diff --git "a/log/log-train-2022-05-03-11-40-27-6" "b/log/log-train-2022-05-03-11-40-27-6" new file mode 100644--- /dev/null +++ "b/log/log-train-2022-05-03-11-40-27-6" @@ -0,0 +1,14399 @@ +2022-05-03 11:40:27,488 INFO [train.py:775] (6/8) Training started +2022-05-03 11:40:27,488 INFO [train.py:785] (6/8) Device: cuda:6 +2022-05-03 11:40:27,490 INFO [train.py:794] (6/8) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'encoder_dim': 512, 'nhead': 8, 'dim_feedforward': 2048, 'num_encoder_layers': 12, 'decoder_dim': 512, 'joiner_dim': 512, 'model_warm_step': 3000, 'env_info': {'k2-version': '1.14', 'k2-build-type': 'Debug', 'k2-with-cuda': True, 'k2-git-sha1': '1b29f0a946f50186aaa82df46a59f492ade9692b', 'k2-git-date': 'Tue Apr 12 20:46:49 2022', 'lhotse-version': '1.1.0', 'torch-version': '1.10.1+cu111', 'torch-cuda-available': True, 'torch-cuda-version': '11.1', 'python-version': '3.8', 'icefall-git-branch': 'spgi', 'icefall-git-sha1': 'e2e5c77-dirty', 'icefall-git-date': 'Mon May 2 14:38:25 2022', 'icefall-path': '/exp/draj/mini_scale_2022/icefall', 'k2-path': '/exp/draj/mini_scale_2022/k2/k2/python/k2/__init__.py', 'lhotse-path': '/exp/draj/mini_scale_2022/lhotse/lhotse/__init__.py', 'hostname': 'r8n04', 'IP address': '10.1.8.4'}, 'world_size': 8, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 0, 'start_batch': 0, 'exp_dir': PosixPath('pruned_transducer_stateless2/exp/v2'), 'bpe_model': 'data/lang_bpe_500/bpe.model', 'initial_lr': 0.003, 'lr_batches': 5000, 'lr_epochs': 4, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'seed': 42, 'print_diagnostics': False, 'save_every_n': 8000, 'keep_last_k': 10, 'use_fp16': True, 'manifest_dir': PosixPath('data/manifests'), 'enable_musan': True, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'max_duration': 200, 'num_buckets': 30, 'on_the_fly_feats': False, 'shuffle': True, 'num_workers': 8, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'blank_id': 0, 'vocab_size': 500} +2022-05-03 11:40:27,491 INFO [train.py:796] (6/8) About to create model +2022-05-03 11:40:27,825 INFO [train.py:800] (6/8) Number of model parameters: 78648040 +2022-05-03 11:40:33,452 INFO [train.py:806] (6/8) Using DDP +2022-05-03 11:40:34,137 INFO [asr_datamodule.py:321] (6/8) About to get SPGISpeech train cuts +2022-05-03 11:40:34,139 INFO [asr_datamodule.py:179] (6/8) About to get Musan cuts +2022-05-03 11:40:35,852 INFO [asr_datamodule.py:184] (6/8) Enable MUSAN +2022-05-03 11:40:35,853 INFO [asr_datamodule.py:207] (6/8) Enable SpecAugment +2022-05-03 11:40:35,853 INFO [asr_datamodule.py:208] (6/8) Time warp factor: 80 +2022-05-03 11:40:35,853 INFO [asr_datamodule.py:221] (6/8) About to create train dataset +2022-05-03 11:40:35,853 INFO [asr_datamodule.py:234] (6/8) Using DynamicBucketingSampler. +2022-05-03 11:40:36,249 INFO [asr_datamodule.py:242] (6/8) About to create train dataloader +2022-05-03 11:40:36,249 INFO [asr_datamodule.py:326] (6/8) About to get SPGISpeech dev cuts +2022-05-03 11:40:36,250 INFO [asr_datamodule.py:274] (6/8) About to create dev dataset +2022-05-03 11:40:36,396 INFO [asr_datamodule.py:289] (6/8) About to create dev dataloader +2022-05-03 11:41:08,012 INFO [train.py:715] (6/8) Epoch 0, batch 0, loss[loss=3.407, simple_loss=6.815, pruned_loss=5.822, over 4971.00 frames.], tot_loss[loss=3.407, simple_loss=6.815, pruned_loss=5.822, over 4971.00 frames.], batch size: 25, lr: 3.00e-03 +2022-05-03 11:41:08,561 INFO [distributed.py:874] (6/8) Reducer buckets have been rebuilt in this iteration. +2022-05-03 11:41:46,319 INFO [train.py:715] (6/8) Epoch 0, batch 50, loss[loss=0.4903, simple_loss=0.9806, pruned_loss=6.776, over 4947.00 frames.], tot_loss[loss=1.32, simple_loss=2.641, pruned_loss=6.476, over 218991.21 frames.], batch size: 21, lr: 3.00e-03 +2022-05-03 11:42:25,582 INFO [train.py:715] (6/8) Epoch 0, batch 100, loss[loss=0.409, simple_loss=0.8181, pruned_loss=6.735, over 4859.00 frames.], tot_loss[loss=0.8154, simple_loss=1.631, pruned_loss=6.582, over 386657.07 frames.], batch size: 20, lr: 3.00e-03 +2022-05-03 11:43:04,761 INFO [train.py:715] (6/8) Epoch 0, batch 150, loss[loss=0.3813, simple_loss=0.7625, pruned_loss=6.712, over 4759.00 frames.], tot_loss[loss=0.6267, simple_loss=1.253, pruned_loss=6.588, over 516895.37 frames.], batch size: 19, lr: 3.00e-03 +2022-05-03 11:43:43,128 INFO [train.py:715] (6/8) Epoch 0, batch 200, loss[loss=0.3454, simple_loss=0.6909, pruned_loss=6.679, over 4966.00 frames.], tot_loss[loss=0.5301, simple_loss=1.06, pruned_loss=6.583, over 618500.68 frames.], batch size: 15, lr: 3.00e-03 +2022-05-03 11:44:22,070 INFO [train.py:715] (6/8) Epoch 0, batch 250, loss[loss=0.3422, simple_loss=0.6844, pruned_loss=6.612, over 4941.00 frames.], tot_loss[loss=0.4715, simple_loss=0.9431, pruned_loss=6.598, over 697008.92 frames.], batch size: 21, lr: 3.00e-03 +2022-05-03 11:45:01,539 INFO [train.py:715] (6/8) Epoch 0, batch 300, loss[loss=0.3404, simple_loss=0.6807, pruned_loss=6.639, over 4838.00 frames.], tot_loss[loss=0.4323, simple_loss=0.8647, pruned_loss=6.608, over 757945.81 frames.], batch size: 32, lr: 3.00e-03 +2022-05-03 11:45:41,193 INFO [train.py:715] (6/8) Epoch 0, batch 350, loss[loss=0.3294, simple_loss=0.6589, pruned_loss=6.703, over 4972.00 frames.], tot_loss[loss=0.4043, simple_loss=0.8086, pruned_loss=6.622, over 805569.33 frames.], batch size: 24, lr: 3.00e-03 +2022-05-03 11:46:19,556 INFO [train.py:715] (6/8) Epoch 0, batch 400, loss[loss=0.3305, simple_loss=0.6609, pruned_loss=6.573, over 4941.00 frames.], tot_loss[loss=0.3837, simple_loss=0.7674, pruned_loss=6.632, over 843524.86 frames.], batch size: 35, lr: 3.00e-03 +2022-05-03 11:46:58,912 INFO [train.py:715] (6/8) Epoch 0, batch 450, loss[loss=0.3274, simple_loss=0.6548, pruned_loss=6.733, over 4688.00 frames.], tot_loss[loss=0.369, simple_loss=0.7381, pruned_loss=6.645, over 871995.72 frames.], batch size: 15, lr: 2.99e-03 +2022-05-03 11:47:38,007 INFO [train.py:715] (6/8) Epoch 0, batch 500, loss[loss=0.3303, simple_loss=0.6607, pruned_loss=6.704, over 4836.00 frames.], tot_loss[loss=0.3569, simple_loss=0.7137, pruned_loss=6.648, over 894114.94 frames.], batch size: 30, lr: 2.99e-03 +2022-05-03 11:48:17,112 INFO [train.py:715] (6/8) Epoch 0, batch 550, loss[loss=0.3223, simple_loss=0.6445, pruned_loss=6.859, over 4751.00 frames.], tot_loss[loss=0.3469, simple_loss=0.6938, pruned_loss=6.652, over 910322.54 frames.], batch size: 16, lr: 2.99e-03 +2022-05-03 11:48:55,931 INFO [train.py:715] (6/8) Epoch 0, batch 600, loss[loss=0.2879, simple_loss=0.5758, pruned_loss=6.68, over 4800.00 frames.], tot_loss[loss=0.3373, simple_loss=0.6747, pruned_loss=6.665, over 924214.02 frames.], batch size: 14, lr: 2.99e-03 +2022-05-03 11:49:35,152 INFO [train.py:715] (6/8) Epoch 0, batch 650, loss[loss=0.3064, simple_loss=0.6128, pruned_loss=6.833, over 4863.00 frames.], tot_loss[loss=0.3267, simple_loss=0.6534, pruned_loss=6.683, over 935167.89 frames.], batch size: 20, lr: 2.99e-03 +2022-05-03 11:50:14,495 INFO [train.py:715] (6/8) Epoch 0, batch 700, loss[loss=0.2815, simple_loss=0.563, pruned_loss=6.849, over 4862.00 frames.], tot_loss[loss=0.3141, simple_loss=0.6282, pruned_loss=6.701, over 943492.84 frames.], batch size: 30, lr: 2.99e-03 +2022-05-03 11:50:53,003 INFO [train.py:715] (6/8) Epoch 0, batch 750, loss[loss=0.2675, simple_loss=0.5349, pruned_loss=6.783, over 4895.00 frames.], tot_loss[loss=0.3014, simple_loss=0.6028, pruned_loss=6.711, over 949746.69 frames.], batch size: 19, lr: 2.98e-03 +2022-05-03 11:51:32,799 INFO [train.py:715] (6/8) Epoch 0, batch 800, loss[loss=0.2802, simple_loss=0.5604, pruned_loss=6.842, over 4973.00 frames.], tot_loss[loss=0.2899, simple_loss=0.5798, pruned_loss=6.713, over 955647.89 frames.], batch size: 24, lr: 2.98e-03 +2022-05-03 11:52:12,747 INFO [train.py:715] (6/8) Epoch 0, batch 850, loss[loss=0.229, simple_loss=0.4579, pruned_loss=6.716, over 4875.00 frames.], tot_loss[loss=0.2793, simple_loss=0.5586, pruned_loss=6.714, over 959809.03 frames.], batch size: 16, lr: 2.98e-03 +2022-05-03 11:52:51,641 INFO [train.py:715] (6/8) Epoch 0, batch 900, loss[loss=0.2316, simple_loss=0.4631, pruned_loss=6.665, over 4778.00 frames.], tot_loss[loss=0.2694, simple_loss=0.5389, pruned_loss=6.71, over 962158.76 frames.], batch size: 14, lr: 2.98e-03 +2022-05-03 11:53:30,234 INFO [train.py:715] (6/8) Epoch 0, batch 950, loss[loss=0.2039, simple_loss=0.4079, pruned_loss=6.541, over 4962.00 frames.], tot_loss[loss=0.2592, simple_loss=0.5183, pruned_loss=6.703, over 964740.51 frames.], batch size: 29, lr: 2.97e-03 +2022-05-03 11:54:09,541 INFO [train.py:715] (6/8) Epoch 0, batch 1000, loss[loss=0.2223, simple_loss=0.4446, pruned_loss=6.695, over 4869.00 frames.], tot_loss[loss=0.251, simple_loss=0.502, pruned_loss=6.702, over 966348.16 frames.], batch size: 20, lr: 2.97e-03 +2022-05-03 11:54:48,904 INFO [train.py:715] (6/8) Epoch 0, batch 1050, loss[loss=0.1972, simple_loss=0.3944, pruned_loss=6.694, over 4975.00 frames.], tot_loss[loss=0.2436, simple_loss=0.4871, pruned_loss=6.707, over 966173.55 frames.], batch size: 28, lr: 2.97e-03 +2022-05-03 11:55:27,471 INFO [train.py:715] (6/8) Epoch 0, batch 1100, loss[loss=0.228, simple_loss=0.456, pruned_loss=6.761, over 4779.00 frames.], tot_loss[loss=0.2374, simple_loss=0.4747, pruned_loss=6.709, over 966375.29 frames.], batch size: 17, lr: 2.96e-03 +2022-05-03 11:56:07,477 INFO [train.py:715] (6/8) Epoch 0, batch 1150, loss[loss=0.2048, simple_loss=0.4095, pruned_loss=6.698, over 4819.00 frames.], tot_loss[loss=0.232, simple_loss=0.4639, pruned_loss=6.715, over 967630.20 frames.], batch size: 27, lr: 2.96e-03 +2022-05-03 11:56:47,809 INFO [train.py:715] (6/8) Epoch 0, batch 1200, loss[loss=0.2057, simple_loss=0.4114, pruned_loss=6.724, over 4939.00 frames.], tot_loss[loss=0.2268, simple_loss=0.4535, pruned_loss=6.715, over 968331.15 frames.], batch size: 35, lr: 2.96e-03 +2022-05-03 11:57:28,433 INFO [train.py:715] (6/8) Epoch 0, batch 1250, loss[loss=0.2189, simple_loss=0.4378, pruned_loss=6.908, over 4898.00 frames.], tot_loss[loss=0.2219, simple_loss=0.4438, pruned_loss=6.711, over 969632.91 frames.], batch size: 17, lr: 2.95e-03 +2022-05-03 11:58:07,333 INFO [train.py:715] (6/8) Epoch 0, batch 1300, loss[loss=0.1972, simple_loss=0.3943, pruned_loss=6.702, over 4854.00 frames.], tot_loss[loss=0.2177, simple_loss=0.4355, pruned_loss=6.711, over 970178.67 frames.], batch size: 20, lr: 2.95e-03 +2022-05-03 11:58:47,746 INFO [train.py:715] (6/8) Epoch 0, batch 1350, loss[loss=0.1771, simple_loss=0.3541, pruned_loss=6.642, over 4795.00 frames.], tot_loss[loss=0.2133, simple_loss=0.4266, pruned_loss=6.709, over 970339.57 frames.], batch size: 24, lr: 2.95e-03 +2022-05-03 11:59:28,709 INFO [train.py:715] (6/8) Epoch 0, batch 1400, loss[loss=0.2021, simple_loss=0.4043, pruned_loss=6.559, over 4933.00 frames.], tot_loss[loss=0.2101, simple_loss=0.4202, pruned_loss=6.709, over 970918.03 frames.], batch size: 29, lr: 2.94e-03 +2022-05-03 12:00:09,326 INFO [train.py:715] (6/8) Epoch 0, batch 1450, loss[loss=0.2101, simple_loss=0.4202, pruned_loss=6.747, over 4855.00 frames.], tot_loss[loss=0.2069, simple_loss=0.4139, pruned_loss=6.706, over 971848.64 frames.], batch size: 30, lr: 2.94e-03 +2022-05-03 12:00:48,853 INFO [train.py:715] (6/8) Epoch 0, batch 1500, loss[loss=0.1594, simple_loss=0.3188, pruned_loss=6.722, over 4808.00 frames.], tot_loss[loss=0.2032, simple_loss=0.4064, pruned_loss=6.7, over 971649.17 frames.], batch size: 12, lr: 2.94e-03 +2022-05-03 12:01:29,920 INFO [train.py:715] (6/8) Epoch 0, batch 1550, loss[loss=0.1628, simple_loss=0.3255, pruned_loss=6.495, over 4863.00 frames.], tot_loss[loss=0.2005, simple_loss=0.4011, pruned_loss=6.701, over 971636.93 frames.], batch size: 20, lr: 2.93e-03 +2022-05-03 12:02:11,269 INFO [train.py:715] (6/8) Epoch 0, batch 1600, loss[loss=0.2231, simple_loss=0.4462, pruned_loss=6.756, over 4974.00 frames.], tot_loss[loss=0.198, simple_loss=0.396, pruned_loss=6.697, over 972117.29 frames.], batch size: 15, lr: 2.93e-03 +2022-05-03 12:02:51,031 INFO [train.py:715] (6/8) Epoch 0, batch 1650, loss[loss=0.1712, simple_loss=0.3423, pruned_loss=6.597, over 4902.00 frames.], tot_loss[loss=0.1952, simple_loss=0.3904, pruned_loss=6.694, over 972705.02 frames.], batch size: 19, lr: 2.92e-03 +2022-05-03 12:03:32,824 INFO [train.py:715] (6/8) Epoch 0, batch 1700, loss[loss=0.2036, simple_loss=0.4071, pruned_loss=6.615, over 4918.00 frames.], tot_loss[loss=0.1926, simple_loss=0.3852, pruned_loss=6.691, over 973233.24 frames.], batch size: 29, lr: 2.92e-03 +2022-05-03 12:04:14,556 INFO [train.py:715] (6/8) Epoch 0, batch 1750, loss[loss=0.1735, simple_loss=0.347, pruned_loss=6.546, over 4968.00 frames.], tot_loss[loss=0.1912, simple_loss=0.3823, pruned_loss=6.691, over 973341.16 frames.], batch size: 33, lr: 2.91e-03 +2022-05-03 12:04:56,004 INFO [train.py:715] (6/8) Epoch 0, batch 1800, loss[loss=0.1708, simple_loss=0.3417, pruned_loss=6.606, over 4951.00 frames.], tot_loss[loss=0.1895, simple_loss=0.379, pruned_loss=6.682, over 972588.85 frames.], batch size: 15, lr: 2.91e-03 +2022-05-03 12:05:36,580 INFO [train.py:715] (6/8) Epoch 0, batch 1850, loss[loss=0.1895, simple_loss=0.3789, pruned_loss=6.694, over 4848.00 frames.], tot_loss[loss=0.1869, simple_loss=0.3738, pruned_loss=6.672, over 971731.73 frames.], batch size: 34, lr: 2.91e-03 +2022-05-03 12:06:18,616 INFO [train.py:715] (6/8) Epoch 0, batch 1900, loss[loss=0.1553, simple_loss=0.3105, pruned_loss=6.525, over 4950.00 frames.], tot_loss[loss=0.1857, simple_loss=0.3715, pruned_loss=6.67, over 972326.84 frames.], batch size: 21, lr: 2.90e-03 +2022-05-03 12:07:00,152 INFO [train.py:715] (6/8) Epoch 0, batch 1950, loss[loss=0.1552, simple_loss=0.3104, pruned_loss=6.603, over 4778.00 frames.], tot_loss[loss=0.1843, simple_loss=0.3686, pruned_loss=6.67, over 972071.27 frames.], batch size: 12, lr: 2.90e-03 +2022-05-03 12:07:38,876 INFO [train.py:715] (6/8) Epoch 0, batch 2000, loss[loss=0.1737, simple_loss=0.3474, pruned_loss=6.645, over 4933.00 frames.], tot_loss[loss=0.1834, simple_loss=0.3668, pruned_loss=6.668, over 972756.53 frames.], batch size: 18, lr: 2.89e-03 +2022-05-03 12:08:19,998 INFO [train.py:715] (6/8) Epoch 0, batch 2050, loss[loss=0.1887, simple_loss=0.3774, pruned_loss=6.754, over 4805.00 frames.], tot_loss[loss=0.183, simple_loss=0.366, pruned_loss=6.666, over 973346.64 frames.], batch size: 24, lr: 2.89e-03 +2022-05-03 12:09:00,598 INFO [train.py:715] (6/8) Epoch 0, batch 2100, loss[loss=0.171, simple_loss=0.3421, pruned_loss=6.594, over 4973.00 frames.], tot_loss[loss=0.1829, simple_loss=0.3657, pruned_loss=6.666, over 972918.31 frames.], batch size: 24, lr: 2.88e-03 +2022-05-03 12:09:41,206 INFO [train.py:715] (6/8) Epoch 0, batch 2150, loss[loss=0.1852, simple_loss=0.3703, pruned_loss=6.718, over 4869.00 frames.], tot_loss[loss=0.1827, simple_loss=0.3654, pruned_loss=6.672, over 973442.00 frames.], batch size: 16, lr: 2.88e-03 +2022-05-03 12:10:20,509 INFO [train.py:715] (6/8) Epoch 0, batch 2200, loss[loss=0.1649, simple_loss=0.3297, pruned_loss=6.653, over 4877.00 frames.], tot_loss[loss=0.1817, simple_loss=0.3634, pruned_loss=6.673, over 972547.23 frames.], batch size: 22, lr: 2.87e-03 +2022-05-03 12:11:01,495 INFO [train.py:715] (6/8) Epoch 0, batch 2250, loss[loss=0.1635, simple_loss=0.3271, pruned_loss=6.746, over 4934.00 frames.], tot_loss[loss=0.1817, simple_loss=0.3635, pruned_loss=6.67, over 972579.94 frames.], batch size: 29, lr: 2.86e-03 +2022-05-03 12:11:42,778 INFO [train.py:715] (6/8) Epoch 0, batch 2300, loss[loss=0.1527, simple_loss=0.3054, pruned_loss=6.553, over 4853.00 frames.], tot_loss[loss=0.1803, simple_loss=0.3605, pruned_loss=6.671, over 973510.20 frames.], batch size: 13, lr: 2.86e-03 +2022-05-03 12:12:22,385 INFO [train.py:715] (6/8) Epoch 0, batch 2350, loss[loss=0.1732, simple_loss=0.3463, pruned_loss=6.636, over 4947.00 frames.], tot_loss[loss=0.1793, simple_loss=0.3586, pruned_loss=6.671, over 973425.90 frames.], batch size: 21, lr: 2.85e-03 +2022-05-03 12:13:03,129 INFO [train.py:715] (6/8) Epoch 0, batch 2400, loss[loss=0.1693, simple_loss=0.3386, pruned_loss=6.723, over 4856.00 frames.], tot_loss[loss=0.1781, simple_loss=0.3562, pruned_loss=6.671, over 972719.88 frames.], batch size: 34, lr: 2.85e-03 +2022-05-03 12:13:43,814 INFO [train.py:715] (6/8) Epoch 0, batch 2450, loss[loss=0.1797, simple_loss=0.3594, pruned_loss=6.702, over 4930.00 frames.], tot_loss[loss=0.177, simple_loss=0.3539, pruned_loss=6.673, over 972688.06 frames.], batch size: 18, lr: 2.84e-03 +2022-05-03 12:14:24,682 INFO [train.py:715] (6/8) Epoch 0, batch 2500, loss[loss=0.1773, simple_loss=0.3547, pruned_loss=6.638, over 4791.00 frames.], tot_loss[loss=0.1764, simple_loss=0.3528, pruned_loss=6.675, over 972489.93 frames.], batch size: 24, lr: 2.84e-03 +2022-05-03 12:15:03,914 INFO [train.py:715] (6/8) Epoch 0, batch 2550, loss[loss=0.1787, simple_loss=0.3574, pruned_loss=6.682, over 4852.00 frames.], tot_loss[loss=0.1759, simple_loss=0.3517, pruned_loss=6.672, over 972921.32 frames.], batch size: 32, lr: 2.83e-03 +2022-05-03 12:15:44,628 INFO [train.py:715] (6/8) Epoch 0, batch 2600, loss[loss=0.1789, simple_loss=0.3578, pruned_loss=6.737, over 4842.00 frames.], tot_loss[loss=0.176, simple_loss=0.352, pruned_loss=6.67, over 972263.62 frames.], batch size: 30, lr: 2.83e-03 +2022-05-03 12:16:25,717 INFO [train.py:715] (6/8) Epoch 0, batch 2650, loss[loss=0.1573, simple_loss=0.3147, pruned_loss=6.69, over 4772.00 frames.], tot_loss[loss=0.1743, simple_loss=0.3486, pruned_loss=6.667, over 972067.74 frames.], batch size: 17, lr: 2.82e-03 +2022-05-03 12:17:08,090 INFO [train.py:715] (6/8) Epoch 0, batch 2700, loss[loss=0.172, simple_loss=0.344, pruned_loss=6.625, over 4684.00 frames.], tot_loss[loss=0.173, simple_loss=0.3459, pruned_loss=6.658, over 972011.41 frames.], batch size: 15, lr: 2.81e-03 +2022-05-03 12:17:48,873 INFO [train.py:715] (6/8) Epoch 0, batch 2750, loss[loss=0.1761, simple_loss=0.3522, pruned_loss=6.601, over 4866.00 frames.], tot_loss[loss=0.1728, simple_loss=0.3456, pruned_loss=6.653, over 972173.16 frames.], batch size: 38, lr: 2.81e-03 +2022-05-03 12:18:29,716 INFO [train.py:715] (6/8) Epoch 0, batch 2800, loss[loss=0.1694, simple_loss=0.3388, pruned_loss=6.632, over 4973.00 frames.], tot_loss[loss=0.172, simple_loss=0.344, pruned_loss=6.648, over 972358.23 frames.], batch size: 24, lr: 2.80e-03 +2022-05-03 12:19:10,262 INFO [train.py:715] (6/8) Epoch 0, batch 2850, loss[loss=0.1939, simple_loss=0.3878, pruned_loss=6.678, over 4743.00 frames.], tot_loss[loss=0.172, simple_loss=0.344, pruned_loss=6.65, over 972278.81 frames.], batch size: 16, lr: 2.80e-03 +2022-05-03 12:19:49,116 INFO [train.py:715] (6/8) Epoch 0, batch 2900, loss[loss=0.2171, simple_loss=0.4342, pruned_loss=6.84, over 4984.00 frames.], tot_loss[loss=0.1709, simple_loss=0.3419, pruned_loss=6.645, over 972778.61 frames.], batch size: 28, lr: 2.79e-03 +2022-05-03 12:20:29,368 INFO [train.py:715] (6/8) Epoch 0, batch 2950, loss[loss=0.1704, simple_loss=0.3408, pruned_loss=6.688, over 4968.00 frames.], tot_loss[loss=0.1702, simple_loss=0.3405, pruned_loss=6.647, over 972794.18 frames.], batch size: 15, lr: 2.78e-03 +2022-05-03 12:21:11,360 INFO [train.py:715] (6/8) Epoch 0, batch 3000, loss[loss=0.8027, simple_loss=0.2995, pruned_loss=6.53, over 4833.00 frames.], tot_loss[loss=0.2083, simple_loss=0.3419, pruned_loss=6.656, over 972962.63 frames.], batch size: 13, lr: 2.78e-03 +2022-05-03 12:21:11,361 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 12:21:21,130 INFO [train.py:742] (6/8) Epoch 0, validation: loss=2.223, simple_loss=0.2788, pruned_loss=2.083, over 914524.00 frames. +2022-05-03 12:22:02,151 INFO [train.py:715] (6/8) Epoch 0, batch 3050, loss[loss=0.2425, simple_loss=0.349, pruned_loss=0.6803, over 4984.00 frames.], tot_loss[loss=0.2237, simple_loss=0.3426, pruned_loss=5.411, over 973382.98 frames.], batch size: 15, lr: 2.77e-03 +2022-05-03 12:22:41,555 INFO [train.py:715] (6/8) Epoch 0, batch 3100, loss[loss=0.1942, simple_loss=0.3188, pruned_loss=0.3482, over 4925.00 frames.], tot_loss[loss=0.2229, simple_loss=0.3422, pruned_loss=4.324, over 972902.03 frames.], batch size: 18, lr: 2.77e-03 +2022-05-03 12:23:22,412 INFO [train.py:715] (6/8) Epoch 0, batch 3150, loss[loss=0.2244, simple_loss=0.3835, pruned_loss=0.3268, over 4688.00 frames.], tot_loss[loss=0.2178, simple_loss=0.3412, pruned_loss=3.431, over 973870.80 frames.], batch size: 15, lr: 2.76e-03 +2022-05-03 12:24:03,657 INFO [train.py:715] (6/8) Epoch 0, batch 3200, loss[loss=0.2105, simple_loss=0.3651, pruned_loss=0.2796, over 4764.00 frames.], tot_loss[loss=0.2116, simple_loss=0.3387, pruned_loss=2.727, over 972830.55 frames.], batch size: 16, lr: 2.75e-03 +2022-05-03 12:24:44,873 INFO [train.py:715] (6/8) Epoch 0, batch 3250, loss[loss=0.1737, simple_loss=0.311, pruned_loss=0.1816, over 4816.00 frames.], tot_loss[loss=0.2066, simple_loss=0.3376, pruned_loss=2.174, over 971816.67 frames.], batch size: 21, lr: 2.75e-03 +2022-05-03 12:25:24,110 INFO [train.py:715] (6/8) Epoch 0, batch 3300, loss[loss=0.2051, simple_loss=0.3622, pruned_loss=0.2394, over 4842.00 frames.], tot_loss[loss=0.2024, simple_loss=0.3366, pruned_loss=1.738, over 972141.16 frames.], batch size: 13, lr: 2.74e-03 +2022-05-03 12:26:05,354 INFO [train.py:715] (6/8) Epoch 0, batch 3350, loss[loss=0.1906, simple_loss=0.3391, pruned_loss=0.2104, over 4858.00 frames.], tot_loss[loss=0.1993, simple_loss=0.3365, pruned_loss=1.399, over 972047.99 frames.], batch size: 20, lr: 2.73e-03 +2022-05-03 12:26:46,183 INFO [train.py:715] (6/8) Epoch 0, batch 3400, loss[loss=0.2013, simple_loss=0.3575, pruned_loss=0.2258, over 4781.00 frames.], tot_loss[loss=0.1966, simple_loss=0.3358, pruned_loss=1.133, over 972222.00 frames.], batch size: 19, lr: 2.73e-03 +2022-05-03 12:27:25,313 INFO [train.py:715] (6/8) Epoch 0, batch 3450, loss[loss=0.1979, simple_loss=0.3501, pruned_loss=0.2282, over 4816.00 frames.], tot_loss[loss=0.193, simple_loss=0.3332, pruned_loss=0.9237, over 971293.02 frames.], batch size: 25, lr: 2.72e-03 +2022-05-03 12:28:06,922 INFO [train.py:715] (6/8) Epoch 0, batch 3500, loss[loss=0.1793, simple_loss=0.3236, pruned_loss=0.1753, over 4797.00 frames.], tot_loss[loss=0.1911, simple_loss=0.3327, pruned_loss=0.761, over 970555.00 frames.], batch size: 18, lr: 2.72e-03 +2022-05-03 12:28:48,559 INFO [train.py:715] (6/8) Epoch 0, batch 3550, loss[loss=0.1637, simple_loss=0.2962, pruned_loss=0.156, over 4968.00 frames.], tot_loss[loss=0.1898, simple_loss=0.3328, pruned_loss=0.6332, over 971383.98 frames.], batch size: 15, lr: 2.71e-03 +2022-05-03 12:29:29,805 INFO [train.py:715] (6/8) Epoch 0, batch 3600, loss[loss=0.1627, simple_loss=0.2969, pruned_loss=0.1422, over 4951.00 frames.], tot_loss[loss=0.1875, simple_loss=0.3307, pruned_loss=0.532, over 971363.62 frames.], batch size: 21, lr: 2.70e-03 +2022-05-03 12:30:08,994 INFO [train.py:715] (6/8) Epoch 0, batch 3650, loss[loss=0.2096, simple_loss=0.3762, pruned_loss=0.2155, over 4907.00 frames.], tot_loss[loss=0.1853, simple_loss=0.3286, pruned_loss=0.4521, over 971519.25 frames.], batch size: 39, lr: 2.70e-03 +2022-05-03 12:30:50,505 INFO [train.py:715] (6/8) Epoch 0, batch 3700, loss[loss=0.1823, simple_loss=0.3313, pruned_loss=0.1666, over 4880.00 frames.], tot_loss[loss=0.1846, simple_loss=0.3287, pruned_loss=0.3911, over 972022.53 frames.], batch size: 22, lr: 2.69e-03 +2022-05-03 12:31:32,101 INFO [train.py:715] (6/8) Epoch 0, batch 3750, loss[loss=0.1801, simple_loss=0.3271, pruned_loss=0.1651, over 4782.00 frames.], tot_loss[loss=0.1838, simple_loss=0.3284, pruned_loss=0.3422, over 972516.96 frames.], batch size: 18, lr: 2.68e-03 +2022-05-03 12:32:11,307 INFO [train.py:715] (6/8) Epoch 0, batch 3800, loss[loss=0.1917, simple_loss=0.3444, pruned_loss=0.1953, over 4972.00 frames.], tot_loss[loss=0.1829, simple_loss=0.3277, pruned_loss=0.3046, over 972131.49 frames.], batch size: 15, lr: 2.68e-03 +2022-05-03 12:33:05,647 INFO [train.py:715] (6/8) Epoch 0, batch 3850, loss[loss=0.1759, simple_loss=0.3194, pruned_loss=0.162, over 4738.00 frames.], tot_loss[loss=0.1827, simple_loss=0.3281, pruned_loss=0.2754, over 971682.32 frames.], batch size: 16, lr: 2.67e-03 +2022-05-03 12:33:46,699 INFO [train.py:715] (6/8) Epoch 0, batch 3900, loss[loss=0.1714, simple_loss=0.3124, pruned_loss=0.1521, over 4856.00 frames.], tot_loss[loss=0.1817, simple_loss=0.327, pruned_loss=0.2512, over 971765.29 frames.], batch size: 20, lr: 2.66e-03 +2022-05-03 12:34:26,860 INFO [train.py:715] (6/8) Epoch 0, batch 3950, loss[loss=0.1701, simple_loss=0.3092, pruned_loss=0.155, over 4881.00 frames.], tot_loss[loss=0.1812, simple_loss=0.3266, pruned_loss=0.2327, over 971473.54 frames.], batch size: 16, lr: 2.66e-03 +2022-05-03 12:35:06,670 INFO [train.py:715] (6/8) Epoch 0, batch 4000, loss[loss=0.1348, simple_loss=0.2484, pruned_loss=0.106, over 4804.00 frames.], tot_loss[loss=0.1796, simple_loss=0.3243, pruned_loss=0.2161, over 971575.21 frames.], batch size: 12, lr: 2.65e-03 +2022-05-03 12:35:47,595 INFO [train.py:715] (6/8) Epoch 0, batch 4050, loss[loss=0.1751, simple_loss=0.3181, pruned_loss=0.1607, over 4798.00 frames.], tot_loss[loss=0.1797, simple_loss=0.3248, pruned_loss=0.2053, over 972654.45 frames.], batch size: 13, lr: 2.64e-03 +2022-05-03 12:36:28,805 INFO [train.py:715] (6/8) Epoch 0, batch 4100, loss[loss=0.173, simple_loss=0.3149, pruned_loss=0.1555, over 4966.00 frames.], tot_loss[loss=0.1796, simple_loss=0.3249, pruned_loss=0.1966, over 972263.95 frames.], batch size: 35, lr: 2.64e-03 +2022-05-03 12:37:07,950 INFO [train.py:715] (6/8) Epoch 0, batch 4150, loss[loss=0.1907, simple_loss=0.3414, pruned_loss=0.1998, over 4806.00 frames.], tot_loss[loss=0.1795, simple_loss=0.3249, pruned_loss=0.1903, over 971606.64 frames.], batch size: 14, lr: 2.63e-03 +2022-05-03 12:37:49,186 INFO [train.py:715] (6/8) Epoch 0, batch 4200, loss[loss=0.155, simple_loss=0.2859, pruned_loss=0.1204, over 4828.00 frames.], tot_loss[loss=0.1787, simple_loss=0.3237, pruned_loss=0.1837, over 972093.85 frames.], batch size: 13, lr: 2.63e-03 +2022-05-03 12:38:30,913 INFO [train.py:715] (6/8) Epoch 0, batch 4250, loss[loss=0.1553, simple_loss=0.2884, pruned_loss=0.1111, over 4892.00 frames.], tot_loss[loss=0.178, simple_loss=0.3228, pruned_loss=0.1777, over 971747.63 frames.], batch size: 19, lr: 2.62e-03 +2022-05-03 12:39:11,501 INFO [train.py:715] (6/8) Epoch 0, batch 4300, loss[loss=0.1745, simple_loss=0.318, pruned_loss=0.1552, over 4777.00 frames.], tot_loss[loss=0.176, simple_loss=0.3197, pruned_loss=0.171, over 972077.86 frames.], batch size: 17, lr: 2.61e-03 +2022-05-03 12:39:51,573 INFO [train.py:715] (6/8) Epoch 0, batch 4350, loss[loss=0.1779, simple_loss=0.325, pruned_loss=0.1537, over 4944.00 frames.], tot_loss[loss=0.1762, simple_loss=0.3201, pruned_loss=0.1691, over 972743.54 frames.], batch size: 21, lr: 2.61e-03 +2022-05-03 12:40:33,087 INFO [train.py:715] (6/8) Epoch 0, batch 4400, loss[loss=0.1735, simple_loss=0.3145, pruned_loss=0.163, over 4699.00 frames.], tot_loss[loss=0.1758, simple_loss=0.3194, pruned_loss=0.1661, over 972197.05 frames.], batch size: 15, lr: 2.60e-03 +2022-05-03 12:41:14,314 INFO [train.py:715] (6/8) Epoch 0, batch 4450, loss[loss=0.1911, simple_loss=0.3451, pruned_loss=0.1851, over 4878.00 frames.], tot_loss[loss=0.1744, simple_loss=0.3173, pruned_loss=0.1622, over 972527.06 frames.], batch size: 22, lr: 2.59e-03 +2022-05-03 12:41:53,447 INFO [train.py:715] (6/8) Epoch 0, batch 4500, loss[loss=0.192, simple_loss=0.3468, pruned_loss=0.1865, over 4805.00 frames.], tot_loss[loss=0.1734, simple_loss=0.3158, pruned_loss=0.1583, over 972705.70 frames.], batch size: 14, lr: 2.59e-03 +2022-05-03 12:42:34,808 INFO [train.py:715] (6/8) Epoch 0, batch 4550, loss[loss=0.1632, simple_loss=0.2999, pruned_loss=0.1322, over 4900.00 frames.], tot_loss[loss=0.1743, simple_loss=0.3176, pruned_loss=0.1582, over 972981.20 frames.], batch size: 17, lr: 2.58e-03 +2022-05-03 12:43:16,363 INFO [train.py:715] (6/8) Epoch 0, batch 4600, loss[loss=0.1584, simple_loss=0.2894, pruned_loss=0.1365, over 4938.00 frames.], tot_loss[loss=0.1732, simple_loss=0.3158, pruned_loss=0.1552, over 973171.81 frames.], batch size: 29, lr: 2.57e-03 +2022-05-03 12:43:56,536 INFO [train.py:715] (6/8) Epoch 0, batch 4650, loss[loss=0.1894, simple_loss=0.3358, pruned_loss=0.2147, over 4844.00 frames.], tot_loss[loss=0.173, simple_loss=0.3154, pruned_loss=0.1541, over 973259.23 frames.], batch size: 13, lr: 2.57e-03 +2022-05-03 12:44:36,470 INFO [train.py:715] (6/8) Epoch 0, batch 4700, loss[loss=0.1693, simple_loss=0.3116, pruned_loss=0.1357, over 4895.00 frames.], tot_loss[loss=0.1721, simple_loss=0.3141, pruned_loss=0.1517, over 973604.82 frames.], batch size: 17, lr: 2.56e-03 +2022-05-03 12:45:17,609 INFO [train.py:715] (6/8) Epoch 0, batch 4750, loss[loss=0.1493, simple_loss=0.2759, pruned_loss=0.1135, over 4982.00 frames.], tot_loss[loss=0.1724, simple_loss=0.3148, pruned_loss=0.1512, over 973753.26 frames.], batch size: 28, lr: 2.55e-03 +2022-05-03 12:45:58,875 INFO [train.py:715] (6/8) Epoch 0, batch 4800, loss[loss=0.1692, simple_loss=0.3095, pruned_loss=0.1441, over 4970.00 frames.], tot_loss[loss=0.173, simple_loss=0.3158, pruned_loss=0.1522, over 973980.04 frames.], batch size: 14, lr: 2.55e-03 +2022-05-03 12:46:38,841 INFO [train.py:715] (6/8) Epoch 0, batch 4850, loss[loss=0.1838, simple_loss=0.3351, pruned_loss=0.1628, over 4878.00 frames.], tot_loss[loss=0.1724, simple_loss=0.3149, pruned_loss=0.1503, over 973830.01 frames.], batch size: 22, lr: 2.54e-03 +2022-05-03 12:47:19,642 INFO [train.py:715] (6/8) Epoch 0, batch 4900, loss[loss=0.1791, simple_loss=0.3275, pruned_loss=0.1532, over 4904.00 frames.], tot_loss[loss=0.1718, simple_loss=0.3139, pruned_loss=0.149, over 974056.12 frames.], batch size: 39, lr: 2.54e-03 +2022-05-03 12:48:01,141 INFO [train.py:715] (6/8) Epoch 0, batch 4950, loss[loss=0.1659, simple_loss=0.3062, pruned_loss=0.1286, over 4974.00 frames.], tot_loss[loss=0.1718, simple_loss=0.314, pruned_loss=0.1484, over 973830.48 frames.], batch size: 15, lr: 2.53e-03 +2022-05-03 12:48:41,421 INFO [train.py:715] (6/8) Epoch 0, batch 5000, loss[loss=0.1957, simple_loss=0.3538, pruned_loss=0.1878, over 4874.00 frames.], tot_loss[loss=0.1713, simple_loss=0.3132, pruned_loss=0.1473, over 973769.77 frames.], batch size: 16, lr: 2.52e-03 +2022-05-03 12:49:22,148 INFO [train.py:715] (6/8) Epoch 0, batch 5050, loss[loss=0.1716, simple_loss=0.3145, pruned_loss=0.1442, over 4757.00 frames.], tot_loss[loss=0.1716, simple_loss=0.3138, pruned_loss=0.1476, over 974453.11 frames.], batch size: 19, lr: 2.52e-03 +2022-05-03 12:50:04,999 INFO [train.py:715] (6/8) Epoch 0, batch 5100, loss[loss=0.1665, simple_loss=0.3032, pruned_loss=0.1488, over 4731.00 frames.], tot_loss[loss=0.1717, simple_loss=0.314, pruned_loss=0.1476, over 973652.53 frames.], batch size: 16, lr: 2.51e-03 +2022-05-03 12:50:48,200 INFO [train.py:715] (6/8) Epoch 0, batch 5150, loss[loss=0.16, simple_loss=0.2928, pruned_loss=0.1357, over 4906.00 frames.], tot_loss[loss=0.1722, simple_loss=0.3148, pruned_loss=0.1478, over 972721.15 frames.], batch size: 18, lr: 2.50e-03 +2022-05-03 12:51:28,091 INFO [train.py:715] (6/8) Epoch 0, batch 5200, loss[loss=0.1552, simple_loss=0.2856, pruned_loss=0.1238, over 4812.00 frames.], tot_loss[loss=0.1707, simple_loss=0.3124, pruned_loss=0.1451, over 972188.61 frames.], batch size: 12, lr: 2.50e-03 +2022-05-03 12:52:08,693 INFO [train.py:715] (6/8) Epoch 0, batch 5250, loss[loss=0.1409, simple_loss=0.26, pruned_loss=0.1096, over 4770.00 frames.], tot_loss[loss=0.1698, simple_loss=0.3109, pruned_loss=0.1432, over 972671.93 frames.], batch size: 19, lr: 2.49e-03 +2022-05-03 12:52:49,809 INFO [train.py:715] (6/8) Epoch 0, batch 5300, loss[loss=0.1684, simple_loss=0.3084, pruned_loss=0.1416, over 4857.00 frames.], tot_loss[loss=0.1698, simple_loss=0.3111, pruned_loss=0.1428, over 972425.47 frames.], batch size: 30, lr: 2.49e-03 +2022-05-03 12:53:30,344 INFO [train.py:715] (6/8) Epoch 0, batch 5350, loss[loss=0.1811, simple_loss=0.3293, pruned_loss=0.1639, over 4821.00 frames.], tot_loss[loss=0.1696, simple_loss=0.3107, pruned_loss=0.1428, over 972589.35 frames.], batch size: 26, lr: 2.48e-03 +2022-05-03 12:54:10,014 INFO [train.py:715] (6/8) Epoch 0, batch 5400, loss[loss=0.1819, simple_loss=0.3311, pruned_loss=0.1637, over 4874.00 frames.], tot_loss[loss=0.17, simple_loss=0.3113, pruned_loss=0.1432, over 972753.77 frames.], batch size: 39, lr: 2.47e-03 +2022-05-03 12:54:50,451 INFO [train.py:715] (6/8) Epoch 0, batch 5450, loss[loss=0.1831, simple_loss=0.3358, pruned_loss=0.1518, over 4801.00 frames.], tot_loss[loss=0.169, simple_loss=0.3098, pruned_loss=0.1411, over 972519.90 frames.], batch size: 24, lr: 2.47e-03 +2022-05-03 12:55:31,403 INFO [train.py:715] (6/8) Epoch 0, batch 5500, loss[loss=0.1996, simple_loss=0.36, pruned_loss=0.1964, over 4843.00 frames.], tot_loss[loss=0.1686, simple_loss=0.3092, pruned_loss=0.1399, over 972631.90 frames.], batch size: 30, lr: 2.46e-03 +2022-05-03 12:56:11,118 INFO [train.py:715] (6/8) Epoch 0, batch 5550, loss[loss=0.134, simple_loss=0.2469, pruned_loss=0.1052, over 4826.00 frames.], tot_loss[loss=0.1674, simple_loss=0.3071, pruned_loss=0.1383, over 972291.19 frames.], batch size: 15, lr: 2.45e-03 +2022-05-03 12:56:51,159 INFO [train.py:715] (6/8) Epoch 0, batch 5600, loss[loss=0.1612, simple_loss=0.2936, pruned_loss=0.1443, over 4833.00 frames.], tot_loss[loss=0.1666, simple_loss=0.3057, pruned_loss=0.1372, over 972084.02 frames.], batch size: 13, lr: 2.45e-03 +2022-05-03 12:57:32,373 INFO [train.py:715] (6/8) Epoch 0, batch 5650, loss[loss=0.1699, simple_loss=0.3109, pruned_loss=0.145, over 4913.00 frames.], tot_loss[loss=0.166, simple_loss=0.3049, pruned_loss=0.1358, over 972037.57 frames.], batch size: 23, lr: 2.44e-03 +2022-05-03 12:58:12,918 INFO [train.py:715] (6/8) Epoch 0, batch 5700, loss[loss=0.1569, simple_loss=0.2881, pruned_loss=0.1289, over 4935.00 frames.], tot_loss[loss=0.1657, simple_loss=0.3045, pruned_loss=0.1349, over 972369.76 frames.], batch size: 18, lr: 2.44e-03 +2022-05-03 12:58:52,119 INFO [train.py:715] (6/8) Epoch 0, batch 5750, loss[loss=0.1889, simple_loss=0.3444, pruned_loss=0.1673, over 4930.00 frames.], tot_loss[loss=0.1665, simple_loss=0.3059, pruned_loss=0.1356, over 972908.90 frames.], batch size: 23, lr: 2.43e-03 +2022-05-03 12:59:33,130 INFO [train.py:715] (6/8) Epoch 0, batch 5800, loss[loss=0.1744, simple_loss=0.3154, pruned_loss=0.1667, over 4893.00 frames.], tot_loss[loss=0.1659, simple_loss=0.3049, pruned_loss=0.1346, over 973423.14 frames.], batch size: 22, lr: 2.42e-03 +2022-05-03 13:00:14,316 INFO [train.py:715] (6/8) Epoch 0, batch 5850, loss[loss=0.1719, simple_loss=0.316, pruned_loss=0.139, over 4783.00 frames.], tot_loss[loss=0.1664, simple_loss=0.3057, pruned_loss=0.1351, over 973541.90 frames.], batch size: 17, lr: 2.42e-03 +2022-05-03 13:00:54,230 INFO [train.py:715] (6/8) Epoch 0, batch 5900, loss[loss=0.1683, simple_loss=0.3122, pruned_loss=0.1217, over 4766.00 frames.], tot_loss[loss=0.1665, simple_loss=0.306, pruned_loss=0.1351, over 972952.52 frames.], batch size: 14, lr: 2.41e-03 +2022-05-03 13:01:33,975 INFO [train.py:715] (6/8) Epoch 0, batch 5950, loss[loss=0.1858, simple_loss=0.3399, pruned_loss=0.1582, over 4806.00 frames.], tot_loss[loss=0.1666, simple_loss=0.3062, pruned_loss=0.1353, over 972707.09 frames.], batch size: 21, lr: 2.41e-03 +2022-05-03 13:02:14,775 INFO [train.py:715] (6/8) Epoch 0, batch 6000, loss[loss=0.3078, simple_loss=0.311, pruned_loss=0.1523, over 4973.00 frames.], tot_loss[loss=0.1664, simple_loss=0.3039, pruned_loss=0.1329, over 972641.07 frames.], batch size: 35, lr: 2.40e-03 +2022-05-03 13:02:14,776 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 13:02:25,809 INFO [train.py:742] (6/8) Epoch 0, validation: loss=0.1779, simple_loss=0.2457, pruned_loss=0.05502, over 914524.00 frames. +2022-05-03 13:03:07,298 INFO [train.py:715] (6/8) Epoch 0, batch 6050, loss[loss=0.3062, simple_loss=0.2986, pruned_loss=0.1569, over 4825.00 frames.], tot_loss[loss=0.198, simple_loss=0.3067, pruned_loss=0.1369, over 972058.65 frames.], batch size: 13, lr: 2.39e-03 +2022-05-03 13:03:47,843 INFO [train.py:715] (6/8) Epoch 0, batch 6100, loss[loss=0.2696, simple_loss=0.2952, pruned_loss=0.122, over 4845.00 frames.], tot_loss[loss=0.2176, simple_loss=0.3068, pruned_loss=0.136, over 972126.36 frames.], batch size: 30, lr: 2.39e-03 +2022-05-03 13:04:27,374 INFO [train.py:715] (6/8) Epoch 0, batch 6150, loss[loss=0.3177, simple_loss=0.3285, pruned_loss=0.1534, over 4918.00 frames.], tot_loss[loss=0.2342, simple_loss=0.3078, pruned_loss=0.1361, over 972402.66 frames.], batch size: 18, lr: 2.38e-03 +2022-05-03 13:05:08,102 INFO [train.py:715] (6/8) Epoch 0, batch 6200, loss[loss=0.2973, simple_loss=0.3192, pruned_loss=0.1377, over 4786.00 frames.], tot_loss[loss=0.2451, simple_loss=0.3076, pruned_loss=0.1347, over 972536.24 frames.], batch size: 17, lr: 2.38e-03 +2022-05-03 13:05:48,917 INFO [train.py:715] (6/8) Epoch 0, batch 6250, loss[loss=0.2554, simple_loss=0.2884, pruned_loss=0.1112, over 4917.00 frames.], tot_loss[loss=0.2507, simple_loss=0.3055, pruned_loss=0.1319, over 971676.89 frames.], batch size: 29, lr: 2.37e-03 +2022-05-03 13:06:29,114 INFO [train.py:715] (6/8) Epoch 0, batch 6300, loss[loss=0.3071, simple_loss=0.3127, pruned_loss=0.1507, over 4917.00 frames.], tot_loss[loss=0.2564, simple_loss=0.3046, pruned_loss=0.1304, over 972448.08 frames.], batch size: 29, lr: 2.37e-03 +2022-05-03 13:07:09,793 INFO [train.py:715] (6/8) Epoch 0, batch 6350, loss[loss=0.3085, simple_loss=0.319, pruned_loss=0.149, over 4796.00 frames.], tot_loss[loss=0.2629, simple_loss=0.3057, pruned_loss=0.1305, over 972397.27 frames.], batch size: 21, lr: 2.36e-03 +2022-05-03 13:07:50,717 INFO [train.py:715] (6/8) Epoch 0, batch 6400, loss[loss=0.2327, simple_loss=0.2651, pruned_loss=0.1002, over 4906.00 frames.], tot_loss[loss=0.2696, simple_loss=0.3077, pruned_loss=0.1317, over 972494.43 frames.], batch size: 19, lr: 2.35e-03 +2022-05-03 13:08:30,738 INFO [train.py:715] (6/8) Epoch 0, batch 6450, loss[loss=0.2487, simple_loss=0.279, pruned_loss=0.1092, over 4948.00 frames.], tot_loss[loss=0.2706, simple_loss=0.3065, pruned_loss=0.1297, over 972943.25 frames.], batch size: 23, lr: 2.35e-03 +2022-05-03 13:09:10,065 INFO [train.py:715] (6/8) Epoch 0, batch 6500, loss[loss=0.3021, simple_loss=0.328, pruned_loss=0.1381, over 4905.00 frames.], tot_loss[loss=0.2733, simple_loss=0.3075, pruned_loss=0.1292, over 972662.71 frames.], batch size: 19, lr: 2.34e-03 +2022-05-03 13:09:50,947 INFO [train.py:715] (6/8) Epoch 0, batch 6550, loss[loss=0.2748, simple_loss=0.2912, pruned_loss=0.1292, over 4906.00 frames.], tot_loss[loss=0.2758, simple_loss=0.3078, pruned_loss=0.1294, over 973966.77 frames.], batch size: 18, lr: 2.34e-03 +2022-05-03 13:10:31,733 INFO [train.py:715] (6/8) Epoch 0, batch 6600, loss[loss=0.3455, simple_loss=0.3592, pruned_loss=0.1659, over 4763.00 frames.], tot_loss[loss=0.2771, simple_loss=0.3078, pruned_loss=0.129, over 973398.74 frames.], batch size: 18, lr: 2.33e-03 +2022-05-03 13:11:11,209 INFO [train.py:715] (6/8) Epoch 0, batch 6650, loss[loss=0.3006, simple_loss=0.332, pruned_loss=0.1346, over 4922.00 frames.], tot_loss[loss=0.2784, simple_loss=0.3081, pruned_loss=0.1289, over 973435.52 frames.], batch size: 39, lr: 2.33e-03 +2022-05-03 13:11:51,654 INFO [train.py:715] (6/8) Epoch 0, batch 6700, loss[loss=0.2954, simple_loss=0.3189, pruned_loss=0.1359, over 4951.00 frames.], tot_loss[loss=0.2792, simple_loss=0.3084, pruned_loss=0.1285, over 972523.65 frames.], batch size: 21, lr: 2.32e-03 +2022-05-03 13:12:32,418 INFO [train.py:715] (6/8) Epoch 0, batch 6750, loss[loss=0.3076, simple_loss=0.3287, pruned_loss=0.1432, over 4792.00 frames.], tot_loss[loss=0.2787, simple_loss=0.3079, pruned_loss=0.1275, over 971921.41 frames.], batch size: 21, lr: 2.31e-03 +2022-05-03 13:13:12,495 INFO [train.py:715] (6/8) Epoch 0, batch 6800, loss[loss=0.3259, simple_loss=0.3415, pruned_loss=0.1552, over 4704.00 frames.], tot_loss[loss=0.2779, simple_loss=0.3076, pruned_loss=0.1263, over 971854.43 frames.], batch size: 15, lr: 2.31e-03 +2022-05-03 13:13:52,209 INFO [train.py:715] (6/8) Epoch 0, batch 6850, loss[loss=0.2402, simple_loss=0.2847, pruned_loss=0.09783, over 4806.00 frames.], tot_loss[loss=0.2767, simple_loss=0.3067, pruned_loss=0.125, over 971810.28 frames.], batch size: 25, lr: 2.30e-03 +2022-05-03 13:14:32,488 INFO [train.py:715] (6/8) Epoch 0, batch 6900, loss[loss=0.303, simple_loss=0.3313, pruned_loss=0.1374, over 4768.00 frames.], tot_loss[loss=0.279, simple_loss=0.308, pruned_loss=0.1263, over 971691.00 frames.], batch size: 14, lr: 2.30e-03 +2022-05-03 13:15:12,914 INFO [train.py:715] (6/8) Epoch 0, batch 6950, loss[loss=0.2489, simple_loss=0.2955, pruned_loss=0.1012, over 4822.00 frames.], tot_loss[loss=0.2789, simple_loss=0.3084, pruned_loss=0.1257, over 971725.78 frames.], batch size: 21, lr: 2.29e-03 +2022-05-03 13:15:53,031 INFO [train.py:715] (6/8) Epoch 0, batch 7000, loss[loss=0.2567, simple_loss=0.2924, pruned_loss=0.1105, over 4802.00 frames.], tot_loss[loss=0.276, simple_loss=0.3061, pruned_loss=0.1237, over 971042.58 frames.], batch size: 21, lr: 2.29e-03 +2022-05-03 13:16:33,740 INFO [train.py:715] (6/8) Epoch 0, batch 7050, loss[loss=0.2474, simple_loss=0.2967, pruned_loss=0.09904, over 4896.00 frames.], tot_loss[loss=0.2739, simple_loss=0.305, pruned_loss=0.122, over 971447.14 frames.], batch size: 19, lr: 2.28e-03 +2022-05-03 13:17:14,925 INFO [train.py:715] (6/8) Epoch 0, batch 7100, loss[loss=0.2635, simple_loss=0.301, pruned_loss=0.113, over 4790.00 frames.], tot_loss[loss=0.2744, simple_loss=0.3055, pruned_loss=0.1221, over 971163.59 frames.], batch size: 18, lr: 2.28e-03 +2022-05-03 13:17:55,870 INFO [train.py:715] (6/8) Epoch 0, batch 7150, loss[loss=0.2588, simple_loss=0.3107, pruned_loss=0.1035, over 4892.00 frames.], tot_loss[loss=0.2752, simple_loss=0.3062, pruned_loss=0.1225, over 971334.87 frames.], batch size: 22, lr: 2.27e-03 +2022-05-03 13:18:35,511 INFO [train.py:715] (6/8) Epoch 0, batch 7200, loss[loss=0.2537, simple_loss=0.2924, pruned_loss=0.1075, over 4808.00 frames.], tot_loss[loss=0.274, simple_loss=0.3057, pruned_loss=0.1214, over 970884.99 frames.], batch size: 25, lr: 2.27e-03 +2022-05-03 13:19:16,096 INFO [train.py:715] (6/8) Epoch 0, batch 7250, loss[loss=0.3036, simple_loss=0.3258, pruned_loss=0.1407, over 4985.00 frames.], tot_loss[loss=0.2736, simple_loss=0.3052, pruned_loss=0.1212, over 971948.16 frames.], batch size: 31, lr: 2.26e-03 +2022-05-03 13:19:55,969 INFO [train.py:715] (6/8) Epoch 0, batch 7300, loss[loss=0.3049, simple_loss=0.3366, pruned_loss=0.1366, over 4955.00 frames.], tot_loss[loss=0.2752, simple_loss=0.3063, pruned_loss=0.1222, over 971565.69 frames.], batch size: 21, lr: 2.26e-03 +2022-05-03 13:20:36,066 INFO [train.py:715] (6/8) Epoch 0, batch 7350, loss[loss=0.2692, simple_loss=0.299, pruned_loss=0.1198, over 4907.00 frames.], tot_loss[loss=0.2742, simple_loss=0.3058, pruned_loss=0.1215, over 971429.98 frames.], batch size: 19, lr: 2.25e-03 +2022-05-03 13:21:16,439 INFO [train.py:715] (6/8) Epoch 0, batch 7400, loss[loss=0.2544, simple_loss=0.2887, pruned_loss=0.11, over 4853.00 frames.], tot_loss[loss=0.273, simple_loss=0.3052, pruned_loss=0.1205, over 971371.07 frames.], batch size: 32, lr: 2.24e-03 +2022-05-03 13:21:57,036 INFO [train.py:715] (6/8) Epoch 0, batch 7450, loss[loss=0.2628, simple_loss=0.2968, pruned_loss=0.1144, over 4934.00 frames.], tot_loss[loss=0.2734, simple_loss=0.3059, pruned_loss=0.1206, over 971312.93 frames.], batch size: 35, lr: 2.24e-03 +2022-05-03 13:22:36,840 INFO [train.py:715] (6/8) Epoch 0, batch 7500, loss[loss=0.2656, simple_loss=0.3007, pruned_loss=0.1152, over 4817.00 frames.], tot_loss[loss=0.2732, simple_loss=0.3059, pruned_loss=0.1204, over 971073.12 frames.], batch size: 25, lr: 2.23e-03 +2022-05-03 13:23:16,565 INFO [train.py:715] (6/8) Epoch 0, batch 7550, loss[loss=0.3055, simple_loss=0.3184, pruned_loss=0.1462, over 4923.00 frames.], tot_loss[loss=0.2735, simple_loss=0.3061, pruned_loss=0.1205, over 972274.85 frames.], batch size: 23, lr: 2.23e-03 +2022-05-03 13:23:57,043 INFO [train.py:715] (6/8) Epoch 0, batch 7600, loss[loss=0.3504, simple_loss=0.3649, pruned_loss=0.1679, over 4689.00 frames.], tot_loss[loss=0.2724, simple_loss=0.3056, pruned_loss=0.1196, over 972665.05 frames.], batch size: 15, lr: 2.22e-03 +2022-05-03 13:24:37,501 INFO [train.py:715] (6/8) Epoch 0, batch 7650, loss[loss=0.2923, simple_loss=0.3084, pruned_loss=0.1381, over 4980.00 frames.], tot_loss[loss=0.2715, simple_loss=0.3048, pruned_loss=0.1191, over 972156.27 frames.], batch size: 14, lr: 2.22e-03 +2022-05-03 13:25:16,992 INFO [train.py:715] (6/8) Epoch 0, batch 7700, loss[loss=0.2857, simple_loss=0.3133, pruned_loss=0.129, over 4801.00 frames.], tot_loss[loss=0.2716, simple_loss=0.3049, pruned_loss=0.1192, over 972593.46 frames.], batch size: 14, lr: 2.21e-03 +2022-05-03 13:25:57,322 INFO [train.py:715] (6/8) Epoch 0, batch 7750, loss[loss=0.3213, simple_loss=0.3325, pruned_loss=0.155, over 4881.00 frames.], tot_loss[loss=0.2691, simple_loss=0.303, pruned_loss=0.1176, over 972824.32 frames.], batch size: 32, lr: 2.21e-03 +2022-05-03 13:26:38,377 INFO [train.py:715] (6/8) Epoch 0, batch 7800, loss[loss=0.2817, simple_loss=0.31, pruned_loss=0.1267, over 4785.00 frames.], tot_loss[loss=0.2697, simple_loss=0.3035, pruned_loss=0.118, over 972138.77 frames.], batch size: 17, lr: 2.20e-03 +2022-05-03 13:27:18,714 INFO [train.py:715] (6/8) Epoch 0, batch 7850, loss[loss=0.3282, simple_loss=0.348, pruned_loss=0.1542, over 4980.00 frames.], tot_loss[loss=0.2724, simple_loss=0.3055, pruned_loss=0.1196, over 972246.34 frames.], batch size: 35, lr: 2.20e-03 +2022-05-03 13:27:58,878 INFO [train.py:715] (6/8) Epoch 0, batch 7900, loss[loss=0.2593, simple_loss=0.3067, pruned_loss=0.106, over 4913.00 frames.], tot_loss[loss=0.2719, simple_loss=0.3058, pruned_loss=0.1191, over 972215.36 frames.], batch size: 17, lr: 2.19e-03 +2022-05-03 13:28:39,530 INFO [train.py:715] (6/8) Epoch 0, batch 7950, loss[loss=0.2414, simple_loss=0.2805, pruned_loss=0.1011, over 4701.00 frames.], tot_loss[loss=0.2718, simple_loss=0.3058, pruned_loss=0.1189, over 972513.40 frames.], batch size: 15, lr: 2.19e-03 +2022-05-03 13:29:22,247 INFO [train.py:715] (6/8) Epoch 0, batch 8000, loss[loss=0.2901, simple_loss=0.3019, pruned_loss=0.1391, over 4864.00 frames.], tot_loss[loss=0.2685, simple_loss=0.3033, pruned_loss=0.1169, over 971699.22 frames.], batch size: 32, lr: 2.18e-03 +2022-05-03 13:30:02,112 INFO [train.py:715] (6/8) Epoch 0, batch 8050, loss[loss=0.3138, simple_loss=0.3495, pruned_loss=0.139, over 4846.00 frames.], tot_loss[loss=0.2676, simple_loss=0.3029, pruned_loss=0.1162, over 971337.12 frames.], batch size: 20, lr: 2.18e-03 +2022-05-03 13:30:41,976 INFO [train.py:715] (6/8) Epoch 0, batch 8100, loss[loss=0.3084, simple_loss=0.3248, pruned_loss=0.146, over 4795.00 frames.], tot_loss[loss=0.2685, simple_loss=0.3036, pruned_loss=0.1166, over 971021.27 frames.], batch size: 21, lr: 2.17e-03 +2022-05-03 13:31:22,999 INFO [train.py:715] (6/8) Epoch 0, batch 8150, loss[loss=0.2509, simple_loss=0.2737, pruned_loss=0.114, over 4903.00 frames.], tot_loss[loss=0.2673, simple_loss=0.3027, pruned_loss=0.1159, over 971537.85 frames.], batch size: 18, lr: 2.17e-03 +2022-05-03 13:32:02,629 INFO [train.py:715] (6/8) Epoch 0, batch 8200, loss[loss=0.2712, simple_loss=0.2995, pruned_loss=0.1214, over 4912.00 frames.], tot_loss[loss=0.2696, simple_loss=0.3045, pruned_loss=0.1174, over 970898.94 frames.], batch size: 18, lr: 2.16e-03 +2022-05-03 13:32:42,138 INFO [train.py:715] (6/8) Epoch 0, batch 8250, loss[loss=0.2605, simple_loss=0.3127, pruned_loss=0.1042, over 4690.00 frames.], tot_loss[loss=0.2702, simple_loss=0.3049, pruned_loss=0.1178, over 971101.21 frames.], batch size: 15, lr: 2.16e-03 +2022-05-03 13:33:23,000 INFO [train.py:715] (6/8) Epoch 0, batch 8300, loss[loss=0.2856, simple_loss=0.3216, pruned_loss=0.1248, over 4798.00 frames.], tot_loss[loss=0.2722, simple_loss=0.3061, pruned_loss=0.1191, over 971225.21 frames.], batch size: 21, lr: 2.15e-03 +2022-05-03 13:34:03,427 INFO [train.py:715] (6/8) Epoch 0, batch 8350, loss[loss=0.2113, simple_loss=0.257, pruned_loss=0.08278, over 4815.00 frames.], tot_loss[loss=0.2694, simple_loss=0.3042, pruned_loss=0.1173, over 971402.34 frames.], batch size: 15, lr: 2.15e-03 +2022-05-03 13:34:43,098 INFO [train.py:715] (6/8) Epoch 0, batch 8400, loss[loss=0.252, simple_loss=0.302, pruned_loss=0.101, over 4751.00 frames.], tot_loss[loss=0.2686, simple_loss=0.3037, pruned_loss=0.1168, over 971260.71 frames.], batch size: 16, lr: 2.15e-03 +2022-05-03 13:35:23,384 INFO [train.py:715] (6/8) Epoch 0, batch 8450, loss[loss=0.2781, simple_loss=0.325, pruned_loss=0.1156, over 4743.00 frames.], tot_loss[loss=0.2667, simple_loss=0.3025, pruned_loss=0.1154, over 970688.25 frames.], batch size: 16, lr: 2.14e-03 +2022-05-03 13:36:04,640 INFO [train.py:715] (6/8) Epoch 0, batch 8500, loss[loss=0.2808, simple_loss=0.3172, pruned_loss=0.1222, over 4818.00 frames.], tot_loss[loss=0.266, simple_loss=0.3021, pruned_loss=0.115, over 970426.04 frames.], batch size: 25, lr: 2.14e-03 +2022-05-03 13:36:45,704 INFO [train.py:715] (6/8) Epoch 0, batch 8550, loss[loss=0.2702, simple_loss=0.3158, pruned_loss=0.1123, over 4778.00 frames.], tot_loss[loss=0.267, simple_loss=0.3031, pruned_loss=0.1155, over 971077.29 frames.], batch size: 18, lr: 2.13e-03 +2022-05-03 13:37:25,355 INFO [train.py:715] (6/8) Epoch 0, batch 8600, loss[loss=0.2837, simple_loss=0.3244, pruned_loss=0.1215, over 4906.00 frames.], tot_loss[loss=0.2647, simple_loss=0.3013, pruned_loss=0.114, over 971381.81 frames.], batch size: 18, lr: 2.13e-03 +2022-05-03 13:38:06,734 INFO [train.py:715] (6/8) Epoch 0, batch 8650, loss[loss=0.267, simple_loss=0.3046, pruned_loss=0.1147, over 4825.00 frames.], tot_loss[loss=0.2615, simple_loss=0.2992, pruned_loss=0.1119, over 970865.61 frames.], batch size: 15, lr: 2.12e-03 +2022-05-03 13:38:47,676 INFO [train.py:715] (6/8) Epoch 0, batch 8700, loss[loss=0.3022, simple_loss=0.331, pruned_loss=0.1367, over 4742.00 frames.], tot_loss[loss=0.2613, simple_loss=0.2991, pruned_loss=0.1117, over 971195.26 frames.], batch size: 19, lr: 2.12e-03 +2022-05-03 13:39:27,759 INFO [train.py:715] (6/8) Epoch 0, batch 8750, loss[loss=0.303, simple_loss=0.3321, pruned_loss=0.1369, over 4933.00 frames.], tot_loss[loss=0.2609, simple_loss=0.2987, pruned_loss=0.1115, over 970670.65 frames.], batch size: 29, lr: 2.11e-03 +2022-05-03 13:40:08,241 INFO [train.py:715] (6/8) Epoch 0, batch 8800, loss[loss=0.1822, simple_loss=0.2464, pruned_loss=0.05904, over 4762.00 frames.], tot_loss[loss=0.2606, simple_loss=0.2986, pruned_loss=0.1112, over 971341.88 frames.], batch size: 19, lr: 2.11e-03 +2022-05-03 13:40:48,805 INFO [train.py:715] (6/8) Epoch 0, batch 8850, loss[loss=0.2431, simple_loss=0.2919, pruned_loss=0.09713, over 4918.00 frames.], tot_loss[loss=0.2613, simple_loss=0.2992, pruned_loss=0.1117, over 972092.97 frames.], batch size: 19, lr: 2.10e-03 +2022-05-03 13:41:29,534 INFO [train.py:715] (6/8) Epoch 0, batch 8900, loss[loss=0.2309, simple_loss=0.2662, pruned_loss=0.09778, over 4928.00 frames.], tot_loss[loss=0.2623, simple_loss=0.2997, pruned_loss=0.1124, over 972616.00 frames.], batch size: 29, lr: 2.10e-03 +2022-05-03 13:42:09,386 INFO [train.py:715] (6/8) Epoch 0, batch 8950, loss[loss=0.2614, simple_loss=0.2947, pruned_loss=0.1141, over 4686.00 frames.], tot_loss[loss=0.2622, simple_loss=0.2991, pruned_loss=0.1126, over 972517.55 frames.], batch size: 15, lr: 2.10e-03 +2022-05-03 13:42:49,921 INFO [train.py:715] (6/8) Epoch 0, batch 9000, loss[loss=0.2315, simple_loss=0.2657, pruned_loss=0.09865, over 4763.00 frames.], tot_loss[loss=0.2609, simple_loss=0.2987, pruned_loss=0.1115, over 972191.08 frames.], batch size: 12, lr: 2.09e-03 +2022-05-03 13:42:49,921 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 13:43:03,384 INFO [train.py:742] (6/8) Epoch 0, validation: loss=0.1592, simple_loss=0.2426, pruned_loss=0.03794, over 914524.00 frames. +2022-05-03 13:43:44,297 INFO [train.py:715] (6/8) Epoch 0, batch 9050, loss[loss=0.3109, simple_loss=0.3406, pruned_loss=0.1406, over 4862.00 frames.], tot_loss[loss=0.2604, simple_loss=0.2987, pruned_loss=0.1111, over 972350.64 frames.], batch size: 30, lr: 2.09e-03 +2022-05-03 13:44:24,656 INFO [train.py:715] (6/8) Epoch 0, batch 9100, loss[loss=0.2604, simple_loss=0.2885, pruned_loss=0.1162, over 4837.00 frames.], tot_loss[loss=0.2588, simple_loss=0.2975, pruned_loss=0.1101, over 972653.36 frames.], batch size: 15, lr: 2.08e-03 +2022-05-03 13:45:04,786 INFO [train.py:715] (6/8) Epoch 0, batch 9150, loss[loss=0.2682, simple_loss=0.3089, pruned_loss=0.1138, over 4917.00 frames.], tot_loss[loss=0.2594, simple_loss=0.298, pruned_loss=0.1104, over 972093.61 frames.], batch size: 23, lr: 2.08e-03 +2022-05-03 13:45:44,982 INFO [train.py:715] (6/8) Epoch 0, batch 9200, loss[loss=0.2536, simple_loss=0.2913, pruned_loss=0.1079, over 4848.00 frames.], tot_loss[loss=0.2593, simple_loss=0.2978, pruned_loss=0.1104, over 972004.04 frames.], batch size: 20, lr: 2.07e-03 +2022-05-03 13:46:26,068 INFO [train.py:715] (6/8) Epoch 0, batch 9250, loss[loss=0.2923, simple_loss=0.3192, pruned_loss=0.1327, over 4875.00 frames.], tot_loss[loss=0.2582, simple_loss=0.2975, pruned_loss=0.1095, over 971932.21 frames.], batch size: 20, lr: 2.07e-03 +2022-05-03 13:47:06,378 INFO [train.py:715] (6/8) Epoch 0, batch 9300, loss[loss=0.2464, simple_loss=0.2864, pruned_loss=0.1032, over 4980.00 frames.], tot_loss[loss=0.2599, simple_loss=0.2991, pruned_loss=0.1103, over 972699.58 frames.], batch size: 28, lr: 2.06e-03 +2022-05-03 13:47:45,670 INFO [train.py:715] (6/8) Epoch 0, batch 9350, loss[loss=0.2869, simple_loss=0.3207, pruned_loss=0.1266, over 4946.00 frames.], tot_loss[loss=0.2593, simple_loss=0.2988, pruned_loss=0.1099, over 972472.43 frames.], batch size: 21, lr: 2.06e-03 +2022-05-03 13:48:27,109 INFO [train.py:715] (6/8) Epoch 0, batch 9400, loss[loss=0.2137, simple_loss=0.2666, pruned_loss=0.08038, over 4832.00 frames.], tot_loss[loss=0.2588, simple_loss=0.2982, pruned_loss=0.1097, over 972175.10 frames.], batch size: 26, lr: 2.06e-03 +2022-05-03 13:49:07,601 INFO [train.py:715] (6/8) Epoch 0, batch 9450, loss[loss=0.2377, simple_loss=0.2763, pruned_loss=0.09951, over 4959.00 frames.], tot_loss[loss=0.2579, simple_loss=0.2977, pruned_loss=0.109, over 972334.67 frames.], batch size: 21, lr: 2.05e-03 +2022-05-03 13:49:47,928 INFO [train.py:715] (6/8) Epoch 0, batch 9500, loss[loss=0.2729, simple_loss=0.2987, pruned_loss=0.1236, over 4798.00 frames.], tot_loss[loss=0.256, simple_loss=0.2965, pruned_loss=0.1078, over 972148.87 frames.], batch size: 24, lr: 2.05e-03 +2022-05-03 13:50:28,007 INFO [train.py:715] (6/8) Epoch 0, batch 9550, loss[loss=0.3499, simple_loss=0.3679, pruned_loss=0.166, over 4911.00 frames.], tot_loss[loss=0.2585, simple_loss=0.2981, pruned_loss=0.1094, over 972608.65 frames.], batch size: 18, lr: 2.04e-03 +2022-05-03 13:51:08,465 INFO [train.py:715] (6/8) Epoch 0, batch 9600, loss[loss=0.2086, simple_loss=0.2545, pruned_loss=0.08132, over 4865.00 frames.], tot_loss[loss=0.2567, simple_loss=0.2968, pruned_loss=0.1083, over 972317.53 frames.], batch size: 34, lr: 2.04e-03 +2022-05-03 13:51:48,908 INFO [train.py:715] (6/8) Epoch 0, batch 9650, loss[loss=0.2507, simple_loss=0.3008, pruned_loss=0.1003, over 4884.00 frames.], tot_loss[loss=0.2552, simple_loss=0.2962, pruned_loss=0.1071, over 972124.69 frames.], batch size: 19, lr: 2.03e-03 +2022-05-03 13:52:27,672 INFO [train.py:715] (6/8) Epoch 0, batch 9700, loss[loss=0.2825, simple_loss=0.3278, pruned_loss=0.1186, over 4980.00 frames.], tot_loss[loss=0.2552, simple_loss=0.2961, pruned_loss=0.1072, over 972106.62 frames.], batch size: 14, lr: 2.03e-03 +2022-05-03 13:53:08,241 INFO [train.py:715] (6/8) Epoch 0, batch 9750, loss[loss=0.2825, simple_loss=0.3086, pruned_loss=0.1282, over 4989.00 frames.], tot_loss[loss=0.2538, simple_loss=0.2949, pruned_loss=0.1064, over 972472.17 frames.], batch size: 25, lr: 2.03e-03 +2022-05-03 13:53:47,975 INFO [train.py:715] (6/8) Epoch 0, batch 9800, loss[loss=0.2544, simple_loss=0.2973, pruned_loss=0.1057, over 4844.00 frames.], tot_loss[loss=0.2538, simple_loss=0.2954, pruned_loss=0.1061, over 972637.65 frames.], batch size: 30, lr: 2.02e-03 +2022-05-03 13:54:27,879 INFO [train.py:715] (6/8) Epoch 0, batch 9850, loss[loss=0.2129, simple_loss=0.2757, pruned_loss=0.07502, over 4840.00 frames.], tot_loss[loss=0.2552, simple_loss=0.2964, pruned_loss=0.107, over 972573.80 frames.], batch size: 15, lr: 2.02e-03 +2022-05-03 13:55:07,631 INFO [train.py:715] (6/8) Epoch 0, batch 9900, loss[loss=0.2309, simple_loss=0.2845, pruned_loss=0.08867, over 4907.00 frames.], tot_loss[loss=0.2541, simple_loss=0.2954, pruned_loss=0.1064, over 973090.25 frames.], batch size: 19, lr: 2.01e-03 +2022-05-03 13:55:47,713 INFO [train.py:715] (6/8) Epoch 0, batch 9950, loss[loss=0.2801, simple_loss=0.3184, pruned_loss=0.1209, over 4857.00 frames.], tot_loss[loss=0.2528, simple_loss=0.2944, pruned_loss=0.1056, over 972525.19 frames.], batch size: 20, lr: 2.01e-03 +2022-05-03 13:56:27,937 INFO [train.py:715] (6/8) Epoch 0, batch 10000, loss[loss=0.2222, simple_loss=0.286, pruned_loss=0.07914, over 4969.00 frames.], tot_loss[loss=0.2537, simple_loss=0.2955, pruned_loss=0.1059, over 972191.60 frames.], batch size: 24, lr: 2.01e-03 +2022-05-03 13:57:07,307 INFO [train.py:715] (6/8) Epoch 0, batch 10050, loss[loss=0.2559, simple_loss=0.2949, pruned_loss=0.1085, over 4754.00 frames.], tot_loss[loss=0.2542, simple_loss=0.2956, pruned_loss=0.1064, over 971960.01 frames.], batch size: 19, lr: 2.00e-03 +2022-05-03 13:57:47,859 INFO [train.py:715] (6/8) Epoch 0, batch 10100, loss[loss=0.2826, simple_loss=0.3048, pruned_loss=0.1302, over 4974.00 frames.], tot_loss[loss=0.2529, simple_loss=0.2944, pruned_loss=0.1057, over 972469.60 frames.], batch size: 39, lr: 2.00e-03 +2022-05-03 13:58:27,704 INFO [train.py:715] (6/8) Epoch 0, batch 10150, loss[loss=0.1727, simple_loss=0.232, pruned_loss=0.05667, over 4991.00 frames.], tot_loss[loss=0.2517, simple_loss=0.2934, pruned_loss=0.105, over 972163.69 frames.], batch size: 16, lr: 1.99e-03 +2022-05-03 13:59:07,285 INFO [train.py:715] (6/8) Epoch 0, batch 10200, loss[loss=0.1866, simple_loss=0.257, pruned_loss=0.05807, over 4889.00 frames.], tot_loss[loss=0.2533, simple_loss=0.2948, pruned_loss=0.1058, over 972934.50 frames.], batch size: 16, lr: 1.99e-03 +2022-05-03 13:59:47,202 INFO [train.py:715] (6/8) Epoch 0, batch 10250, loss[loss=0.2958, simple_loss=0.3148, pruned_loss=0.1384, over 4847.00 frames.], tot_loss[loss=0.2517, simple_loss=0.294, pruned_loss=0.1047, over 973371.60 frames.], batch size: 32, lr: 1.99e-03 +2022-05-03 14:00:28,079 INFO [train.py:715] (6/8) Epoch 0, batch 10300, loss[loss=0.2546, simple_loss=0.3015, pruned_loss=0.1038, over 4791.00 frames.], tot_loss[loss=0.2498, simple_loss=0.2927, pruned_loss=0.1034, over 973047.67 frames.], batch size: 24, lr: 1.98e-03 +2022-05-03 14:01:08,334 INFO [train.py:715] (6/8) Epoch 0, batch 10350, loss[loss=0.3044, simple_loss=0.3318, pruned_loss=0.1385, over 4972.00 frames.], tot_loss[loss=0.2505, simple_loss=0.2933, pruned_loss=0.1039, over 973036.67 frames.], batch size: 25, lr: 1.98e-03 +2022-05-03 14:01:47,792 INFO [train.py:715] (6/8) Epoch 0, batch 10400, loss[loss=0.2622, simple_loss=0.3056, pruned_loss=0.1094, over 4688.00 frames.], tot_loss[loss=0.2501, simple_loss=0.2929, pruned_loss=0.1036, over 972369.88 frames.], batch size: 15, lr: 1.97e-03 +2022-05-03 14:02:28,430 INFO [train.py:715] (6/8) Epoch 0, batch 10450, loss[loss=0.2516, simple_loss=0.293, pruned_loss=0.1051, over 4924.00 frames.], tot_loss[loss=0.2498, simple_loss=0.2928, pruned_loss=0.1034, over 973167.12 frames.], batch size: 18, lr: 1.97e-03 +2022-05-03 14:03:09,164 INFO [train.py:715] (6/8) Epoch 0, batch 10500, loss[loss=0.2917, simple_loss=0.3219, pruned_loss=0.1308, over 4948.00 frames.], tot_loss[loss=0.2507, simple_loss=0.2932, pruned_loss=0.1041, over 973184.44 frames.], batch size: 39, lr: 1.97e-03 +2022-05-03 14:03:48,869 INFO [train.py:715] (6/8) Epoch 0, batch 10550, loss[loss=0.2648, simple_loss=0.3021, pruned_loss=0.1137, over 4862.00 frames.], tot_loss[loss=0.2501, simple_loss=0.2931, pruned_loss=0.1036, over 972408.69 frames.], batch size: 20, lr: 1.96e-03 +2022-05-03 14:04:28,879 INFO [train.py:715] (6/8) Epoch 0, batch 10600, loss[loss=0.2465, simple_loss=0.2894, pruned_loss=0.1018, over 4844.00 frames.], tot_loss[loss=0.249, simple_loss=0.2922, pruned_loss=0.1029, over 972887.55 frames.], batch size: 34, lr: 1.96e-03 +2022-05-03 14:05:09,750 INFO [train.py:715] (6/8) Epoch 0, batch 10650, loss[loss=0.1933, simple_loss=0.2441, pruned_loss=0.07121, over 4732.00 frames.], tot_loss[loss=0.2478, simple_loss=0.2911, pruned_loss=0.1023, over 972460.90 frames.], batch size: 12, lr: 1.96e-03 +2022-05-03 14:05:49,660 INFO [train.py:715] (6/8) Epoch 0, batch 10700, loss[loss=0.2362, simple_loss=0.2838, pruned_loss=0.09429, over 4909.00 frames.], tot_loss[loss=0.249, simple_loss=0.2921, pruned_loss=0.103, over 972005.78 frames.], batch size: 23, lr: 1.95e-03 +2022-05-03 14:06:29,549 INFO [train.py:715] (6/8) Epoch 0, batch 10750, loss[loss=0.2092, simple_loss=0.2591, pruned_loss=0.07964, over 4788.00 frames.], tot_loss[loss=0.2481, simple_loss=0.2915, pruned_loss=0.1023, over 972642.17 frames.], batch size: 17, lr: 1.95e-03 +2022-05-03 14:07:09,725 INFO [train.py:715] (6/8) Epoch 0, batch 10800, loss[loss=0.2112, simple_loss=0.2677, pruned_loss=0.07739, over 4773.00 frames.], tot_loss[loss=0.2467, simple_loss=0.2906, pruned_loss=0.1013, over 972974.33 frames.], batch size: 17, lr: 1.94e-03 +2022-05-03 14:07:50,569 INFO [train.py:715] (6/8) Epoch 0, batch 10850, loss[loss=0.2766, simple_loss=0.3114, pruned_loss=0.1209, over 4976.00 frames.], tot_loss[loss=0.2467, simple_loss=0.2912, pruned_loss=0.1011, over 973393.47 frames.], batch size: 25, lr: 1.94e-03 +2022-05-03 14:08:30,099 INFO [train.py:715] (6/8) Epoch 0, batch 10900, loss[loss=0.2599, simple_loss=0.3093, pruned_loss=0.1053, over 4930.00 frames.], tot_loss[loss=0.2465, simple_loss=0.2912, pruned_loss=0.1009, over 973288.45 frames.], batch size: 23, lr: 1.94e-03 +2022-05-03 14:09:10,040 INFO [train.py:715] (6/8) Epoch 0, batch 10950, loss[loss=0.2145, simple_loss=0.2521, pruned_loss=0.08838, over 4974.00 frames.], tot_loss[loss=0.2459, simple_loss=0.2906, pruned_loss=0.1006, over 972838.31 frames.], batch size: 15, lr: 1.93e-03 +2022-05-03 14:09:50,815 INFO [train.py:715] (6/8) Epoch 0, batch 11000, loss[loss=0.2619, simple_loss=0.3084, pruned_loss=0.1077, over 4923.00 frames.], tot_loss[loss=0.2443, simple_loss=0.2894, pruned_loss=0.09957, over 973010.58 frames.], batch size: 39, lr: 1.93e-03 +2022-05-03 14:10:31,100 INFO [train.py:715] (6/8) Epoch 0, batch 11050, loss[loss=0.2106, simple_loss=0.2681, pruned_loss=0.0765, over 4982.00 frames.], tot_loss[loss=0.2442, simple_loss=0.2895, pruned_loss=0.09947, over 973360.51 frames.], batch size: 31, lr: 1.93e-03 +2022-05-03 14:11:11,145 INFO [train.py:715] (6/8) Epoch 0, batch 11100, loss[loss=0.2444, simple_loss=0.2847, pruned_loss=0.1021, over 4688.00 frames.], tot_loss[loss=0.2442, simple_loss=0.2899, pruned_loss=0.09927, over 973122.72 frames.], batch size: 15, lr: 1.92e-03 +2022-05-03 14:11:51,019 INFO [train.py:715] (6/8) Epoch 0, batch 11150, loss[loss=0.2501, simple_loss=0.3099, pruned_loss=0.09512, over 4964.00 frames.], tot_loss[loss=0.2453, simple_loss=0.2903, pruned_loss=0.1001, over 972425.66 frames.], batch size: 15, lr: 1.92e-03 +2022-05-03 14:12:31,468 INFO [train.py:715] (6/8) Epoch 0, batch 11200, loss[loss=0.2213, simple_loss=0.2757, pruned_loss=0.0835, over 4916.00 frames.], tot_loss[loss=0.2441, simple_loss=0.2892, pruned_loss=0.09946, over 971801.75 frames.], batch size: 18, lr: 1.92e-03 +2022-05-03 14:13:10,946 INFO [train.py:715] (6/8) Epoch 0, batch 11250, loss[loss=0.2282, simple_loss=0.2762, pruned_loss=0.09009, over 4794.00 frames.], tot_loss[loss=0.2424, simple_loss=0.2882, pruned_loss=0.09832, over 971990.00 frames.], batch size: 21, lr: 1.91e-03 +2022-05-03 14:13:51,034 INFO [train.py:715] (6/8) Epoch 0, batch 11300, loss[loss=0.2226, simple_loss=0.2807, pruned_loss=0.08227, over 4792.00 frames.], tot_loss[loss=0.2419, simple_loss=0.2877, pruned_loss=0.09809, over 971806.21 frames.], batch size: 14, lr: 1.91e-03 +2022-05-03 14:14:31,686 INFO [train.py:715] (6/8) Epoch 0, batch 11350, loss[loss=0.2185, simple_loss=0.2699, pruned_loss=0.08353, over 4983.00 frames.], tot_loss[loss=0.2432, simple_loss=0.2885, pruned_loss=0.0989, over 971929.97 frames.], batch size: 14, lr: 1.90e-03 +2022-05-03 14:15:12,114 INFO [train.py:715] (6/8) Epoch 0, batch 11400, loss[loss=0.2272, simple_loss=0.276, pruned_loss=0.08917, over 4944.00 frames.], tot_loss[loss=0.2447, simple_loss=0.2895, pruned_loss=0.0999, over 971828.29 frames.], batch size: 29, lr: 1.90e-03 +2022-05-03 14:15:51,360 INFO [train.py:715] (6/8) Epoch 0, batch 11450, loss[loss=0.2235, simple_loss=0.2644, pruned_loss=0.09126, over 4844.00 frames.], tot_loss[loss=0.2447, simple_loss=0.2895, pruned_loss=0.09994, over 971970.45 frames.], batch size: 13, lr: 1.90e-03 +2022-05-03 14:16:32,012 INFO [train.py:715] (6/8) Epoch 0, batch 11500, loss[loss=0.1954, simple_loss=0.2533, pruned_loss=0.06877, over 4975.00 frames.], tot_loss[loss=0.2441, simple_loss=0.289, pruned_loss=0.09964, over 971652.46 frames.], batch size: 14, lr: 1.89e-03 +2022-05-03 14:17:12,409 INFO [train.py:715] (6/8) Epoch 0, batch 11550, loss[loss=0.22, simple_loss=0.2732, pruned_loss=0.08335, over 4816.00 frames.], tot_loss[loss=0.2436, simple_loss=0.2887, pruned_loss=0.09925, over 971469.57 frames.], batch size: 25, lr: 1.89e-03 +2022-05-03 14:17:52,479 INFO [train.py:715] (6/8) Epoch 0, batch 11600, loss[loss=0.2678, simple_loss=0.3027, pruned_loss=0.1164, over 4986.00 frames.], tot_loss[loss=0.2437, simple_loss=0.2886, pruned_loss=0.09946, over 971549.53 frames.], batch size: 31, lr: 1.89e-03 +2022-05-03 14:18:32,575 INFO [train.py:715] (6/8) Epoch 0, batch 11650, loss[loss=0.2537, simple_loss=0.2879, pruned_loss=0.1097, over 4860.00 frames.], tot_loss[loss=0.2441, simple_loss=0.289, pruned_loss=0.09963, over 972288.70 frames.], batch size: 20, lr: 1.88e-03 +2022-05-03 14:19:13,487 INFO [train.py:715] (6/8) Epoch 0, batch 11700, loss[loss=0.2337, simple_loss=0.2822, pruned_loss=0.09255, over 4939.00 frames.], tot_loss[loss=0.2438, simple_loss=0.289, pruned_loss=0.09937, over 971359.96 frames.], batch size: 23, lr: 1.88e-03 +2022-05-03 14:19:53,843 INFO [train.py:715] (6/8) Epoch 0, batch 11750, loss[loss=0.2324, simple_loss=0.2817, pruned_loss=0.09151, over 4946.00 frames.], tot_loss[loss=0.2447, simple_loss=0.2898, pruned_loss=0.0998, over 971736.78 frames.], batch size: 29, lr: 1.88e-03 +2022-05-03 14:20:34,220 INFO [train.py:715] (6/8) Epoch 0, batch 11800, loss[loss=0.3032, simple_loss=0.3322, pruned_loss=0.1371, over 4852.00 frames.], tot_loss[loss=0.2446, simple_loss=0.2896, pruned_loss=0.09978, over 971673.75 frames.], batch size: 16, lr: 1.87e-03 +2022-05-03 14:21:14,576 INFO [train.py:715] (6/8) Epoch 0, batch 11850, loss[loss=0.2196, simple_loss=0.2712, pruned_loss=0.08397, over 4701.00 frames.], tot_loss[loss=0.2442, simple_loss=0.2895, pruned_loss=0.0994, over 971308.14 frames.], batch size: 15, lr: 1.87e-03 +2022-05-03 14:21:55,694 INFO [train.py:715] (6/8) Epoch 0, batch 11900, loss[loss=0.2054, simple_loss=0.2497, pruned_loss=0.08059, over 4806.00 frames.], tot_loss[loss=0.2444, simple_loss=0.29, pruned_loss=0.09939, over 971746.45 frames.], batch size: 12, lr: 1.87e-03 +2022-05-03 14:22:35,872 INFO [train.py:715] (6/8) Epoch 0, batch 11950, loss[loss=0.1881, simple_loss=0.2554, pruned_loss=0.06034, over 4832.00 frames.], tot_loss[loss=0.2446, simple_loss=0.2903, pruned_loss=0.09945, over 971843.81 frames.], batch size: 12, lr: 1.86e-03 +2022-05-03 14:23:15,990 INFO [train.py:715] (6/8) Epoch 0, batch 12000, loss[loss=0.22, simple_loss=0.2602, pruned_loss=0.08991, over 4784.00 frames.], tot_loss[loss=0.2448, simple_loss=0.2903, pruned_loss=0.09968, over 971594.71 frames.], batch size: 14, lr: 1.86e-03 +2022-05-03 14:23:15,991 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 14:23:31,274 INFO [train.py:742] (6/8) Epoch 0, validation: loss=0.1516, simple_loss=0.2368, pruned_loss=0.03315, over 914524.00 frames. +2022-05-03 14:24:11,266 INFO [train.py:715] (6/8) Epoch 0, batch 12050, loss[loss=0.2789, simple_loss=0.3185, pruned_loss=0.1196, over 4799.00 frames.], tot_loss[loss=0.2457, simple_loss=0.2908, pruned_loss=0.1003, over 971764.19 frames.], batch size: 25, lr: 1.86e-03 +2022-05-03 14:24:51,297 INFO [train.py:715] (6/8) Epoch 0, batch 12100, loss[loss=0.2528, simple_loss=0.3049, pruned_loss=0.1003, over 4820.00 frames.], tot_loss[loss=0.2456, simple_loss=0.2904, pruned_loss=0.1005, over 971559.85 frames.], batch size: 27, lr: 1.85e-03 +2022-05-03 14:25:31,595 INFO [train.py:715] (6/8) Epoch 0, batch 12150, loss[loss=0.216, simple_loss=0.2666, pruned_loss=0.08269, over 4921.00 frames.], tot_loss[loss=0.2463, simple_loss=0.291, pruned_loss=0.1008, over 971713.42 frames.], batch size: 18, lr: 1.85e-03 +2022-05-03 14:26:11,157 INFO [train.py:715] (6/8) Epoch 0, batch 12200, loss[loss=0.2285, simple_loss=0.2752, pruned_loss=0.09083, over 4781.00 frames.], tot_loss[loss=0.2448, simple_loss=0.2905, pruned_loss=0.09957, over 970802.86 frames.], batch size: 14, lr: 1.85e-03 +2022-05-03 14:26:51,065 INFO [train.py:715] (6/8) Epoch 0, batch 12250, loss[loss=0.2147, simple_loss=0.2729, pruned_loss=0.0783, over 4903.00 frames.], tot_loss[loss=0.2438, simple_loss=0.2899, pruned_loss=0.09889, over 971048.01 frames.], batch size: 17, lr: 1.84e-03 +2022-05-03 14:27:31,552 INFO [train.py:715] (6/8) Epoch 0, batch 12300, loss[loss=0.2278, simple_loss=0.2884, pruned_loss=0.08364, over 4909.00 frames.], tot_loss[loss=0.2441, simple_loss=0.2903, pruned_loss=0.09899, over 971666.08 frames.], batch size: 19, lr: 1.84e-03 +2022-05-03 14:28:10,861 INFO [train.py:715] (6/8) Epoch 0, batch 12350, loss[loss=0.2708, simple_loss=0.3118, pruned_loss=0.1149, over 4845.00 frames.], tot_loss[loss=0.245, simple_loss=0.2911, pruned_loss=0.09949, over 971711.81 frames.], batch size: 30, lr: 1.84e-03 +2022-05-03 14:28:50,835 INFO [train.py:715] (6/8) Epoch 0, batch 12400, loss[loss=0.2142, simple_loss=0.2625, pruned_loss=0.08294, over 4772.00 frames.], tot_loss[loss=0.242, simple_loss=0.289, pruned_loss=0.0975, over 971619.25 frames.], batch size: 14, lr: 1.83e-03 +2022-05-03 14:29:31,159 INFO [train.py:715] (6/8) Epoch 0, batch 12450, loss[loss=0.2323, simple_loss=0.2868, pruned_loss=0.08894, over 4695.00 frames.], tot_loss[loss=0.2423, simple_loss=0.289, pruned_loss=0.09783, over 971843.30 frames.], batch size: 15, lr: 1.83e-03 +2022-05-03 14:30:11,386 INFO [train.py:715] (6/8) Epoch 0, batch 12500, loss[loss=0.243, simple_loss=0.2937, pruned_loss=0.09622, over 4884.00 frames.], tot_loss[loss=0.2422, simple_loss=0.2892, pruned_loss=0.09762, over 972333.24 frames.], batch size: 16, lr: 1.83e-03 +2022-05-03 14:30:50,310 INFO [train.py:715] (6/8) Epoch 0, batch 12550, loss[loss=0.2584, simple_loss=0.2997, pruned_loss=0.1086, over 4843.00 frames.], tot_loss[loss=0.2435, simple_loss=0.2904, pruned_loss=0.09828, over 971950.99 frames.], batch size: 15, lr: 1.83e-03 +2022-05-03 14:31:30,337 INFO [train.py:715] (6/8) Epoch 0, batch 12600, loss[loss=0.2192, simple_loss=0.2714, pruned_loss=0.08355, over 4979.00 frames.], tot_loss[loss=0.2407, simple_loss=0.2878, pruned_loss=0.09683, over 972779.14 frames.], batch size: 28, lr: 1.82e-03 +2022-05-03 14:32:11,362 INFO [train.py:715] (6/8) Epoch 0, batch 12650, loss[loss=0.2412, simple_loss=0.2896, pruned_loss=0.09639, over 4933.00 frames.], tot_loss[loss=0.2407, simple_loss=0.2878, pruned_loss=0.09675, over 972110.20 frames.], batch size: 21, lr: 1.82e-03 +2022-05-03 14:32:51,086 INFO [train.py:715] (6/8) Epoch 0, batch 12700, loss[loss=0.2699, simple_loss=0.3141, pruned_loss=0.1128, over 4767.00 frames.], tot_loss[loss=0.2397, simple_loss=0.2872, pruned_loss=0.09614, over 972188.37 frames.], batch size: 18, lr: 1.82e-03 +2022-05-03 14:33:30,734 INFO [train.py:715] (6/8) Epoch 0, batch 12750, loss[loss=0.1964, simple_loss=0.2512, pruned_loss=0.07074, over 4984.00 frames.], tot_loss[loss=0.2386, simple_loss=0.2862, pruned_loss=0.09555, over 971664.93 frames.], batch size: 25, lr: 1.81e-03 +2022-05-03 14:34:11,175 INFO [train.py:715] (6/8) Epoch 0, batch 12800, loss[loss=0.248, simple_loss=0.3097, pruned_loss=0.09315, over 4864.00 frames.], tot_loss[loss=0.2385, simple_loss=0.2865, pruned_loss=0.09529, over 972161.73 frames.], batch size: 16, lr: 1.81e-03 +2022-05-03 14:34:51,655 INFO [train.py:715] (6/8) Epoch 0, batch 12850, loss[loss=0.2768, simple_loss=0.3205, pruned_loss=0.1165, over 4782.00 frames.], tot_loss[loss=0.2373, simple_loss=0.2856, pruned_loss=0.09455, over 973285.52 frames.], batch size: 17, lr: 1.81e-03 +2022-05-03 14:35:31,484 INFO [train.py:715] (6/8) Epoch 0, batch 12900, loss[loss=0.2488, simple_loss=0.2932, pruned_loss=0.1022, over 4748.00 frames.], tot_loss[loss=0.2379, simple_loss=0.2856, pruned_loss=0.09508, over 973044.87 frames.], batch size: 19, lr: 1.80e-03 +2022-05-03 14:36:11,742 INFO [train.py:715] (6/8) Epoch 0, batch 12950, loss[loss=0.1677, simple_loss=0.2214, pruned_loss=0.05707, over 4819.00 frames.], tot_loss[loss=0.2371, simple_loss=0.2849, pruned_loss=0.0947, over 972883.95 frames.], batch size: 12, lr: 1.80e-03 +2022-05-03 14:36:52,263 INFO [train.py:715] (6/8) Epoch 0, batch 13000, loss[loss=0.1987, simple_loss=0.2625, pruned_loss=0.06741, over 4922.00 frames.], tot_loss[loss=0.2375, simple_loss=0.2854, pruned_loss=0.09481, over 972130.59 frames.], batch size: 23, lr: 1.80e-03 +2022-05-03 14:37:32,727 INFO [train.py:715] (6/8) Epoch 0, batch 13050, loss[loss=0.2711, simple_loss=0.3166, pruned_loss=0.1128, over 4808.00 frames.], tot_loss[loss=0.2403, simple_loss=0.2874, pruned_loss=0.09661, over 971832.44 frames.], batch size: 21, lr: 1.79e-03 +2022-05-03 14:38:12,069 INFO [train.py:715] (6/8) Epoch 0, batch 13100, loss[loss=0.2066, simple_loss=0.2553, pruned_loss=0.07891, over 4645.00 frames.], tot_loss[loss=0.24, simple_loss=0.287, pruned_loss=0.09655, over 971806.63 frames.], batch size: 13, lr: 1.79e-03 +2022-05-03 14:38:52,502 INFO [train.py:715] (6/8) Epoch 0, batch 13150, loss[loss=0.2424, simple_loss=0.2872, pruned_loss=0.09881, over 4863.00 frames.], tot_loss[loss=0.2403, simple_loss=0.2873, pruned_loss=0.09667, over 971348.08 frames.], batch size: 30, lr: 1.79e-03 +2022-05-03 14:39:32,993 INFO [train.py:715] (6/8) Epoch 0, batch 13200, loss[loss=0.1892, simple_loss=0.2572, pruned_loss=0.06056, over 4951.00 frames.], tot_loss[loss=0.2416, simple_loss=0.2886, pruned_loss=0.09735, over 972017.81 frames.], batch size: 29, lr: 1.79e-03 +2022-05-03 14:40:12,566 INFO [train.py:715] (6/8) Epoch 0, batch 13250, loss[loss=0.2179, simple_loss=0.2708, pruned_loss=0.08251, over 4908.00 frames.], tot_loss[loss=0.2412, simple_loss=0.288, pruned_loss=0.0972, over 972178.82 frames.], batch size: 19, lr: 1.78e-03 +2022-05-03 14:40:52,444 INFO [train.py:715] (6/8) Epoch 0, batch 13300, loss[loss=0.2705, simple_loss=0.318, pruned_loss=0.1116, over 4658.00 frames.], tot_loss[loss=0.2393, simple_loss=0.2867, pruned_loss=0.09599, over 971724.63 frames.], batch size: 13, lr: 1.78e-03 +2022-05-03 14:41:32,821 INFO [train.py:715] (6/8) Epoch 0, batch 13350, loss[loss=0.2461, simple_loss=0.2858, pruned_loss=0.1032, over 4841.00 frames.], tot_loss[loss=0.2385, simple_loss=0.2859, pruned_loss=0.09557, over 971527.29 frames.], batch size: 13, lr: 1.78e-03 +2022-05-03 14:42:13,150 INFO [train.py:715] (6/8) Epoch 0, batch 13400, loss[loss=0.2008, simple_loss=0.2536, pruned_loss=0.07399, over 4962.00 frames.], tot_loss[loss=0.2376, simple_loss=0.2855, pruned_loss=0.09487, over 972831.53 frames.], batch size: 28, lr: 1.77e-03 +2022-05-03 14:42:52,946 INFO [train.py:715] (6/8) Epoch 0, batch 13450, loss[loss=0.2436, simple_loss=0.2888, pruned_loss=0.09924, over 4924.00 frames.], tot_loss[loss=0.2368, simple_loss=0.2852, pruned_loss=0.09424, over 971708.80 frames.], batch size: 23, lr: 1.77e-03 +2022-05-03 14:43:33,177 INFO [train.py:715] (6/8) Epoch 0, batch 13500, loss[loss=0.2336, simple_loss=0.282, pruned_loss=0.09262, over 4848.00 frames.], tot_loss[loss=0.2374, simple_loss=0.2856, pruned_loss=0.09463, over 971279.80 frames.], batch size: 32, lr: 1.77e-03 +2022-05-03 14:44:13,340 INFO [train.py:715] (6/8) Epoch 0, batch 13550, loss[loss=0.242, simple_loss=0.29, pruned_loss=0.09696, over 4822.00 frames.], tot_loss[loss=0.2381, simple_loss=0.286, pruned_loss=0.09507, over 971351.38 frames.], batch size: 26, lr: 1.77e-03 +2022-05-03 14:44:52,793 INFO [train.py:715] (6/8) Epoch 0, batch 13600, loss[loss=0.2491, simple_loss=0.2841, pruned_loss=0.1071, over 4880.00 frames.], tot_loss[loss=0.239, simple_loss=0.2869, pruned_loss=0.09554, over 971248.94 frames.], batch size: 22, lr: 1.76e-03 +2022-05-03 14:45:32,771 INFO [train.py:715] (6/8) Epoch 0, batch 13650, loss[loss=0.3172, simple_loss=0.3352, pruned_loss=0.1496, over 4924.00 frames.], tot_loss[loss=0.239, simple_loss=0.2866, pruned_loss=0.09566, over 972309.74 frames.], batch size: 17, lr: 1.76e-03 +2022-05-03 14:46:12,698 INFO [train.py:715] (6/8) Epoch 0, batch 13700, loss[loss=0.2633, simple_loss=0.3029, pruned_loss=0.1118, over 4898.00 frames.], tot_loss[loss=0.2382, simple_loss=0.2859, pruned_loss=0.09526, over 972433.39 frames.], batch size: 22, lr: 1.76e-03 +2022-05-03 14:46:52,708 INFO [train.py:715] (6/8) Epoch 0, batch 13750, loss[loss=0.2387, simple_loss=0.2913, pruned_loss=0.09306, over 4748.00 frames.], tot_loss[loss=0.2376, simple_loss=0.2856, pruned_loss=0.09484, over 972830.35 frames.], batch size: 19, lr: 1.75e-03 +2022-05-03 14:47:32,539 INFO [train.py:715] (6/8) Epoch 0, batch 13800, loss[loss=0.2262, simple_loss=0.2782, pruned_loss=0.08708, over 4798.00 frames.], tot_loss[loss=0.2374, simple_loss=0.2855, pruned_loss=0.09462, over 972238.93 frames.], batch size: 24, lr: 1.75e-03 +2022-05-03 14:48:12,868 INFO [train.py:715] (6/8) Epoch 0, batch 13850, loss[loss=0.1901, simple_loss=0.2498, pruned_loss=0.06516, over 4743.00 frames.], tot_loss[loss=0.2373, simple_loss=0.2858, pruned_loss=0.09442, over 972606.84 frames.], batch size: 19, lr: 1.75e-03 +2022-05-03 14:48:53,749 INFO [train.py:715] (6/8) Epoch 0, batch 13900, loss[loss=0.2145, simple_loss=0.2733, pruned_loss=0.07783, over 4760.00 frames.], tot_loss[loss=0.236, simple_loss=0.2847, pruned_loss=0.09368, over 972504.76 frames.], batch size: 19, lr: 1.75e-03 +2022-05-03 14:49:33,798 INFO [train.py:715] (6/8) Epoch 0, batch 13950, loss[loss=0.274, simple_loss=0.3083, pruned_loss=0.1198, over 4969.00 frames.], tot_loss[loss=0.2363, simple_loss=0.2845, pruned_loss=0.09405, over 972517.68 frames.], batch size: 15, lr: 1.74e-03 +2022-05-03 14:50:14,390 INFO [train.py:715] (6/8) Epoch 0, batch 14000, loss[loss=0.2547, simple_loss=0.2926, pruned_loss=0.1084, over 4848.00 frames.], tot_loss[loss=0.2381, simple_loss=0.2864, pruned_loss=0.09492, over 972129.57 frames.], batch size: 20, lr: 1.74e-03 +2022-05-03 14:50:55,255 INFO [train.py:715] (6/8) Epoch 0, batch 14050, loss[loss=0.234, simple_loss=0.292, pruned_loss=0.08799, over 4806.00 frames.], tot_loss[loss=0.2383, simple_loss=0.2866, pruned_loss=0.09497, over 971453.28 frames.], batch size: 25, lr: 1.74e-03 +2022-05-03 14:51:35,710 INFO [train.py:715] (6/8) Epoch 0, batch 14100, loss[loss=0.2747, simple_loss=0.3142, pruned_loss=0.1176, over 4954.00 frames.], tot_loss[loss=0.2383, simple_loss=0.2862, pruned_loss=0.09517, over 971377.99 frames.], batch size: 15, lr: 1.73e-03 +2022-05-03 14:52:16,218 INFO [train.py:715] (6/8) Epoch 0, batch 14150, loss[loss=0.1876, simple_loss=0.2458, pruned_loss=0.06464, over 4982.00 frames.], tot_loss[loss=0.2378, simple_loss=0.286, pruned_loss=0.09481, over 972043.99 frames.], batch size: 25, lr: 1.73e-03 +2022-05-03 14:52:56,875 INFO [train.py:715] (6/8) Epoch 0, batch 14200, loss[loss=0.2179, simple_loss=0.2736, pruned_loss=0.08107, over 4866.00 frames.], tot_loss[loss=0.2364, simple_loss=0.2851, pruned_loss=0.09384, over 972021.78 frames.], batch size: 16, lr: 1.73e-03 +2022-05-03 14:53:37,725 INFO [train.py:715] (6/8) Epoch 0, batch 14250, loss[loss=0.2356, simple_loss=0.2806, pruned_loss=0.09536, over 4749.00 frames.], tot_loss[loss=0.2364, simple_loss=0.2847, pruned_loss=0.09402, over 972106.02 frames.], batch size: 16, lr: 1.73e-03 +2022-05-03 14:54:18,426 INFO [train.py:715] (6/8) Epoch 0, batch 14300, loss[loss=0.2594, simple_loss=0.2883, pruned_loss=0.1152, over 4775.00 frames.], tot_loss[loss=0.2352, simple_loss=0.2836, pruned_loss=0.09343, over 971535.57 frames.], batch size: 18, lr: 1.72e-03 +2022-05-03 14:54:59,496 INFO [train.py:715] (6/8) Epoch 0, batch 14350, loss[loss=0.2249, simple_loss=0.2793, pruned_loss=0.08524, over 4806.00 frames.], tot_loss[loss=0.2339, simple_loss=0.2828, pruned_loss=0.09249, over 971328.73 frames.], batch size: 26, lr: 1.72e-03 +2022-05-03 14:55:40,732 INFO [train.py:715] (6/8) Epoch 0, batch 14400, loss[loss=0.2292, simple_loss=0.2853, pruned_loss=0.08659, over 4858.00 frames.], tot_loss[loss=0.2337, simple_loss=0.2828, pruned_loss=0.09224, over 971388.89 frames.], batch size: 20, lr: 1.72e-03 +2022-05-03 14:56:21,203 INFO [train.py:715] (6/8) Epoch 0, batch 14450, loss[loss=0.2844, simple_loss=0.3128, pruned_loss=0.128, over 4807.00 frames.], tot_loss[loss=0.2333, simple_loss=0.283, pruned_loss=0.0918, over 971958.64 frames.], batch size: 21, lr: 1.72e-03 +2022-05-03 14:57:01,537 INFO [train.py:715] (6/8) Epoch 0, batch 14500, loss[loss=0.1952, simple_loss=0.2504, pruned_loss=0.06998, over 4790.00 frames.], tot_loss[loss=0.2333, simple_loss=0.2829, pruned_loss=0.09185, over 971793.33 frames.], batch size: 12, lr: 1.71e-03 +2022-05-03 14:57:42,207 INFO [train.py:715] (6/8) Epoch 0, batch 14550, loss[loss=0.1994, simple_loss=0.2601, pruned_loss=0.0693, over 4981.00 frames.], tot_loss[loss=0.2322, simple_loss=0.2818, pruned_loss=0.09131, over 972684.80 frames.], batch size: 25, lr: 1.71e-03 +2022-05-03 14:58:22,166 INFO [train.py:715] (6/8) Epoch 0, batch 14600, loss[loss=0.2592, simple_loss=0.3082, pruned_loss=0.1051, over 4744.00 frames.], tot_loss[loss=0.2332, simple_loss=0.2826, pruned_loss=0.09188, over 972790.43 frames.], batch size: 16, lr: 1.71e-03 +2022-05-03 14:59:01,457 INFO [train.py:715] (6/8) Epoch 0, batch 14650, loss[loss=0.2579, simple_loss=0.2898, pruned_loss=0.113, over 4895.00 frames.], tot_loss[loss=0.2333, simple_loss=0.2829, pruned_loss=0.09181, over 973435.62 frames.], batch size: 22, lr: 1.70e-03 +2022-05-03 14:59:41,813 INFO [train.py:715] (6/8) Epoch 0, batch 14700, loss[loss=0.2388, simple_loss=0.2858, pruned_loss=0.09585, over 4796.00 frames.], tot_loss[loss=0.2338, simple_loss=0.2832, pruned_loss=0.09223, over 972997.24 frames.], batch size: 24, lr: 1.70e-03 +2022-05-03 15:00:22,079 INFO [train.py:715] (6/8) Epoch 0, batch 14750, loss[loss=0.2118, simple_loss=0.2656, pruned_loss=0.07901, over 4904.00 frames.], tot_loss[loss=0.2352, simple_loss=0.2845, pruned_loss=0.09302, over 972676.88 frames.], batch size: 17, lr: 1.70e-03 +2022-05-03 15:01:02,120 INFO [train.py:715] (6/8) Epoch 0, batch 14800, loss[loss=0.2117, simple_loss=0.2563, pruned_loss=0.08359, over 4959.00 frames.], tot_loss[loss=0.2368, simple_loss=0.2857, pruned_loss=0.09392, over 972506.33 frames.], batch size: 35, lr: 1.70e-03 +2022-05-03 15:01:41,998 INFO [train.py:715] (6/8) Epoch 0, batch 14850, loss[loss=0.2395, simple_loss=0.2795, pruned_loss=0.0998, over 4924.00 frames.], tot_loss[loss=0.2371, simple_loss=0.286, pruned_loss=0.09412, over 972480.30 frames.], batch size: 21, lr: 1.69e-03 +2022-05-03 15:02:22,717 INFO [train.py:715] (6/8) Epoch 0, batch 14900, loss[loss=0.2343, simple_loss=0.2864, pruned_loss=0.09108, over 4856.00 frames.], tot_loss[loss=0.2343, simple_loss=0.2838, pruned_loss=0.09241, over 972246.89 frames.], batch size: 32, lr: 1.69e-03 +2022-05-03 15:03:02,607 INFO [train.py:715] (6/8) Epoch 0, batch 14950, loss[loss=0.3105, simple_loss=0.343, pruned_loss=0.139, over 4918.00 frames.], tot_loss[loss=0.2337, simple_loss=0.2839, pruned_loss=0.09176, over 972652.87 frames.], batch size: 23, lr: 1.69e-03 +2022-05-03 15:03:42,037 INFO [train.py:715] (6/8) Epoch 0, batch 15000, loss[loss=0.2128, simple_loss=0.2676, pruned_loss=0.07898, over 4886.00 frames.], tot_loss[loss=0.2332, simple_loss=0.2835, pruned_loss=0.09147, over 972832.30 frames.], batch size: 22, lr: 1.69e-03 +2022-05-03 15:03:42,038 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 15:03:53,633 INFO [train.py:742] (6/8) Epoch 0, validation: loss=0.1454, simple_loss=0.2314, pruned_loss=0.02968, over 914524.00 frames. +2022-05-03 15:04:32,992 INFO [train.py:715] (6/8) Epoch 0, batch 15050, loss[loss=0.2548, simple_loss=0.3003, pruned_loss=0.1046, over 4919.00 frames.], tot_loss[loss=0.2323, simple_loss=0.2828, pruned_loss=0.09087, over 972384.12 frames.], batch size: 29, lr: 1.68e-03 +2022-05-03 15:05:13,563 INFO [train.py:715] (6/8) Epoch 0, batch 15100, loss[loss=0.2809, simple_loss=0.3181, pruned_loss=0.1218, over 4856.00 frames.], tot_loss[loss=0.2327, simple_loss=0.2835, pruned_loss=0.091, over 972471.37 frames.], batch size: 20, lr: 1.68e-03 +2022-05-03 15:05:53,899 INFO [train.py:715] (6/8) Epoch 0, batch 15150, loss[loss=0.2146, simple_loss=0.2654, pruned_loss=0.08194, over 4942.00 frames.], tot_loss[loss=0.231, simple_loss=0.2818, pruned_loss=0.09005, over 972743.67 frames.], batch size: 21, lr: 1.68e-03 +2022-05-03 15:06:33,821 INFO [train.py:715] (6/8) Epoch 0, batch 15200, loss[loss=0.2561, simple_loss=0.2974, pruned_loss=0.1074, over 4827.00 frames.], tot_loss[loss=0.233, simple_loss=0.2829, pruned_loss=0.09157, over 973181.76 frames.], batch size: 26, lr: 1.68e-03 +2022-05-03 15:07:13,394 INFO [train.py:715] (6/8) Epoch 0, batch 15250, loss[loss=0.2445, simple_loss=0.2946, pruned_loss=0.09719, over 4928.00 frames.], tot_loss[loss=0.2325, simple_loss=0.2829, pruned_loss=0.09102, over 972582.82 frames.], batch size: 18, lr: 1.67e-03 +2022-05-03 15:07:53,256 INFO [train.py:715] (6/8) Epoch 0, batch 15300, loss[loss=0.2804, simple_loss=0.3177, pruned_loss=0.1215, over 4820.00 frames.], tot_loss[loss=0.2319, simple_loss=0.2827, pruned_loss=0.09054, over 972778.85 frames.], batch size: 26, lr: 1.67e-03 +2022-05-03 15:08:33,627 INFO [train.py:715] (6/8) Epoch 0, batch 15350, loss[loss=0.2093, simple_loss=0.2607, pruned_loss=0.07894, over 4972.00 frames.], tot_loss[loss=0.232, simple_loss=0.2824, pruned_loss=0.09084, over 973056.89 frames.], batch size: 35, lr: 1.67e-03 +2022-05-03 15:09:13,457 INFO [train.py:715] (6/8) Epoch 0, batch 15400, loss[loss=0.2596, simple_loss=0.2939, pruned_loss=0.1127, over 4908.00 frames.], tot_loss[loss=0.2329, simple_loss=0.2833, pruned_loss=0.0913, over 972610.67 frames.], batch size: 18, lr: 1.67e-03 +2022-05-03 15:09:53,910 INFO [train.py:715] (6/8) Epoch 0, batch 15450, loss[loss=0.2498, simple_loss=0.2978, pruned_loss=0.1009, over 4814.00 frames.], tot_loss[loss=0.2329, simple_loss=0.2833, pruned_loss=0.09124, over 973858.53 frames.], batch size: 13, lr: 1.66e-03 +2022-05-03 15:10:33,373 INFO [train.py:715] (6/8) Epoch 0, batch 15500, loss[loss=0.201, simple_loss=0.2534, pruned_loss=0.07425, over 4645.00 frames.], tot_loss[loss=0.2314, simple_loss=0.2822, pruned_loss=0.09032, over 973805.78 frames.], batch size: 13, lr: 1.66e-03 +2022-05-03 15:11:12,572 INFO [train.py:715] (6/8) Epoch 0, batch 15550, loss[loss=0.2439, simple_loss=0.2943, pruned_loss=0.09671, over 4877.00 frames.], tot_loss[loss=0.2304, simple_loss=0.2811, pruned_loss=0.08988, over 973127.52 frames.], batch size: 22, lr: 1.66e-03 +2022-05-03 15:11:52,062 INFO [train.py:715] (6/8) Epoch 0, batch 15600, loss[loss=0.229, simple_loss=0.2839, pruned_loss=0.08698, over 4944.00 frames.], tot_loss[loss=0.23, simple_loss=0.281, pruned_loss=0.08949, over 973418.19 frames.], batch size: 21, lr: 1.66e-03 +2022-05-03 15:12:31,507 INFO [train.py:715] (6/8) Epoch 0, batch 15650, loss[loss=0.2479, simple_loss=0.2766, pruned_loss=0.1096, over 4982.00 frames.], tot_loss[loss=0.2298, simple_loss=0.2804, pruned_loss=0.08963, over 972799.14 frames.], batch size: 35, lr: 1.65e-03 +2022-05-03 15:13:11,303 INFO [train.py:715] (6/8) Epoch 0, batch 15700, loss[loss=0.2158, simple_loss=0.2688, pruned_loss=0.08143, over 4793.00 frames.], tot_loss[loss=0.2302, simple_loss=0.2807, pruned_loss=0.08988, over 973145.70 frames.], batch size: 14, lr: 1.65e-03 +2022-05-03 15:13:50,906 INFO [train.py:715] (6/8) Epoch 0, batch 15750, loss[loss=0.2521, simple_loss=0.304, pruned_loss=0.1, over 4839.00 frames.], tot_loss[loss=0.2289, simple_loss=0.2799, pruned_loss=0.08892, over 973008.22 frames.], batch size: 15, lr: 1.65e-03 +2022-05-03 15:14:30,848 INFO [train.py:715] (6/8) Epoch 0, batch 15800, loss[loss=0.1699, simple_loss=0.233, pruned_loss=0.05339, over 4924.00 frames.], tot_loss[loss=0.2282, simple_loss=0.279, pruned_loss=0.08869, over 972467.65 frames.], batch size: 18, lr: 1.65e-03 +2022-05-03 15:15:10,667 INFO [train.py:715] (6/8) Epoch 0, batch 15850, loss[loss=0.3096, simple_loss=0.3422, pruned_loss=0.1386, over 4811.00 frames.], tot_loss[loss=0.2272, simple_loss=0.2783, pruned_loss=0.08806, over 972904.11 frames.], batch size: 15, lr: 1.65e-03 +2022-05-03 15:15:50,243 INFO [train.py:715] (6/8) Epoch 0, batch 15900, loss[loss=0.2695, simple_loss=0.3065, pruned_loss=0.1163, over 4915.00 frames.], tot_loss[loss=0.2282, simple_loss=0.2787, pruned_loss=0.08884, over 971615.39 frames.], batch size: 17, lr: 1.64e-03 +2022-05-03 15:16:30,476 INFO [train.py:715] (6/8) Epoch 0, batch 15950, loss[loss=0.2016, simple_loss=0.2518, pruned_loss=0.07569, over 4864.00 frames.], tot_loss[loss=0.2293, simple_loss=0.2799, pruned_loss=0.08933, over 972073.89 frames.], batch size: 15, lr: 1.64e-03 +2022-05-03 15:17:12,827 INFO [train.py:715] (6/8) Epoch 0, batch 16000, loss[loss=0.2782, simple_loss=0.31, pruned_loss=0.1232, over 4944.00 frames.], tot_loss[loss=0.2297, simple_loss=0.2803, pruned_loss=0.08953, over 971960.31 frames.], batch size: 21, lr: 1.64e-03 +2022-05-03 15:17:52,725 INFO [train.py:715] (6/8) Epoch 0, batch 16050, loss[loss=0.2686, simple_loss=0.3032, pruned_loss=0.117, over 4781.00 frames.], tot_loss[loss=0.2312, simple_loss=0.2817, pruned_loss=0.09039, over 971686.20 frames.], batch size: 17, lr: 1.64e-03 +2022-05-03 15:18:33,270 INFO [train.py:715] (6/8) Epoch 0, batch 16100, loss[loss=0.2407, simple_loss=0.283, pruned_loss=0.09926, over 4977.00 frames.], tot_loss[loss=0.2304, simple_loss=0.2808, pruned_loss=0.08998, over 971944.04 frames.], batch size: 35, lr: 1.63e-03 +2022-05-03 15:19:13,431 INFO [train.py:715] (6/8) Epoch 0, batch 16150, loss[loss=0.2757, simple_loss=0.3143, pruned_loss=0.1186, over 4941.00 frames.], tot_loss[loss=0.2316, simple_loss=0.2818, pruned_loss=0.09075, over 971977.93 frames.], batch size: 35, lr: 1.63e-03 +2022-05-03 15:19:52,898 INFO [train.py:715] (6/8) Epoch 0, batch 16200, loss[loss=0.1662, simple_loss=0.2376, pruned_loss=0.04747, over 4989.00 frames.], tot_loss[loss=0.23, simple_loss=0.2807, pruned_loss=0.08971, over 971880.88 frames.], batch size: 28, lr: 1.63e-03 +2022-05-03 15:20:32,322 INFO [train.py:715] (6/8) Epoch 0, batch 16250, loss[loss=0.2307, simple_loss=0.2874, pruned_loss=0.08698, over 4804.00 frames.], tot_loss[loss=0.229, simple_loss=0.2802, pruned_loss=0.08891, over 971636.91 frames.], batch size: 21, lr: 1.63e-03 +2022-05-03 15:21:12,243 INFO [train.py:715] (6/8) Epoch 0, batch 16300, loss[loss=0.2704, simple_loss=0.3038, pruned_loss=0.1185, over 4797.00 frames.], tot_loss[loss=0.2279, simple_loss=0.2793, pruned_loss=0.08829, over 972007.24 frames.], batch size: 14, lr: 1.62e-03 +2022-05-03 15:21:51,670 INFO [train.py:715] (6/8) Epoch 0, batch 16350, loss[loss=0.2612, simple_loss=0.3137, pruned_loss=0.1043, over 4817.00 frames.], tot_loss[loss=0.2275, simple_loss=0.2787, pruned_loss=0.08818, over 971042.43 frames.], batch size: 27, lr: 1.62e-03 +2022-05-03 15:22:31,099 INFO [train.py:715] (6/8) Epoch 0, batch 16400, loss[loss=0.2361, simple_loss=0.2983, pruned_loss=0.08699, over 4842.00 frames.], tot_loss[loss=0.2265, simple_loss=0.2777, pruned_loss=0.0876, over 970871.58 frames.], batch size: 15, lr: 1.62e-03 +2022-05-03 15:23:11,047 INFO [train.py:715] (6/8) Epoch 0, batch 16450, loss[loss=0.205, simple_loss=0.2692, pruned_loss=0.07035, over 4801.00 frames.], tot_loss[loss=0.2256, simple_loss=0.2776, pruned_loss=0.08679, over 971086.64 frames.], batch size: 25, lr: 1.62e-03 +2022-05-03 15:23:51,581 INFO [train.py:715] (6/8) Epoch 0, batch 16500, loss[loss=0.2494, simple_loss=0.2955, pruned_loss=0.1017, over 4868.00 frames.], tot_loss[loss=0.2278, simple_loss=0.2791, pruned_loss=0.08819, over 970573.90 frames.], batch size: 20, lr: 1.62e-03 +2022-05-03 15:24:31,536 INFO [train.py:715] (6/8) Epoch 0, batch 16550, loss[loss=0.3019, simple_loss=0.3437, pruned_loss=0.13, over 4861.00 frames.], tot_loss[loss=0.2267, simple_loss=0.2784, pruned_loss=0.08751, over 972092.47 frames.], batch size: 20, lr: 1.61e-03 +2022-05-03 15:25:11,225 INFO [train.py:715] (6/8) Epoch 0, batch 16600, loss[loss=0.3359, simple_loss=0.3396, pruned_loss=0.1661, over 4801.00 frames.], tot_loss[loss=0.2262, simple_loss=0.278, pruned_loss=0.08723, over 972218.42 frames.], batch size: 13, lr: 1.61e-03 +2022-05-03 15:25:50,679 INFO [train.py:715] (6/8) Epoch 0, batch 16650, loss[loss=0.2414, simple_loss=0.2949, pruned_loss=0.09391, over 4832.00 frames.], tot_loss[loss=0.2273, simple_loss=0.2785, pruned_loss=0.08805, over 973347.31 frames.], batch size: 15, lr: 1.61e-03 +2022-05-03 15:26:30,541 INFO [train.py:715] (6/8) Epoch 0, batch 16700, loss[loss=0.2833, simple_loss=0.3202, pruned_loss=0.1232, over 4992.00 frames.], tot_loss[loss=0.2267, simple_loss=0.2786, pruned_loss=0.08739, over 972934.57 frames.], batch size: 14, lr: 1.61e-03 +2022-05-03 15:27:09,633 INFO [train.py:715] (6/8) Epoch 0, batch 16750, loss[loss=0.2152, simple_loss=0.273, pruned_loss=0.07869, over 4855.00 frames.], tot_loss[loss=0.228, simple_loss=0.2797, pruned_loss=0.08814, over 971710.72 frames.], batch size: 30, lr: 1.60e-03 +2022-05-03 15:27:48,780 INFO [train.py:715] (6/8) Epoch 0, batch 16800, loss[loss=0.2077, simple_loss=0.2628, pruned_loss=0.07627, over 4950.00 frames.], tot_loss[loss=0.2282, simple_loss=0.2798, pruned_loss=0.08824, over 971602.50 frames.], batch size: 29, lr: 1.60e-03 +2022-05-03 15:28:28,413 INFO [train.py:715] (6/8) Epoch 0, batch 16850, loss[loss=0.2073, simple_loss=0.2698, pruned_loss=0.07246, over 4733.00 frames.], tot_loss[loss=0.2274, simple_loss=0.2789, pruned_loss=0.08793, over 970937.37 frames.], batch size: 16, lr: 1.60e-03 +2022-05-03 15:29:08,021 INFO [train.py:715] (6/8) Epoch 0, batch 16900, loss[loss=0.1718, simple_loss=0.2373, pruned_loss=0.05318, over 4799.00 frames.], tot_loss[loss=0.2282, simple_loss=0.2796, pruned_loss=0.08838, over 971570.84 frames.], batch size: 21, lr: 1.60e-03 +2022-05-03 15:29:47,267 INFO [train.py:715] (6/8) Epoch 0, batch 16950, loss[loss=0.2313, simple_loss=0.2947, pruned_loss=0.08394, over 4963.00 frames.], tot_loss[loss=0.2299, simple_loss=0.2809, pruned_loss=0.08941, over 972164.86 frames.], batch size: 24, lr: 1.60e-03 +2022-05-03 15:30:27,236 INFO [train.py:715] (6/8) Epoch 0, batch 17000, loss[loss=0.1857, simple_loss=0.2386, pruned_loss=0.06642, over 4901.00 frames.], tot_loss[loss=0.2277, simple_loss=0.2791, pruned_loss=0.08811, over 972282.17 frames.], batch size: 19, lr: 1.59e-03 +2022-05-03 15:31:07,732 INFO [train.py:715] (6/8) Epoch 0, batch 17050, loss[loss=0.2294, simple_loss=0.2774, pruned_loss=0.09068, over 4768.00 frames.], tot_loss[loss=0.2263, simple_loss=0.2781, pruned_loss=0.08728, over 971307.61 frames.], batch size: 17, lr: 1.59e-03 +2022-05-03 15:31:47,487 INFO [train.py:715] (6/8) Epoch 0, batch 17100, loss[loss=0.1896, simple_loss=0.2498, pruned_loss=0.06474, over 4793.00 frames.], tot_loss[loss=0.2251, simple_loss=0.2771, pruned_loss=0.08651, over 971651.91 frames.], batch size: 18, lr: 1.59e-03 +2022-05-03 15:32:26,653 INFO [train.py:715] (6/8) Epoch 0, batch 17150, loss[loss=0.1958, simple_loss=0.2522, pruned_loss=0.06969, over 4700.00 frames.], tot_loss[loss=0.2268, simple_loss=0.2785, pruned_loss=0.08755, over 971147.67 frames.], batch size: 15, lr: 1.59e-03 +2022-05-03 15:33:06,903 INFO [train.py:715] (6/8) Epoch 0, batch 17200, loss[loss=0.2531, simple_loss=0.3083, pruned_loss=0.09895, over 4881.00 frames.], tot_loss[loss=0.2268, simple_loss=0.2784, pruned_loss=0.08759, over 972196.94 frames.], batch size: 22, lr: 1.58e-03 +2022-05-03 15:33:46,680 INFO [train.py:715] (6/8) Epoch 0, batch 17250, loss[loss=0.2029, simple_loss=0.2704, pruned_loss=0.06774, over 4825.00 frames.], tot_loss[loss=0.2243, simple_loss=0.2767, pruned_loss=0.08597, over 971650.80 frames.], batch size: 25, lr: 1.58e-03 +2022-05-03 15:34:26,235 INFO [train.py:715] (6/8) Epoch 0, batch 17300, loss[loss=0.2218, simple_loss=0.2775, pruned_loss=0.08304, over 4981.00 frames.], tot_loss[loss=0.2243, simple_loss=0.277, pruned_loss=0.0858, over 972147.81 frames.], batch size: 25, lr: 1.58e-03 +2022-05-03 15:35:06,294 INFO [train.py:715] (6/8) Epoch 0, batch 17350, loss[loss=0.2396, simple_loss=0.2861, pruned_loss=0.09654, over 4865.00 frames.], tot_loss[loss=0.2248, simple_loss=0.2775, pruned_loss=0.08603, over 971912.93 frames.], batch size: 32, lr: 1.58e-03 +2022-05-03 15:35:46,527 INFO [train.py:715] (6/8) Epoch 0, batch 17400, loss[loss=0.2293, simple_loss=0.2953, pruned_loss=0.08166, over 4817.00 frames.], tot_loss[loss=0.2254, simple_loss=0.2778, pruned_loss=0.08646, over 972272.17 frames.], batch size: 26, lr: 1.58e-03 +2022-05-03 15:36:26,425 INFO [train.py:715] (6/8) Epoch 0, batch 17450, loss[loss=0.203, simple_loss=0.266, pruned_loss=0.07003, over 4893.00 frames.], tot_loss[loss=0.2256, simple_loss=0.2778, pruned_loss=0.08674, over 972494.27 frames.], batch size: 22, lr: 1.57e-03 +2022-05-03 15:37:07,037 INFO [train.py:715] (6/8) Epoch 0, batch 17500, loss[loss=0.2044, simple_loss=0.2709, pruned_loss=0.06898, over 4983.00 frames.], tot_loss[loss=0.2254, simple_loss=0.278, pruned_loss=0.08634, over 972976.92 frames.], batch size: 15, lr: 1.57e-03 +2022-05-03 15:37:47,464 INFO [train.py:715] (6/8) Epoch 0, batch 17550, loss[loss=0.1941, simple_loss=0.253, pruned_loss=0.0676, over 4955.00 frames.], tot_loss[loss=0.2253, simple_loss=0.278, pruned_loss=0.08632, over 973208.07 frames.], batch size: 24, lr: 1.57e-03 +2022-05-03 15:38:27,021 INFO [train.py:715] (6/8) Epoch 0, batch 17600, loss[loss=0.2417, simple_loss=0.2902, pruned_loss=0.09657, over 4891.00 frames.], tot_loss[loss=0.2236, simple_loss=0.2765, pruned_loss=0.08536, over 973329.49 frames.], batch size: 19, lr: 1.57e-03 +2022-05-03 15:39:06,939 INFO [train.py:715] (6/8) Epoch 0, batch 17650, loss[loss=0.2275, simple_loss=0.2851, pruned_loss=0.08496, over 4865.00 frames.], tot_loss[loss=0.2233, simple_loss=0.2762, pruned_loss=0.0852, over 972757.71 frames.], batch size: 16, lr: 1.57e-03 +2022-05-03 15:39:47,483 INFO [train.py:715] (6/8) Epoch 0, batch 17700, loss[loss=0.2381, simple_loss=0.2902, pruned_loss=0.093, over 4695.00 frames.], tot_loss[loss=0.2227, simple_loss=0.2756, pruned_loss=0.08495, over 972195.19 frames.], batch size: 15, lr: 1.56e-03 +2022-05-03 15:40:27,387 INFO [train.py:715] (6/8) Epoch 0, batch 17750, loss[loss=0.1883, simple_loss=0.2582, pruned_loss=0.05922, over 4871.00 frames.], tot_loss[loss=0.2232, simple_loss=0.2758, pruned_loss=0.08532, over 971751.38 frames.], batch size: 20, lr: 1.56e-03 +2022-05-03 15:41:07,057 INFO [train.py:715] (6/8) Epoch 0, batch 17800, loss[loss=0.2401, simple_loss=0.2883, pruned_loss=0.09591, over 4870.00 frames.], tot_loss[loss=0.2234, simple_loss=0.2762, pruned_loss=0.08528, over 972515.75 frames.], batch size: 30, lr: 1.56e-03 +2022-05-03 15:41:47,857 INFO [train.py:715] (6/8) Epoch 0, batch 17850, loss[loss=0.2005, simple_loss=0.2576, pruned_loss=0.0717, over 4989.00 frames.], tot_loss[loss=0.223, simple_loss=0.2757, pruned_loss=0.08512, over 972502.63 frames.], batch size: 14, lr: 1.56e-03 +2022-05-03 15:42:28,486 INFO [train.py:715] (6/8) Epoch 0, batch 17900, loss[loss=0.2919, simple_loss=0.3232, pruned_loss=0.1303, over 4901.00 frames.], tot_loss[loss=0.2241, simple_loss=0.2764, pruned_loss=0.08587, over 972465.40 frames.], batch size: 19, lr: 1.56e-03 +2022-05-03 15:43:07,992 INFO [train.py:715] (6/8) Epoch 0, batch 17950, loss[loss=0.2019, simple_loss=0.2646, pruned_loss=0.06958, over 4823.00 frames.], tot_loss[loss=0.2233, simple_loss=0.2758, pruned_loss=0.08538, over 972193.14 frames.], batch size: 13, lr: 1.55e-03 +2022-05-03 15:43:48,223 INFO [train.py:715] (6/8) Epoch 0, batch 18000, loss[loss=0.2185, simple_loss=0.2648, pruned_loss=0.08611, over 4930.00 frames.], tot_loss[loss=0.2256, simple_loss=0.2768, pruned_loss=0.08718, over 971752.59 frames.], batch size: 29, lr: 1.55e-03 +2022-05-03 15:43:48,224 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 15:43:57,827 INFO [train.py:742] (6/8) Epoch 0, validation: loss=0.141, simple_loss=0.228, pruned_loss=0.02706, over 914524.00 frames. +2022-05-03 15:44:38,094 INFO [train.py:715] (6/8) Epoch 0, batch 18050, loss[loss=0.2424, simple_loss=0.2984, pruned_loss=0.09322, over 4995.00 frames.], tot_loss[loss=0.2259, simple_loss=0.2771, pruned_loss=0.08736, over 972682.53 frames.], batch size: 16, lr: 1.55e-03 +2022-05-03 15:45:18,343 INFO [train.py:715] (6/8) Epoch 0, batch 18100, loss[loss=0.1835, simple_loss=0.2404, pruned_loss=0.06332, over 4981.00 frames.], tot_loss[loss=0.225, simple_loss=0.2764, pruned_loss=0.08681, over 973261.49 frames.], batch size: 14, lr: 1.55e-03 +2022-05-03 15:45:58,158 INFO [train.py:715] (6/8) Epoch 0, batch 18150, loss[loss=0.2275, simple_loss=0.2868, pruned_loss=0.08407, over 4805.00 frames.], tot_loss[loss=0.2234, simple_loss=0.2754, pruned_loss=0.08572, over 973065.46 frames.], batch size: 14, lr: 1.55e-03 +2022-05-03 15:46:37,569 INFO [train.py:715] (6/8) Epoch 0, batch 18200, loss[loss=0.2003, simple_loss=0.2561, pruned_loss=0.07224, over 4914.00 frames.], tot_loss[loss=0.2233, simple_loss=0.2751, pruned_loss=0.08576, over 972977.92 frames.], batch size: 19, lr: 1.54e-03 +2022-05-03 15:47:17,747 INFO [train.py:715] (6/8) Epoch 0, batch 18250, loss[loss=0.2451, simple_loss=0.2808, pruned_loss=0.1047, over 4988.00 frames.], tot_loss[loss=0.2223, simple_loss=0.2743, pruned_loss=0.08512, over 972553.10 frames.], batch size: 24, lr: 1.54e-03 +2022-05-03 15:47:59,026 INFO [train.py:715] (6/8) Epoch 0, batch 18300, loss[loss=0.1966, simple_loss=0.2559, pruned_loss=0.06866, over 4749.00 frames.], tot_loss[loss=0.224, simple_loss=0.2758, pruned_loss=0.08612, over 972913.26 frames.], batch size: 12, lr: 1.54e-03 +2022-05-03 15:48:38,806 INFO [train.py:715] (6/8) Epoch 0, batch 18350, loss[loss=0.1858, simple_loss=0.249, pruned_loss=0.06132, over 4650.00 frames.], tot_loss[loss=0.2226, simple_loss=0.275, pruned_loss=0.08508, over 972384.08 frames.], batch size: 13, lr: 1.54e-03 +2022-05-03 15:49:19,083 INFO [train.py:715] (6/8) Epoch 0, batch 18400, loss[loss=0.2368, simple_loss=0.2949, pruned_loss=0.08939, over 4963.00 frames.], tot_loss[loss=0.2241, simple_loss=0.2764, pruned_loss=0.08591, over 971784.94 frames.], batch size: 24, lr: 1.54e-03 +2022-05-03 15:49:59,581 INFO [train.py:715] (6/8) Epoch 0, batch 18450, loss[loss=0.2036, simple_loss=0.2662, pruned_loss=0.07048, over 4776.00 frames.], tot_loss[loss=0.2251, simple_loss=0.2779, pruned_loss=0.08611, over 970146.53 frames.], batch size: 14, lr: 1.53e-03 +2022-05-03 15:50:39,244 INFO [train.py:715] (6/8) Epoch 0, batch 18500, loss[loss=0.2775, simple_loss=0.3263, pruned_loss=0.1144, over 4947.00 frames.], tot_loss[loss=0.2253, simple_loss=0.2781, pruned_loss=0.08627, over 970085.57 frames.], batch size: 39, lr: 1.53e-03 +2022-05-03 15:51:19,777 INFO [train.py:715] (6/8) Epoch 0, batch 18550, loss[loss=0.2471, simple_loss=0.2863, pruned_loss=0.104, over 4779.00 frames.], tot_loss[loss=0.2255, simple_loss=0.2777, pruned_loss=0.08665, over 970714.52 frames.], batch size: 17, lr: 1.53e-03 +2022-05-03 15:52:00,085 INFO [train.py:715] (6/8) Epoch 0, batch 18600, loss[loss=0.2095, simple_loss=0.2689, pruned_loss=0.07505, over 4950.00 frames.], tot_loss[loss=0.2245, simple_loss=0.277, pruned_loss=0.08596, over 971300.49 frames.], batch size: 21, lr: 1.53e-03 +2022-05-03 15:52:40,190 INFO [train.py:715] (6/8) Epoch 0, batch 18650, loss[loss=0.222, simple_loss=0.2703, pruned_loss=0.08683, over 4884.00 frames.], tot_loss[loss=0.2228, simple_loss=0.2758, pruned_loss=0.08486, over 971892.20 frames.], batch size: 39, lr: 1.53e-03 +2022-05-03 15:53:19,601 INFO [train.py:715] (6/8) Epoch 0, batch 18700, loss[loss=0.1908, simple_loss=0.2565, pruned_loss=0.06254, over 4908.00 frames.], tot_loss[loss=0.2239, simple_loss=0.2763, pruned_loss=0.08579, over 971483.66 frames.], batch size: 39, lr: 1.52e-03 +2022-05-03 15:53:59,909 INFO [train.py:715] (6/8) Epoch 0, batch 18750, loss[loss=0.1952, simple_loss=0.2607, pruned_loss=0.06487, over 4962.00 frames.], tot_loss[loss=0.2239, simple_loss=0.2764, pruned_loss=0.08574, over 971120.48 frames.], batch size: 14, lr: 1.52e-03 +2022-05-03 15:54:41,180 INFO [train.py:715] (6/8) Epoch 0, batch 18800, loss[loss=0.2298, simple_loss=0.2832, pruned_loss=0.08818, over 4795.00 frames.], tot_loss[loss=0.2244, simple_loss=0.2771, pruned_loss=0.08582, over 971909.69 frames.], batch size: 24, lr: 1.52e-03 +2022-05-03 15:55:20,401 INFO [train.py:715] (6/8) Epoch 0, batch 18850, loss[loss=0.2092, simple_loss=0.2637, pruned_loss=0.07737, over 4745.00 frames.], tot_loss[loss=0.2242, simple_loss=0.2765, pruned_loss=0.08595, over 972427.97 frames.], batch size: 16, lr: 1.52e-03 +2022-05-03 15:56:01,311 INFO [train.py:715] (6/8) Epoch 0, batch 18900, loss[loss=0.1848, simple_loss=0.2556, pruned_loss=0.05698, over 4760.00 frames.], tot_loss[loss=0.2245, simple_loss=0.2775, pruned_loss=0.08575, over 972248.07 frames.], batch size: 18, lr: 1.52e-03 +2022-05-03 15:56:41,744 INFO [train.py:715] (6/8) Epoch 0, batch 18950, loss[loss=0.2002, simple_loss=0.2645, pruned_loss=0.06794, over 4797.00 frames.], tot_loss[loss=0.2232, simple_loss=0.2764, pruned_loss=0.08501, over 971167.61 frames.], batch size: 24, lr: 1.52e-03 +2022-05-03 15:57:21,405 INFO [train.py:715] (6/8) Epoch 0, batch 19000, loss[loss=0.2283, simple_loss=0.2725, pruned_loss=0.09206, over 4889.00 frames.], tot_loss[loss=0.2222, simple_loss=0.2755, pruned_loss=0.08444, over 972160.51 frames.], batch size: 22, lr: 1.51e-03 +2022-05-03 15:58:01,852 INFO [train.py:715] (6/8) Epoch 0, batch 19050, loss[loss=0.2229, simple_loss=0.2859, pruned_loss=0.07996, over 4963.00 frames.], tot_loss[loss=0.2244, simple_loss=0.2771, pruned_loss=0.08583, over 972442.54 frames.], batch size: 24, lr: 1.51e-03 +2022-05-03 15:58:42,184 INFO [train.py:715] (6/8) Epoch 0, batch 19100, loss[loss=0.239, simple_loss=0.2861, pruned_loss=0.09601, over 4920.00 frames.], tot_loss[loss=0.2225, simple_loss=0.2753, pruned_loss=0.08485, over 972167.46 frames.], batch size: 29, lr: 1.51e-03 +2022-05-03 15:59:22,505 INFO [train.py:715] (6/8) Epoch 0, batch 19150, loss[loss=0.1713, simple_loss=0.2343, pruned_loss=0.05415, over 4921.00 frames.], tot_loss[loss=0.2227, simple_loss=0.2755, pruned_loss=0.085, over 972040.77 frames.], batch size: 29, lr: 1.51e-03 +2022-05-03 16:00:01,713 INFO [train.py:715] (6/8) Epoch 0, batch 19200, loss[loss=0.2165, simple_loss=0.2697, pruned_loss=0.08163, over 4984.00 frames.], tot_loss[loss=0.2209, simple_loss=0.274, pruned_loss=0.08391, over 972725.64 frames.], batch size: 25, lr: 1.51e-03 +2022-05-03 16:00:42,582 INFO [train.py:715] (6/8) Epoch 0, batch 19250, loss[loss=0.2394, simple_loss=0.2834, pruned_loss=0.09773, over 4981.00 frames.], tot_loss[loss=0.2201, simple_loss=0.2737, pruned_loss=0.08327, over 972225.86 frames.], batch size: 28, lr: 1.50e-03 +2022-05-03 16:01:23,353 INFO [train.py:715] (6/8) Epoch 0, batch 19300, loss[loss=0.2171, simple_loss=0.2725, pruned_loss=0.08082, over 4851.00 frames.], tot_loss[loss=0.2192, simple_loss=0.2733, pruned_loss=0.08253, over 972153.80 frames.], batch size: 13, lr: 1.50e-03 +2022-05-03 16:02:03,056 INFO [train.py:715] (6/8) Epoch 0, batch 19350, loss[loss=0.1922, simple_loss=0.2514, pruned_loss=0.06655, over 4757.00 frames.], tot_loss[loss=0.2205, simple_loss=0.2742, pruned_loss=0.08338, over 972515.53 frames.], batch size: 14, lr: 1.50e-03 +2022-05-03 16:02:43,210 INFO [train.py:715] (6/8) Epoch 0, batch 19400, loss[loss=0.1969, simple_loss=0.2366, pruned_loss=0.0786, over 4988.00 frames.], tot_loss[loss=0.2205, simple_loss=0.2741, pruned_loss=0.08343, over 972552.37 frames.], batch size: 14, lr: 1.50e-03 +2022-05-03 16:03:24,065 INFO [train.py:715] (6/8) Epoch 0, batch 19450, loss[loss=0.2008, simple_loss=0.2518, pruned_loss=0.07493, over 4899.00 frames.], tot_loss[loss=0.2205, simple_loss=0.2742, pruned_loss=0.08339, over 972462.32 frames.], batch size: 17, lr: 1.50e-03 +2022-05-03 16:04:03,574 INFO [train.py:715] (6/8) Epoch 0, batch 19500, loss[loss=0.2724, simple_loss=0.3004, pruned_loss=0.1222, over 4878.00 frames.], tot_loss[loss=0.2204, simple_loss=0.2739, pruned_loss=0.08343, over 973119.37 frames.], batch size: 39, lr: 1.50e-03 +2022-05-03 16:04:42,928 INFO [train.py:715] (6/8) Epoch 0, batch 19550, loss[loss=0.169, simple_loss=0.2335, pruned_loss=0.05228, over 4963.00 frames.], tot_loss[loss=0.2202, simple_loss=0.2738, pruned_loss=0.08333, over 973530.85 frames.], batch size: 24, lr: 1.49e-03 +2022-05-03 16:05:23,274 INFO [train.py:715] (6/8) Epoch 0, batch 19600, loss[loss=0.2194, simple_loss=0.2678, pruned_loss=0.08549, over 4923.00 frames.], tot_loss[loss=0.2198, simple_loss=0.2738, pruned_loss=0.08295, over 973381.61 frames.], batch size: 23, lr: 1.49e-03 +2022-05-03 16:06:03,063 INFO [train.py:715] (6/8) Epoch 0, batch 19650, loss[loss=0.2275, simple_loss=0.281, pruned_loss=0.087, over 4971.00 frames.], tot_loss[loss=0.2189, simple_loss=0.2729, pruned_loss=0.08241, over 972699.16 frames.], batch size: 24, lr: 1.49e-03 +2022-05-03 16:06:42,548 INFO [train.py:715] (6/8) Epoch 0, batch 19700, loss[loss=0.246, simple_loss=0.2881, pruned_loss=0.1019, over 4797.00 frames.], tot_loss[loss=0.218, simple_loss=0.2724, pruned_loss=0.08179, over 973071.74 frames.], batch size: 21, lr: 1.49e-03 +2022-05-03 16:07:22,620 INFO [train.py:715] (6/8) Epoch 0, batch 19750, loss[loss=0.3134, simple_loss=0.3416, pruned_loss=0.1426, over 4873.00 frames.], tot_loss[loss=0.2198, simple_loss=0.2739, pruned_loss=0.08283, over 974027.65 frames.], batch size: 32, lr: 1.49e-03 +2022-05-03 16:08:02,296 INFO [train.py:715] (6/8) Epoch 0, batch 19800, loss[loss=0.2074, simple_loss=0.2541, pruned_loss=0.08036, over 4764.00 frames.], tot_loss[loss=0.2182, simple_loss=0.2722, pruned_loss=0.08206, over 973434.13 frames.], batch size: 12, lr: 1.48e-03 +2022-05-03 16:08:42,111 INFO [train.py:715] (6/8) Epoch 0, batch 19850, loss[loss=0.2468, simple_loss=0.2958, pruned_loss=0.09883, over 4825.00 frames.], tot_loss[loss=0.2193, simple_loss=0.2732, pruned_loss=0.08272, over 973200.71 frames.], batch size: 15, lr: 1.48e-03 +2022-05-03 16:09:21,345 INFO [train.py:715] (6/8) Epoch 0, batch 19900, loss[loss=0.236, simple_loss=0.2954, pruned_loss=0.08834, over 4778.00 frames.], tot_loss[loss=0.2183, simple_loss=0.2725, pruned_loss=0.08204, over 972588.48 frames.], batch size: 17, lr: 1.48e-03 +2022-05-03 16:10:02,122 INFO [train.py:715] (6/8) Epoch 0, batch 19950, loss[loss=0.1941, simple_loss=0.2379, pruned_loss=0.07517, over 4965.00 frames.], tot_loss[loss=0.2187, simple_loss=0.2729, pruned_loss=0.08223, over 972675.86 frames.], batch size: 24, lr: 1.48e-03 +2022-05-03 16:10:42,173 INFO [train.py:715] (6/8) Epoch 0, batch 20000, loss[loss=0.1952, simple_loss=0.2577, pruned_loss=0.06634, over 4760.00 frames.], tot_loss[loss=0.2176, simple_loss=0.272, pruned_loss=0.08157, over 972141.73 frames.], batch size: 14, lr: 1.48e-03 +2022-05-03 16:11:21,525 INFO [train.py:715] (6/8) Epoch 0, batch 20050, loss[loss=0.2526, simple_loss=0.313, pruned_loss=0.09615, over 4871.00 frames.], tot_loss[loss=0.2167, simple_loss=0.2713, pruned_loss=0.08105, over 972742.29 frames.], batch size: 38, lr: 1.48e-03 +2022-05-03 16:12:01,704 INFO [train.py:715] (6/8) Epoch 0, batch 20100, loss[loss=0.2606, simple_loss=0.2953, pruned_loss=0.113, over 4855.00 frames.], tot_loss[loss=0.2188, simple_loss=0.2729, pruned_loss=0.08238, over 972880.12 frames.], batch size: 32, lr: 1.47e-03 +2022-05-03 16:12:41,690 INFO [train.py:715] (6/8) Epoch 0, batch 20150, loss[loss=0.2035, simple_loss=0.2558, pruned_loss=0.07562, over 4802.00 frames.], tot_loss[loss=0.2192, simple_loss=0.2733, pruned_loss=0.0825, over 972391.71 frames.], batch size: 21, lr: 1.47e-03 +2022-05-03 16:13:21,728 INFO [train.py:715] (6/8) Epoch 0, batch 20200, loss[loss=0.2012, simple_loss=0.2727, pruned_loss=0.06486, over 4785.00 frames.], tot_loss[loss=0.2175, simple_loss=0.2725, pruned_loss=0.08127, over 972773.96 frames.], batch size: 21, lr: 1.47e-03 +2022-05-03 16:14:01,258 INFO [train.py:715] (6/8) Epoch 0, batch 20250, loss[loss=0.2555, simple_loss=0.3121, pruned_loss=0.09943, over 4984.00 frames.], tot_loss[loss=0.2178, simple_loss=0.2725, pruned_loss=0.08158, over 973349.68 frames.], batch size: 25, lr: 1.47e-03 +2022-05-03 16:14:42,002 INFO [train.py:715] (6/8) Epoch 0, batch 20300, loss[loss=0.1765, simple_loss=0.2369, pruned_loss=0.05808, over 4776.00 frames.], tot_loss[loss=0.2194, simple_loss=0.2738, pruned_loss=0.08252, over 973334.35 frames.], batch size: 18, lr: 1.47e-03 +2022-05-03 16:15:21,889 INFO [train.py:715] (6/8) Epoch 0, batch 20350, loss[loss=0.2217, simple_loss=0.2724, pruned_loss=0.08547, over 4775.00 frames.], tot_loss[loss=0.2184, simple_loss=0.2725, pruned_loss=0.08212, over 972335.18 frames.], batch size: 18, lr: 1.47e-03 +2022-05-03 16:16:00,949 INFO [train.py:715] (6/8) Epoch 0, batch 20400, loss[loss=0.2693, simple_loss=0.308, pruned_loss=0.1153, over 4837.00 frames.], tot_loss[loss=0.2181, simple_loss=0.2726, pruned_loss=0.08186, over 972223.26 frames.], batch size: 30, lr: 1.46e-03 +2022-05-03 16:16:40,900 INFO [train.py:715] (6/8) Epoch 0, batch 20450, loss[loss=0.2141, simple_loss=0.2649, pruned_loss=0.08163, over 4930.00 frames.], tot_loss[loss=0.2177, simple_loss=0.2721, pruned_loss=0.08164, over 971757.56 frames.], batch size: 18, lr: 1.46e-03 +2022-05-03 16:17:20,440 INFO [train.py:715] (6/8) Epoch 0, batch 20500, loss[loss=0.1941, simple_loss=0.2472, pruned_loss=0.0705, over 4930.00 frames.], tot_loss[loss=0.2182, simple_loss=0.2724, pruned_loss=0.08194, over 971912.75 frames.], batch size: 29, lr: 1.46e-03 +2022-05-03 16:18:00,499 INFO [train.py:715] (6/8) Epoch 0, batch 20550, loss[loss=0.1827, simple_loss=0.229, pruned_loss=0.06824, over 4980.00 frames.], tot_loss[loss=0.2189, simple_loss=0.2731, pruned_loss=0.08234, over 971351.75 frames.], batch size: 14, lr: 1.46e-03 +2022-05-03 16:18:39,959 INFO [train.py:715] (6/8) Epoch 0, batch 20600, loss[loss=0.2278, simple_loss=0.2804, pruned_loss=0.08762, over 4942.00 frames.], tot_loss[loss=0.218, simple_loss=0.2728, pruned_loss=0.08163, over 971747.83 frames.], batch size: 35, lr: 1.46e-03 +2022-05-03 16:19:19,650 INFO [train.py:715] (6/8) Epoch 0, batch 20650, loss[loss=0.1954, simple_loss=0.2583, pruned_loss=0.0663, over 4893.00 frames.], tot_loss[loss=0.2172, simple_loss=0.2722, pruned_loss=0.08111, over 971262.44 frames.], batch size: 32, lr: 1.46e-03 +2022-05-03 16:20:00,378 INFO [train.py:715] (6/8) Epoch 0, batch 20700, loss[loss=0.211, simple_loss=0.2759, pruned_loss=0.07306, over 4759.00 frames.], tot_loss[loss=0.2169, simple_loss=0.2717, pruned_loss=0.08099, over 971114.66 frames.], batch size: 18, lr: 1.45e-03 +2022-05-03 16:20:39,701 INFO [train.py:715] (6/8) Epoch 0, batch 20750, loss[loss=0.2105, simple_loss=0.2643, pruned_loss=0.07836, over 4830.00 frames.], tot_loss[loss=0.2156, simple_loss=0.2708, pruned_loss=0.08022, over 970778.51 frames.], batch size: 27, lr: 1.45e-03 +2022-05-03 16:21:19,880 INFO [train.py:715] (6/8) Epoch 0, batch 20800, loss[loss=0.1663, simple_loss=0.2369, pruned_loss=0.04785, over 4758.00 frames.], tot_loss[loss=0.2155, simple_loss=0.2704, pruned_loss=0.08025, over 971235.86 frames.], batch size: 19, lr: 1.45e-03 +2022-05-03 16:21:59,640 INFO [train.py:715] (6/8) Epoch 0, batch 20850, loss[loss=0.1859, simple_loss=0.2533, pruned_loss=0.0593, over 4876.00 frames.], tot_loss[loss=0.2151, simple_loss=0.2703, pruned_loss=0.07994, over 970904.07 frames.], batch size: 16, lr: 1.45e-03 +2022-05-03 16:22:39,142 INFO [train.py:715] (6/8) Epoch 0, batch 20900, loss[loss=0.1879, simple_loss=0.2342, pruned_loss=0.07078, over 4751.00 frames.], tot_loss[loss=0.2152, simple_loss=0.2701, pruned_loss=0.08013, over 970886.62 frames.], batch size: 12, lr: 1.45e-03 +2022-05-03 16:23:19,660 INFO [train.py:715] (6/8) Epoch 0, batch 20950, loss[loss=0.1884, simple_loss=0.2579, pruned_loss=0.05948, over 4737.00 frames.], tot_loss[loss=0.2143, simple_loss=0.2693, pruned_loss=0.07968, over 972127.52 frames.], batch size: 16, lr: 1.45e-03 +2022-05-03 16:24:00,696 INFO [train.py:715] (6/8) Epoch 0, batch 21000, loss[loss=0.1989, simple_loss=0.257, pruned_loss=0.0704, over 4774.00 frames.], tot_loss[loss=0.2166, simple_loss=0.271, pruned_loss=0.08107, over 972189.26 frames.], batch size: 17, lr: 1.44e-03 +2022-05-03 16:24:00,697 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 16:24:16,220 INFO [train.py:742] (6/8) Epoch 0, validation: loss=0.1386, simple_loss=0.2255, pruned_loss=0.02581, over 914524.00 frames. +2022-05-03 16:24:57,035 INFO [train.py:715] (6/8) Epoch 0, batch 21050, loss[loss=0.1981, simple_loss=0.2527, pruned_loss=0.07171, over 4985.00 frames.], tot_loss[loss=0.2185, simple_loss=0.2724, pruned_loss=0.08225, over 972820.74 frames.], batch size: 15, lr: 1.44e-03 +2022-05-03 16:25:36,600 INFO [train.py:715] (6/8) Epoch 0, batch 21100, loss[loss=0.1811, simple_loss=0.2454, pruned_loss=0.05843, over 4859.00 frames.], tot_loss[loss=0.2183, simple_loss=0.2724, pruned_loss=0.0821, over 972379.82 frames.], batch size: 20, lr: 1.44e-03 +2022-05-03 16:26:16,952 INFO [train.py:715] (6/8) Epoch 0, batch 21150, loss[loss=0.1748, simple_loss=0.2436, pruned_loss=0.05297, over 4796.00 frames.], tot_loss[loss=0.219, simple_loss=0.2733, pruned_loss=0.08235, over 972801.05 frames.], batch size: 18, lr: 1.44e-03 +2022-05-03 16:26:56,818 INFO [train.py:715] (6/8) Epoch 0, batch 21200, loss[loss=0.1964, simple_loss=0.2505, pruned_loss=0.07115, over 4798.00 frames.], tot_loss[loss=0.2186, simple_loss=0.2727, pruned_loss=0.0822, over 972663.79 frames.], batch size: 21, lr: 1.44e-03 +2022-05-03 16:27:37,355 INFO [train.py:715] (6/8) Epoch 0, batch 21250, loss[loss=0.1957, simple_loss=0.2516, pruned_loss=0.06984, over 4935.00 frames.], tot_loss[loss=0.2188, simple_loss=0.2725, pruned_loss=0.08249, over 972344.45 frames.], batch size: 18, lr: 1.44e-03 +2022-05-03 16:28:17,120 INFO [train.py:715] (6/8) Epoch 0, batch 21300, loss[loss=0.2443, simple_loss=0.2826, pruned_loss=0.103, over 4897.00 frames.], tot_loss[loss=0.2181, simple_loss=0.2718, pruned_loss=0.08217, over 972675.57 frames.], batch size: 18, lr: 1.43e-03 +2022-05-03 16:28:57,542 INFO [train.py:715] (6/8) Epoch 0, batch 21350, loss[loss=0.2432, simple_loss=0.2997, pruned_loss=0.09333, over 4909.00 frames.], tot_loss[loss=0.2182, simple_loss=0.2716, pruned_loss=0.08239, over 973093.11 frames.], batch size: 19, lr: 1.43e-03 +2022-05-03 16:29:38,280 INFO [train.py:715] (6/8) Epoch 0, batch 21400, loss[loss=0.1863, simple_loss=0.2417, pruned_loss=0.06549, over 4878.00 frames.], tot_loss[loss=0.2168, simple_loss=0.2707, pruned_loss=0.08139, over 972866.43 frames.], batch size: 32, lr: 1.43e-03 +2022-05-03 16:30:17,951 INFO [train.py:715] (6/8) Epoch 0, batch 21450, loss[loss=0.2382, simple_loss=0.2885, pruned_loss=0.09392, over 4820.00 frames.], tot_loss[loss=0.2154, simple_loss=0.2701, pruned_loss=0.08035, over 972421.06 frames.], batch size: 15, lr: 1.43e-03 +2022-05-03 16:30:57,793 INFO [train.py:715] (6/8) Epoch 0, batch 21500, loss[loss=0.1561, simple_loss=0.2161, pruned_loss=0.04804, over 4681.00 frames.], tot_loss[loss=0.2146, simple_loss=0.2697, pruned_loss=0.07972, over 972371.66 frames.], batch size: 13, lr: 1.43e-03 +2022-05-03 16:31:38,011 INFO [train.py:715] (6/8) Epoch 0, batch 21550, loss[loss=0.2038, simple_loss=0.261, pruned_loss=0.07326, over 4953.00 frames.], tot_loss[loss=0.2145, simple_loss=0.2698, pruned_loss=0.07965, over 972289.58 frames.], batch size: 24, lr: 1.43e-03 +2022-05-03 16:32:18,470 INFO [train.py:715] (6/8) Epoch 0, batch 21600, loss[loss=0.271, simple_loss=0.3065, pruned_loss=0.1178, over 4966.00 frames.], tot_loss[loss=0.2144, simple_loss=0.2693, pruned_loss=0.07977, over 972283.95 frames.], batch size: 24, lr: 1.42e-03 +2022-05-03 16:32:58,238 INFO [train.py:715] (6/8) Epoch 0, batch 21650, loss[loss=0.2071, simple_loss=0.2671, pruned_loss=0.0736, over 4833.00 frames.], tot_loss[loss=0.2149, simple_loss=0.2698, pruned_loss=0.07995, over 972078.21 frames.], batch size: 26, lr: 1.42e-03 +2022-05-03 16:33:39,050 INFO [train.py:715] (6/8) Epoch 0, batch 21700, loss[loss=0.2488, simple_loss=0.2941, pruned_loss=0.1018, over 4926.00 frames.], tot_loss[loss=0.216, simple_loss=0.2705, pruned_loss=0.08075, over 973004.35 frames.], batch size: 39, lr: 1.42e-03 +2022-05-03 16:34:19,207 INFO [train.py:715] (6/8) Epoch 0, batch 21750, loss[loss=0.1914, simple_loss=0.2652, pruned_loss=0.05878, over 4752.00 frames.], tot_loss[loss=0.2157, simple_loss=0.2706, pruned_loss=0.08034, over 973687.87 frames.], batch size: 16, lr: 1.42e-03 +2022-05-03 16:34:58,793 INFO [train.py:715] (6/8) Epoch 0, batch 21800, loss[loss=0.2453, simple_loss=0.2944, pruned_loss=0.09812, over 4760.00 frames.], tot_loss[loss=0.2151, simple_loss=0.2703, pruned_loss=0.07998, over 972990.44 frames.], batch size: 14, lr: 1.42e-03 +2022-05-03 16:35:38,621 INFO [train.py:715] (6/8) Epoch 0, batch 21850, loss[loss=0.2472, simple_loss=0.2941, pruned_loss=0.1002, over 4926.00 frames.], tot_loss[loss=0.2161, simple_loss=0.271, pruned_loss=0.08057, over 973411.81 frames.], batch size: 29, lr: 1.42e-03 +2022-05-03 16:36:19,097 INFO [train.py:715] (6/8) Epoch 0, batch 21900, loss[loss=0.1601, simple_loss=0.2228, pruned_loss=0.04875, over 4761.00 frames.], tot_loss[loss=0.2147, simple_loss=0.2699, pruned_loss=0.07973, over 972516.06 frames.], batch size: 17, lr: 1.42e-03 +2022-05-03 16:36:59,005 INFO [train.py:715] (6/8) Epoch 0, batch 21950, loss[loss=0.244, simple_loss=0.2978, pruned_loss=0.0951, over 4831.00 frames.], tot_loss[loss=0.2136, simple_loss=0.2692, pruned_loss=0.07899, over 972615.19 frames.], batch size: 15, lr: 1.41e-03 +2022-05-03 16:37:38,290 INFO [train.py:715] (6/8) Epoch 0, batch 22000, loss[loss=0.1978, simple_loss=0.2578, pruned_loss=0.0689, over 4773.00 frames.], tot_loss[loss=0.2133, simple_loss=0.2689, pruned_loss=0.07879, over 972098.82 frames.], batch size: 18, lr: 1.41e-03 +2022-05-03 16:38:18,448 INFO [train.py:715] (6/8) Epoch 0, batch 22050, loss[loss=0.2296, simple_loss=0.273, pruned_loss=0.09309, over 4775.00 frames.], tot_loss[loss=0.2129, simple_loss=0.2684, pruned_loss=0.07865, over 971776.07 frames.], batch size: 18, lr: 1.41e-03 +2022-05-03 16:38:58,600 INFO [train.py:715] (6/8) Epoch 0, batch 22100, loss[loss=0.224, simple_loss=0.2859, pruned_loss=0.08104, over 4782.00 frames.], tot_loss[loss=0.2127, simple_loss=0.2684, pruned_loss=0.07853, over 972058.85 frames.], batch size: 17, lr: 1.41e-03 +2022-05-03 16:39:38,118 INFO [train.py:715] (6/8) Epoch 0, batch 22150, loss[loss=0.2184, simple_loss=0.2724, pruned_loss=0.0822, over 4925.00 frames.], tot_loss[loss=0.213, simple_loss=0.2686, pruned_loss=0.07875, over 972067.41 frames.], batch size: 23, lr: 1.41e-03 +2022-05-03 16:40:17,927 INFO [train.py:715] (6/8) Epoch 0, batch 22200, loss[loss=0.1904, simple_loss=0.2471, pruned_loss=0.06688, over 4755.00 frames.], tot_loss[loss=0.2144, simple_loss=0.2697, pruned_loss=0.0796, over 971342.94 frames.], batch size: 16, lr: 1.41e-03 +2022-05-03 16:40:58,311 INFO [train.py:715] (6/8) Epoch 0, batch 22250, loss[loss=0.2743, simple_loss=0.3127, pruned_loss=0.1179, over 4803.00 frames.], tot_loss[loss=0.216, simple_loss=0.2708, pruned_loss=0.08064, over 971415.58 frames.], batch size: 21, lr: 1.40e-03 +2022-05-03 16:41:38,378 INFO [train.py:715] (6/8) Epoch 0, batch 22300, loss[loss=0.2175, simple_loss=0.2698, pruned_loss=0.08265, over 4957.00 frames.], tot_loss[loss=0.2161, simple_loss=0.2709, pruned_loss=0.0807, over 971374.76 frames.], batch size: 35, lr: 1.40e-03 +2022-05-03 16:42:18,083 INFO [train.py:715] (6/8) Epoch 0, batch 22350, loss[loss=0.2217, simple_loss=0.2798, pruned_loss=0.08184, over 4636.00 frames.], tot_loss[loss=0.2154, simple_loss=0.2702, pruned_loss=0.08026, over 971851.55 frames.], batch size: 13, lr: 1.40e-03 +2022-05-03 16:42:58,254 INFO [train.py:715] (6/8) Epoch 0, batch 22400, loss[loss=0.2862, simple_loss=0.312, pruned_loss=0.1302, over 4940.00 frames.], tot_loss[loss=0.2151, simple_loss=0.2701, pruned_loss=0.08006, over 971688.16 frames.], batch size: 23, lr: 1.40e-03 +2022-05-03 16:43:38,085 INFO [train.py:715] (6/8) Epoch 0, batch 22450, loss[loss=0.1962, simple_loss=0.2614, pruned_loss=0.06545, over 4777.00 frames.], tot_loss[loss=0.2163, simple_loss=0.2709, pruned_loss=0.08086, over 972653.66 frames.], batch size: 17, lr: 1.40e-03 +2022-05-03 16:44:17,449 INFO [train.py:715] (6/8) Epoch 0, batch 22500, loss[loss=0.1905, simple_loss=0.2549, pruned_loss=0.06303, over 4916.00 frames.], tot_loss[loss=0.2168, simple_loss=0.2716, pruned_loss=0.08103, over 973005.92 frames.], batch size: 23, lr: 1.40e-03 +2022-05-03 16:44:57,228 INFO [train.py:715] (6/8) Epoch 0, batch 22550, loss[loss=0.1931, simple_loss=0.2477, pruned_loss=0.06926, over 4793.00 frames.], tot_loss[loss=0.2155, simple_loss=0.2707, pruned_loss=0.08012, over 973105.62 frames.], batch size: 21, lr: 1.40e-03 +2022-05-03 16:45:37,440 INFO [train.py:715] (6/8) Epoch 0, batch 22600, loss[loss=0.2994, simple_loss=0.3213, pruned_loss=0.1387, over 4969.00 frames.], tot_loss[loss=0.2161, simple_loss=0.2716, pruned_loss=0.08028, over 973766.80 frames.], batch size: 35, lr: 1.39e-03 +2022-05-03 16:46:18,083 INFO [train.py:715] (6/8) Epoch 0, batch 22650, loss[loss=0.1638, simple_loss=0.2282, pruned_loss=0.04974, over 4799.00 frames.], tot_loss[loss=0.2179, simple_loss=0.2728, pruned_loss=0.08145, over 973578.43 frames.], batch size: 12, lr: 1.39e-03 +2022-05-03 16:46:57,299 INFO [train.py:715] (6/8) Epoch 0, batch 22700, loss[loss=0.2378, simple_loss=0.2853, pruned_loss=0.09516, over 4889.00 frames.], tot_loss[loss=0.2171, simple_loss=0.2725, pruned_loss=0.08083, over 974065.64 frames.], batch size: 22, lr: 1.39e-03 +2022-05-03 16:47:37,378 INFO [train.py:715] (6/8) Epoch 0, batch 22750, loss[loss=0.1958, simple_loss=0.2604, pruned_loss=0.06566, over 4804.00 frames.], tot_loss[loss=0.218, simple_loss=0.273, pruned_loss=0.08144, over 973841.42 frames.], batch size: 21, lr: 1.39e-03 +2022-05-03 16:48:17,861 INFO [train.py:715] (6/8) Epoch 0, batch 22800, loss[loss=0.1785, simple_loss=0.2492, pruned_loss=0.05391, over 4839.00 frames.], tot_loss[loss=0.2175, simple_loss=0.2726, pruned_loss=0.08117, over 972815.18 frames.], batch size: 15, lr: 1.39e-03 +2022-05-03 16:48:57,454 INFO [train.py:715] (6/8) Epoch 0, batch 22850, loss[loss=0.2296, simple_loss=0.2745, pruned_loss=0.09239, over 4789.00 frames.], tot_loss[loss=0.2175, simple_loss=0.2725, pruned_loss=0.08123, over 972554.36 frames.], batch size: 18, lr: 1.39e-03 +2022-05-03 16:49:37,567 INFO [train.py:715] (6/8) Epoch 0, batch 22900, loss[loss=0.1754, simple_loss=0.2533, pruned_loss=0.0488, over 4939.00 frames.], tot_loss[loss=0.2166, simple_loss=0.2717, pruned_loss=0.08081, over 972457.92 frames.], batch size: 39, lr: 1.39e-03 +2022-05-03 16:50:17,834 INFO [train.py:715] (6/8) Epoch 0, batch 22950, loss[loss=0.2527, simple_loss=0.2872, pruned_loss=0.1091, over 4759.00 frames.], tot_loss[loss=0.2158, simple_loss=0.2711, pruned_loss=0.0802, over 971820.12 frames.], batch size: 12, lr: 1.38e-03 +2022-05-03 16:50:58,461 INFO [train.py:715] (6/8) Epoch 0, batch 23000, loss[loss=0.1993, simple_loss=0.2557, pruned_loss=0.0714, over 4874.00 frames.], tot_loss[loss=0.2156, simple_loss=0.2709, pruned_loss=0.08012, over 971922.53 frames.], batch size: 38, lr: 1.38e-03 +2022-05-03 16:51:37,477 INFO [train.py:715] (6/8) Epoch 0, batch 23050, loss[loss=0.2027, simple_loss=0.2651, pruned_loss=0.07012, over 4863.00 frames.], tot_loss[loss=0.2159, simple_loss=0.2712, pruned_loss=0.08029, over 972071.49 frames.], batch size: 20, lr: 1.38e-03 +2022-05-03 16:52:18,417 INFO [train.py:715] (6/8) Epoch 0, batch 23100, loss[loss=0.2236, simple_loss=0.2733, pruned_loss=0.08692, over 4709.00 frames.], tot_loss[loss=0.2162, simple_loss=0.2717, pruned_loss=0.08039, over 971721.95 frames.], batch size: 15, lr: 1.38e-03 +2022-05-03 16:52:59,451 INFO [train.py:715] (6/8) Epoch 0, batch 23150, loss[loss=0.2003, simple_loss=0.2604, pruned_loss=0.07011, over 4757.00 frames.], tot_loss[loss=0.2163, simple_loss=0.2713, pruned_loss=0.08066, over 971806.30 frames.], batch size: 16, lr: 1.38e-03 +2022-05-03 16:53:39,202 INFO [train.py:715] (6/8) Epoch 0, batch 23200, loss[loss=0.2002, simple_loss=0.2569, pruned_loss=0.07176, over 4971.00 frames.], tot_loss[loss=0.2159, simple_loss=0.2712, pruned_loss=0.08024, over 971559.53 frames.], batch size: 14, lr: 1.38e-03 +2022-05-03 16:54:19,769 INFO [train.py:715] (6/8) Epoch 0, batch 23250, loss[loss=0.2312, simple_loss=0.282, pruned_loss=0.09015, over 4966.00 frames.], tot_loss[loss=0.2156, simple_loss=0.271, pruned_loss=0.08014, over 971956.46 frames.], batch size: 24, lr: 1.38e-03 +2022-05-03 16:55:00,195 INFO [train.py:715] (6/8) Epoch 0, batch 23300, loss[loss=0.1887, simple_loss=0.2493, pruned_loss=0.06399, over 4977.00 frames.], tot_loss[loss=0.2144, simple_loss=0.2696, pruned_loss=0.07957, over 971856.90 frames.], batch size: 28, lr: 1.37e-03 +2022-05-03 16:55:40,671 INFO [train.py:715] (6/8) Epoch 0, batch 23350, loss[loss=0.2191, simple_loss=0.2746, pruned_loss=0.08186, over 4850.00 frames.], tot_loss[loss=0.2131, simple_loss=0.2687, pruned_loss=0.07875, over 971919.54 frames.], batch size: 32, lr: 1.37e-03 +2022-05-03 16:56:21,269 INFO [train.py:715] (6/8) Epoch 0, batch 23400, loss[loss=0.1701, simple_loss=0.2375, pruned_loss=0.05129, over 4897.00 frames.], tot_loss[loss=0.2122, simple_loss=0.2678, pruned_loss=0.07828, over 972324.95 frames.], batch size: 22, lr: 1.37e-03 +2022-05-03 16:57:02,286 INFO [train.py:715] (6/8) Epoch 0, batch 23450, loss[loss=0.2164, simple_loss=0.2508, pruned_loss=0.09106, over 4827.00 frames.], tot_loss[loss=0.2104, simple_loss=0.2668, pruned_loss=0.07695, over 972396.21 frames.], batch size: 13, lr: 1.37e-03 +2022-05-03 16:57:43,388 INFO [train.py:715] (6/8) Epoch 0, batch 23500, loss[loss=0.2193, simple_loss=0.2713, pruned_loss=0.08363, over 4929.00 frames.], tot_loss[loss=0.2101, simple_loss=0.2664, pruned_loss=0.07687, over 972583.18 frames.], batch size: 29, lr: 1.37e-03 +2022-05-03 16:58:23,240 INFO [train.py:715] (6/8) Epoch 0, batch 23550, loss[loss=0.2087, simple_loss=0.2715, pruned_loss=0.07301, over 4883.00 frames.], tot_loss[loss=0.2121, simple_loss=0.268, pruned_loss=0.07805, over 972034.27 frames.], batch size: 20, lr: 1.37e-03 +2022-05-03 16:59:04,082 INFO [train.py:715] (6/8) Epoch 0, batch 23600, loss[loss=0.2095, simple_loss=0.2819, pruned_loss=0.06858, over 4978.00 frames.], tot_loss[loss=0.2126, simple_loss=0.269, pruned_loss=0.07807, over 972576.33 frames.], batch size: 24, lr: 1.37e-03 +2022-05-03 16:59:44,350 INFO [train.py:715] (6/8) Epoch 0, batch 23650, loss[loss=0.2293, simple_loss=0.2819, pruned_loss=0.08834, over 4927.00 frames.], tot_loss[loss=0.2141, simple_loss=0.27, pruned_loss=0.07913, over 972365.83 frames.], batch size: 23, lr: 1.36e-03 +2022-05-03 17:00:24,471 INFO [train.py:715] (6/8) Epoch 0, batch 23700, loss[loss=0.1948, simple_loss=0.2465, pruned_loss=0.07156, over 4825.00 frames.], tot_loss[loss=0.2135, simple_loss=0.2694, pruned_loss=0.07873, over 972522.38 frames.], batch size: 13, lr: 1.36e-03 +2022-05-03 17:01:03,659 INFO [train.py:715] (6/8) Epoch 0, batch 23750, loss[loss=0.2374, simple_loss=0.2972, pruned_loss=0.08883, over 4981.00 frames.], tot_loss[loss=0.2132, simple_loss=0.2691, pruned_loss=0.07864, over 972476.77 frames.], batch size: 24, lr: 1.36e-03 +2022-05-03 17:01:43,662 INFO [train.py:715] (6/8) Epoch 0, batch 23800, loss[loss=0.213, simple_loss=0.2719, pruned_loss=0.07703, over 4895.00 frames.], tot_loss[loss=0.2142, simple_loss=0.2697, pruned_loss=0.07937, over 971983.88 frames.], batch size: 19, lr: 1.36e-03 +2022-05-03 17:02:24,145 INFO [train.py:715] (6/8) Epoch 0, batch 23850, loss[loss=0.258, simple_loss=0.2998, pruned_loss=0.1081, over 4969.00 frames.], tot_loss[loss=0.2147, simple_loss=0.2703, pruned_loss=0.07958, over 971717.24 frames.], batch size: 24, lr: 1.36e-03 +2022-05-03 17:03:03,307 INFO [train.py:715] (6/8) Epoch 0, batch 23900, loss[loss=0.2141, simple_loss=0.2837, pruned_loss=0.07227, over 4898.00 frames.], tot_loss[loss=0.2135, simple_loss=0.2696, pruned_loss=0.07869, over 971881.82 frames.], batch size: 19, lr: 1.36e-03 +2022-05-03 17:03:43,456 INFO [train.py:715] (6/8) Epoch 0, batch 23950, loss[loss=0.236, simple_loss=0.2924, pruned_loss=0.08982, over 4946.00 frames.], tot_loss[loss=0.2148, simple_loss=0.2704, pruned_loss=0.0796, over 972176.19 frames.], batch size: 39, lr: 1.36e-03 +2022-05-03 17:04:26,569 INFO [train.py:715] (6/8) Epoch 0, batch 24000, loss[loss=0.1868, simple_loss=0.244, pruned_loss=0.06477, over 4792.00 frames.], tot_loss[loss=0.213, simple_loss=0.2689, pruned_loss=0.07861, over 972057.27 frames.], batch size: 24, lr: 1.35e-03 +2022-05-03 17:04:26,570 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 17:04:40,850 INFO [train.py:742] (6/8) Epoch 0, validation: loss=0.1357, simple_loss=0.2226, pruned_loss=0.02435, over 914524.00 frames. +2022-05-03 17:05:21,171 INFO [train.py:715] (6/8) Epoch 0, batch 24050, loss[loss=0.177, simple_loss=0.252, pruned_loss=0.05106, over 4749.00 frames.], tot_loss[loss=0.2122, simple_loss=0.2688, pruned_loss=0.07782, over 972933.61 frames.], batch size: 19, lr: 1.35e-03 +2022-05-03 17:06:00,595 INFO [train.py:715] (6/8) Epoch 0, batch 24100, loss[loss=0.1666, simple_loss=0.2425, pruned_loss=0.04532, over 4898.00 frames.], tot_loss[loss=0.21, simple_loss=0.2669, pruned_loss=0.07658, over 973129.58 frames.], batch size: 19, lr: 1.35e-03 +2022-05-03 17:06:40,583 INFO [train.py:715] (6/8) Epoch 0, batch 24150, loss[loss=0.2222, simple_loss=0.2732, pruned_loss=0.08561, over 4707.00 frames.], tot_loss[loss=0.2103, simple_loss=0.2671, pruned_loss=0.07679, over 972312.48 frames.], batch size: 15, lr: 1.35e-03 +2022-05-03 17:07:20,600 INFO [train.py:715] (6/8) Epoch 0, batch 24200, loss[loss=0.2328, simple_loss=0.2849, pruned_loss=0.0904, over 4742.00 frames.], tot_loss[loss=0.2097, simple_loss=0.2667, pruned_loss=0.07631, over 973515.74 frames.], batch size: 16, lr: 1.35e-03 +2022-05-03 17:08:01,227 INFO [train.py:715] (6/8) Epoch 0, batch 24250, loss[loss=0.2792, simple_loss=0.3128, pruned_loss=0.1228, over 4690.00 frames.], tot_loss[loss=0.2098, simple_loss=0.2667, pruned_loss=0.07643, over 972581.00 frames.], batch size: 15, lr: 1.35e-03 +2022-05-03 17:08:40,835 INFO [train.py:715] (6/8) Epoch 0, batch 24300, loss[loss=0.2116, simple_loss=0.2671, pruned_loss=0.07803, over 4855.00 frames.], tot_loss[loss=0.2101, simple_loss=0.2668, pruned_loss=0.0767, over 972607.71 frames.], batch size: 32, lr: 1.35e-03 +2022-05-03 17:09:21,014 INFO [train.py:715] (6/8) Epoch 0, batch 24350, loss[loss=0.234, simple_loss=0.2753, pruned_loss=0.09637, over 4981.00 frames.], tot_loss[loss=0.2107, simple_loss=0.2671, pruned_loss=0.07713, over 972503.86 frames.], batch size: 35, lr: 1.35e-03 +2022-05-03 17:10:01,416 INFO [train.py:715] (6/8) Epoch 0, batch 24400, loss[loss=0.2689, simple_loss=0.3072, pruned_loss=0.1153, over 4856.00 frames.], tot_loss[loss=0.2108, simple_loss=0.2672, pruned_loss=0.07724, over 972508.86 frames.], batch size: 38, lr: 1.34e-03 +2022-05-03 17:10:40,941 INFO [train.py:715] (6/8) Epoch 0, batch 24450, loss[loss=0.2256, simple_loss=0.2892, pruned_loss=0.08098, over 4786.00 frames.], tot_loss[loss=0.2106, simple_loss=0.2673, pruned_loss=0.07702, over 971213.54 frames.], batch size: 14, lr: 1.34e-03 +2022-05-03 17:11:21,052 INFO [train.py:715] (6/8) Epoch 0, batch 24500, loss[loss=0.2088, simple_loss=0.2738, pruned_loss=0.07193, over 4808.00 frames.], tot_loss[loss=0.211, simple_loss=0.2677, pruned_loss=0.07722, over 971639.40 frames.], batch size: 21, lr: 1.34e-03 +2022-05-03 17:12:01,321 INFO [train.py:715] (6/8) Epoch 0, batch 24550, loss[loss=0.2091, simple_loss=0.273, pruned_loss=0.07257, over 4937.00 frames.], tot_loss[loss=0.2119, simple_loss=0.2684, pruned_loss=0.07772, over 972389.98 frames.], batch size: 21, lr: 1.34e-03 +2022-05-03 17:12:41,515 INFO [train.py:715] (6/8) Epoch 0, batch 24600, loss[loss=0.2123, simple_loss=0.2752, pruned_loss=0.07468, over 4803.00 frames.], tot_loss[loss=0.2118, simple_loss=0.2686, pruned_loss=0.07751, over 971080.93 frames.], batch size: 21, lr: 1.34e-03 +2022-05-03 17:13:20,994 INFO [train.py:715] (6/8) Epoch 0, batch 24650, loss[loss=0.1937, simple_loss=0.2435, pruned_loss=0.07197, over 4847.00 frames.], tot_loss[loss=0.2122, simple_loss=0.2686, pruned_loss=0.07787, over 971530.49 frames.], batch size: 32, lr: 1.34e-03 +2022-05-03 17:14:01,417 INFO [train.py:715] (6/8) Epoch 0, batch 24700, loss[loss=0.203, simple_loss=0.2686, pruned_loss=0.0687, over 4772.00 frames.], tot_loss[loss=0.2123, simple_loss=0.2687, pruned_loss=0.07797, over 971632.35 frames.], batch size: 18, lr: 1.34e-03 +2022-05-03 17:14:42,122 INFO [train.py:715] (6/8) Epoch 0, batch 24750, loss[loss=0.1752, simple_loss=0.2256, pruned_loss=0.06238, over 4841.00 frames.], tot_loss[loss=0.2119, simple_loss=0.2682, pruned_loss=0.07782, over 971999.33 frames.], batch size: 34, lr: 1.33e-03 +2022-05-03 17:15:21,175 INFO [train.py:715] (6/8) Epoch 0, batch 24800, loss[loss=0.1917, simple_loss=0.2519, pruned_loss=0.0658, over 4735.00 frames.], tot_loss[loss=0.2117, simple_loss=0.2678, pruned_loss=0.07781, over 972565.25 frames.], batch size: 16, lr: 1.33e-03 +2022-05-03 17:16:01,309 INFO [train.py:715] (6/8) Epoch 0, batch 24850, loss[loss=0.1867, simple_loss=0.252, pruned_loss=0.06074, over 4777.00 frames.], tot_loss[loss=0.2142, simple_loss=0.2696, pruned_loss=0.07937, over 972876.62 frames.], batch size: 17, lr: 1.33e-03 +2022-05-03 17:16:41,591 INFO [train.py:715] (6/8) Epoch 0, batch 24900, loss[loss=0.2166, simple_loss=0.2732, pruned_loss=0.07998, over 4850.00 frames.], tot_loss[loss=0.2126, simple_loss=0.2686, pruned_loss=0.0783, over 972810.12 frames.], batch size: 20, lr: 1.33e-03 +2022-05-03 17:17:21,632 INFO [train.py:715] (6/8) Epoch 0, batch 24950, loss[loss=0.2173, simple_loss=0.2804, pruned_loss=0.07707, over 4778.00 frames.], tot_loss[loss=0.2121, simple_loss=0.2684, pruned_loss=0.07792, over 972512.80 frames.], batch size: 17, lr: 1.33e-03 +2022-05-03 17:18:01,151 INFO [train.py:715] (6/8) Epoch 0, batch 25000, loss[loss=0.1875, simple_loss=0.2472, pruned_loss=0.06385, over 4962.00 frames.], tot_loss[loss=0.2119, simple_loss=0.2682, pruned_loss=0.07784, over 972382.70 frames.], batch size: 24, lr: 1.33e-03 +2022-05-03 17:18:41,403 INFO [train.py:715] (6/8) Epoch 0, batch 25050, loss[loss=0.2134, simple_loss=0.2593, pruned_loss=0.0838, over 4818.00 frames.], tot_loss[loss=0.2119, simple_loss=0.2678, pruned_loss=0.07795, over 972726.06 frames.], batch size: 13, lr: 1.33e-03 +2022-05-03 17:19:21,104 INFO [train.py:715] (6/8) Epoch 0, batch 25100, loss[loss=0.1646, simple_loss=0.2311, pruned_loss=0.04904, over 4867.00 frames.], tot_loss[loss=0.2116, simple_loss=0.2673, pruned_loss=0.07794, over 972672.45 frames.], batch size: 16, lr: 1.33e-03 +2022-05-03 17:20:00,603 INFO [train.py:715] (6/8) Epoch 0, batch 25150, loss[loss=0.179, simple_loss=0.2405, pruned_loss=0.05873, over 4979.00 frames.], tot_loss[loss=0.2111, simple_loss=0.2676, pruned_loss=0.07732, over 973432.14 frames.], batch size: 28, lr: 1.32e-03 +2022-05-03 17:20:41,134 INFO [train.py:715] (6/8) Epoch 0, batch 25200, loss[loss=0.1856, simple_loss=0.2557, pruned_loss=0.0577, over 4638.00 frames.], tot_loss[loss=0.2112, simple_loss=0.2677, pruned_loss=0.07734, over 973507.29 frames.], batch size: 13, lr: 1.32e-03 +2022-05-03 17:21:21,702 INFO [train.py:715] (6/8) Epoch 0, batch 25250, loss[loss=0.2157, simple_loss=0.2658, pruned_loss=0.0828, over 4905.00 frames.], tot_loss[loss=0.2104, simple_loss=0.2672, pruned_loss=0.07675, over 973068.51 frames.], batch size: 17, lr: 1.32e-03 +2022-05-03 17:22:02,264 INFO [train.py:715] (6/8) Epoch 0, batch 25300, loss[loss=0.199, simple_loss=0.2619, pruned_loss=0.06803, over 4991.00 frames.], tot_loss[loss=0.2098, simple_loss=0.2667, pruned_loss=0.07649, over 972390.49 frames.], batch size: 16, lr: 1.32e-03 +2022-05-03 17:22:42,093 INFO [train.py:715] (6/8) Epoch 0, batch 25350, loss[loss=0.1995, simple_loss=0.2545, pruned_loss=0.07225, over 4783.00 frames.], tot_loss[loss=0.209, simple_loss=0.266, pruned_loss=0.07596, over 972420.63 frames.], batch size: 14, lr: 1.32e-03 +2022-05-03 17:23:22,553 INFO [train.py:715] (6/8) Epoch 0, batch 25400, loss[loss=0.2107, simple_loss=0.256, pruned_loss=0.08265, over 4971.00 frames.], tot_loss[loss=0.2086, simple_loss=0.2657, pruned_loss=0.07573, over 972578.51 frames.], batch size: 31, lr: 1.32e-03 +2022-05-03 17:24:02,724 INFO [train.py:715] (6/8) Epoch 0, batch 25450, loss[loss=0.1896, simple_loss=0.2534, pruned_loss=0.06284, over 4889.00 frames.], tot_loss[loss=0.2094, simple_loss=0.2665, pruned_loss=0.07619, over 972829.79 frames.], batch size: 22, lr: 1.32e-03 +2022-05-03 17:24:41,711 INFO [train.py:715] (6/8) Epoch 0, batch 25500, loss[loss=0.2452, simple_loss=0.2939, pruned_loss=0.09828, over 4855.00 frames.], tot_loss[loss=0.2118, simple_loss=0.2682, pruned_loss=0.07772, over 972932.95 frames.], batch size: 32, lr: 1.32e-03 +2022-05-03 17:25:22,417 INFO [train.py:715] (6/8) Epoch 0, batch 25550, loss[loss=0.2035, simple_loss=0.2556, pruned_loss=0.07573, over 4690.00 frames.], tot_loss[loss=0.2106, simple_loss=0.2672, pruned_loss=0.077, over 972110.81 frames.], batch size: 15, lr: 1.31e-03 +2022-05-03 17:26:02,030 INFO [train.py:715] (6/8) Epoch 0, batch 25600, loss[loss=0.216, simple_loss=0.2793, pruned_loss=0.07635, over 4918.00 frames.], tot_loss[loss=0.2114, simple_loss=0.2675, pruned_loss=0.0776, over 970804.73 frames.], batch size: 23, lr: 1.31e-03 +2022-05-03 17:26:41,739 INFO [train.py:715] (6/8) Epoch 0, batch 25650, loss[loss=0.2167, simple_loss=0.2645, pruned_loss=0.08449, over 4939.00 frames.], tot_loss[loss=0.2094, simple_loss=0.2661, pruned_loss=0.07634, over 972189.00 frames.], batch size: 29, lr: 1.31e-03 +2022-05-03 17:27:21,449 INFO [train.py:715] (6/8) Epoch 0, batch 25700, loss[loss=0.19, simple_loss=0.2512, pruned_loss=0.06434, over 4845.00 frames.], tot_loss[loss=0.2074, simple_loss=0.2646, pruned_loss=0.0751, over 971874.89 frames.], batch size: 30, lr: 1.31e-03 +2022-05-03 17:28:01,734 INFO [train.py:715] (6/8) Epoch 0, batch 25750, loss[loss=0.1791, simple_loss=0.2481, pruned_loss=0.05505, over 4965.00 frames.], tot_loss[loss=0.2086, simple_loss=0.2658, pruned_loss=0.07573, over 972358.79 frames.], batch size: 21, lr: 1.31e-03 +2022-05-03 17:28:41,513 INFO [train.py:715] (6/8) Epoch 0, batch 25800, loss[loss=0.2113, simple_loss=0.2636, pruned_loss=0.07951, over 4884.00 frames.], tot_loss[loss=0.2096, simple_loss=0.2663, pruned_loss=0.07642, over 972244.60 frames.], batch size: 19, lr: 1.31e-03 +2022-05-03 17:29:20,757 INFO [train.py:715] (6/8) Epoch 0, batch 25850, loss[loss=0.1835, simple_loss=0.2494, pruned_loss=0.05876, over 4892.00 frames.], tot_loss[loss=0.2084, simple_loss=0.2653, pruned_loss=0.07575, over 972090.72 frames.], batch size: 22, lr: 1.31e-03 +2022-05-03 17:30:01,474 INFO [train.py:715] (6/8) Epoch 0, batch 25900, loss[loss=0.226, simple_loss=0.2706, pruned_loss=0.09077, over 4813.00 frames.], tot_loss[loss=0.2093, simple_loss=0.2661, pruned_loss=0.07626, over 971106.79 frames.], batch size: 15, lr: 1.31e-03 +2022-05-03 17:30:41,211 INFO [train.py:715] (6/8) Epoch 0, batch 25950, loss[loss=0.2173, simple_loss=0.2656, pruned_loss=0.0845, over 4809.00 frames.], tot_loss[loss=0.2085, simple_loss=0.2658, pruned_loss=0.07558, over 970865.73 frames.], batch size: 13, lr: 1.30e-03 +2022-05-03 17:31:21,228 INFO [train.py:715] (6/8) Epoch 0, batch 26000, loss[loss=0.2264, simple_loss=0.2799, pruned_loss=0.08644, over 4794.00 frames.], tot_loss[loss=0.2082, simple_loss=0.2651, pruned_loss=0.07564, over 971284.56 frames.], batch size: 21, lr: 1.30e-03 +2022-05-03 17:32:01,174 INFO [train.py:715] (6/8) Epoch 0, batch 26050, loss[loss=0.154, simple_loss=0.2229, pruned_loss=0.04261, over 4661.00 frames.], tot_loss[loss=0.2087, simple_loss=0.2657, pruned_loss=0.07588, over 971213.79 frames.], batch size: 13, lr: 1.30e-03 +2022-05-03 17:32:41,633 INFO [train.py:715] (6/8) Epoch 0, batch 26100, loss[loss=0.1964, simple_loss=0.2536, pruned_loss=0.06956, over 4783.00 frames.], tot_loss[loss=0.2085, simple_loss=0.2657, pruned_loss=0.0757, over 971904.63 frames.], batch size: 18, lr: 1.30e-03 +2022-05-03 17:33:21,954 INFO [train.py:715] (6/8) Epoch 0, batch 26150, loss[loss=0.2067, simple_loss=0.2714, pruned_loss=0.07103, over 4980.00 frames.], tot_loss[loss=0.2092, simple_loss=0.2659, pruned_loss=0.07627, over 971292.72 frames.], batch size: 28, lr: 1.30e-03 +2022-05-03 17:34:00,860 INFO [train.py:715] (6/8) Epoch 0, batch 26200, loss[loss=0.1979, simple_loss=0.2589, pruned_loss=0.06842, over 4774.00 frames.], tot_loss[loss=0.2085, simple_loss=0.2652, pruned_loss=0.0759, over 971956.41 frames.], batch size: 18, lr: 1.30e-03 +2022-05-03 17:34:41,488 INFO [train.py:715] (6/8) Epoch 0, batch 26250, loss[loss=0.2046, simple_loss=0.2715, pruned_loss=0.06882, over 4943.00 frames.], tot_loss[loss=0.2069, simple_loss=0.2641, pruned_loss=0.07486, over 972863.70 frames.], batch size: 29, lr: 1.30e-03 +2022-05-03 17:35:21,434 INFO [train.py:715] (6/8) Epoch 0, batch 26300, loss[loss=0.2063, simple_loss=0.2586, pruned_loss=0.07699, over 4980.00 frames.], tot_loss[loss=0.2071, simple_loss=0.264, pruned_loss=0.07513, over 972603.68 frames.], batch size: 15, lr: 1.30e-03 +2022-05-03 17:36:01,273 INFO [train.py:715] (6/8) Epoch 0, batch 26350, loss[loss=0.1889, simple_loss=0.2547, pruned_loss=0.06154, over 4667.00 frames.], tot_loss[loss=0.208, simple_loss=0.2647, pruned_loss=0.07564, over 972775.58 frames.], batch size: 14, lr: 1.30e-03 +2022-05-03 17:36:41,221 INFO [train.py:715] (6/8) Epoch 0, batch 26400, loss[loss=0.1675, simple_loss=0.2315, pruned_loss=0.05172, over 4986.00 frames.], tot_loss[loss=0.2078, simple_loss=0.2647, pruned_loss=0.07547, over 972257.19 frames.], batch size: 28, lr: 1.29e-03 +2022-05-03 17:37:21,343 INFO [train.py:715] (6/8) Epoch 0, batch 26450, loss[loss=0.1644, simple_loss=0.2363, pruned_loss=0.04626, over 4866.00 frames.], tot_loss[loss=0.2077, simple_loss=0.2647, pruned_loss=0.0754, over 971801.08 frames.], batch size: 16, lr: 1.29e-03 +2022-05-03 17:38:02,049 INFO [train.py:715] (6/8) Epoch 0, batch 26500, loss[loss=0.2326, simple_loss=0.2788, pruned_loss=0.09319, over 4915.00 frames.], tot_loss[loss=0.2086, simple_loss=0.2653, pruned_loss=0.07596, over 971579.78 frames.], batch size: 18, lr: 1.29e-03 +2022-05-03 17:38:41,412 INFO [train.py:715] (6/8) Epoch 0, batch 26550, loss[loss=0.1776, simple_loss=0.2462, pruned_loss=0.05452, over 4787.00 frames.], tot_loss[loss=0.2096, simple_loss=0.2662, pruned_loss=0.07647, over 971062.70 frames.], batch size: 18, lr: 1.29e-03 +2022-05-03 17:39:21,087 INFO [train.py:715] (6/8) Epoch 0, batch 26600, loss[loss=0.2008, simple_loss=0.26, pruned_loss=0.07083, over 4796.00 frames.], tot_loss[loss=0.2078, simple_loss=0.265, pruned_loss=0.07529, over 971567.39 frames.], batch size: 25, lr: 1.29e-03 +2022-05-03 17:40:01,337 INFO [train.py:715] (6/8) Epoch 0, batch 26650, loss[loss=0.1808, simple_loss=0.2478, pruned_loss=0.05683, over 4989.00 frames.], tot_loss[loss=0.2073, simple_loss=0.2649, pruned_loss=0.07486, over 971708.17 frames.], batch size: 25, lr: 1.29e-03 +2022-05-03 17:40:40,798 INFO [train.py:715] (6/8) Epoch 0, batch 26700, loss[loss=0.2576, simple_loss=0.3086, pruned_loss=0.1033, over 4827.00 frames.], tot_loss[loss=0.2092, simple_loss=0.2663, pruned_loss=0.07608, over 971531.65 frames.], batch size: 15, lr: 1.29e-03 +2022-05-03 17:41:20,823 INFO [train.py:715] (6/8) Epoch 0, batch 26750, loss[loss=0.2045, simple_loss=0.2549, pruned_loss=0.07701, over 4745.00 frames.], tot_loss[loss=0.2093, simple_loss=0.2664, pruned_loss=0.0761, over 971610.58 frames.], batch size: 16, lr: 1.29e-03 +2022-05-03 17:42:01,253 INFO [train.py:715] (6/8) Epoch 0, batch 26800, loss[loss=0.2076, simple_loss=0.2639, pruned_loss=0.07567, over 4815.00 frames.], tot_loss[loss=0.2086, simple_loss=0.2662, pruned_loss=0.07548, over 971252.98 frames.], batch size: 26, lr: 1.28e-03 +2022-05-03 17:42:41,672 INFO [train.py:715] (6/8) Epoch 0, batch 26850, loss[loss=0.2125, simple_loss=0.2706, pruned_loss=0.07723, over 4984.00 frames.], tot_loss[loss=0.2073, simple_loss=0.2652, pruned_loss=0.07473, over 972039.08 frames.], batch size: 39, lr: 1.28e-03 +2022-05-03 17:43:21,535 INFO [train.py:715] (6/8) Epoch 0, batch 26900, loss[loss=0.216, simple_loss=0.265, pruned_loss=0.0835, over 4770.00 frames.], tot_loss[loss=0.2092, simple_loss=0.2665, pruned_loss=0.07596, over 972763.33 frames.], batch size: 14, lr: 1.28e-03 +2022-05-03 17:44:02,263 INFO [train.py:715] (6/8) Epoch 0, batch 26950, loss[loss=0.253, simple_loss=0.3064, pruned_loss=0.09975, over 4913.00 frames.], tot_loss[loss=0.2092, simple_loss=0.2664, pruned_loss=0.07607, over 973213.01 frames.], batch size: 29, lr: 1.28e-03 +2022-05-03 17:44:42,420 INFO [train.py:715] (6/8) Epoch 0, batch 27000, loss[loss=0.1915, simple_loss=0.2389, pruned_loss=0.07202, over 4823.00 frames.], tot_loss[loss=0.2081, simple_loss=0.2656, pruned_loss=0.07529, over 972936.87 frames.], batch size: 13, lr: 1.28e-03 +2022-05-03 17:44:42,421 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 17:44:51,201 INFO [train.py:742] (6/8) Epoch 0, validation: loss=0.1338, simple_loss=0.2208, pruned_loss=0.02337, over 914524.00 frames. +2022-05-03 17:45:31,273 INFO [train.py:715] (6/8) Epoch 0, batch 27050, loss[loss=0.2148, simple_loss=0.2661, pruned_loss=0.08171, over 4900.00 frames.], tot_loss[loss=0.2068, simple_loss=0.2645, pruned_loss=0.07454, over 973476.68 frames.], batch size: 17, lr: 1.28e-03 +2022-05-03 17:46:10,747 INFO [train.py:715] (6/8) Epoch 0, batch 27100, loss[loss=0.2637, simple_loss=0.3073, pruned_loss=0.1101, over 4836.00 frames.], tot_loss[loss=0.2073, simple_loss=0.265, pruned_loss=0.07485, over 972685.32 frames.], batch size: 15, lr: 1.28e-03 +2022-05-03 17:46:51,329 INFO [train.py:715] (6/8) Epoch 0, batch 27150, loss[loss=0.2117, simple_loss=0.2654, pruned_loss=0.07895, over 4969.00 frames.], tot_loss[loss=0.2076, simple_loss=0.2654, pruned_loss=0.07494, over 973258.16 frames.], batch size: 14, lr: 1.28e-03 +2022-05-03 17:47:31,713 INFO [train.py:715] (6/8) Epoch 0, batch 27200, loss[loss=0.2039, simple_loss=0.2606, pruned_loss=0.07356, over 4782.00 frames.], tot_loss[loss=0.2076, simple_loss=0.2652, pruned_loss=0.07497, over 973329.88 frames.], batch size: 14, lr: 1.28e-03 +2022-05-03 17:48:11,815 INFO [train.py:715] (6/8) Epoch 0, batch 27250, loss[loss=0.215, simple_loss=0.2811, pruned_loss=0.07451, over 4764.00 frames.], tot_loss[loss=0.2088, simple_loss=0.2663, pruned_loss=0.07561, over 973554.77 frames.], batch size: 19, lr: 1.27e-03 +2022-05-03 17:48:51,955 INFO [train.py:715] (6/8) Epoch 0, batch 27300, loss[loss=0.2527, simple_loss=0.2982, pruned_loss=0.1036, over 4840.00 frames.], tot_loss[loss=0.2072, simple_loss=0.2647, pruned_loss=0.07484, over 972964.67 frames.], batch size: 30, lr: 1.27e-03 +2022-05-03 17:49:31,860 INFO [train.py:715] (6/8) Epoch 0, batch 27350, loss[loss=0.2269, simple_loss=0.2908, pruned_loss=0.08155, over 4926.00 frames.], tot_loss[loss=0.2064, simple_loss=0.2641, pruned_loss=0.0743, over 972815.85 frames.], batch size: 29, lr: 1.27e-03 +2022-05-03 17:50:11,821 INFO [train.py:715] (6/8) Epoch 0, batch 27400, loss[loss=0.2771, simple_loss=0.3121, pruned_loss=0.121, over 4744.00 frames.], tot_loss[loss=0.2064, simple_loss=0.264, pruned_loss=0.07444, over 971788.96 frames.], batch size: 16, lr: 1.27e-03 +2022-05-03 17:50:51,097 INFO [train.py:715] (6/8) Epoch 0, batch 27450, loss[loss=0.2072, simple_loss=0.258, pruned_loss=0.0782, over 4983.00 frames.], tot_loss[loss=0.2052, simple_loss=0.2629, pruned_loss=0.07377, over 971837.30 frames.], batch size: 25, lr: 1.27e-03 +2022-05-03 17:51:31,243 INFO [train.py:715] (6/8) Epoch 0, batch 27500, loss[loss=0.2641, simple_loss=0.3126, pruned_loss=0.1078, over 4757.00 frames.], tot_loss[loss=0.2048, simple_loss=0.2627, pruned_loss=0.07341, over 971924.20 frames.], batch size: 19, lr: 1.27e-03 +2022-05-03 17:52:11,052 INFO [train.py:715] (6/8) Epoch 0, batch 27550, loss[loss=0.2035, simple_loss=0.245, pruned_loss=0.08095, over 4927.00 frames.], tot_loss[loss=0.2069, simple_loss=0.2642, pruned_loss=0.07475, over 972093.95 frames.], batch size: 21, lr: 1.27e-03 +2022-05-03 17:52:50,538 INFO [train.py:715] (6/8) Epoch 0, batch 27600, loss[loss=0.2062, simple_loss=0.2609, pruned_loss=0.07579, over 4802.00 frames.], tot_loss[loss=0.2087, simple_loss=0.2653, pruned_loss=0.07607, over 971264.40 frames.], batch size: 24, lr: 1.27e-03 +2022-05-03 17:53:29,972 INFO [train.py:715] (6/8) Epoch 0, batch 27650, loss[loss=0.2554, simple_loss=0.31, pruned_loss=0.1004, over 4774.00 frames.], tot_loss[loss=0.2075, simple_loss=0.2643, pruned_loss=0.07535, over 971469.23 frames.], batch size: 16, lr: 1.27e-03 +2022-05-03 17:54:09,974 INFO [train.py:715] (6/8) Epoch 0, batch 27700, loss[loss=0.2152, simple_loss=0.2788, pruned_loss=0.07576, over 4909.00 frames.], tot_loss[loss=0.2078, simple_loss=0.2648, pruned_loss=0.0754, over 972179.17 frames.], batch size: 19, lr: 1.26e-03 +2022-05-03 17:54:50,345 INFO [train.py:715] (6/8) Epoch 0, batch 27750, loss[loss=0.2291, simple_loss=0.2905, pruned_loss=0.08387, over 4753.00 frames.], tot_loss[loss=0.2075, simple_loss=0.2647, pruned_loss=0.07515, over 971640.44 frames.], batch size: 16, lr: 1.26e-03 +2022-05-03 17:55:30,108 INFO [train.py:715] (6/8) Epoch 0, batch 27800, loss[loss=0.1922, simple_loss=0.2445, pruned_loss=0.06994, over 4778.00 frames.], tot_loss[loss=0.2058, simple_loss=0.2638, pruned_loss=0.07392, over 971713.42 frames.], batch size: 17, lr: 1.26e-03 +2022-05-03 17:56:10,360 INFO [train.py:715] (6/8) Epoch 0, batch 27850, loss[loss=0.1826, simple_loss=0.2284, pruned_loss=0.06839, over 4800.00 frames.], tot_loss[loss=0.2059, simple_loss=0.2636, pruned_loss=0.0741, over 971419.21 frames.], batch size: 12, lr: 1.26e-03 +2022-05-03 17:56:49,945 INFO [train.py:715] (6/8) Epoch 0, batch 27900, loss[loss=0.2158, simple_loss=0.2798, pruned_loss=0.07591, over 4929.00 frames.], tot_loss[loss=0.2046, simple_loss=0.2627, pruned_loss=0.07324, over 971899.72 frames.], batch size: 21, lr: 1.26e-03 +2022-05-03 17:57:29,409 INFO [train.py:715] (6/8) Epoch 0, batch 27950, loss[loss=0.212, simple_loss=0.2604, pruned_loss=0.08181, over 4831.00 frames.], tot_loss[loss=0.2041, simple_loss=0.2621, pruned_loss=0.07305, over 971389.30 frames.], batch size: 25, lr: 1.26e-03 +2022-05-03 17:58:09,429 INFO [train.py:715] (6/8) Epoch 0, batch 28000, loss[loss=0.2261, simple_loss=0.2728, pruned_loss=0.08967, over 4909.00 frames.], tot_loss[loss=0.2043, simple_loss=0.2624, pruned_loss=0.07308, over 972212.27 frames.], batch size: 19, lr: 1.26e-03 +2022-05-03 17:58:49,660 INFO [train.py:715] (6/8) Epoch 0, batch 28050, loss[loss=0.2597, simple_loss=0.2989, pruned_loss=0.1103, over 4810.00 frames.], tot_loss[loss=0.2053, simple_loss=0.2631, pruned_loss=0.07379, over 972169.87 frames.], batch size: 25, lr: 1.26e-03 +2022-05-03 17:59:29,711 INFO [train.py:715] (6/8) Epoch 0, batch 28100, loss[loss=0.1881, simple_loss=0.253, pruned_loss=0.0616, over 4780.00 frames.], tot_loss[loss=0.2055, simple_loss=0.2628, pruned_loss=0.07405, over 972122.82 frames.], batch size: 17, lr: 1.26e-03 +2022-05-03 18:00:08,962 INFO [train.py:715] (6/8) Epoch 0, batch 28150, loss[loss=0.1748, simple_loss=0.2362, pruned_loss=0.0567, over 4791.00 frames.], tot_loss[loss=0.2052, simple_loss=0.2625, pruned_loss=0.07396, over 972676.74 frames.], batch size: 24, lr: 1.25e-03 +2022-05-03 18:00:49,204 INFO [train.py:715] (6/8) Epoch 0, batch 28200, loss[loss=0.2323, simple_loss=0.2755, pruned_loss=0.09458, over 4878.00 frames.], tot_loss[loss=0.2062, simple_loss=0.2634, pruned_loss=0.07447, over 973078.83 frames.], batch size: 22, lr: 1.25e-03 +2022-05-03 18:01:28,911 INFO [train.py:715] (6/8) Epoch 0, batch 28250, loss[loss=0.2585, simple_loss=0.3098, pruned_loss=0.1036, over 4707.00 frames.], tot_loss[loss=0.2053, simple_loss=0.2625, pruned_loss=0.0741, over 972904.86 frames.], batch size: 15, lr: 1.25e-03 +2022-05-03 18:02:07,675 INFO [train.py:715] (6/8) Epoch 0, batch 28300, loss[loss=0.2034, simple_loss=0.265, pruned_loss=0.07088, over 4781.00 frames.], tot_loss[loss=0.206, simple_loss=0.2633, pruned_loss=0.07439, over 973369.59 frames.], batch size: 18, lr: 1.25e-03 +2022-05-03 18:02:48,213 INFO [train.py:715] (6/8) Epoch 0, batch 28350, loss[loss=0.2078, simple_loss=0.2753, pruned_loss=0.07014, over 4907.00 frames.], tot_loss[loss=0.205, simple_loss=0.2627, pruned_loss=0.0737, over 972877.01 frames.], batch size: 17, lr: 1.25e-03 +2022-05-03 18:03:27,715 INFO [train.py:715] (6/8) Epoch 0, batch 28400, loss[loss=0.234, simple_loss=0.2778, pruned_loss=0.09506, over 4811.00 frames.], tot_loss[loss=0.205, simple_loss=0.2628, pruned_loss=0.07359, over 973311.07 frames.], batch size: 26, lr: 1.25e-03 +2022-05-03 18:04:07,957 INFO [train.py:715] (6/8) Epoch 0, batch 28450, loss[loss=0.1896, simple_loss=0.2439, pruned_loss=0.06765, over 4793.00 frames.], tot_loss[loss=0.2052, simple_loss=0.2627, pruned_loss=0.07384, over 972841.43 frames.], batch size: 12, lr: 1.25e-03 +2022-05-03 18:04:47,634 INFO [train.py:715] (6/8) Epoch 0, batch 28500, loss[loss=0.1896, simple_loss=0.2597, pruned_loss=0.05975, over 4872.00 frames.], tot_loss[loss=0.2038, simple_loss=0.262, pruned_loss=0.07274, over 972612.50 frames.], batch size: 22, lr: 1.25e-03 +2022-05-03 18:05:28,119 INFO [train.py:715] (6/8) Epoch 0, batch 28550, loss[loss=0.1809, simple_loss=0.2328, pruned_loss=0.06445, over 4795.00 frames.], tot_loss[loss=0.2032, simple_loss=0.2615, pruned_loss=0.07247, over 972549.91 frames.], batch size: 24, lr: 1.25e-03 +2022-05-03 18:06:07,732 INFO [train.py:715] (6/8) Epoch 0, batch 28600, loss[loss=0.208, simple_loss=0.2609, pruned_loss=0.07749, over 4756.00 frames.], tot_loss[loss=0.2027, simple_loss=0.2611, pruned_loss=0.07213, over 972259.88 frames.], batch size: 16, lr: 1.24e-03 +2022-05-03 18:06:46,958 INFO [train.py:715] (6/8) Epoch 0, batch 28650, loss[loss=0.205, simple_loss=0.264, pruned_loss=0.07299, over 4926.00 frames.], tot_loss[loss=0.2042, simple_loss=0.2626, pruned_loss=0.07289, over 971821.96 frames.], batch size: 21, lr: 1.24e-03 +2022-05-03 18:07:26,844 INFO [train.py:715] (6/8) Epoch 0, batch 28700, loss[loss=0.2068, simple_loss=0.2663, pruned_loss=0.07363, over 4901.00 frames.], tot_loss[loss=0.2035, simple_loss=0.262, pruned_loss=0.07251, over 972635.89 frames.], batch size: 16, lr: 1.24e-03 +2022-05-03 18:08:06,488 INFO [train.py:715] (6/8) Epoch 0, batch 28750, loss[loss=0.2596, simple_loss=0.299, pruned_loss=0.1101, over 4896.00 frames.], tot_loss[loss=0.2042, simple_loss=0.2629, pruned_loss=0.07274, over 972052.68 frames.], batch size: 39, lr: 1.24e-03 +2022-05-03 18:08:46,800 INFO [train.py:715] (6/8) Epoch 0, batch 28800, loss[loss=0.1758, simple_loss=0.2343, pruned_loss=0.05867, over 4892.00 frames.], tot_loss[loss=0.2045, simple_loss=0.2632, pruned_loss=0.07291, over 971766.59 frames.], batch size: 19, lr: 1.24e-03 +2022-05-03 18:09:25,927 INFO [train.py:715] (6/8) Epoch 0, batch 28850, loss[loss=0.2111, simple_loss=0.2625, pruned_loss=0.07988, over 4688.00 frames.], tot_loss[loss=0.2049, simple_loss=0.2634, pruned_loss=0.07321, over 971270.08 frames.], batch size: 15, lr: 1.24e-03 +2022-05-03 18:10:05,955 INFO [train.py:715] (6/8) Epoch 0, batch 28900, loss[loss=0.2173, simple_loss=0.2794, pruned_loss=0.07756, over 4958.00 frames.], tot_loss[loss=0.2049, simple_loss=0.2633, pruned_loss=0.07326, over 970998.08 frames.], batch size: 24, lr: 1.24e-03 +2022-05-03 18:10:45,833 INFO [train.py:715] (6/8) Epoch 0, batch 28950, loss[loss=0.2069, simple_loss=0.2674, pruned_loss=0.07317, over 4959.00 frames.], tot_loss[loss=0.205, simple_loss=0.2633, pruned_loss=0.07337, over 971111.78 frames.], batch size: 35, lr: 1.24e-03 +2022-05-03 18:11:24,710 INFO [train.py:715] (6/8) Epoch 0, batch 29000, loss[loss=0.2295, simple_loss=0.2831, pruned_loss=0.08797, over 4754.00 frames.], tot_loss[loss=0.2055, simple_loss=0.2634, pruned_loss=0.07378, over 971615.67 frames.], batch size: 16, lr: 1.24e-03 +2022-05-03 18:12:05,313 INFO [train.py:715] (6/8) Epoch 0, batch 29050, loss[loss=0.2063, simple_loss=0.2652, pruned_loss=0.07372, over 4968.00 frames.], tot_loss[loss=0.2061, simple_loss=0.264, pruned_loss=0.07406, over 972470.08 frames.], batch size: 15, lr: 1.24e-03 +2022-05-03 18:12:45,444 INFO [train.py:715] (6/8) Epoch 0, batch 29100, loss[loss=0.2002, simple_loss=0.2578, pruned_loss=0.07133, over 4854.00 frames.], tot_loss[loss=0.2051, simple_loss=0.2633, pruned_loss=0.07348, over 973258.66 frames.], batch size: 20, lr: 1.23e-03 +2022-05-03 18:13:25,065 INFO [train.py:715] (6/8) Epoch 0, batch 29150, loss[loss=0.2238, simple_loss=0.2695, pruned_loss=0.08904, over 4807.00 frames.], tot_loss[loss=0.2054, simple_loss=0.2634, pruned_loss=0.07373, over 973003.69 frames.], batch size: 15, lr: 1.23e-03 +2022-05-03 18:14:04,269 INFO [train.py:715] (6/8) Epoch 0, batch 29200, loss[loss=0.1851, simple_loss=0.2387, pruned_loss=0.06579, over 4844.00 frames.], tot_loss[loss=0.2052, simple_loss=0.263, pruned_loss=0.07368, over 972455.72 frames.], batch size: 32, lr: 1.23e-03 +2022-05-03 18:14:44,211 INFO [train.py:715] (6/8) Epoch 0, batch 29250, loss[loss=0.197, simple_loss=0.2476, pruned_loss=0.0732, over 4856.00 frames.], tot_loss[loss=0.2037, simple_loss=0.2617, pruned_loss=0.07284, over 972163.53 frames.], batch size: 32, lr: 1.23e-03 +2022-05-03 18:15:24,225 INFO [train.py:715] (6/8) Epoch 0, batch 29300, loss[loss=0.1448, simple_loss=0.215, pruned_loss=0.03728, over 4967.00 frames.], tot_loss[loss=0.2036, simple_loss=0.262, pruned_loss=0.07261, over 971762.88 frames.], batch size: 14, lr: 1.23e-03 +2022-05-03 18:16:04,639 INFO [train.py:715] (6/8) Epoch 0, batch 29350, loss[loss=0.1987, simple_loss=0.2681, pruned_loss=0.06462, over 4766.00 frames.], tot_loss[loss=0.2051, simple_loss=0.2633, pruned_loss=0.07345, over 972515.14 frames.], batch size: 19, lr: 1.23e-03 +2022-05-03 18:16:44,085 INFO [train.py:715] (6/8) Epoch 0, batch 29400, loss[loss=0.1981, simple_loss=0.2552, pruned_loss=0.07056, over 4938.00 frames.], tot_loss[loss=0.2045, simple_loss=0.2628, pruned_loss=0.07311, over 972974.29 frames.], batch size: 29, lr: 1.23e-03 +2022-05-03 18:17:23,557 INFO [train.py:715] (6/8) Epoch 0, batch 29450, loss[loss=0.2052, simple_loss=0.2623, pruned_loss=0.074, over 4852.00 frames.], tot_loss[loss=0.2048, simple_loss=0.2631, pruned_loss=0.07324, over 973044.61 frames.], batch size: 32, lr: 1.23e-03 +2022-05-03 18:18:03,752 INFO [train.py:715] (6/8) Epoch 0, batch 29500, loss[loss=0.1772, simple_loss=0.2431, pruned_loss=0.05569, over 4985.00 frames.], tot_loss[loss=0.2039, simple_loss=0.2623, pruned_loss=0.07273, over 972440.88 frames.], batch size: 25, lr: 1.23e-03 +2022-05-03 18:18:42,858 INFO [train.py:715] (6/8) Epoch 0, batch 29550, loss[loss=0.1972, simple_loss=0.2634, pruned_loss=0.06546, over 4969.00 frames.], tot_loss[loss=0.2038, simple_loss=0.2623, pruned_loss=0.07263, over 971626.16 frames.], batch size: 15, lr: 1.23e-03 +2022-05-03 18:19:23,022 INFO [train.py:715] (6/8) Epoch 0, batch 29600, loss[loss=0.2046, simple_loss=0.2708, pruned_loss=0.06925, over 4940.00 frames.], tot_loss[loss=0.2028, simple_loss=0.2611, pruned_loss=0.07225, over 971877.14 frames.], batch size: 29, lr: 1.22e-03 +2022-05-03 18:20:02,964 INFO [train.py:715] (6/8) Epoch 0, batch 29650, loss[loss=0.2208, simple_loss=0.267, pruned_loss=0.0873, over 4977.00 frames.], tot_loss[loss=0.2026, simple_loss=0.261, pruned_loss=0.07211, over 971751.07 frames.], batch size: 33, lr: 1.22e-03 +2022-05-03 18:20:42,830 INFO [train.py:715] (6/8) Epoch 0, batch 29700, loss[loss=0.2352, simple_loss=0.2949, pruned_loss=0.08779, over 4930.00 frames.], tot_loss[loss=0.2016, simple_loss=0.2605, pruned_loss=0.07142, over 972278.02 frames.], batch size: 18, lr: 1.22e-03 +2022-05-03 18:21:23,327 INFO [train.py:715] (6/8) Epoch 0, batch 29750, loss[loss=0.1932, simple_loss=0.2544, pruned_loss=0.06595, over 4820.00 frames.], tot_loss[loss=0.2018, simple_loss=0.2604, pruned_loss=0.07163, over 971967.99 frames.], batch size: 13, lr: 1.22e-03 +2022-05-03 18:22:03,172 INFO [train.py:715] (6/8) Epoch 0, batch 29800, loss[loss=0.1884, simple_loss=0.2384, pruned_loss=0.0692, over 4744.00 frames.], tot_loss[loss=0.2019, simple_loss=0.2604, pruned_loss=0.07174, over 971992.53 frames.], batch size: 12, lr: 1.22e-03 +2022-05-03 18:22:44,077 INFO [train.py:715] (6/8) Epoch 0, batch 29850, loss[loss=0.2288, simple_loss=0.2862, pruned_loss=0.0857, over 4825.00 frames.], tot_loss[loss=0.2027, simple_loss=0.2613, pruned_loss=0.07201, over 972083.55 frames.], batch size: 25, lr: 1.22e-03 +2022-05-03 18:23:24,008 INFO [train.py:715] (6/8) Epoch 0, batch 29900, loss[loss=0.2557, simple_loss=0.2946, pruned_loss=0.1084, over 4989.00 frames.], tot_loss[loss=0.2041, simple_loss=0.2627, pruned_loss=0.07281, over 972252.67 frames.], batch size: 24, lr: 1.22e-03 +2022-05-03 18:24:03,890 INFO [train.py:715] (6/8) Epoch 0, batch 29950, loss[loss=0.2044, simple_loss=0.2639, pruned_loss=0.07239, over 4954.00 frames.], tot_loss[loss=0.2043, simple_loss=0.2627, pruned_loss=0.07297, over 973057.71 frames.], batch size: 29, lr: 1.22e-03 +2022-05-03 18:24:43,765 INFO [train.py:715] (6/8) Epoch 0, batch 30000, loss[loss=0.2025, simple_loss=0.2534, pruned_loss=0.07575, over 4835.00 frames.], tot_loss[loss=0.2048, simple_loss=0.2633, pruned_loss=0.07316, over 972806.74 frames.], batch size: 30, lr: 1.22e-03 +2022-05-03 18:24:43,766 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 18:25:00,380 INFO [train.py:742] (6/8) Epoch 0, validation: loss=0.1316, simple_loss=0.2189, pruned_loss=0.02213, over 914524.00 frames. +2022-05-03 18:25:40,704 INFO [train.py:715] (6/8) Epoch 0, batch 30050, loss[loss=0.2055, simple_loss=0.2729, pruned_loss=0.06906, over 4964.00 frames.], tot_loss[loss=0.2053, simple_loss=0.2636, pruned_loss=0.07352, over 973151.34 frames.], batch size: 24, lr: 1.22e-03 +2022-05-03 18:26:21,248 INFO [train.py:715] (6/8) Epoch 0, batch 30100, loss[loss=0.1747, simple_loss=0.256, pruned_loss=0.04664, over 4981.00 frames.], tot_loss[loss=0.2053, simple_loss=0.2634, pruned_loss=0.07357, over 974152.95 frames.], batch size: 15, lr: 1.21e-03 +2022-05-03 18:27:01,933 INFO [train.py:715] (6/8) Epoch 0, batch 30150, loss[loss=0.2346, simple_loss=0.2826, pruned_loss=0.0933, over 4896.00 frames.], tot_loss[loss=0.2046, simple_loss=0.2631, pruned_loss=0.07303, over 973787.91 frames.], batch size: 19, lr: 1.21e-03 +2022-05-03 18:27:42,071 INFO [train.py:715] (6/8) Epoch 0, batch 30200, loss[loss=0.1711, simple_loss=0.2424, pruned_loss=0.04994, over 4941.00 frames.], tot_loss[loss=0.2033, simple_loss=0.2616, pruned_loss=0.07254, over 972817.54 frames.], batch size: 21, lr: 1.21e-03 +2022-05-03 18:28:22,562 INFO [train.py:715] (6/8) Epoch 0, batch 30250, loss[loss=0.216, simple_loss=0.2779, pruned_loss=0.0771, over 4699.00 frames.], tot_loss[loss=0.2048, simple_loss=0.2624, pruned_loss=0.07356, over 972381.28 frames.], batch size: 15, lr: 1.21e-03 +2022-05-03 18:29:02,661 INFO [train.py:715] (6/8) Epoch 0, batch 30300, loss[loss=0.2203, simple_loss=0.2631, pruned_loss=0.08881, over 4829.00 frames.], tot_loss[loss=0.2043, simple_loss=0.2619, pruned_loss=0.07329, over 972985.07 frames.], batch size: 15, lr: 1.21e-03 +2022-05-03 18:29:43,087 INFO [train.py:715] (6/8) Epoch 0, batch 30350, loss[loss=0.1824, simple_loss=0.252, pruned_loss=0.05643, over 4904.00 frames.], tot_loss[loss=0.2036, simple_loss=0.2617, pruned_loss=0.07276, over 972535.60 frames.], batch size: 19, lr: 1.21e-03 +2022-05-03 18:30:23,203 INFO [train.py:715] (6/8) Epoch 0, batch 30400, loss[loss=0.2045, simple_loss=0.2619, pruned_loss=0.07359, over 4762.00 frames.], tot_loss[loss=0.203, simple_loss=0.2612, pruned_loss=0.07237, over 972302.55 frames.], batch size: 16, lr: 1.21e-03 +2022-05-03 18:31:02,971 INFO [train.py:715] (6/8) Epoch 0, batch 30450, loss[loss=0.2145, simple_loss=0.2661, pruned_loss=0.0814, over 4879.00 frames.], tot_loss[loss=0.2038, simple_loss=0.262, pruned_loss=0.07277, over 973179.86 frames.], batch size: 16, lr: 1.21e-03 +2022-05-03 18:31:42,726 INFO [train.py:715] (6/8) Epoch 0, batch 30500, loss[loss=0.2448, simple_loss=0.2979, pruned_loss=0.09579, over 4991.00 frames.], tot_loss[loss=0.2032, simple_loss=0.2615, pruned_loss=0.07244, over 972750.20 frames.], batch size: 20, lr: 1.21e-03 +2022-05-03 18:32:22,644 INFO [train.py:715] (6/8) Epoch 0, batch 30550, loss[loss=0.1528, simple_loss=0.2159, pruned_loss=0.04478, over 4758.00 frames.], tot_loss[loss=0.2017, simple_loss=0.2605, pruned_loss=0.0715, over 973262.90 frames.], batch size: 19, lr: 1.21e-03 +2022-05-03 18:33:01,762 INFO [train.py:715] (6/8) Epoch 0, batch 30600, loss[loss=0.1859, simple_loss=0.2479, pruned_loss=0.062, over 4922.00 frames.], tot_loss[loss=0.2005, simple_loss=0.2591, pruned_loss=0.07091, over 973027.78 frames.], batch size: 29, lr: 1.20e-03 +2022-05-03 18:33:41,706 INFO [train.py:715] (6/8) Epoch 0, batch 30650, loss[loss=0.1988, simple_loss=0.2546, pruned_loss=0.07146, over 4643.00 frames.], tot_loss[loss=0.2021, simple_loss=0.2602, pruned_loss=0.07202, over 973016.69 frames.], batch size: 13, lr: 1.20e-03 +2022-05-03 18:34:21,524 INFO [train.py:715] (6/8) Epoch 0, batch 30700, loss[loss=0.1697, simple_loss=0.2399, pruned_loss=0.04977, over 4879.00 frames.], tot_loss[loss=0.2014, simple_loss=0.2598, pruned_loss=0.0715, over 972053.12 frames.], batch size: 22, lr: 1.20e-03 +2022-05-03 18:35:01,625 INFO [train.py:715] (6/8) Epoch 0, batch 30750, loss[loss=0.1776, simple_loss=0.252, pruned_loss=0.05164, over 4652.00 frames.], tot_loss[loss=0.2017, simple_loss=0.2598, pruned_loss=0.07179, over 972216.52 frames.], batch size: 13, lr: 1.20e-03 +2022-05-03 18:35:40,975 INFO [train.py:715] (6/8) Epoch 0, batch 30800, loss[loss=0.188, simple_loss=0.2416, pruned_loss=0.06721, over 4756.00 frames.], tot_loss[loss=0.202, simple_loss=0.2604, pruned_loss=0.07184, over 971292.22 frames.], batch size: 12, lr: 1.20e-03 +2022-05-03 18:36:21,307 INFO [train.py:715] (6/8) Epoch 0, batch 30850, loss[loss=0.234, simple_loss=0.2814, pruned_loss=0.09327, over 4971.00 frames.], tot_loss[loss=0.202, simple_loss=0.2608, pruned_loss=0.07156, over 972224.20 frames.], batch size: 39, lr: 1.20e-03 +2022-05-03 18:37:01,149 INFO [train.py:715] (6/8) Epoch 0, batch 30900, loss[loss=0.1694, simple_loss=0.2402, pruned_loss=0.04928, over 4882.00 frames.], tot_loss[loss=0.2007, simple_loss=0.2595, pruned_loss=0.07094, over 972547.78 frames.], batch size: 16, lr: 1.20e-03 +2022-05-03 18:37:40,863 INFO [train.py:715] (6/8) Epoch 0, batch 30950, loss[loss=0.1926, simple_loss=0.2464, pruned_loss=0.06934, over 4846.00 frames.], tot_loss[loss=0.2014, simple_loss=0.2598, pruned_loss=0.07154, over 973400.17 frames.], batch size: 13, lr: 1.20e-03 +2022-05-03 18:38:20,950 INFO [train.py:715] (6/8) Epoch 0, batch 31000, loss[loss=0.1686, simple_loss=0.2319, pruned_loss=0.05265, over 4807.00 frames.], tot_loss[loss=0.2017, simple_loss=0.2601, pruned_loss=0.07168, over 973281.93 frames.], batch size: 25, lr: 1.20e-03 +2022-05-03 18:39:00,969 INFO [train.py:715] (6/8) Epoch 0, batch 31050, loss[loss=0.2313, simple_loss=0.2903, pruned_loss=0.08615, over 4773.00 frames.], tot_loss[loss=0.2015, simple_loss=0.2599, pruned_loss=0.07155, over 973590.91 frames.], batch size: 14, lr: 1.20e-03 +2022-05-03 18:39:40,374 INFO [train.py:715] (6/8) Epoch 0, batch 31100, loss[loss=0.1997, simple_loss=0.2405, pruned_loss=0.07946, over 4796.00 frames.], tot_loss[loss=0.2024, simple_loss=0.2608, pruned_loss=0.07199, over 973115.10 frames.], batch size: 12, lr: 1.20e-03 +2022-05-03 18:40:19,536 INFO [train.py:715] (6/8) Epoch 0, batch 31150, loss[loss=0.2202, simple_loss=0.2829, pruned_loss=0.07878, over 4880.00 frames.], tot_loss[loss=0.2016, simple_loss=0.2607, pruned_loss=0.07125, over 973031.89 frames.], batch size: 22, lr: 1.19e-03 +2022-05-03 18:40:59,615 INFO [train.py:715] (6/8) Epoch 0, batch 31200, loss[loss=0.2152, simple_loss=0.2736, pruned_loss=0.07842, over 4900.00 frames.], tot_loss[loss=0.2017, simple_loss=0.2609, pruned_loss=0.07121, over 972943.33 frames.], batch size: 22, lr: 1.19e-03 +2022-05-03 18:41:39,411 INFO [train.py:715] (6/8) Epoch 0, batch 31250, loss[loss=0.1829, simple_loss=0.2645, pruned_loss=0.05067, over 4949.00 frames.], tot_loss[loss=0.2012, simple_loss=0.2608, pruned_loss=0.0708, over 972421.25 frames.], batch size: 21, lr: 1.19e-03 +2022-05-03 18:42:18,879 INFO [train.py:715] (6/8) Epoch 0, batch 31300, loss[loss=0.161, simple_loss=0.2333, pruned_loss=0.0443, over 4981.00 frames.], tot_loss[loss=0.2, simple_loss=0.2596, pruned_loss=0.0702, over 971581.17 frames.], batch size: 14, lr: 1.19e-03 +2022-05-03 18:42:59,216 INFO [train.py:715] (6/8) Epoch 0, batch 31350, loss[loss=0.2489, simple_loss=0.3124, pruned_loss=0.09274, over 4905.00 frames.], tot_loss[loss=0.2014, simple_loss=0.2606, pruned_loss=0.07104, over 971387.28 frames.], batch size: 39, lr: 1.19e-03 +2022-05-03 18:43:38,900 INFO [train.py:715] (6/8) Epoch 0, batch 31400, loss[loss=0.1919, simple_loss=0.2512, pruned_loss=0.06632, over 4877.00 frames.], tot_loss[loss=0.2003, simple_loss=0.2596, pruned_loss=0.07048, over 971734.09 frames.], batch size: 22, lr: 1.19e-03 +2022-05-03 18:44:18,171 INFO [train.py:715] (6/8) Epoch 0, batch 31450, loss[loss=0.1917, simple_loss=0.2503, pruned_loss=0.06661, over 4694.00 frames.], tot_loss[loss=0.2005, simple_loss=0.2596, pruned_loss=0.07068, over 972412.98 frames.], batch size: 15, lr: 1.19e-03 +2022-05-03 18:44:57,277 INFO [train.py:715] (6/8) Epoch 0, batch 31500, loss[loss=0.1767, simple_loss=0.2393, pruned_loss=0.05702, over 4791.00 frames.], tot_loss[loss=0.1998, simple_loss=0.2589, pruned_loss=0.07035, over 972790.60 frames.], batch size: 14, lr: 1.19e-03 +2022-05-03 18:45:37,325 INFO [train.py:715] (6/8) Epoch 0, batch 31550, loss[loss=0.2045, simple_loss=0.2703, pruned_loss=0.06938, over 4900.00 frames.], tot_loss[loss=0.2009, simple_loss=0.2602, pruned_loss=0.07084, over 972296.03 frames.], batch size: 38, lr: 1.19e-03 +2022-05-03 18:46:17,103 INFO [train.py:715] (6/8) Epoch 0, batch 31600, loss[loss=0.1547, simple_loss=0.214, pruned_loss=0.0477, over 4781.00 frames.], tot_loss[loss=0.202, simple_loss=0.2609, pruned_loss=0.07155, over 972301.73 frames.], batch size: 14, lr: 1.19e-03 +2022-05-03 18:46:56,338 INFO [train.py:715] (6/8) Epoch 0, batch 31650, loss[loss=0.1696, simple_loss=0.2387, pruned_loss=0.05025, over 4761.00 frames.], tot_loss[loss=0.2034, simple_loss=0.2621, pruned_loss=0.07232, over 972572.90 frames.], batch size: 18, lr: 1.19e-03 +2022-05-03 18:47:36,249 INFO [train.py:715] (6/8) Epoch 0, batch 31700, loss[loss=0.1977, simple_loss=0.2583, pruned_loss=0.06854, over 4813.00 frames.], tot_loss[loss=0.203, simple_loss=0.2618, pruned_loss=0.07213, over 972902.94 frames.], batch size: 21, lr: 1.18e-03 +2022-05-03 18:48:16,475 INFO [train.py:715] (6/8) Epoch 0, batch 31750, loss[loss=0.2214, simple_loss=0.2794, pruned_loss=0.08163, over 4852.00 frames.], tot_loss[loss=0.2035, simple_loss=0.2624, pruned_loss=0.07232, over 972690.76 frames.], batch size: 30, lr: 1.18e-03 +2022-05-03 18:48:56,201 INFO [train.py:715] (6/8) Epoch 0, batch 31800, loss[loss=0.1627, simple_loss=0.2298, pruned_loss=0.04779, over 4942.00 frames.], tot_loss[loss=0.2029, simple_loss=0.2618, pruned_loss=0.07203, over 972544.76 frames.], batch size: 21, lr: 1.18e-03 +2022-05-03 18:49:35,471 INFO [train.py:715] (6/8) Epoch 0, batch 31850, loss[loss=0.2055, simple_loss=0.2446, pruned_loss=0.08318, over 4810.00 frames.], tot_loss[loss=0.2023, simple_loss=0.2611, pruned_loss=0.07179, over 972403.31 frames.], batch size: 13, lr: 1.18e-03 +2022-05-03 18:50:15,970 INFO [train.py:715] (6/8) Epoch 0, batch 31900, loss[loss=0.2072, simple_loss=0.256, pruned_loss=0.07924, over 4881.00 frames.], tot_loss[loss=0.2018, simple_loss=0.2601, pruned_loss=0.07175, over 972148.11 frames.], batch size: 16, lr: 1.18e-03 +2022-05-03 18:50:55,677 INFO [train.py:715] (6/8) Epoch 0, batch 31950, loss[loss=0.1813, simple_loss=0.2429, pruned_loss=0.0599, over 4806.00 frames.], tot_loss[loss=0.2014, simple_loss=0.2603, pruned_loss=0.07124, over 972318.35 frames.], batch size: 13, lr: 1.18e-03 +2022-05-03 18:51:37,247 INFO [train.py:715] (6/8) Epoch 0, batch 32000, loss[loss=0.1987, simple_loss=0.2689, pruned_loss=0.06429, over 4922.00 frames.], tot_loss[loss=0.201, simple_loss=0.2599, pruned_loss=0.07102, over 972404.87 frames.], batch size: 23, lr: 1.18e-03 +2022-05-03 18:52:17,406 INFO [train.py:715] (6/8) Epoch 0, batch 32050, loss[loss=0.1652, simple_loss=0.2274, pruned_loss=0.05151, over 4918.00 frames.], tot_loss[loss=0.2012, simple_loss=0.2598, pruned_loss=0.07124, over 972689.47 frames.], batch size: 18, lr: 1.18e-03 +2022-05-03 18:52:57,280 INFO [train.py:715] (6/8) Epoch 0, batch 32100, loss[loss=0.153, simple_loss=0.2231, pruned_loss=0.04146, over 4898.00 frames.], tot_loss[loss=0.2011, simple_loss=0.2599, pruned_loss=0.07115, over 972755.53 frames.], batch size: 22, lr: 1.18e-03 +2022-05-03 18:53:36,630 INFO [train.py:715] (6/8) Epoch 0, batch 32150, loss[loss=0.1999, simple_loss=0.253, pruned_loss=0.07339, over 4807.00 frames.], tot_loss[loss=0.2017, simple_loss=0.2603, pruned_loss=0.07157, over 971983.27 frames.], batch size: 21, lr: 1.18e-03 +2022-05-03 18:54:15,808 INFO [train.py:715] (6/8) Epoch 0, batch 32200, loss[loss=0.2098, simple_loss=0.2721, pruned_loss=0.0738, over 4945.00 frames.], tot_loss[loss=0.2015, simple_loss=0.2605, pruned_loss=0.07124, over 972305.20 frames.], batch size: 21, lr: 1.18e-03 +2022-05-03 18:54:55,966 INFO [train.py:715] (6/8) Epoch 0, batch 32250, loss[loss=0.2303, simple_loss=0.2881, pruned_loss=0.0863, over 4746.00 frames.], tot_loss[loss=0.2015, simple_loss=0.2608, pruned_loss=0.07108, over 972863.45 frames.], batch size: 16, lr: 1.17e-03 +2022-05-03 18:55:35,811 INFO [train.py:715] (6/8) Epoch 0, batch 32300, loss[loss=0.1829, simple_loss=0.2468, pruned_loss=0.05944, over 4841.00 frames.], tot_loss[loss=0.201, simple_loss=0.2604, pruned_loss=0.07082, over 973477.00 frames.], batch size: 32, lr: 1.17e-03 +2022-05-03 18:56:15,321 INFO [train.py:715] (6/8) Epoch 0, batch 32350, loss[loss=0.2044, simple_loss=0.2503, pruned_loss=0.0793, over 4783.00 frames.], tot_loss[loss=0.2013, simple_loss=0.2604, pruned_loss=0.07111, over 973531.42 frames.], batch size: 18, lr: 1.17e-03 +2022-05-03 18:56:55,316 INFO [train.py:715] (6/8) Epoch 0, batch 32400, loss[loss=0.1904, simple_loss=0.2574, pruned_loss=0.06164, over 4967.00 frames.], tot_loss[loss=0.2009, simple_loss=0.2601, pruned_loss=0.07084, over 973046.26 frames.], batch size: 24, lr: 1.17e-03 +2022-05-03 18:57:35,392 INFO [train.py:715] (6/8) Epoch 0, batch 32450, loss[loss=0.2098, simple_loss=0.2768, pruned_loss=0.07136, over 4777.00 frames.], tot_loss[loss=0.2013, simple_loss=0.2605, pruned_loss=0.07104, over 973047.36 frames.], batch size: 18, lr: 1.17e-03 +2022-05-03 18:58:15,187 INFO [train.py:715] (6/8) Epoch 0, batch 32500, loss[loss=0.163, simple_loss=0.2363, pruned_loss=0.04489, over 4767.00 frames.], tot_loss[loss=0.2005, simple_loss=0.2601, pruned_loss=0.07045, over 971762.15 frames.], batch size: 14, lr: 1.17e-03 +2022-05-03 18:58:54,511 INFO [train.py:715] (6/8) Epoch 0, batch 32550, loss[loss=0.2092, simple_loss=0.2508, pruned_loss=0.08381, over 4870.00 frames.], tot_loss[loss=0.2003, simple_loss=0.2597, pruned_loss=0.0705, over 971288.71 frames.], batch size: 16, lr: 1.17e-03 +2022-05-03 18:59:34,022 INFO [train.py:715] (6/8) Epoch 0, batch 32600, loss[loss=0.2159, simple_loss=0.2628, pruned_loss=0.08456, over 4866.00 frames.], tot_loss[loss=0.2012, simple_loss=0.2606, pruned_loss=0.0709, over 970997.46 frames.], batch size: 32, lr: 1.17e-03 +2022-05-03 19:00:13,284 INFO [train.py:715] (6/8) Epoch 0, batch 32650, loss[loss=0.165, simple_loss=0.2383, pruned_loss=0.0458, over 4982.00 frames.], tot_loss[loss=0.2006, simple_loss=0.2598, pruned_loss=0.0707, over 971515.79 frames.], batch size: 31, lr: 1.17e-03 +2022-05-03 19:00:52,622 INFO [train.py:715] (6/8) Epoch 0, batch 32700, loss[loss=0.1572, simple_loss=0.2283, pruned_loss=0.04308, over 4745.00 frames.], tot_loss[loss=0.2001, simple_loss=0.2595, pruned_loss=0.07035, over 972137.46 frames.], batch size: 19, lr: 1.17e-03 +2022-05-03 19:01:32,100 INFO [train.py:715] (6/8) Epoch 0, batch 32750, loss[loss=0.2487, simple_loss=0.2959, pruned_loss=0.1008, over 4832.00 frames.], tot_loss[loss=0.1999, simple_loss=0.2592, pruned_loss=0.07036, over 972677.13 frames.], batch size: 13, lr: 1.17e-03 +2022-05-03 19:02:12,130 INFO [train.py:715] (6/8) Epoch 0, batch 32800, loss[loss=0.2156, simple_loss=0.2737, pruned_loss=0.07876, over 4792.00 frames.], tot_loss[loss=0.2012, simple_loss=0.26, pruned_loss=0.0712, over 973008.12 frames.], batch size: 18, lr: 1.16e-03 +2022-05-03 19:02:51,639 INFO [train.py:715] (6/8) Epoch 0, batch 32850, loss[loss=0.1783, simple_loss=0.2421, pruned_loss=0.05729, over 4951.00 frames.], tot_loss[loss=0.2001, simple_loss=0.2592, pruned_loss=0.07052, over 971814.80 frames.], batch size: 21, lr: 1.16e-03 +2022-05-03 19:03:31,130 INFO [train.py:715] (6/8) Epoch 0, batch 32900, loss[loss=0.2379, simple_loss=0.2851, pruned_loss=0.09537, over 4657.00 frames.], tot_loss[loss=0.1995, simple_loss=0.2586, pruned_loss=0.07025, over 971656.04 frames.], batch size: 13, lr: 1.16e-03 +2022-05-03 19:04:11,182 INFO [train.py:715] (6/8) Epoch 0, batch 32950, loss[loss=0.1485, simple_loss=0.2305, pruned_loss=0.03323, over 4773.00 frames.], tot_loss[loss=0.2002, simple_loss=0.2589, pruned_loss=0.07069, over 971757.77 frames.], batch size: 18, lr: 1.16e-03 +2022-05-03 19:04:50,690 INFO [train.py:715] (6/8) Epoch 0, batch 33000, loss[loss=0.2287, simple_loss=0.2763, pruned_loss=0.09057, over 4947.00 frames.], tot_loss[loss=0.2003, simple_loss=0.2594, pruned_loss=0.0706, over 971278.52 frames.], batch size: 29, lr: 1.16e-03 +2022-05-03 19:04:50,691 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 19:05:00,797 INFO [train.py:742] (6/8) Epoch 0, validation: loss=0.1303, simple_loss=0.2174, pruned_loss=0.02158, over 914524.00 frames. +2022-05-03 19:05:40,738 INFO [train.py:715] (6/8) Epoch 0, batch 33050, loss[loss=0.2049, simple_loss=0.2625, pruned_loss=0.07364, over 4821.00 frames.], tot_loss[loss=0.2, simple_loss=0.2593, pruned_loss=0.07037, over 971299.88 frames.], batch size: 15, lr: 1.16e-03 +2022-05-03 19:06:20,350 INFO [train.py:715] (6/8) Epoch 0, batch 33100, loss[loss=0.199, simple_loss=0.2569, pruned_loss=0.07058, over 4925.00 frames.], tot_loss[loss=0.1986, simple_loss=0.258, pruned_loss=0.06959, over 971597.11 frames.], batch size: 21, lr: 1.16e-03 +2022-05-03 19:07:01,021 INFO [train.py:715] (6/8) Epoch 0, batch 33150, loss[loss=0.2175, simple_loss=0.2682, pruned_loss=0.08344, over 4981.00 frames.], tot_loss[loss=0.1997, simple_loss=0.2589, pruned_loss=0.07019, over 971939.77 frames.], batch size: 14, lr: 1.16e-03 +2022-05-03 19:07:41,360 INFO [train.py:715] (6/8) Epoch 0, batch 33200, loss[loss=0.2331, simple_loss=0.2879, pruned_loss=0.08916, over 4919.00 frames.], tot_loss[loss=0.1989, simple_loss=0.2581, pruned_loss=0.06984, over 972601.82 frames.], batch size: 35, lr: 1.16e-03 +2022-05-03 19:08:21,596 INFO [train.py:715] (6/8) Epoch 0, batch 33250, loss[loss=0.1928, simple_loss=0.2574, pruned_loss=0.06414, over 4945.00 frames.], tot_loss[loss=0.1979, simple_loss=0.2576, pruned_loss=0.0691, over 972323.92 frames.], batch size: 29, lr: 1.16e-03 +2022-05-03 19:09:01,810 INFO [train.py:715] (6/8) Epoch 0, batch 33300, loss[loss=0.2046, simple_loss=0.2642, pruned_loss=0.07247, over 4768.00 frames.], tot_loss[loss=0.1992, simple_loss=0.2585, pruned_loss=0.06992, over 972526.01 frames.], batch size: 18, lr: 1.16e-03 +2022-05-03 19:09:42,529 INFO [train.py:715] (6/8) Epoch 0, batch 33350, loss[loss=0.2154, simple_loss=0.2719, pruned_loss=0.07942, over 4930.00 frames.], tot_loss[loss=0.1991, simple_loss=0.2583, pruned_loss=0.06993, over 972414.42 frames.], batch size: 21, lr: 1.16e-03 +2022-05-03 19:10:22,679 INFO [train.py:715] (6/8) Epoch 0, batch 33400, loss[loss=0.2538, simple_loss=0.3006, pruned_loss=0.1035, over 4802.00 frames.], tot_loss[loss=0.2002, simple_loss=0.2592, pruned_loss=0.07057, over 972402.44 frames.], batch size: 21, lr: 1.15e-03 +2022-05-03 19:11:02,703 INFO [train.py:715] (6/8) Epoch 0, batch 33450, loss[loss=0.2361, simple_loss=0.2834, pruned_loss=0.09435, over 4865.00 frames.], tot_loss[loss=0.2007, simple_loss=0.2598, pruned_loss=0.07076, over 972497.26 frames.], batch size: 22, lr: 1.15e-03 +2022-05-03 19:11:43,357 INFO [train.py:715] (6/8) Epoch 0, batch 33500, loss[loss=0.1893, simple_loss=0.2459, pruned_loss=0.06642, over 4863.00 frames.], tot_loss[loss=0.2002, simple_loss=0.2593, pruned_loss=0.07051, over 972821.17 frames.], batch size: 30, lr: 1.15e-03 +2022-05-03 19:12:23,715 INFO [train.py:715] (6/8) Epoch 0, batch 33550, loss[loss=0.2149, simple_loss=0.2802, pruned_loss=0.07476, over 4806.00 frames.], tot_loss[loss=0.2, simple_loss=0.2597, pruned_loss=0.07021, over 973207.67 frames.], batch size: 24, lr: 1.15e-03 +2022-05-03 19:13:02,898 INFO [train.py:715] (6/8) Epoch 0, batch 33600, loss[loss=0.1685, simple_loss=0.2391, pruned_loss=0.04889, over 4933.00 frames.], tot_loss[loss=0.199, simple_loss=0.2592, pruned_loss=0.06944, over 972984.31 frames.], batch size: 18, lr: 1.15e-03 +2022-05-03 19:13:43,470 INFO [train.py:715] (6/8) Epoch 0, batch 33650, loss[loss=0.19, simple_loss=0.2555, pruned_loss=0.06222, over 4903.00 frames.], tot_loss[loss=0.1993, simple_loss=0.2593, pruned_loss=0.06967, over 973427.19 frames.], batch size: 22, lr: 1.15e-03 +2022-05-03 19:14:23,807 INFO [train.py:715] (6/8) Epoch 0, batch 33700, loss[loss=0.2273, simple_loss=0.2956, pruned_loss=0.07956, over 4764.00 frames.], tot_loss[loss=0.1998, simple_loss=0.2597, pruned_loss=0.06998, over 973554.62 frames.], batch size: 14, lr: 1.15e-03 +2022-05-03 19:15:03,034 INFO [train.py:715] (6/8) Epoch 0, batch 33750, loss[loss=0.1999, simple_loss=0.2689, pruned_loss=0.06542, over 4978.00 frames.], tot_loss[loss=0.1997, simple_loss=0.2599, pruned_loss=0.06979, over 973531.04 frames.], batch size: 15, lr: 1.15e-03 +2022-05-03 19:15:42,517 INFO [train.py:715] (6/8) Epoch 0, batch 33800, loss[loss=0.2469, simple_loss=0.3116, pruned_loss=0.09112, over 4840.00 frames.], tot_loss[loss=0.1993, simple_loss=0.2596, pruned_loss=0.06945, over 972256.74 frames.], batch size: 26, lr: 1.15e-03 +2022-05-03 19:16:22,772 INFO [train.py:715] (6/8) Epoch 0, batch 33850, loss[loss=0.1643, simple_loss=0.2248, pruned_loss=0.05192, over 4838.00 frames.], tot_loss[loss=0.1996, simple_loss=0.2602, pruned_loss=0.06952, over 972953.04 frames.], batch size: 13, lr: 1.15e-03 +2022-05-03 19:17:02,062 INFO [train.py:715] (6/8) Epoch 0, batch 33900, loss[loss=0.2141, simple_loss=0.2647, pruned_loss=0.08171, over 4736.00 frames.], tot_loss[loss=0.1999, simple_loss=0.2604, pruned_loss=0.06968, over 973345.04 frames.], batch size: 16, lr: 1.15e-03 +2022-05-03 19:17:41,117 INFO [train.py:715] (6/8) Epoch 0, batch 33950, loss[loss=0.2246, simple_loss=0.2949, pruned_loss=0.07719, over 4820.00 frames.], tot_loss[loss=0.1995, simple_loss=0.26, pruned_loss=0.06952, over 972874.68 frames.], batch size: 26, lr: 1.15e-03 +2022-05-03 19:18:21,086 INFO [train.py:715] (6/8) Epoch 0, batch 34000, loss[loss=0.2603, simple_loss=0.2886, pruned_loss=0.116, over 4787.00 frames.], tot_loss[loss=0.1998, simple_loss=0.2601, pruned_loss=0.06979, over 972223.74 frames.], batch size: 14, lr: 1.14e-03 +2022-05-03 19:19:00,968 INFO [train.py:715] (6/8) Epoch 0, batch 34050, loss[loss=0.2223, simple_loss=0.2897, pruned_loss=0.07747, over 4878.00 frames.], tot_loss[loss=0.2005, simple_loss=0.2606, pruned_loss=0.07023, over 972733.03 frames.], batch size: 20, lr: 1.14e-03 +2022-05-03 19:19:40,633 INFO [train.py:715] (6/8) Epoch 0, batch 34100, loss[loss=0.1903, simple_loss=0.2582, pruned_loss=0.0612, over 4931.00 frames.], tot_loss[loss=0.2001, simple_loss=0.26, pruned_loss=0.07004, over 972729.74 frames.], batch size: 21, lr: 1.14e-03 +2022-05-03 19:20:19,827 INFO [train.py:715] (6/8) Epoch 0, batch 34150, loss[loss=0.1757, simple_loss=0.2361, pruned_loss=0.05762, over 4835.00 frames.], tot_loss[loss=0.1986, simple_loss=0.2588, pruned_loss=0.06918, over 972496.33 frames.], batch size: 26, lr: 1.14e-03 +2022-05-03 19:20:59,757 INFO [train.py:715] (6/8) Epoch 0, batch 34200, loss[loss=0.2015, simple_loss=0.2646, pruned_loss=0.06916, over 4784.00 frames.], tot_loss[loss=0.1987, simple_loss=0.2593, pruned_loss=0.06908, over 971884.11 frames.], batch size: 12, lr: 1.14e-03 +2022-05-03 19:21:39,301 INFO [train.py:715] (6/8) Epoch 0, batch 34250, loss[loss=0.2169, simple_loss=0.2755, pruned_loss=0.07917, over 4795.00 frames.], tot_loss[loss=0.1987, simple_loss=0.259, pruned_loss=0.06919, over 971034.27 frames.], batch size: 17, lr: 1.14e-03 +2022-05-03 19:22:18,600 INFO [train.py:715] (6/8) Epoch 0, batch 34300, loss[loss=0.2325, simple_loss=0.2795, pruned_loss=0.09278, over 4961.00 frames.], tot_loss[loss=0.1996, simple_loss=0.2596, pruned_loss=0.06979, over 971981.04 frames.], batch size: 24, lr: 1.14e-03 +2022-05-03 19:22:58,858 INFO [train.py:715] (6/8) Epoch 0, batch 34350, loss[loss=0.1833, simple_loss=0.2627, pruned_loss=0.05195, over 4820.00 frames.], tot_loss[loss=0.1984, simple_loss=0.2588, pruned_loss=0.06897, over 971023.03 frames.], batch size: 27, lr: 1.14e-03 +2022-05-03 19:23:39,062 INFO [train.py:715] (6/8) Epoch 0, batch 34400, loss[loss=0.1889, simple_loss=0.2477, pruned_loss=0.06505, over 4829.00 frames.], tot_loss[loss=0.1987, simple_loss=0.2594, pruned_loss=0.06904, over 971123.63 frames.], batch size: 26, lr: 1.14e-03 +2022-05-03 19:24:18,633 INFO [train.py:715] (6/8) Epoch 0, batch 34450, loss[loss=0.2022, simple_loss=0.2636, pruned_loss=0.07038, over 4935.00 frames.], tot_loss[loss=0.2003, simple_loss=0.2602, pruned_loss=0.07018, over 970111.20 frames.], batch size: 29, lr: 1.14e-03 +2022-05-03 19:24:57,906 INFO [train.py:715] (6/8) Epoch 0, batch 34500, loss[loss=0.2247, simple_loss=0.271, pruned_loss=0.08916, over 4987.00 frames.], tot_loss[loss=0.1994, simple_loss=0.2597, pruned_loss=0.0695, over 971461.04 frames.], batch size: 14, lr: 1.14e-03 +2022-05-03 19:25:38,246 INFO [train.py:715] (6/8) Epoch 0, batch 34550, loss[loss=0.2184, simple_loss=0.2812, pruned_loss=0.07777, over 4964.00 frames.], tot_loss[loss=0.1998, simple_loss=0.2599, pruned_loss=0.06979, over 972255.19 frames.], batch size: 21, lr: 1.14e-03 +2022-05-03 19:26:17,985 INFO [train.py:715] (6/8) Epoch 0, batch 34600, loss[loss=0.1856, simple_loss=0.2527, pruned_loss=0.05922, over 4928.00 frames.], tot_loss[loss=0.1995, simple_loss=0.2597, pruned_loss=0.06966, over 971915.46 frames.], batch size: 18, lr: 1.13e-03 +2022-05-03 19:26:57,216 INFO [train.py:715] (6/8) Epoch 0, batch 34650, loss[loss=0.2729, simple_loss=0.2993, pruned_loss=0.1232, over 4833.00 frames.], tot_loss[loss=0.2002, simple_loss=0.2599, pruned_loss=0.07022, over 972170.57 frames.], batch size: 15, lr: 1.13e-03 +2022-05-03 19:27:37,742 INFO [train.py:715] (6/8) Epoch 0, batch 34700, loss[loss=0.1894, simple_loss=0.2495, pruned_loss=0.06471, over 4948.00 frames.], tot_loss[loss=0.1984, simple_loss=0.2581, pruned_loss=0.0693, over 972411.10 frames.], batch size: 39, lr: 1.13e-03 +2022-05-03 19:28:15,925 INFO [train.py:715] (6/8) Epoch 0, batch 34750, loss[loss=0.1897, simple_loss=0.262, pruned_loss=0.05876, over 4918.00 frames.], tot_loss[loss=0.1979, simple_loss=0.2577, pruned_loss=0.06907, over 971727.50 frames.], batch size: 17, lr: 1.13e-03 +2022-05-03 19:28:53,217 INFO [train.py:715] (6/8) Epoch 0, batch 34800, loss[loss=0.208, simple_loss=0.269, pruned_loss=0.07345, over 4780.00 frames.], tot_loss[loss=0.1973, simple_loss=0.2571, pruned_loss=0.06882, over 971903.87 frames.], batch size: 12, lr: 1.13e-03 +2022-05-03 19:29:42,573 INFO [train.py:715] (6/8) Epoch 1, batch 0, loss[loss=0.1641, simple_loss=0.2324, pruned_loss=0.04787, over 4971.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2324, pruned_loss=0.04787, over 4971.00 frames.], batch size: 25, lr: 1.11e-03 +2022-05-03 19:30:21,873 INFO [train.py:715] (6/8) Epoch 1, batch 50, loss[loss=0.1764, simple_loss=0.236, pruned_loss=0.05839, over 4961.00 frames.], tot_loss[loss=0.199, simple_loss=0.2577, pruned_loss=0.07017, over 220382.75 frames.], batch size: 35, lr: 1.11e-03 +2022-05-03 19:31:01,846 INFO [train.py:715] (6/8) Epoch 1, batch 100, loss[loss=0.177, simple_loss=0.2405, pruned_loss=0.05673, over 4825.00 frames.], tot_loss[loss=0.1979, simple_loss=0.2573, pruned_loss=0.06924, over 387163.07 frames.], batch size: 27, lr: 1.11e-03 +2022-05-03 19:31:41,283 INFO [train.py:715] (6/8) Epoch 1, batch 150, loss[loss=0.2075, simple_loss=0.2766, pruned_loss=0.06916, over 4801.00 frames.], tot_loss[loss=0.1984, simple_loss=0.2578, pruned_loss=0.06953, over 516593.28 frames.], batch size: 21, lr: 1.11e-03 +2022-05-03 19:32:20,521 INFO [train.py:715] (6/8) Epoch 1, batch 200, loss[loss=0.1384, simple_loss=0.2043, pruned_loss=0.03624, over 4851.00 frames.], tot_loss[loss=0.1966, simple_loss=0.2566, pruned_loss=0.06827, over 617824.27 frames.], batch size: 13, lr: 1.11e-03 +2022-05-03 19:33:00,056 INFO [train.py:715] (6/8) Epoch 1, batch 250, loss[loss=0.1761, simple_loss=0.2303, pruned_loss=0.06094, over 4961.00 frames.], tot_loss[loss=0.196, simple_loss=0.2565, pruned_loss=0.0678, over 697303.32 frames.], batch size: 14, lr: 1.11e-03 +2022-05-03 19:33:40,740 INFO [train.py:715] (6/8) Epoch 1, batch 300, loss[loss=0.2159, simple_loss=0.2715, pruned_loss=0.08017, over 4907.00 frames.], tot_loss[loss=0.196, simple_loss=0.2562, pruned_loss=0.06786, over 757877.05 frames.], batch size: 17, lr: 1.11e-03 +2022-05-03 19:34:21,111 INFO [train.py:715] (6/8) Epoch 1, batch 350, loss[loss=0.2459, simple_loss=0.2929, pruned_loss=0.09945, over 4883.00 frames.], tot_loss[loss=0.1977, simple_loss=0.2577, pruned_loss=0.06879, over 806920.02 frames.], batch size: 16, lr: 1.11e-03 +2022-05-03 19:35:01,382 INFO [train.py:715] (6/8) Epoch 1, batch 400, loss[loss=0.2159, simple_loss=0.2609, pruned_loss=0.08545, over 4689.00 frames.], tot_loss[loss=0.1973, simple_loss=0.2572, pruned_loss=0.06865, over 843456.30 frames.], batch size: 15, lr: 1.11e-03 +2022-05-03 19:35:42,057 INFO [train.py:715] (6/8) Epoch 1, batch 450, loss[loss=0.2047, simple_loss=0.256, pruned_loss=0.0767, over 4926.00 frames.], tot_loss[loss=0.1961, simple_loss=0.2563, pruned_loss=0.06797, over 873139.67 frames.], batch size: 23, lr: 1.11e-03 +2022-05-03 19:36:22,767 INFO [train.py:715] (6/8) Epoch 1, batch 500, loss[loss=0.2222, simple_loss=0.2785, pruned_loss=0.08295, over 4827.00 frames.], tot_loss[loss=0.1968, simple_loss=0.2572, pruned_loss=0.06821, over 895797.97 frames.], batch size: 15, lr: 1.11e-03 +2022-05-03 19:37:03,290 INFO [train.py:715] (6/8) Epoch 1, batch 550, loss[loss=0.2346, simple_loss=0.29, pruned_loss=0.08954, over 4849.00 frames.], tot_loss[loss=0.1987, simple_loss=0.2586, pruned_loss=0.06945, over 913023.27 frames.], batch size: 20, lr: 1.11e-03 +2022-05-03 19:37:43,270 INFO [train.py:715] (6/8) Epoch 1, batch 600, loss[loss=0.1659, simple_loss=0.2264, pruned_loss=0.05271, over 4987.00 frames.], tot_loss[loss=0.1981, simple_loss=0.258, pruned_loss=0.06903, over 925845.58 frames.], batch size: 14, lr: 1.10e-03 +2022-05-03 19:38:23,976 INFO [train.py:715] (6/8) Epoch 1, batch 650, loss[loss=0.2166, simple_loss=0.2912, pruned_loss=0.07101, over 4925.00 frames.], tot_loss[loss=0.1968, simple_loss=0.2572, pruned_loss=0.06822, over 936954.73 frames.], batch size: 23, lr: 1.10e-03 +2022-05-03 19:39:04,144 INFO [train.py:715] (6/8) Epoch 1, batch 700, loss[loss=0.2095, simple_loss=0.2712, pruned_loss=0.07392, over 4770.00 frames.], tot_loss[loss=0.1957, simple_loss=0.2563, pruned_loss=0.06759, over 944480.47 frames.], batch size: 16, lr: 1.10e-03 +2022-05-03 19:39:44,121 INFO [train.py:715] (6/8) Epoch 1, batch 750, loss[loss=0.215, simple_loss=0.2638, pruned_loss=0.08312, over 4901.00 frames.], tot_loss[loss=0.1949, simple_loss=0.2557, pruned_loss=0.06699, over 950791.44 frames.], batch size: 19, lr: 1.10e-03 +2022-05-03 19:40:24,219 INFO [train.py:715] (6/8) Epoch 1, batch 800, loss[loss=0.1697, simple_loss=0.2328, pruned_loss=0.05326, over 4894.00 frames.], tot_loss[loss=0.1946, simple_loss=0.2551, pruned_loss=0.06706, over 955184.26 frames.], batch size: 16, lr: 1.10e-03 +2022-05-03 19:41:04,464 INFO [train.py:715] (6/8) Epoch 1, batch 850, loss[loss=0.1967, simple_loss=0.251, pruned_loss=0.07123, over 4960.00 frames.], tot_loss[loss=0.194, simple_loss=0.2549, pruned_loss=0.06652, over 959316.88 frames.], batch size: 35, lr: 1.10e-03 +2022-05-03 19:41:43,692 INFO [train.py:715] (6/8) Epoch 1, batch 900, loss[loss=0.2113, simple_loss=0.2685, pruned_loss=0.07708, over 4796.00 frames.], tot_loss[loss=0.1947, simple_loss=0.2558, pruned_loss=0.0668, over 962378.49 frames.], batch size: 13, lr: 1.10e-03 +2022-05-03 19:42:22,972 INFO [train.py:715] (6/8) Epoch 1, batch 950, loss[loss=0.2067, simple_loss=0.2623, pruned_loss=0.07557, over 4841.00 frames.], tot_loss[loss=0.1954, simple_loss=0.2561, pruned_loss=0.06733, over 964870.59 frames.], batch size: 32, lr: 1.10e-03 +2022-05-03 19:43:02,562 INFO [train.py:715] (6/8) Epoch 1, batch 1000, loss[loss=0.2006, simple_loss=0.2481, pruned_loss=0.07654, over 4859.00 frames.], tot_loss[loss=0.1964, simple_loss=0.2565, pruned_loss=0.06819, over 966916.77 frames.], batch size: 30, lr: 1.10e-03 +2022-05-03 19:43:41,899 INFO [train.py:715] (6/8) Epoch 1, batch 1050, loss[loss=0.192, simple_loss=0.2494, pruned_loss=0.06733, over 4842.00 frames.], tot_loss[loss=0.1956, simple_loss=0.2559, pruned_loss=0.06767, over 967594.32 frames.], batch size: 30, lr: 1.10e-03 +2022-05-03 19:44:20,965 INFO [train.py:715] (6/8) Epoch 1, batch 1100, loss[loss=0.197, simple_loss=0.2589, pruned_loss=0.06757, over 4686.00 frames.], tot_loss[loss=0.1948, simple_loss=0.2551, pruned_loss=0.06724, over 967773.36 frames.], batch size: 15, lr: 1.10e-03 +2022-05-03 19:45:00,276 INFO [train.py:715] (6/8) Epoch 1, batch 1150, loss[loss=0.208, simple_loss=0.2699, pruned_loss=0.07301, over 4785.00 frames.], tot_loss[loss=0.1957, simple_loss=0.2563, pruned_loss=0.06756, over 968559.41 frames.], batch size: 18, lr: 1.10e-03 +2022-05-03 19:45:40,271 INFO [train.py:715] (6/8) Epoch 1, batch 1200, loss[loss=0.1889, simple_loss=0.244, pruned_loss=0.06689, over 4912.00 frames.], tot_loss[loss=0.1962, simple_loss=0.2568, pruned_loss=0.06783, over 969434.91 frames.], batch size: 19, lr: 1.10e-03 +2022-05-03 19:46:19,429 INFO [train.py:715] (6/8) Epoch 1, batch 1250, loss[loss=0.1721, simple_loss=0.236, pruned_loss=0.05407, over 4795.00 frames.], tot_loss[loss=0.1944, simple_loss=0.2554, pruned_loss=0.06669, over 969903.25 frames.], batch size: 24, lr: 1.10e-03 +2022-05-03 19:46:58,959 INFO [train.py:715] (6/8) Epoch 1, batch 1300, loss[loss=0.1623, simple_loss=0.2266, pruned_loss=0.04899, over 4823.00 frames.], tot_loss[loss=0.1941, simple_loss=0.2553, pruned_loss=0.06648, over 969944.93 frames.], batch size: 13, lr: 1.09e-03 +2022-05-03 19:47:39,267 INFO [train.py:715] (6/8) Epoch 1, batch 1350, loss[loss=0.2091, simple_loss=0.2723, pruned_loss=0.07292, over 4885.00 frames.], tot_loss[loss=0.1943, simple_loss=0.2553, pruned_loss=0.06663, over 970680.38 frames.], batch size: 32, lr: 1.09e-03 +2022-05-03 19:48:18,896 INFO [train.py:715] (6/8) Epoch 1, batch 1400, loss[loss=0.2071, simple_loss=0.2637, pruned_loss=0.07525, over 4984.00 frames.], tot_loss[loss=0.1952, simple_loss=0.256, pruned_loss=0.06719, over 970912.37 frames.], batch size: 35, lr: 1.09e-03 +2022-05-03 19:48:58,746 INFO [train.py:715] (6/8) Epoch 1, batch 1450, loss[loss=0.215, simple_loss=0.2828, pruned_loss=0.07359, over 4995.00 frames.], tot_loss[loss=0.1953, simple_loss=0.2557, pruned_loss=0.06741, over 971537.56 frames.], batch size: 16, lr: 1.09e-03 +2022-05-03 19:49:38,350 INFO [train.py:715] (6/8) Epoch 1, batch 1500, loss[loss=0.1876, simple_loss=0.2482, pruned_loss=0.06354, over 4761.00 frames.], tot_loss[loss=0.1961, simple_loss=0.2567, pruned_loss=0.06782, over 972009.70 frames.], batch size: 14, lr: 1.09e-03 +2022-05-03 19:50:17,878 INFO [train.py:715] (6/8) Epoch 1, batch 1550, loss[loss=0.205, simple_loss=0.275, pruned_loss=0.0675, over 4930.00 frames.], tot_loss[loss=0.1966, simple_loss=0.2574, pruned_loss=0.06794, over 972598.59 frames.], batch size: 18, lr: 1.09e-03 +2022-05-03 19:50:57,100 INFO [train.py:715] (6/8) Epoch 1, batch 1600, loss[loss=0.1972, simple_loss=0.2697, pruned_loss=0.06237, over 4835.00 frames.], tot_loss[loss=0.1961, simple_loss=0.2568, pruned_loss=0.0677, over 972719.72 frames.], batch size: 25, lr: 1.09e-03 +2022-05-03 19:51:36,402 INFO [train.py:715] (6/8) Epoch 1, batch 1650, loss[loss=0.169, simple_loss=0.2367, pruned_loss=0.05063, over 4769.00 frames.], tot_loss[loss=0.1941, simple_loss=0.2553, pruned_loss=0.0664, over 972333.50 frames.], batch size: 12, lr: 1.09e-03 +2022-05-03 19:52:16,982 INFO [train.py:715] (6/8) Epoch 1, batch 1700, loss[loss=0.2138, simple_loss=0.2705, pruned_loss=0.07853, over 4868.00 frames.], tot_loss[loss=0.1948, simple_loss=0.2556, pruned_loss=0.06699, over 971877.95 frames.], batch size: 16, lr: 1.09e-03 +2022-05-03 19:52:56,162 INFO [train.py:715] (6/8) Epoch 1, batch 1750, loss[loss=0.1763, simple_loss=0.236, pruned_loss=0.05829, over 4904.00 frames.], tot_loss[loss=0.1963, simple_loss=0.2564, pruned_loss=0.06811, over 972559.15 frames.], batch size: 23, lr: 1.09e-03 +2022-05-03 19:53:35,893 INFO [train.py:715] (6/8) Epoch 1, batch 1800, loss[loss=0.1834, simple_loss=0.2385, pruned_loss=0.06412, over 4965.00 frames.], tot_loss[loss=0.1949, simple_loss=0.2554, pruned_loss=0.06725, over 972873.94 frames.], batch size: 33, lr: 1.09e-03 +2022-05-03 19:54:15,260 INFO [train.py:715] (6/8) Epoch 1, batch 1850, loss[loss=0.2245, simple_loss=0.2835, pruned_loss=0.08272, over 4901.00 frames.], tot_loss[loss=0.195, simple_loss=0.2554, pruned_loss=0.06731, over 972419.97 frames.], batch size: 19, lr: 1.09e-03 +2022-05-03 19:54:54,779 INFO [train.py:715] (6/8) Epoch 1, batch 1900, loss[loss=0.1574, simple_loss=0.2287, pruned_loss=0.04301, over 4774.00 frames.], tot_loss[loss=0.1964, simple_loss=0.2565, pruned_loss=0.06816, over 972926.92 frames.], batch size: 18, lr: 1.09e-03 +2022-05-03 19:55:34,086 INFO [train.py:715] (6/8) Epoch 1, batch 1950, loss[loss=0.2378, simple_loss=0.2619, pruned_loss=0.1069, over 4809.00 frames.], tot_loss[loss=0.1975, simple_loss=0.2573, pruned_loss=0.06888, over 973229.18 frames.], batch size: 12, lr: 1.08e-03 +2022-05-03 19:56:14,081 INFO [train.py:715] (6/8) Epoch 1, batch 2000, loss[loss=0.1837, simple_loss=0.2584, pruned_loss=0.05452, over 4821.00 frames.], tot_loss[loss=0.1972, simple_loss=0.2573, pruned_loss=0.06859, over 973622.23 frames.], batch size: 25, lr: 1.08e-03 +2022-05-03 19:56:53,565 INFO [train.py:715] (6/8) Epoch 1, batch 2050, loss[loss=0.1804, simple_loss=0.2431, pruned_loss=0.05883, over 4831.00 frames.], tot_loss[loss=0.1963, simple_loss=0.2563, pruned_loss=0.06814, over 973585.31 frames.], batch size: 30, lr: 1.08e-03 +2022-05-03 19:57:33,042 INFO [train.py:715] (6/8) Epoch 1, batch 2100, loss[loss=0.1987, simple_loss=0.2588, pruned_loss=0.06929, over 4835.00 frames.], tot_loss[loss=0.196, simple_loss=0.256, pruned_loss=0.06797, over 974008.26 frames.], batch size: 32, lr: 1.08e-03 +2022-05-03 19:58:12,720 INFO [train.py:715] (6/8) Epoch 1, batch 2150, loss[loss=0.189, simple_loss=0.241, pruned_loss=0.06852, over 4779.00 frames.], tot_loss[loss=0.1965, simple_loss=0.2564, pruned_loss=0.06829, over 973982.36 frames.], batch size: 12, lr: 1.08e-03 +2022-05-03 19:58:52,404 INFO [train.py:715] (6/8) Epoch 1, batch 2200, loss[loss=0.2339, simple_loss=0.3002, pruned_loss=0.08377, over 4862.00 frames.], tot_loss[loss=0.1957, simple_loss=0.2557, pruned_loss=0.06782, over 973523.53 frames.], batch size: 16, lr: 1.08e-03 +2022-05-03 19:59:32,131 INFO [train.py:715] (6/8) Epoch 1, batch 2250, loss[loss=0.1264, simple_loss=0.1923, pruned_loss=0.03028, over 4755.00 frames.], tot_loss[loss=0.1953, simple_loss=0.2552, pruned_loss=0.0677, over 973117.77 frames.], batch size: 12, lr: 1.08e-03 +2022-05-03 20:00:11,174 INFO [train.py:715] (6/8) Epoch 1, batch 2300, loss[loss=0.2016, simple_loss=0.2646, pruned_loss=0.0693, over 4767.00 frames.], tot_loss[loss=0.1965, simple_loss=0.2561, pruned_loss=0.06841, over 973352.65 frames.], batch size: 19, lr: 1.08e-03 +2022-05-03 20:00:51,311 INFO [train.py:715] (6/8) Epoch 1, batch 2350, loss[loss=0.1899, simple_loss=0.2639, pruned_loss=0.05792, over 4913.00 frames.], tot_loss[loss=0.1947, simple_loss=0.2549, pruned_loss=0.06725, over 973152.81 frames.], batch size: 18, lr: 1.08e-03 +2022-05-03 20:01:30,586 INFO [train.py:715] (6/8) Epoch 1, batch 2400, loss[loss=0.2147, simple_loss=0.2579, pruned_loss=0.08572, over 4844.00 frames.], tot_loss[loss=0.1937, simple_loss=0.2543, pruned_loss=0.0666, over 972349.58 frames.], batch size: 13, lr: 1.08e-03 +2022-05-03 20:02:09,733 INFO [train.py:715] (6/8) Epoch 1, batch 2450, loss[loss=0.196, simple_loss=0.2495, pruned_loss=0.0712, over 4990.00 frames.], tot_loss[loss=0.1935, simple_loss=0.2545, pruned_loss=0.06632, over 971987.06 frames.], batch size: 25, lr: 1.08e-03 +2022-05-03 20:02:48,985 INFO [train.py:715] (6/8) Epoch 1, batch 2500, loss[loss=0.1925, simple_loss=0.2557, pruned_loss=0.06468, over 4852.00 frames.], tot_loss[loss=0.1932, simple_loss=0.2544, pruned_loss=0.06606, over 972305.97 frames.], batch size: 32, lr: 1.08e-03 +2022-05-03 20:03:28,534 INFO [train.py:715] (6/8) Epoch 1, batch 2550, loss[loss=0.2443, simple_loss=0.2939, pruned_loss=0.09736, over 4776.00 frames.], tot_loss[loss=0.1932, simple_loss=0.2546, pruned_loss=0.06591, over 972079.31 frames.], batch size: 18, lr: 1.08e-03 +2022-05-03 20:04:08,262 INFO [train.py:715] (6/8) Epoch 1, batch 2600, loss[loss=0.1606, simple_loss=0.2312, pruned_loss=0.04501, over 4761.00 frames.], tot_loss[loss=0.1923, simple_loss=0.2537, pruned_loss=0.06549, over 970824.10 frames.], batch size: 18, lr: 1.08e-03 +2022-05-03 20:04:47,474 INFO [train.py:715] (6/8) Epoch 1, batch 2650, loss[loss=0.1836, simple_loss=0.2399, pruned_loss=0.06369, over 4822.00 frames.], tot_loss[loss=0.1927, simple_loss=0.2542, pruned_loss=0.06553, over 971948.45 frames.], batch size: 27, lr: 1.07e-03 +2022-05-03 20:05:27,542 INFO [train.py:715] (6/8) Epoch 1, batch 2700, loss[loss=0.1931, simple_loss=0.2587, pruned_loss=0.06374, over 4886.00 frames.], tot_loss[loss=0.1943, simple_loss=0.2556, pruned_loss=0.06656, over 971468.88 frames.], batch size: 22, lr: 1.07e-03 +2022-05-03 20:06:06,958 INFO [train.py:715] (6/8) Epoch 1, batch 2750, loss[loss=0.2213, simple_loss=0.2714, pruned_loss=0.08557, over 4786.00 frames.], tot_loss[loss=0.1935, simple_loss=0.2546, pruned_loss=0.06625, over 971750.09 frames.], batch size: 14, lr: 1.07e-03 +2022-05-03 20:06:45,687 INFO [train.py:715] (6/8) Epoch 1, batch 2800, loss[loss=0.1941, simple_loss=0.2496, pruned_loss=0.0693, over 4738.00 frames.], tot_loss[loss=0.1931, simple_loss=0.2541, pruned_loss=0.0661, over 972294.65 frames.], batch size: 16, lr: 1.07e-03 +2022-05-03 20:07:25,353 INFO [train.py:715] (6/8) Epoch 1, batch 2850, loss[loss=0.2163, simple_loss=0.2617, pruned_loss=0.08544, over 4778.00 frames.], tot_loss[loss=0.1938, simple_loss=0.2547, pruned_loss=0.06648, over 973335.79 frames.], batch size: 12, lr: 1.07e-03 +2022-05-03 20:08:05,010 INFO [train.py:715] (6/8) Epoch 1, batch 2900, loss[loss=0.226, simple_loss=0.2586, pruned_loss=0.09669, over 4808.00 frames.], tot_loss[loss=0.1951, simple_loss=0.2557, pruned_loss=0.06723, over 974101.34 frames.], batch size: 12, lr: 1.07e-03 +2022-05-03 20:08:44,124 INFO [train.py:715] (6/8) Epoch 1, batch 2950, loss[loss=0.1812, simple_loss=0.2404, pruned_loss=0.06095, over 4738.00 frames.], tot_loss[loss=0.1956, simple_loss=0.2565, pruned_loss=0.06732, over 973400.13 frames.], batch size: 16, lr: 1.07e-03 +2022-05-03 20:09:22,836 INFO [train.py:715] (6/8) Epoch 1, batch 3000, loss[loss=0.2131, simple_loss=0.2617, pruned_loss=0.08222, over 4820.00 frames.], tot_loss[loss=0.1973, simple_loss=0.2579, pruned_loss=0.06833, over 973192.71 frames.], batch size: 25, lr: 1.07e-03 +2022-05-03 20:09:22,836 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 20:09:34,565 INFO [train.py:742] (6/8) Epoch 1, validation: loss=0.1276, simple_loss=0.2149, pruned_loss=0.0201, over 914524.00 frames. +2022-05-03 20:10:13,443 INFO [train.py:715] (6/8) Epoch 1, batch 3050, loss[loss=0.2188, simple_loss=0.2643, pruned_loss=0.08668, over 4788.00 frames.], tot_loss[loss=0.1959, simple_loss=0.2565, pruned_loss=0.06759, over 972683.44 frames.], batch size: 14, lr: 1.07e-03 +2022-05-03 20:10:53,453 INFO [train.py:715] (6/8) Epoch 1, batch 3100, loss[loss=0.1614, simple_loss=0.2144, pruned_loss=0.05425, over 4836.00 frames.], tot_loss[loss=0.1948, simple_loss=0.2558, pruned_loss=0.06691, over 973177.69 frames.], batch size: 12, lr: 1.07e-03 +2022-05-03 20:11:32,602 INFO [train.py:715] (6/8) Epoch 1, batch 3150, loss[loss=0.2182, simple_loss=0.2802, pruned_loss=0.07804, over 4895.00 frames.], tot_loss[loss=0.1957, simple_loss=0.2566, pruned_loss=0.06736, over 973918.91 frames.], batch size: 17, lr: 1.07e-03 +2022-05-03 20:12:11,817 INFO [train.py:715] (6/8) Epoch 1, batch 3200, loss[loss=0.2346, simple_loss=0.2975, pruned_loss=0.08582, over 4780.00 frames.], tot_loss[loss=0.1961, simple_loss=0.2566, pruned_loss=0.06777, over 973284.40 frames.], batch size: 14, lr: 1.07e-03 +2022-05-03 20:12:51,459 INFO [train.py:715] (6/8) Epoch 1, batch 3250, loss[loss=0.1908, simple_loss=0.2496, pruned_loss=0.06602, over 4825.00 frames.], tot_loss[loss=0.1955, simple_loss=0.2562, pruned_loss=0.06739, over 973239.34 frames.], batch size: 26, lr: 1.07e-03 +2022-05-03 20:13:31,211 INFO [train.py:715] (6/8) Epoch 1, batch 3300, loss[loss=0.1721, simple_loss=0.2379, pruned_loss=0.05315, over 4861.00 frames.], tot_loss[loss=0.1954, simple_loss=0.2561, pruned_loss=0.06738, over 973294.40 frames.], batch size: 16, lr: 1.07e-03 +2022-05-03 20:14:10,770 INFO [train.py:715] (6/8) Epoch 1, batch 3350, loss[loss=0.1983, simple_loss=0.2573, pruned_loss=0.06963, over 4805.00 frames.], tot_loss[loss=0.1957, simple_loss=0.2564, pruned_loss=0.06749, over 972704.80 frames.], batch size: 24, lr: 1.07e-03 +2022-05-03 20:14:50,050 INFO [train.py:715] (6/8) Epoch 1, batch 3400, loss[loss=0.1966, simple_loss=0.2632, pruned_loss=0.06501, over 4836.00 frames.], tot_loss[loss=0.1948, simple_loss=0.2555, pruned_loss=0.067, over 972181.43 frames.], batch size: 15, lr: 1.06e-03 +2022-05-03 20:15:30,671 INFO [train.py:715] (6/8) Epoch 1, batch 3450, loss[loss=0.1722, simple_loss=0.2405, pruned_loss=0.05196, over 4980.00 frames.], tot_loss[loss=0.1949, simple_loss=0.2554, pruned_loss=0.0672, over 971647.65 frames.], batch size: 15, lr: 1.06e-03 +2022-05-03 20:16:09,595 INFO [train.py:715] (6/8) Epoch 1, batch 3500, loss[loss=0.1628, simple_loss=0.2251, pruned_loss=0.05027, over 4867.00 frames.], tot_loss[loss=0.1947, simple_loss=0.2551, pruned_loss=0.06713, over 971834.24 frames.], batch size: 16, lr: 1.06e-03 +2022-05-03 20:16:48,619 INFO [train.py:715] (6/8) Epoch 1, batch 3550, loss[loss=0.1827, simple_loss=0.2402, pruned_loss=0.06257, over 4846.00 frames.], tot_loss[loss=0.1942, simple_loss=0.2547, pruned_loss=0.06684, over 971598.37 frames.], batch size: 13, lr: 1.06e-03 +2022-05-03 20:17:28,377 INFO [train.py:715] (6/8) Epoch 1, batch 3600, loss[loss=0.1954, simple_loss=0.2513, pruned_loss=0.06973, over 4812.00 frames.], tot_loss[loss=0.1936, simple_loss=0.2543, pruned_loss=0.06646, over 971811.43 frames.], batch size: 21, lr: 1.06e-03 +2022-05-03 20:18:08,020 INFO [train.py:715] (6/8) Epoch 1, batch 3650, loss[loss=0.1692, simple_loss=0.2224, pruned_loss=0.058, over 4885.00 frames.], tot_loss[loss=0.1937, simple_loss=0.2544, pruned_loss=0.06652, over 971317.30 frames.], batch size: 17, lr: 1.06e-03 +2022-05-03 20:18:46,986 INFO [train.py:715] (6/8) Epoch 1, batch 3700, loss[loss=0.2127, simple_loss=0.2622, pruned_loss=0.0816, over 4988.00 frames.], tot_loss[loss=0.1934, simple_loss=0.2543, pruned_loss=0.06624, over 972381.13 frames.], batch size: 35, lr: 1.06e-03 +2022-05-03 20:19:25,662 INFO [train.py:715] (6/8) Epoch 1, batch 3750, loss[loss=0.1741, simple_loss=0.2415, pruned_loss=0.05339, over 4960.00 frames.], tot_loss[loss=0.1935, simple_loss=0.2543, pruned_loss=0.06635, over 972674.35 frames.], batch size: 21, lr: 1.06e-03 +2022-05-03 20:20:05,935 INFO [train.py:715] (6/8) Epoch 1, batch 3800, loss[loss=0.2341, simple_loss=0.2861, pruned_loss=0.09104, over 4851.00 frames.], tot_loss[loss=0.1933, simple_loss=0.2543, pruned_loss=0.06614, over 971940.33 frames.], batch size: 30, lr: 1.06e-03 +2022-05-03 20:20:44,903 INFO [train.py:715] (6/8) Epoch 1, batch 3850, loss[loss=0.2008, simple_loss=0.2626, pruned_loss=0.06952, over 4955.00 frames.], tot_loss[loss=0.1928, simple_loss=0.2537, pruned_loss=0.06593, over 972684.20 frames.], batch size: 24, lr: 1.06e-03 +2022-05-03 20:21:23,760 INFO [train.py:715] (6/8) Epoch 1, batch 3900, loss[loss=0.2911, simple_loss=0.3245, pruned_loss=0.1289, over 4905.00 frames.], tot_loss[loss=0.1921, simple_loss=0.2532, pruned_loss=0.06546, over 972801.16 frames.], batch size: 19, lr: 1.06e-03 +2022-05-03 20:22:03,286 INFO [train.py:715] (6/8) Epoch 1, batch 3950, loss[loss=0.2281, simple_loss=0.2861, pruned_loss=0.085, over 4882.00 frames.], tot_loss[loss=0.1918, simple_loss=0.2531, pruned_loss=0.06529, over 972723.79 frames.], batch size: 22, lr: 1.06e-03 +2022-05-03 20:22:42,798 INFO [train.py:715] (6/8) Epoch 1, batch 4000, loss[loss=0.1884, simple_loss=0.241, pruned_loss=0.06793, over 4868.00 frames.], tot_loss[loss=0.1931, simple_loss=0.2542, pruned_loss=0.06601, over 973466.39 frames.], batch size: 32, lr: 1.06e-03 +2022-05-03 20:23:21,458 INFO [train.py:715] (6/8) Epoch 1, batch 4050, loss[loss=0.3209, simple_loss=0.3631, pruned_loss=0.1393, over 4820.00 frames.], tot_loss[loss=0.1946, simple_loss=0.2559, pruned_loss=0.06662, over 972845.66 frames.], batch size: 25, lr: 1.06e-03 +2022-05-03 20:24:00,886 INFO [train.py:715] (6/8) Epoch 1, batch 4100, loss[loss=0.1543, simple_loss=0.214, pruned_loss=0.04727, over 4922.00 frames.], tot_loss[loss=0.1948, simple_loss=0.2563, pruned_loss=0.06666, over 972879.42 frames.], batch size: 18, lr: 1.05e-03 +2022-05-03 20:24:40,536 INFO [train.py:715] (6/8) Epoch 1, batch 4150, loss[loss=0.2026, simple_loss=0.2557, pruned_loss=0.07469, over 4934.00 frames.], tot_loss[loss=0.1952, simple_loss=0.2561, pruned_loss=0.06709, over 972132.13 frames.], batch size: 23, lr: 1.05e-03 +2022-05-03 20:25:19,586 INFO [train.py:715] (6/8) Epoch 1, batch 4200, loss[loss=0.1803, simple_loss=0.2339, pruned_loss=0.06336, over 4855.00 frames.], tot_loss[loss=0.1951, simple_loss=0.2559, pruned_loss=0.06715, over 971926.23 frames.], batch size: 30, lr: 1.05e-03 +2022-05-03 20:25:58,626 INFO [train.py:715] (6/8) Epoch 1, batch 4250, loss[loss=0.2125, simple_loss=0.2747, pruned_loss=0.07511, over 4771.00 frames.], tot_loss[loss=0.1947, simple_loss=0.2556, pruned_loss=0.06694, over 971901.69 frames.], batch size: 18, lr: 1.05e-03 +2022-05-03 20:26:38,142 INFO [train.py:715] (6/8) Epoch 1, batch 4300, loss[loss=0.1429, simple_loss=0.2099, pruned_loss=0.038, over 4780.00 frames.], tot_loss[loss=0.1934, simple_loss=0.2547, pruned_loss=0.06603, over 971990.10 frames.], batch size: 18, lr: 1.05e-03 +2022-05-03 20:27:17,806 INFO [train.py:715] (6/8) Epoch 1, batch 4350, loss[loss=0.2071, simple_loss=0.2581, pruned_loss=0.07808, over 4974.00 frames.], tot_loss[loss=0.1934, simple_loss=0.2544, pruned_loss=0.06614, over 971635.82 frames.], batch size: 15, lr: 1.05e-03 +2022-05-03 20:27:56,256 INFO [train.py:715] (6/8) Epoch 1, batch 4400, loss[loss=0.2219, simple_loss=0.2846, pruned_loss=0.07955, over 4860.00 frames.], tot_loss[loss=0.1935, simple_loss=0.2545, pruned_loss=0.06631, over 972309.77 frames.], batch size: 32, lr: 1.05e-03 +2022-05-03 20:28:35,843 INFO [train.py:715] (6/8) Epoch 1, batch 4450, loss[loss=0.21, simple_loss=0.2699, pruned_loss=0.07505, over 4903.00 frames.], tot_loss[loss=0.1931, simple_loss=0.2544, pruned_loss=0.06588, over 972453.33 frames.], batch size: 39, lr: 1.05e-03 +2022-05-03 20:29:15,597 INFO [train.py:715] (6/8) Epoch 1, batch 4500, loss[loss=0.1755, simple_loss=0.2456, pruned_loss=0.05267, over 4859.00 frames.], tot_loss[loss=0.1943, simple_loss=0.2554, pruned_loss=0.06661, over 972885.67 frames.], batch size: 32, lr: 1.05e-03 +2022-05-03 20:29:54,820 INFO [train.py:715] (6/8) Epoch 1, batch 4550, loss[loss=0.1651, simple_loss=0.2321, pruned_loss=0.04906, over 4963.00 frames.], tot_loss[loss=0.1935, simple_loss=0.2547, pruned_loss=0.06609, over 972887.26 frames.], batch size: 14, lr: 1.05e-03 +2022-05-03 20:30:33,522 INFO [train.py:715] (6/8) Epoch 1, batch 4600, loss[loss=0.2072, simple_loss=0.2624, pruned_loss=0.07599, over 4750.00 frames.], tot_loss[loss=0.1931, simple_loss=0.2544, pruned_loss=0.06587, over 972751.61 frames.], batch size: 16, lr: 1.05e-03 +2022-05-03 20:31:13,057 INFO [train.py:715] (6/8) Epoch 1, batch 4650, loss[loss=0.1652, simple_loss=0.216, pruned_loss=0.05717, over 4644.00 frames.], tot_loss[loss=0.1928, simple_loss=0.2543, pruned_loss=0.0657, over 972089.11 frames.], batch size: 13, lr: 1.05e-03 +2022-05-03 20:31:52,528 INFO [train.py:715] (6/8) Epoch 1, batch 4700, loss[loss=0.1918, simple_loss=0.2448, pruned_loss=0.06939, over 4763.00 frames.], tot_loss[loss=0.1935, simple_loss=0.2548, pruned_loss=0.06608, over 971493.62 frames.], batch size: 18, lr: 1.05e-03 +2022-05-03 20:32:31,325 INFO [train.py:715] (6/8) Epoch 1, batch 4750, loss[loss=0.2796, simple_loss=0.3113, pruned_loss=0.1239, over 4973.00 frames.], tot_loss[loss=0.1949, simple_loss=0.2557, pruned_loss=0.06703, over 972022.34 frames.], batch size: 15, lr: 1.05e-03 +2022-05-03 20:33:11,345 INFO [train.py:715] (6/8) Epoch 1, batch 4800, loss[loss=0.1676, simple_loss=0.2228, pruned_loss=0.05622, over 4794.00 frames.], tot_loss[loss=0.1946, simple_loss=0.2547, pruned_loss=0.06731, over 971966.36 frames.], batch size: 12, lr: 1.05e-03 +2022-05-03 20:33:51,189 INFO [train.py:715] (6/8) Epoch 1, batch 4850, loss[loss=0.2212, simple_loss=0.2951, pruned_loss=0.07368, over 4898.00 frames.], tot_loss[loss=0.194, simple_loss=0.254, pruned_loss=0.06703, over 971855.48 frames.], batch size: 19, lr: 1.05e-03 +2022-05-03 20:34:30,469 INFO [train.py:715] (6/8) Epoch 1, batch 4900, loss[loss=0.1739, simple_loss=0.2368, pruned_loss=0.05546, over 4774.00 frames.], tot_loss[loss=0.1939, simple_loss=0.2539, pruned_loss=0.06695, over 972075.32 frames.], batch size: 17, lr: 1.04e-03 +2022-05-03 20:35:09,823 INFO [train.py:715] (6/8) Epoch 1, batch 4950, loss[loss=0.2203, simple_loss=0.2741, pruned_loss=0.08324, over 4890.00 frames.], tot_loss[loss=0.1945, simple_loss=0.2544, pruned_loss=0.06732, over 972387.29 frames.], batch size: 19, lr: 1.04e-03 +2022-05-03 20:35:50,163 INFO [train.py:715] (6/8) Epoch 1, batch 5000, loss[loss=0.2077, simple_loss=0.2582, pruned_loss=0.07856, over 4854.00 frames.], tot_loss[loss=0.1933, simple_loss=0.2534, pruned_loss=0.06661, over 972325.28 frames.], batch size: 13, lr: 1.04e-03 +2022-05-03 20:36:29,724 INFO [train.py:715] (6/8) Epoch 1, batch 5050, loss[loss=0.1801, simple_loss=0.2491, pruned_loss=0.05554, over 4833.00 frames.], tot_loss[loss=0.1934, simple_loss=0.2536, pruned_loss=0.06657, over 972115.49 frames.], batch size: 27, lr: 1.04e-03 +2022-05-03 20:37:08,715 INFO [train.py:715] (6/8) Epoch 1, batch 5100, loss[loss=0.2023, simple_loss=0.2437, pruned_loss=0.0805, over 4782.00 frames.], tot_loss[loss=0.1928, simple_loss=0.2531, pruned_loss=0.06631, over 971947.20 frames.], batch size: 17, lr: 1.04e-03 +2022-05-03 20:37:48,747 INFO [train.py:715] (6/8) Epoch 1, batch 5150, loss[loss=0.1928, simple_loss=0.2606, pruned_loss=0.06247, over 4951.00 frames.], tot_loss[loss=0.1927, simple_loss=0.2539, pruned_loss=0.06574, over 972880.62 frames.], batch size: 40, lr: 1.04e-03 +2022-05-03 20:38:30,132 INFO [train.py:715] (6/8) Epoch 1, batch 5200, loss[loss=0.1886, simple_loss=0.2487, pruned_loss=0.06429, over 4915.00 frames.], tot_loss[loss=0.1918, simple_loss=0.253, pruned_loss=0.06526, over 972556.81 frames.], batch size: 18, lr: 1.04e-03 +2022-05-03 20:39:09,108 INFO [train.py:715] (6/8) Epoch 1, batch 5250, loss[loss=0.1999, simple_loss=0.2497, pruned_loss=0.07506, over 4869.00 frames.], tot_loss[loss=0.1926, simple_loss=0.2534, pruned_loss=0.06591, over 973193.98 frames.], batch size: 32, lr: 1.04e-03 +2022-05-03 20:39:48,467 INFO [train.py:715] (6/8) Epoch 1, batch 5300, loss[loss=0.19, simple_loss=0.2482, pruned_loss=0.0659, over 4950.00 frames.], tot_loss[loss=0.1921, simple_loss=0.2531, pruned_loss=0.06552, over 973403.44 frames.], batch size: 39, lr: 1.04e-03 +2022-05-03 20:40:28,106 INFO [train.py:715] (6/8) Epoch 1, batch 5350, loss[loss=0.1928, simple_loss=0.249, pruned_loss=0.06827, over 4960.00 frames.], tot_loss[loss=0.1926, simple_loss=0.2538, pruned_loss=0.06568, over 973387.96 frames.], batch size: 24, lr: 1.04e-03 +2022-05-03 20:41:07,649 INFO [train.py:715] (6/8) Epoch 1, batch 5400, loss[loss=0.2026, simple_loss=0.2597, pruned_loss=0.07268, over 4883.00 frames.], tot_loss[loss=0.194, simple_loss=0.2546, pruned_loss=0.06667, over 973319.44 frames.], batch size: 16, lr: 1.04e-03 +2022-05-03 20:41:46,698 INFO [train.py:715] (6/8) Epoch 1, batch 5450, loss[loss=0.2065, simple_loss=0.2657, pruned_loss=0.07361, over 4790.00 frames.], tot_loss[loss=0.1962, simple_loss=0.2567, pruned_loss=0.06788, over 973548.22 frames.], batch size: 17, lr: 1.04e-03 +2022-05-03 20:42:26,580 INFO [train.py:715] (6/8) Epoch 1, batch 5500, loss[loss=0.1555, simple_loss=0.2313, pruned_loss=0.03984, over 4877.00 frames.], tot_loss[loss=0.196, simple_loss=0.2563, pruned_loss=0.06782, over 973084.06 frames.], batch size: 20, lr: 1.04e-03 +2022-05-03 20:43:06,481 INFO [train.py:715] (6/8) Epoch 1, batch 5550, loss[loss=0.2499, simple_loss=0.2857, pruned_loss=0.1071, over 4872.00 frames.], tot_loss[loss=0.1948, simple_loss=0.2551, pruned_loss=0.06722, over 973464.07 frames.], batch size: 16, lr: 1.04e-03 +2022-05-03 20:43:45,490 INFO [train.py:715] (6/8) Epoch 1, batch 5600, loss[loss=0.1894, simple_loss=0.2506, pruned_loss=0.06415, over 4923.00 frames.], tot_loss[loss=0.1946, simple_loss=0.2551, pruned_loss=0.06698, over 973719.50 frames.], batch size: 18, lr: 1.04e-03 +2022-05-03 20:44:24,787 INFO [train.py:715] (6/8) Epoch 1, batch 5650, loss[loss=0.1331, simple_loss=0.2017, pruned_loss=0.0322, over 4869.00 frames.], tot_loss[loss=0.1941, simple_loss=0.2542, pruned_loss=0.06698, over 972754.97 frames.], batch size: 22, lr: 1.03e-03 +2022-05-03 20:45:04,555 INFO [train.py:715] (6/8) Epoch 1, batch 5700, loss[loss=0.1939, simple_loss=0.2518, pruned_loss=0.06796, over 4819.00 frames.], tot_loss[loss=0.1928, simple_loss=0.2537, pruned_loss=0.06599, over 971846.89 frames.], batch size: 25, lr: 1.03e-03 +2022-05-03 20:45:44,082 INFO [train.py:715] (6/8) Epoch 1, batch 5750, loss[loss=0.177, simple_loss=0.2318, pruned_loss=0.0611, over 4909.00 frames.], tot_loss[loss=0.1925, simple_loss=0.2535, pruned_loss=0.06574, over 972096.69 frames.], batch size: 18, lr: 1.03e-03 +2022-05-03 20:46:23,088 INFO [train.py:715] (6/8) Epoch 1, batch 5800, loss[loss=0.1832, simple_loss=0.2537, pruned_loss=0.05639, over 4817.00 frames.], tot_loss[loss=0.1921, simple_loss=0.253, pruned_loss=0.06555, over 971830.60 frames.], batch size: 27, lr: 1.03e-03 +2022-05-03 20:47:03,043 INFO [train.py:715] (6/8) Epoch 1, batch 5850, loss[loss=0.2367, simple_loss=0.2849, pruned_loss=0.09423, over 4738.00 frames.], tot_loss[loss=0.1917, simple_loss=0.253, pruned_loss=0.0652, over 972163.89 frames.], batch size: 16, lr: 1.03e-03 +2022-05-03 20:47:42,849 INFO [train.py:715] (6/8) Epoch 1, batch 5900, loss[loss=0.211, simple_loss=0.264, pruned_loss=0.07896, over 4934.00 frames.], tot_loss[loss=0.1925, simple_loss=0.2539, pruned_loss=0.06552, over 972268.63 frames.], batch size: 21, lr: 1.03e-03 +2022-05-03 20:48:21,958 INFO [train.py:715] (6/8) Epoch 1, batch 5950, loss[loss=0.2109, simple_loss=0.2594, pruned_loss=0.08122, over 4845.00 frames.], tot_loss[loss=0.1921, simple_loss=0.2533, pruned_loss=0.06545, over 972880.50 frames.], batch size: 30, lr: 1.03e-03 +2022-05-03 20:49:01,788 INFO [train.py:715] (6/8) Epoch 1, batch 6000, loss[loss=0.2099, simple_loss=0.2707, pruned_loss=0.07458, over 4957.00 frames.], tot_loss[loss=0.1913, simple_loss=0.253, pruned_loss=0.06481, over 972848.90 frames.], batch size: 39, lr: 1.03e-03 +2022-05-03 20:49:01,789 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 20:49:14,259 INFO [train.py:742] (6/8) Epoch 1, validation: loss=0.1267, simple_loss=0.2135, pruned_loss=0.01993, over 914524.00 frames. +2022-05-03 20:49:53,685 INFO [train.py:715] (6/8) Epoch 1, batch 6050, loss[loss=0.1933, simple_loss=0.2531, pruned_loss=0.06677, over 4976.00 frames.], tot_loss[loss=0.1907, simple_loss=0.2525, pruned_loss=0.06445, over 972865.75 frames.], batch size: 15, lr: 1.03e-03 +2022-05-03 20:50:33,753 INFO [train.py:715] (6/8) Epoch 1, batch 6100, loss[loss=0.1692, simple_loss=0.2333, pruned_loss=0.05256, over 4958.00 frames.], tot_loss[loss=0.1906, simple_loss=0.2526, pruned_loss=0.06433, over 972743.29 frames.], batch size: 24, lr: 1.03e-03 +2022-05-03 20:51:13,277 INFO [train.py:715] (6/8) Epoch 1, batch 6150, loss[loss=0.1823, simple_loss=0.2412, pruned_loss=0.06166, over 4781.00 frames.], tot_loss[loss=0.1915, simple_loss=0.2533, pruned_loss=0.06484, over 973270.35 frames.], batch size: 17, lr: 1.03e-03 +2022-05-03 20:51:51,978 INFO [train.py:715] (6/8) Epoch 1, batch 6200, loss[loss=0.188, simple_loss=0.2628, pruned_loss=0.05662, over 4943.00 frames.], tot_loss[loss=0.1908, simple_loss=0.2529, pruned_loss=0.06436, over 973347.23 frames.], batch size: 21, lr: 1.03e-03 +2022-05-03 20:52:32,164 INFO [train.py:715] (6/8) Epoch 1, batch 6250, loss[loss=0.1618, simple_loss=0.2348, pruned_loss=0.04443, over 4835.00 frames.], tot_loss[loss=0.1911, simple_loss=0.2532, pruned_loss=0.06448, over 973174.30 frames.], batch size: 13, lr: 1.03e-03 +2022-05-03 20:53:11,879 INFO [train.py:715] (6/8) Epoch 1, batch 6300, loss[loss=0.1963, simple_loss=0.2543, pruned_loss=0.06915, over 4828.00 frames.], tot_loss[loss=0.1918, simple_loss=0.2537, pruned_loss=0.06499, over 972413.17 frames.], batch size: 15, lr: 1.03e-03 +2022-05-03 20:53:51,077 INFO [train.py:715] (6/8) Epoch 1, batch 6350, loss[loss=0.1814, simple_loss=0.2504, pruned_loss=0.05618, over 4776.00 frames.], tot_loss[loss=0.193, simple_loss=0.2546, pruned_loss=0.0657, over 972788.09 frames.], batch size: 17, lr: 1.03e-03 +2022-05-03 20:54:30,388 INFO [train.py:715] (6/8) Epoch 1, batch 6400, loss[loss=0.1662, simple_loss=0.2245, pruned_loss=0.05397, over 4965.00 frames.], tot_loss[loss=0.1937, simple_loss=0.2549, pruned_loss=0.06625, over 972761.63 frames.], batch size: 14, lr: 1.03e-03 +2022-05-03 20:55:09,944 INFO [train.py:715] (6/8) Epoch 1, batch 6450, loss[loss=0.1888, simple_loss=0.2425, pruned_loss=0.06759, over 4916.00 frames.], tot_loss[loss=0.1923, simple_loss=0.2535, pruned_loss=0.06559, over 972318.31 frames.], batch size: 39, lr: 1.02e-03 +2022-05-03 20:55:49,583 INFO [train.py:715] (6/8) Epoch 1, batch 6500, loss[loss=0.2025, simple_loss=0.2614, pruned_loss=0.07184, over 4961.00 frames.], tot_loss[loss=0.1922, simple_loss=0.2535, pruned_loss=0.06542, over 972011.26 frames.], batch size: 15, lr: 1.02e-03 +2022-05-03 20:56:28,198 INFO [train.py:715] (6/8) Epoch 1, batch 6550, loss[loss=0.2204, simple_loss=0.2794, pruned_loss=0.08073, over 4939.00 frames.], tot_loss[loss=0.1921, simple_loss=0.2532, pruned_loss=0.06547, over 972178.30 frames.], batch size: 39, lr: 1.02e-03 +2022-05-03 20:57:08,079 INFO [train.py:715] (6/8) Epoch 1, batch 6600, loss[loss=0.2309, simple_loss=0.2712, pruned_loss=0.09531, over 4748.00 frames.], tot_loss[loss=0.1921, simple_loss=0.2531, pruned_loss=0.06561, over 971457.82 frames.], batch size: 12, lr: 1.02e-03 +2022-05-03 20:57:48,551 INFO [train.py:715] (6/8) Epoch 1, batch 6650, loss[loss=0.1707, simple_loss=0.2275, pruned_loss=0.05692, over 4833.00 frames.], tot_loss[loss=0.1926, simple_loss=0.2536, pruned_loss=0.06584, over 972115.96 frames.], batch size: 13, lr: 1.02e-03 +2022-05-03 20:58:28,003 INFO [train.py:715] (6/8) Epoch 1, batch 6700, loss[loss=0.2001, simple_loss=0.2513, pruned_loss=0.07439, over 4951.00 frames.], tot_loss[loss=0.1925, simple_loss=0.2537, pruned_loss=0.06565, over 972458.24 frames.], batch size: 35, lr: 1.02e-03 +2022-05-03 20:59:07,324 INFO [train.py:715] (6/8) Epoch 1, batch 6750, loss[loss=0.215, simple_loss=0.2678, pruned_loss=0.08106, over 4992.00 frames.], tot_loss[loss=0.1924, simple_loss=0.2534, pruned_loss=0.06571, over 972764.27 frames.], batch size: 28, lr: 1.02e-03 +2022-05-03 20:59:47,258 INFO [train.py:715] (6/8) Epoch 1, batch 6800, loss[loss=0.1917, simple_loss=0.2469, pruned_loss=0.06821, over 4706.00 frames.], tot_loss[loss=0.1924, simple_loss=0.2535, pruned_loss=0.06565, over 972660.38 frames.], batch size: 15, lr: 1.02e-03 +2022-05-03 21:00:26,801 INFO [train.py:715] (6/8) Epoch 1, batch 6850, loss[loss=0.1623, simple_loss=0.2243, pruned_loss=0.05014, over 4957.00 frames.], tot_loss[loss=0.1938, simple_loss=0.2552, pruned_loss=0.06622, over 972409.47 frames.], batch size: 24, lr: 1.02e-03 +2022-05-03 21:01:05,426 INFO [train.py:715] (6/8) Epoch 1, batch 6900, loss[loss=0.2118, simple_loss=0.2764, pruned_loss=0.0736, over 4932.00 frames.], tot_loss[loss=0.1931, simple_loss=0.2547, pruned_loss=0.06579, over 972563.86 frames.], batch size: 18, lr: 1.02e-03 +2022-05-03 21:01:44,717 INFO [train.py:715] (6/8) Epoch 1, batch 6950, loss[loss=0.1747, simple_loss=0.2403, pruned_loss=0.05457, over 4941.00 frames.], tot_loss[loss=0.1919, simple_loss=0.2537, pruned_loss=0.06505, over 972427.62 frames.], batch size: 24, lr: 1.02e-03 +2022-05-03 21:02:24,797 INFO [train.py:715] (6/8) Epoch 1, batch 7000, loss[loss=0.1692, simple_loss=0.2315, pruned_loss=0.05346, over 4848.00 frames.], tot_loss[loss=0.1922, simple_loss=0.2538, pruned_loss=0.06533, over 972137.20 frames.], batch size: 20, lr: 1.02e-03 +2022-05-03 21:03:03,643 INFO [train.py:715] (6/8) Epoch 1, batch 7050, loss[loss=0.1594, simple_loss=0.2217, pruned_loss=0.04851, over 4915.00 frames.], tot_loss[loss=0.1925, simple_loss=0.2538, pruned_loss=0.06557, over 973004.54 frames.], batch size: 17, lr: 1.02e-03 +2022-05-03 21:03:42,609 INFO [train.py:715] (6/8) Epoch 1, batch 7100, loss[loss=0.1779, simple_loss=0.2477, pruned_loss=0.05403, over 4786.00 frames.], tot_loss[loss=0.1923, simple_loss=0.2538, pruned_loss=0.06541, over 972645.08 frames.], batch size: 18, lr: 1.02e-03 +2022-05-03 21:04:22,596 INFO [train.py:715] (6/8) Epoch 1, batch 7150, loss[loss=0.1831, simple_loss=0.2396, pruned_loss=0.06328, over 4933.00 frames.], tot_loss[loss=0.1921, simple_loss=0.2539, pruned_loss=0.06513, over 972670.57 frames.], batch size: 29, lr: 1.02e-03 +2022-05-03 21:05:02,516 INFO [train.py:715] (6/8) Epoch 1, batch 7200, loss[loss=0.2006, simple_loss=0.2512, pruned_loss=0.07498, over 4757.00 frames.], tot_loss[loss=0.191, simple_loss=0.2528, pruned_loss=0.06463, over 971701.42 frames.], batch size: 16, lr: 1.02e-03 +2022-05-03 21:05:41,159 INFO [train.py:715] (6/8) Epoch 1, batch 7250, loss[loss=0.1871, simple_loss=0.2456, pruned_loss=0.06436, over 4771.00 frames.], tot_loss[loss=0.192, simple_loss=0.2534, pruned_loss=0.06529, over 972138.45 frames.], batch size: 19, lr: 1.02e-03 +2022-05-03 21:06:21,089 INFO [train.py:715] (6/8) Epoch 1, batch 7300, loss[loss=0.1827, simple_loss=0.2471, pruned_loss=0.05912, over 4968.00 frames.], tot_loss[loss=0.192, simple_loss=0.2533, pruned_loss=0.06536, over 973063.01 frames.], batch size: 15, lr: 1.01e-03 +2022-05-03 21:07:00,828 INFO [train.py:715] (6/8) Epoch 1, batch 7350, loss[loss=0.1786, simple_loss=0.2437, pruned_loss=0.05679, over 4774.00 frames.], tot_loss[loss=0.1922, simple_loss=0.2537, pruned_loss=0.0654, over 972793.59 frames.], batch size: 18, lr: 1.01e-03 +2022-05-03 21:07:39,619 INFO [train.py:715] (6/8) Epoch 1, batch 7400, loss[loss=0.1449, simple_loss=0.205, pruned_loss=0.04246, over 4764.00 frames.], tot_loss[loss=0.1919, simple_loss=0.2536, pruned_loss=0.06514, over 972737.95 frames.], batch size: 12, lr: 1.01e-03 +2022-05-03 21:08:18,533 INFO [train.py:715] (6/8) Epoch 1, batch 7450, loss[loss=0.2112, simple_loss=0.2708, pruned_loss=0.07583, over 4940.00 frames.], tot_loss[loss=0.192, simple_loss=0.2533, pruned_loss=0.0653, over 972059.29 frames.], batch size: 21, lr: 1.01e-03 +2022-05-03 21:08:58,351 INFO [train.py:715] (6/8) Epoch 1, batch 7500, loss[loss=0.2228, simple_loss=0.2901, pruned_loss=0.07775, over 4954.00 frames.], tot_loss[loss=0.1923, simple_loss=0.2537, pruned_loss=0.06542, over 972533.10 frames.], batch size: 35, lr: 1.01e-03 +2022-05-03 21:09:38,026 INFO [train.py:715] (6/8) Epoch 1, batch 7550, loss[loss=0.1673, simple_loss=0.2335, pruned_loss=0.05057, over 4705.00 frames.], tot_loss[loss=0.1912, simple_loss=0.2526, pruned_loss=0.06491, over 972813.48 frames.], batch size: 15, lr: 1.01e-03 +2022-05-03 21:10:16,235 INFO [train.py:715] (6/8) Epoch 1, batch 7600, loss[loss=0.2216, simple_loss=0.2783, pruned_loss=0.08242, over 4849.00 frames.], tot_loss[loss=0.1904, simple_loss=0.2523, pruned_loss=0.06428, over 973044.71 frames.], batch size: 12, lr: 1.01e-03 +2022-05-03 21:10:55,973 INFO [train.py:715] (6/8) Epoch 1, batch 7650, loss[loss=0.2106, simple_loss=0.2662, pruned_loss=0.07748, over 4822.00 frames.], tot_loss[loss=0.1894, simple_loss=0.2514, pruned_loss=0.06373, over 972991.57 frames.], batch size: 21, lr: 1.01e-03 +2022-05-03 21:11:35,792 INFO [train.py:715] (6/8) Epoch 1, batch 7700, loss[loss=0.2058, simple_loss=0.2629, pruned_loss=0.0743, over 4854.00 frames.], tot_loss[loss=0.1893, simple_loss=0.2512, pruned_loss=0.06369, over 972206.71 frames.], batch size: 30, lr: 1.01e-03 +2022-05-03 21:12:14,135 INFO [train.py:715] (6/8) Epoch 1, batch 7750, loss[loss=0.158, simple_loss=0.2234, pruned_loss=0.04627, over 4919.00 frames.], tot_loss[loss=0.1889, simple_loss=0.2507, pruned_loss=0.0636, over 972052.50 frames.], batch size: 19, lr: 1.01e-03 +2022-05-03 21:12:53,241 INFO [train.py:715] (6/8) Epoch 1, batch 7800, loss[loss=0.1727, simple_loss=0.2386, pruned_loss=0.0534, over 4930.00 frames.], tot_loss[loss=0.1902, simple_loss=0.2518, pruned_loss=0.06436, over 973096.07 frames.], batch size: 29, lr: 1.01e-03 +2022-05-03 21:13:33,314 INFO [train.py:715] (6/8) Epoch 1, batch 7850, loss[loss=0.1346, simple_loss=0.2008, pruned_loss=0.03417, over 4733.00 frames.], tot_loss[loss=0.19, simple_loss=0.2515, pruned_loss=0.06424, over 972419.52 frames.], batch size: 12, lr: 1.01e-03 +2022-05-03 21:14:12,718 INFO [train.py:715] (6/8) Epoch 1, batch 7900, loss[loss=0.1779, simple_loss=0.2368, pruned_loss=0.0595, over 4974.00 frames.], tot_loss[loss=0.1909, simple_loss=0.2524, pruned_loss=0.06473, over 972459.10 frames.], batch size: 24, lr: 1.01e-03 +2022-05-03 21:14:51,152 INFO [train.py:715] (6/8) Epoch 1, batch 7950, loss[loss=0.1765, simple_loss=0.2421, pruned_loss=0.05543, over 4984.00 frames.], tot_loss[loss=0.1918, simple_loss=0.2533, pruned_loss=0.06517, over 972066.71 frames.], batch size: 28, lr: 1.01e-03 +2022-05-03 21:15:31,263 INFO [train.py:715] (6/8) Epoch 1, batch 8000, loss[loss=0.1484, simple_loss=0.2182, pruned_loss=0.03933, over 4707.00 frames.], tot_loss[loss=0.1911, simple_loss=0.2531, pruned_loss=0.06453, over 971921.29 frames.], batch size: 15, lr: 1.01e-03 +2022-05-03 21:16:11,053 INFO [train.py:715] (6/8) Epoch 1, batch 8050, loss[loss=0.1936, simple_loss=0.259, pruned_loss=0.06415, over 4791.00 frames.], tot_loss[loss=0.1927, simple_loss=0.2544, pruned_loss=0.06549, over 971976.71 frames.], batch size: 17, lr: 1.01e-03 +2022-05-03 21:16:50,427 INFO [train.py:715] (6/8) Epoch 1, batch 8100, loss[loss=0.1762, simple_loss=0.2328, pruned_loss=0.05979, over 4755.00 frames.], tot_loss[loss=0.1923, simple_loss=0.254, pruned_loss=0.06527, over 972418.08 frames.], batch size: 19, lr: 1.01e-03 +2022-05-03 21:17:28,627 INFO [train.py:715] (6/8) Epoch 1, batch 8150, loss[loss=0.2128, simple_loss=0.2675, pruned_loss=0.07912, over 4845.00 frames.], tot_loss[loss=0.1911, simple_loss=0.2529, pruned_loss=0.06468, over 973147.51 frames.], batch size: 15, lr: 1.00e-03 +2022-05-03 21:18:08,546 INFO [train.py:715] (6/8) Epoch 1, batch 8200, loss[loss=0.265, simple_loss=0.3038, pruned_loss=0.1131, over 4702.00 frames.], tot_loss[loss=0.1911, simple_loss=0.253, pruned_loss=0.06456, over 972774.75 frames.], batch size: 15, lr: 1.00e-03 +2022-05-03 21:18:48,018 INFO [train.py:715] (6/8) Epoch 1, batch 8250, loss[loss=0.172, simple_loss=0.2309, pruned_loss=0.0566, over 4917.00 frames.], tot_loss[loss=0.1921, simple_loss=0.254, pruned_loss=0.06514, over 973552.53 frames.], batch size: 18, lr: 1.00e-03 +2022-05-03 21:19:26,208 INFO [train.py:715] (6/8) Epoch 1, batch 8300, loss[loss=0.204, simple_loss=0.2717, pruned_loss=0.0681, over 4757.00 frames.], tot_loss[loss=0.1922, simple_loss=0.2541, pruned_loss=0.06512, over 972422.83 frames.], batch size: 14, lr: 1.00e-03 +2022-05-03 21:20:06,148 INFO [train.py:715] (6/8) Epoch 1, batch 8350, loss[loss=0.1602, simple_loss=0.2298, pruned_loss=0.04527, over 4982.00 frames.], tot_loss[loss=0.19, simple_loss=0.2524, pruned_loss=0.06385, over 972546.00 frames.], batch size: 25, lr: 1.00e-03 +2022-05-03 21:20:45,728 INFO [train.py:715] (6/8) Epoch 1, batch 8400, loss[loss=0.2114, simple_loss=0.2589, pruned_loss=0.082, over 4851.00 frames.], tot_loss[loss=0.19, simple_loss=0.2522, pruned_loss=0.0639, over 971391.18 frames.], batch size: 32, lr: 1.00e-03 +2022-05-03 21:21:25,105 INFO [train.py:715] (6/8) Epoch 1, batch 8450, loss[loss=0.1733, simple_loss=0.2434, pruned_loss=0.05156, over 4849.00 frames.], tot_loss[loss=0.1906, simple_loss=0.2521, pruned_loss=0.06454, over 971828.25 frames.], batch size: 20, lr: 1.00e-03 +2022-05-03 21:22:03,497 INFO [train.py:715] (6/8) Epoch 1, batch 8500, loss[loss=0.1899, simple_loss=0.257, pruned_loss=0.06141, over 4974.00 frames.], tot_loss[loss=0.1917, simple_loss=0.2531, pruned_loss=0.06513, over 971841.41 frames.], batch size: 39, lr: 1.00e-03 +2022-05-03 21:22:43,396 INFO [train.py:715] (6/8) Epoch 1, batch 8550, loss[loss=0.2126, simple_loss=0.2706, pruned_loss=0.07731, over 4813.00 frames.], tot_loss[loss=0.1911, simple_loss=0.2527, pruned_loss=0.06472, over 972569.50 frames.], batch size: 25, lr: 1.00e-03 +2022-05-03 21:23:22,904 INFO [train.py:715] (6/8) Epoch 1, batch 8600, loss[loss=0.1982, simple_loss=0.263, pruned_loss=0.06673, over 4886.00 frames.], tot_loss[loss=0.1904, simple_loss=0.2524, pruned_loss=0.0642, over 972996.84 frames.], batch size: 16, lr: 1.00e-03 +2022-05-03 21:24:00,903 INFO [train.py:715] (6/8) Epoch 1, batch 8650, loss[loss=0.1867, simple_loss=0.2587, pruned_loss=0.05736, over 4761.00 frames.], tot_loss[loss=0.1907, simple_loss=0.2527, pruned_loss=0.06438, over 973442.61 frames.], batch size: 17, lr: 9.99e-04 +2022-05-03 21:24:41,126 INFO [train.py:715] (6/8) Epoch 1, batch 8700, loss[loss=0.2199, simple_loss=0.2731, pruned_loss=0.0833, over 4819.00 frames.], tot_loss[loss=0.1906, simple_loss=0.2526, pruned_loss=0.06429, over 972966.57 frames.], batch size: 13, lr: 9.98e-04 +2022-05-03 21:25:21,119 INFO [train.py:715] (6/8) Epoch 1, batch 8750, loss[loss=0.1783, simple_loss=0.2477, pruned_loss=0.0545, over 4990.00 frames.], tot_loss[loss=0.1911, simple_loss=0.2531, pruned_loss=0.06457, over 972977.53 frames.], batch size: 28, lr: 9.98e-04 +2022-05-03 21:26:00,209 INFO [train.py:715] (6/8) Epoch 1, batch 8800, loss[loss=0.1956, simple_loss=0.2649, pruned_loss=0.06316, over 4874.00 frames.], tot_loss[loss=0.192, simple_loss=0.2538, pruned_loss=0.06506, over 973031.27 frames.], batch size: 20, lr: 9.97e-04 +2022-05-03 21:26:39,532 INFO [train.py:715] (6/8) Epoch 1, batch 8850, loss[loss=0.2268, simple_loss=0.2866, pruned_loss=0.08355, over 4826.00 frames.], tot_loss[loss=0.1921, simple_loss=0.2543, pruned_loss=0.06497, over 973785.37 frames.], batch size: 27, lr: 9.97e-04 +2022-05-03 21:27:19,653 INFO [train.py:715] (6/8) Epoch 1, batch 8900, loss[loss=0.2267, simple_loss=0.2765, pruned_loss=0.08842, over 4976.00 frames.], tot_loss[loss=0.1924, simple_loss=0.2541, pruned_loss=0.06537, over 973471.35 frames.], batch size: 15, lr: 9.96e-04 +2022-05-03 21:27:59,355 INFO [train.py:715] (6/8) Epoch 1, batch 8950, loss[loss=0.1865, simple_loss=0.2562, pruned_loss=0.05844, over 4970.00 frames.], tot_loss[loss=0.1912, simple_loss=0.2535, pruned_loss=0.06444, over 972112.14 frames.], batch size: 25, lr: 9.96e-04 +2022-05-03 21:28:37,780 INFO [train.py:715] (6/8) Epoch 1, batch 9000, loss[loss=0.1896, simple_loss=0.2712, pruned_loss=0.05398, over 4890.00 frames.], tot_loss[loss=0.1903, simple_loss=0.2527, pruned_loss=0.06391, over 971853.15 frames.], batch size: 19, lr: 9.95e-04 +2022-05-03 21:28:37,781 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 21:28:47,502 INFO [train.py:742] (6/8) Epoch 1, validation: loss=0.1253, simple_loss=0.2125, pruned_loss=0.01906, over 914524.00 frames. +2022-05-03 21:29:25,999 INFO [train.py:715] (6/8) Epoch 1, batch 9050, loss[loss=0.1528, simple_loss=0.2256, pruned_loss=0.04, over 4874.00 frames.], tot_loss[loss=0.1895, simple_loss=0.252, pruned_loss=0.06346, over 972458.91 frames.], batch size: 22, lr: 9.94e-04 +2022-05-03 21:30:06,209 INFO [train.py:715] (6/8) Epoch 1, batch 9100, loss[loss=0.1615, simple_loss=0.2315, pruned_loss=0.04579, over 4774.00 frames.], tot_loss[loss=0.1895, simple_loss=0.2519, pruned_loss=0.06352, over 972429.57 frames.], batch size: 18, lr: 9.94e-04 +2022-05-03 21:30:45,845 INFO [train.py:715] (6/8) Epoch 1, batch 9150, loss[loss=0.1891, simple_loss=0.2493, pruned_loss=0.06451, over 4943.00 frames.], tot_loss[loss=0.1888, simple_loss=0.251, pruned_loss=0.06331, over 972186.69 frames.], batch size: 21, lr: 9.93e-04 +2022-05-03 21:31:24,125 INFO [train.py:715] (6/8) Epoch 1, batch 9200, loss[loss=0.2344, simple_loss=0.2897, pruned_loss=0.08955, over 4741.00 frames.], tot_loss[loss=0.1896, simple_loss=0.252, pruned_loss=0.06361, over 972068.14 frames.], batch size: 16, lr: 9.93e-04 +2022-05-03 21:32:03,945 INFO [train.py:715] (6/8) Epoch 1, batch 9250, loss[loss=0.1835, simple_loss=0.253, pruned_loss=0.05701, over 4780.00 frames.], tot_loss[loss=0.1887, simple_loss=0.2515, pruned_loss=0.06294, over 972996.30 frames.], batch size: 17, lr: 9.92e-04 +2022-05-03 21:32:43,824 INFO [train.py:715] (6/8) Epoch 1, batch 9300, loss[loss=0.1747, simple_loss=0.244, pruned_loss=0.05266, over 4832.00 frames.], tot_loss[loss=0.1896, simple_loss=0.252, pruned_loss=0.06357, over 972205.68 frames.], batch size: 15, lr: 9.92e-04 +2022-05-03 21:33:22,872 INFO [train.py:715] (6/8) Epoch 1, batch 9350, loss[loss=0.1701, simple_loss=0.2407, pruned_loss=0.04977, over 4963.00 frames.], tot_loss[loss=0.1891, simple_loss=0.2515, pruned_loss=0.06334, over 972624.30 frames.], batch size: 14, lr: 9.91e-04 +2022-05-03 21:34:02,363 INFO [train.py:715] (6/8) Epoch 1, batch 9400, loss[loss=0.204, simple_loss=0.2589, pruned_loss=0.07452, over 4916.00 frames.], tot_loss[loss=0.1879, simple_loss=0.2502, pruned_loss=0.0628, over 972512.47 frames.], batch size: 23, lr: 9.91e-04 +2022-05-03 21:34:42,535 INFO [train.py:715] (6/8) Epoch 1, batch 9450, loss[loss=0.1549, simple_loss=0.2322, pruned_loss=0.0388, over 4924.00 frames.], tot_loss[loss=0.189, simple_loss=0.2512, pruned_loss=0.06337, over 972880.77 frames.], batch size: 29, lr: 9.90e-04 +2022-05-03 21:35:22,131 INFO [train.py:715] (6/8) Epoch 1, batch 9500, loss[loss=0.2279, simple_loss=0.2762, pruned_loss=0.08982, over 4835.00 frames.], tot_loss[loss=0.1899, simple_loss=0.2521, pruned_loss=0.06383, over 972827.61 frames.], batch size: 30, lr: 9.89e-04 +2022-05-03 21:36:00,390 INFO [train.py:715] (6/8) Epoch 1, batch 9550, loss[loss=0.1967, simple_loss=0.2564, pruned_loss=0.06849, over 4774.00 frames.], tot_loss[loss=0.1887, simple_loss=0.2511, pruned_loss=0.06319, over 972881.82 frames.], batch size: 17, lr: 9.89e-04 +2022-05-03 21:36:40,621 INFO [train.py:715] (6/8) Epoch 1, batch 9600, loss[loss=0.198, simple_loss=0.2544, pruned_loss=0.07077, over 4810.00 frames.], tot_loss[loss=0.1879, simple_loss=0.2501, pruned_loss=0.06281, over 972351.18 frames.], batch size: 21, lr: 9.88e-04 +2022-05-03 21:37:20,359 INFO [train.py:715] (6/8) Epoch 1, batch 9650, loss[loss=0.1694, simple_loss=0.2435, pruned_loss=0.04767, over 4979.00 frames.], tot_loss[loss=0.1887, simple_loss=0.2511, pruned_loss=0.06315, over 972555.20 frames.], batch size: 25, lr: 9.88e-04 +2022-05-03 21:37:58,749 INFO [train.py:715] (6/8) Epoch 1, batch 9700, loss[loss=0.2114, simple_loss=0.27, pruned_loss=0.07638, over 4931.00 frames.], tot_loss[loss=0.1893, simple_loss=0.2518, pruned_loss=0.06345, over 971708.57 frames.], batch size: 23, lr: 9.87e-04 +2022-05-03 21:38:38,641 INFO [train.py:715] (6/8) Epoch 1, batch 9750, loss[loss=0.1622, simple_loss=0.2216, pruned_loss=0.05145, over 4837.00 frames.], tot_loss[loss=0.1902, simple_loss=0.2526, pruned_loss=0.06388, over 972066.20 frames.], batch size: 13, lr: 9.87e-04 +2022-05-03 21:39:19,062 INFO [train.py:715] (6/8) Epoch 1, batch 9800, loss[loss=0.1855, simple_loss=0.2574, pruned_loss=0.05682, over 4977.00 frames.], tot_loss[loss=0.1893, simple_loss=0.2518, pruned_loss=0.06337, over 971974.92 frames.], batch size: 35, lr: 9.86e-04 +2022-05-03 21:39:58,300 INFO [train.py:715] (6/8) Epoch 1, batch 9850, loss[loss=0.1741, simple_loss=0.2454, pruned_loss=0.05137, over 4969.00 frames.], tot_loss[loss=0.1896, simple_loss=0.2516, pruned_loss=0.0638, over 971451.92 frames.], batch size: 28, lr: 9.86e-04 +2022-05-03 21:40:37,080 INFO [train.py:715] (6/8) Epoch 1, batch 9900, loss[loss=0.1744, simple_loss=0.24, pruned_loss=0.05444, over 4884.00 frames.], tot_loss[loss=0.1906, simple_loss=0.2526, pruned_loss=0.06427, over 972662.56 frames.], batch size: 22, lr: 9.85e-04 +2022-05-03 21:41:17,364 INFO [train.py:715] (6/8) Epoch 1, batch 9950, loss[loss=0.193, simple_loss=0.257, pruned_loss=0.06453, over 4817.00 frames.], tot_loss[loss=0.1899, simple_loss=0.2523, pruned_loss=0.06379, over 972920.96 frames.], batch size: 21, lr: 9.85e-04 +2022-05-03 21:41:57,267 INFO [train.py:715] (6/8) Epoch 1, batch 10000, loss[loss=0.1922, simple_loss=0.246, pruned_loss=0.06919, over 4950.00 frames.], tot_loss[loss=0.1891, simple_loss=0.2517, pruned_loss=0.06326, over 972187.87 frames.], batch size: 29, lr: 9.84e-04 +2022-05-03 21:42:36,322 INFO [train.py:715] (6/8) Epoch 1, batch 10050, loss[loss=0.1539, simple_loss=0.2033, pruned_loss=0.05228, over 4801.00 frames.], tot_loss[loss=0.1886, simple_loss=0.2511, pruned_loss=0.06305, over 972023.76 frames.], batch size: 17, lr: 9.83e-04 +2022-05-03 21:43:15,956 INFO [train.py:715] (6/8) Epoch 1, batch 10100, loss[loss=0.1806, simple_loss=0.2487, pruned_loss=0.05624, over 4908.00 frames.], tot_loss[loss=0.1892, simple_loss=0.2512, pruned_loss=0.06363, over 972853.42 frames.], batch size: 17, lr: 9.83e-04 +2022-05-03 21:43:55,973 INFO [train.py:715] (6/8) Epoch 1, batch 10150, loss[loss=0.2209, simple_loss=0.297, pruned_loss=0.07244, over 4836.00 frames.], tot_loss[loss=0.1896, simple_loss=0.2523, pruned_loss=0.0635, over 973031.95 frames.], batch size: 27, lr: 9.82e-04 +2022-05-03 21:44:35,083 INFO [train.py:715] (6/8) Epoch 1, batch 10200, loss[loss=0.2102, simple_loss=0.2672, pruned_loss=0.07658, over 4888.00 frames.], tot_loss[loss=0.1914, simple_loss=0.2533, pruned_loss=0.06475, over 972886.40 frames.], batch size: 39, lr: 9.82e-04 +2022-05-03 21:45:14,036 INFO [train.py:715] (6/8) Epoch 1, batch 10250, loss[loss=0.1861, simple_loss=0.2561, pruned_loss=0.05805, over 4967.00 frames.], tot_loss[loss=0.1922, simple_loss=0.2543, pruned_loss=0.06504, over 972280.66 frames.], batch size: 24, lr: 9.81e-04 +2022-05-03 21:45:54,206 INFO [train.py:715] (6/8) Epoch 1, batch 10300, loss[loss=0.1586, simple_loss=0.2222, pruned_loss=0.0475, over 4839.00 frames.], tot_loss[loss=0.1908, simple_loss=0.2529, pruned_loss=0.06436, over 971528.83 frames.], batch size: 26, lr: 9.81e-04 +2022-05-03 21:46:34,447 INFO [train.py:715] (6/8) Epoch 1, batch 10350, loss[loss=0.2053, simple_loss=0.264, pruned_loss=0.07325, over 4881.00 frames.], tot_loss[loss=0.1911, simple_loss=0.2529, pruned_loss=0.06466, over 972565.24 frames.], batch size: 22, lr: 9.80e-04 +2022-05-03 21:47:13,908 INFO [train.py:715] (6/8) Epoch 1, batch 10400, loss[loss=0.1777, simple_loss=0.2423, pruned_loss=0.05657, over 4960.00 frames.], tot_loss[loss=0.1905, simple_loss=0.2524, pruned_loss=0.06428, over 972944.50 frames.], batch size: 24, lr: 9.80e-04 +2022-05-03 21:47:53,946 INFO [train.py:715] (6/8) Epoch 1, batch 10450, loss[loss=0.1918, simple_loss=0.2431, pruned_loss=0.07025, over 4779.00 frames.], tot_loss[loss=0.1912, simple_loss=0.2529, pruned_loss=0.06477, over 973321.13 frames.], batch size: 12, lr: 9.79e-04 +2022-05-03 21:48:34,478 INFO [train.py:715] (6/8) Epoch 1, batch 10500, loss[loss=0.1625, simple_loss=0.2336, pruned_loss=0.04572, over 4976.00 frames.], tot_loss[loss=0.1904, simple_loss=0.2523, pruned_loss=0.06431, over 973612.27 frames.], batch size: 25, lr: 9.79e-04 +2022-05-03 21:49:13,763 INFO [train.py:715] (6/8) Epoch 1, batch 10550, loss[loss=0.1856, simple_loss=0.2424, pruned_loss=0.06441, over 4891.00 frames.], tot_loss[loss=0.1902, simple_loss=0.252, pruned_loss=0.06418, over 972531.32 frames.], batch size: 19, lr: 9.78e-04 +2022-05-03 21:49:52,640 INFO [train.py:715] (6/8) Epoch 1, batch 10600, loss[loss=0.2107, simple_loss=0.273, pruned_loss=0.0742, over 4699.00 frames.], tot_loss[loss=0.1921, simple_loss=0.2536, pruned_loss=0.06526, over 972455.55 frames.], batch size: 15, lr: 9.78e-04 +2022-05-03 21:50:33,178 INFO [train.py:715] (6/8) Epoch 1, batch 10650, loss[loss=0.1646, simple_loss=0.2349, pruned_loss=0.04711, over 4887.00 frames.], tot_loss[loss=0.1915, simple_loss=0.2535, pruned_loss=0.06471, over 972401.38 frames.], batch size: 22, lr: 9.77e-04 +2022-05-03 21:51:13,727 INFO [train.py:715] (6/8) Epoch 1, batch 10700, loss[loss=0.2114, simple_loss=0.274, pruned_loss=0.07445, over 4991.00 frames.], tot_loss[loss=0.1917, simple_loss=0.2539, pruned_loss=0.06475, over 972330.36 frames.], batch size: 27, lr: 9.76e-04 +2022-05-03 21:51:52,992 INFO [train.py:715] (6/8) Epoch 1, batch 10750, loss[loss=0.2193, simple_loss=0.2802, pruned_loss=0.07921, over 4759.00 frames.], tot_loss[loss=0.1928, simple_loss=0.2549, pruned_loss=0.06532, over 972332.27 frames.], batch size: 16, lr: 9.76e-04 +2022-05-03 21:52:32,275 INFO [train.py:715] (6/8) Epoch 1, batch 10800, loss[loss=0.1845, simple_loss=0.2414, pruned_loss=0.06383, over 4761.00 frames.], tot_loss[loss=0.1911, simple_loss=0.2532, pruned_loss=0.0645, over 971860.78 frames.], batch size: 19, lr: 9.75e-04 +2022-05-03 21:53:12,732 INFO [train.py:715] (6/8) Epoch 1, batch 10850, loss[loss=0.2394, simple_loss=0.2959, pruned_loss=0.09147, over 4812.00 frames.], tot_loss[loss=0.1896, simple_loss=0.2522, pruned_loss=0.06353, over 971396.68 frames.], batch size: 15, lr: 9.75e-04 +2022-05-03 21:53:52,222 INFO [train.py:715] (6/8) Epoch 1, batch 10900, loss[loss=0.1632, simple_loss=0.2386, pruned_loss=0.0439, over 4988.00 frames.], tot_loss[loss=0.1892, simple_loss=0.252, pruned_loss=0.06319, over 972548.14 frames.], batch size: 25, lr: 9.74e-04 +2022-05-03 21:54:30,709 INFO [train.py:715] (6/8) Epoch 1, batch 10950, loss[loss=0.1855, simple_loss=0.2421, pruned_loss=0.06442, over 4837.00 frames.], tot_loss[loss=0.1912, simple_loss=0.2536, pruned_loss=0.06437, over 973007.62 frames.], batch size: 26, lr: 9.74e-04 +2022-05-03 21:55:10,755 INFO [train.py:715] (6/8) Epoch 1, batch 11000, loss[loss=0.1652, simple_loss=0.2395, pruned_loss=0.04548, over 4811.00 frames.], tot_loss[loss=0.1923, simple_loss=0.2546, pruned_loss=0.06495, over 972168.93 frames.], batch size: 25, lr: 9.73e-04 +2022-05-03 21:55:50,515 INFO [train.py:715] (6/8) Epoch 1, batch 11050, loss[loss=0.1686, simple_loss=0.2336, pruned_loss=0.05174, over 4861.00 frames.], tot_loss[loss=0.1912, simple_loss=0.2534, pruned_loss=0.06451, over 972649.87 frames.], batch size: 32, lr: 9.73e-04 +2022-05-03 21:56:29,271 INFO [train.py:715] (6/8) Epoch 1, batch 11100, loss[loss=0.1655, simple_loss=0.2296, pruned_loss=0.05071, over 4961.00 frames.], tot_loss[loss=0.1913, simple_loss=0.2534, pruned_loss=0.06464, over 974201.67 frames.], batch size: 39, lr: 9.72e-04 +2022-05-03 21:57:08,680 INFO [train.py:715] (6/8) Epoch 1, batch 11150, loss[loss=0.1769, simple_loss=0.2442, pruned_loss=0.05481, over 4976.00 frames.], tot_loss[loss=0.1915, simple_loss=0.2534, pruned_loss=0.06485, over 974228.31 frames.], batch size: 31, lr: 9.72e-04 +2022-05-03 21:57:48,800 INFO [train.py:715] (6/8) Epoch 1, batch 11200, loss[loss=0.2173, simple_loss=0.2747, pruned_loss=0.07993, over 4845.00 frames.], tot_loss[loss=0.1935, simple_loss=0.255, pruned_loss=0.06598, over 973464.65 frames.], batch size: 13, lr: 9.71e-04 +2022-05-03 21:58:28,397 INFO [train.py:715] (6/8) Epoch 1, batch 11250, loss[loss=0.1687, simple_loss=0.2331, pruned_loss=0.05214, over 4979.00 frames.], tot_loss[loss=0.1918, simple_loss=0.2537, pruned_loss=0.06491, over 972935.55 frames.], batch size: 25, lr: 9.71e-04 +2022-05-03 21:59:06,585 INFO [train.py:715] (6/8) Epoch 1, batch 11300, loss[loss=0.2101, simple_loss=0.2641, pruned_loss=0.07806, over 4804.00 frames.], tot_loss[loss=0.1907, simple_loss=0.2526, pruned_loss=0.06441, over 973353.23 frames.], batch size: 14, lr: 9.70e-04 +2022-05-03 21:59:46,987 INFO [train.py:715] (6/8) Epoch 1, batch 11350, loss[loss=0.1805, simple_loss=0.2508, pruned_loss=0.05511, over 4937.00 frames.], tot_loss[loss=0.1906, simple_loss=0.2527, pruned_loss=0.06422, over 972659.07 frames.], batch size: 39, lr: 9.70e-04 +2022-05-03 22:00:26,694 INFO [train.py:715] (6/8) Epoch 1, batch 11400, loss[loss=0.1801, simple_loss=0.2314, pruned_loss=0.06442, over 4763.00 frames.], tot_loss[loss=0.1877, simple_loss=0.2504, pruned_loss=0.06248, over 972789.95 frames.], batch size: 19, lr: 9.69e-04 +2022-05-03 22:01:04,861 INFO [train.py:715] (6/8) Epoch 1, batch 11450, loss[loss=0.1983, simple_loss=0.2563, pruned_loss=0.07015, over 4885.00 frames.], tot_loss[loss=0.1877, simple_loss=0.2506, pruned_loss=0.0624, over 972649.12 frames.], batch size: 32, lr: 9.69e-04 +2022-05-03 22:01:44,072 INFO [train.py:715] (6/8) Epoch 1, batch 11500, loss[loss=0.2194, simple_loss=0.2721, pruned_loss=0.08338, over 4785.00 frames.], tot_loss[loss=0.1874, simple_loss=0.2504, pruned_loss=0.06222, over 972247.04 frames.], batch size: 18, lr: 9.68e-04 +2022-05-03 22:02:23,962 INFO [train.py:715] (6/8) Epoch 1, batch 11550, loss[loss=0.2434, simple_loss=0.2929, pruned_loss=0.09696, over 4892.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2506, pruned_loss=0.06215, over 972944.06 frames.], batch size: 16, lr: 9.68e-04 +2022-05-03 22:03:03,163 INFO [train.py:715] (6/8) Epoch 1, batch 11600, loss[loss=0.1881, simple_loss=0.2432, pruned_loss=0.06652, over 4686.00 frames.], tot_loss[loss=0.1879, simple_loss=0.251, pruned_loss=0.06242, over 972266.30 frames.], batch size: 15, lr: 9.67e-04 +2022-05-03 22:03:41,495 INFO [train.py:715] (6/8) Epoch 1, batch 11650, loss[loss=0.1729, simple_loss=0.2424, pruned_loss=0.05168, over 4762.00 frames.], tot_loss[loss=0.1867, simple_loss=0.2498, pruned_loss=0.06183, over 972603.20 frames.], batch size: 19, lr: 9.67e-04 +2022-05-03 22:04:21,436 INFO [train.py:715] (6/8) Epoch 1, batch 11700, loss[loss=0.166, simple_loss=0.2264, pruned_loss=0.05278, over 4795.00 frames.], tot_loss[loss=0.1872, simple_loss=0.2503, pruned_loss=0.06201, over 972483.95 frames.], batch size: 24, lr: 9.66e-04 +2022-05-03 22:05:01,252 INFO [train.py:715] (6/8) Epoch 1, batch 11750, loss[loss=0.1807, simple_loss=0.2362, pruned_loss=0.06263, over 4974.00 frames.], tot_loss[loss=0.1876, simple_loss=0.2503, pruned_loss=0.0625, over 972188.30 frames.], batch size: 14, lr: 9.66e-04 +2022-05-03 22:05:40,554 INFO [train.py:715] (6/8) Epoch 1, batch 11800, loss[loss=0.1721, simple_loss=0.2363, pruned_loss=0.0539, over 4857.00 frames.], tot_loss[loss=0.1885, simple_loss=0.251, pruned_loss=0.06297, over 973275.09 frames.], batch size: 20, lr: 9.65e-04 +2022-05-03 22:06:19,254 INFO [train.py:715] (6/8) Epoch 1, batch 11850, loss[loss=0.1934, simple_loss=0.2492, pruned_loss=0.06883, over 4989.00 frames.], tot_loss[loss=0.1891, simple_loss=0.2513, pruned_loss=0.06346, over 973619.69 frames.], batch size: 14, lr: 9.65e-04 +2022-05-03 22:06:59,291 INFO [train.py:715] (6/8) Epoch 1, batch 11900, loss[loss=0.2393, simple_loss=0.278, pruned_loss=0.1003, over 4826.00 frames.], tot_loss[loss=0.1894, simple_loss=0.2511, pruned_loss=0.0638, over 972792.82 frames.], batch size: 26, lr: 9.64e-04 +2022-05-03 22:07:38,639 INFO [train.py:715] (6/8) Epoch 1, batch 11950, loss[loss=0.1907, simple_loss=0.2517, pruned_loss=0.06483, over 4850.00 frames.], tot_loss[loss=0.1873, simple_loss=0.2499, pruned_loss=0.06238, over 972503.96 frames.], batch size: 32, lr: 9.63e-04 +2022-05-03 22:08:17,119 INFO [train.py:715] (6/8) Epoch 1, batch 12000, loss[loss=0.1904, simple_loss=0.2529, pruned_loss=0.06395, over 4924.00 frames.], tot_loss[loss=0.1866, simple_loss=0.2494, pruned_loss=0.06193, over 972706.28 frames.], batch size: 18, lr: 9.63e-04 +2022-05-03 22:08:17,119 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 22:08:27,631 INFO [train.py:742] (6/8) Epoch 1, validation: loss=0.1244, simple_loss=0.2116, pruned_loss=0.01858, over 914524.00 frames. +2022-05-03 22:09:06,364 INFO [train.py:715] (6/8) Epoch 1, batch 12050, loss[loss=0.2219, simple_loss=0.2758, pruned_loss=0.08401, over 4840.00 frames.], tot_loss[loss=0.1866, simple_loss=0.2491, pruned_loss=0.06209, over 972079.51 frames.], batch size: 15, lr: 9.62e-04 +2022-05-03 22:09:46,989 INFO [train.py:715] (6/8) Epoch 1, batch 12100, loss[loss=0.1695, simple_loss=0.2364, pruned_loss=0.0513, over 4894.00 frames.], tot_loss[loss=0.1865, simple_loss=0.2492, pruned_loss=0.06189, over 972138.32 frames.], batch size: 17, lr: 9.62e-04 +2022-05-03 22:10:27,672 INFO [train.py:715] (6/8) Epoch 1, batch 12150, loss[loss=0.1751, simple_loss=0.2411, pruned_loss=0.05453, over 4956.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2503, pruned_loss=0.06238, over 971755.05 frames.], batch size: 24, lr: 9.61e-04 +2022-05-03 22:11:06,639 INFO [train.py:715] (6/8) Epoch 1, batch 12200, loss[loss=0.1963, simple_loss=0.2615, pruned_loss=0.06558, over 4945.00 frames.], tot_loss[loss=0.1883, simple_loss=0.2507, pruned_loss=0.06289, over 972009.40 frames.], batch size: 40, lr: 9.61e-04 +2022-05-03 22:11:46,547 INFO [train.py:715] (6/8) Epoch 1, batch 12250, loss[loss=0.184, simple_loss=0.2558, pruned_loss=0.05609, over 4814.00 frames.], tot_loss[loss=0.1883, simple_loss=0.2511, pruned_loss=0.06281, over 971514.00 frames.], batch size: 25, lr: 9.60e-04 +2022-05-03 22:12:27,160 INFO [train.py:715] (6/8) Epoch 1, batch 12300, loss[loss=0.2013, simple_loss=0.2525, pruned_loss=0.07503, over 4988.00 frames.], tot_loss[loss=0.1891, simple_loss=0.2516, pruned_loss=0.0633, over 972407.61 frames.], batch size: 14, lr: 9.60e-04 +2022-05-03 22:13:06,775 INFO [train.py:715] (6/8) Epoch 1, batch 12350, loss[loss=0.2202, simple_loss=0.2686, pruned_loss=0.08585, over 4736.00 frames.], tot_loss[loss=0.1891, simple_loss=0.2515, pruned_loss=0.06334, over 971917.91 frames.], batch size: 16, lr: 9.59e-04 +2022-05-03 22:13:45,540 INFO [train.py:715] (6/8) Epoch 1, batch 12400, loss[loss=0.2148, simple_loss=0.287, pruned_loss=0.07132, over 4837.00 frames.], tot_loss[loss=0.1894, simple_loss=0.2518, pruned_loss=0.0635, over 972884.01 frames.], batch size: 26, lr: 9.59e-04 +2022-05-03 22:14:25,690 INFO [train.py:715] (6/8) Epoch 1, batch 12450, loss[loss=0.1834, simple_loss=0.2574, pruned_loss=0.05473, over 4933.00 frames.], tot_loss[loss=0.188, simple_loss=0.2507, pruned_loss=0.06267, over 973043.83 frames.], batch size: 18, lr: 9.58e-04 +2022-05-03 22:15:05,671 INFO [train.py:715] (6/8) Epoch 1, batch 12500, loss[loss=0.1421, simple_loss=0.2114, pruned_loss=0.03643, over 4688.00 frames.], tot_loss[loss=0.1905, simple_loss=0.253, pruned_loss=0.06402, over 972809.43 frames.], batch size: 15, lr: 9.58e-04 +2022-05-03 22:15:44,878 INFO [train.py:715] (6/8) Epoch 1, batch 12550, loss[loss=0.1872, simple_loss=0.2543, pruned_loss=0.06007, over 4806.00 frames.], tot_loss[loss=0.1915, simple_loss=0.2537, pruned_loss=0.06468, over 972790.40 frames.], batch size: 25, lr: 9.57e-04 +2022-05-03 22:16:24,275 INFO [train.py:715] (6/8) Epoch 1, batch 12600, loss[loss=0.1416, simple_loss=0.2039, pruned_loss=0.03963, over 4766.00 frames.], tot_loss[loss=0.191, simple_loss=0.253, pruned_loss=0.06449, over 972297.45 frames.], batch size: 12, lr: 9.57e-04 +2022-05-03 22:17:04,551 INFO [train.py:715] (6/8) Epoch 1, batch 12650, loss[loss=0.1677, simple_loss=0.2258, pruned_loss=0.05486, over 4812.00 frames.], tot_loss[loss=0.1905, simple_loss=0.2528, pruned_loss=0.06407, over 972414.36 frames.], batch size: 27, lr: 9.56e-04 +2022-05-03 22:17:43,555 INFO [train.py:715] (6/8) Epoch 1, batch 12700, loss[loss=0.1916, simple_loss=0.2567, pruned_loss=0.06324, over 4888.00 frames.], tot_loss[loss=0.1897, simple_loss=0.252, pruned_loss=0.06365, over 972104.41 frames.], batch size: 19, lr: 9.56e-04 +2022-05-03 22:18:22,953 INFO [train.py:715] (6/8) Epoch 1, batch 12750, loss[loss=0.244, simple_loss=0.2935, pruned_loss=0.09727, over 4694.00 frames.], tot_loss[loss=0.1909, simple_loss=0.2532, pruned_loss=0.06424, over 971962.67 frames.], batch size: 15, lr: 9.55e-04 +2022-05-03 22:19:03,051 INFO [train.py:715] (6/8) Epoch 1, batch 12800, loss[loss=0.1966, simple_loss=0.2603, pruned_loss=0.06647, over 4859.00 frames.], tot_loss[loss=0.1913, simple_loss=0.2534, pruned_loss=0.06461, over 972268.35 frames.], batch size: 20, lr: 9.55e-04 +2022-05-03 22:19:42,878 INFO [train.py:715] (6/8) Epoch 1, batch 12850, loss[loss=0.2025, simple_loss=0.2533, pruned_loss=0.07583, over 4834.00 frames.], tot_loss[loss=0.1902, simple_loss=0.2521, pruned_loss=0.06417, over 972080.12 frames.], batch size: 15, lr: 9.54e-04 +2022-05-03 22:20:21,823 INFO [train.py:715] (6/8) Epoch 1, batch 12900, loss[loss=0.1891, simple_loss=0.2396, pruned_loss=0.06927, over 4927.00 frames.], tot_loss[loss=0.1907, simple_loss=0.2525, pruned_loss=0.06447, over 971883.35 frames.], batch size: 18, lr: 9.54e-04 +2022-05-03 22:21:01,116 INFO [train.py:715] (6/8) Epoch 1, batch 12950, loss[loss=0.1988, simple_loss=0.2566, pruned_loss=0.0705, over 4785.00 frames.], tot_loss[loss=0.19, simple_loss=0.2519, pruned_loss=0.06398, over 972520.38 frames.], batch size: 14, lr: 9.53e-04 +2022-05-03 22:21:41,530 INFO [train.py:715] (6/8) Epoch 1, batch 13000, loss[loss=0.1804, simple_loss=0.247, pruned_loss=0.05691, over 4780.00 frames.], tot_loss[loss=0.1897, simple_loss=0.2516, pruned_loss=0.06396, over 972122.27 frames.], batch size: 17, lr: 9.53e-04 +2022-05-03 22:22:21,099 INFO [train.py:715] (6/8) Epoch 1, batch 13050, loss[loss=0.2025, simple_loss=0.2478, pruned_loss=0.07861, over 4994.00 frames.], tot_loss[loss=0.1888, simple_loss=0.2507, pruned_loss=0.06346, over 971971.14 frames.], batch size: 14, lr: 9.52e-04 +2022-05-03 22:23:01,176 INFO [train.py:715] (6/8) Epoch 1, batch 13100, loss[loss=0.1562, simple_loss=0.2232, pruned_loss=0.04461, over 4939.00 frames.], tot_loss[loss=0.1896, simple_loss=0.2518, pruned_loss=0.06374, over 971878.10 frames.], batch size: 29, lr: 9.52e-04 +2022-05-03 22:23:41,364 INFO [train.py:715] (6/8) Epoch 1, batch 13150, loss[loss=0.2035, simple_loss=0.2688, pruned_loss=0.06908, over 4884.00 frames.], tot_loss[loss=0.1894, simple_loss=0.2518, pruned_loss=0.06351, over 971943.34 frames.], batch size: 16, lr: 9.51e-04 +2022-05-03 22:24:23,883 INFO [train.py:715] (6/8) Epoch 1, batch 13200, loss[loss=0.174, simple_loss=0.2367, pruned_loss=0.0557, over 4783.00 frames.], tot_loss[loss=0.1889, simple_loss=0.2515, pruned_loss=0.06318, over 972454.28 frames.], batch size: 14, lr: 9.51e-04 +2022-05-03 22:25:03,007 INFO [train.py:715] (6/8) Epoch 1, batch 13250, loss[loss=0.1963, simple_loss=0.2561, pruned_loss=0.06824, over 4878.00 frames.], tot_loss[loss=0.1889, simple_loss=0.2514, pruned_loss=0.06324, over 972587.61 frames.], batch size: 32, lr: 9.51e-04 +2022-05-03 22:25:41,756 INFO [train.py:715] (6/8) Epoch 1, batch 13300, loss[loss=0.2207, simple_loss=0.2857, pruned_loss=0.07784, over 4894.00 frames.], tot_loss[loss=0.1881, simple_loss=0.2509, pruned_loss=0.06266, over 972416.68 frames.], batch size: 19, lr: 9.50e-04 +2022-05-03 22:26:21,987 INFO [train.py:715] (6/8) Epoch 1, batch 13350, loss[loss=0.1739, simple_loss=0.237, pruned_loss=0.05536, over 4962.00 frames.], tot_loss[loss=0.1869, simple_loss=0.2501, pruned_loss=0.06185, over 972925.79 frames.], batch size: 35, lr: 9.50e-04 +2022-05-03 22:27:01,390 INFO [train.py:715] (6/8) Epoch 1, batch 13400, loss[loss=0.185, simple_loss=0.258, pruned_loss=0.05606, over 4948.00 frames.], tot_loss[loss=0.1872, simple_loss=0.2504, pruned_loss=0.06206, over 972286.92 frames.], batch size: 21, lr: 9.49e-04 +2022-05-03 22:27:41,359 INFO [train.py:715] (6/8) Epoch 1, batch 13450, loss[loss=0.195, simple_loss=0.2519, pruned_loss=0.06905, over 4788.00 frames.], tot_loss[loss=0.1877, simple_loss=0.2507, pruned_loss=0.06236, over 971958.19 frames.], batch size: 18, lr: 9.49e-04 +2022-05-03 22:28:21,069 INFO [train.py:715] (6/8) Epoch 1, batch 13500, loss[loss=0.1878, simple_loss=0.2456, pruned_loss=0.06498, over 4965.00 frames.], tot_loss[loss=0.1876, simple_loss=0.2502, pruned_loss=0.06247, over 972129.11 frames.], batch size: 24, lr: 9.48e-04 +2022-05-03 22:29:01,041 INFO [train.py:715] (6/8) Epoch 1, batch 13550, loss[loss=0.172, simple_loss=0.2366, pruned_loss=0.05372, over 4866.00 frames.], tot_loss[loss=0.1887, simple_loss=0.2509, pruned_loss=0.06325, over 971921.95 frames.], batch size: 32, lr: 9.48e-04 +2022-05-03 22:29:39,298 INFO [train.py:715] (6/8) Epoch 1, batch 13600, loss[loss=0.1904, simple_loss=0.2384, pruned_loss=0.07114, over 4793.00 frames.], tot_loss[loss=0.188, simple_loss=0.2504, pruned_loss=0.0628, over 972051.96 frames.], batch size: 12, lr: 9.47e-04 +2022-05-03 22:30:18,510 INFO [train.py:715] (6/8) Epoch 1, batch 13650, loss[loss=0.2324, simple_loss=0.2865, pruned_loss=0.08913, over 4739.00 frames.], tot_loss[loss=0.1886, simple_loss=0.2504, pruned_loss=0.06344, over 971570.36 frames.], batch size: 16, lr: 9.47e-04 +2022-05-03 22:30:58,741 INFO [train.py:715] (6/8) Epoch 1, batch 13700, loss[loss=0.1936, simple_loss=0.2657, pruned_loss=0.06077, over 4817.00 frames.], tot_loss[loss=0.187, simple_loss=0.2493, pruned_loss=0.06235, over 971780.45 frames.], batch size: 26, lr: 9.46e-04 +2022-05-03 22:31:38,139 INFO [train.py:715] (6/8) Epoch 1, batch 13750, loss[loss=0.2485, simple_loss=0.2732, pruned_loss=0.1119, over 4965.00 frames.], tot_loss[loss=0.1872, simple_loss=0.2495, pruned_loss=0.06245, over 972100.26 frames.], batch size: 14, lr: 9.46e-04 +2022-05-03 22:32:17,281 INFO [train.py:715] (6/8) Epoch 1, batch 13800, loss[loss=0.176, simple_loss=0.2459, pruned_loss=0.05301, over 4812.00 frames.], tot_loss[loss=0.1865, simple_loss=0.249, pruned_loss=0.06194, over 971786.40 frames.], batch size: 24, lr: 9.45e-04 +2022-05-03 22:32:56,971 INFO [train.py:715] (6/8) Epoch 1, batch 13850, loss[loss=0.1729, simple_loss=0.2301, pruned_loss=0.05783, over 4985.00 frames.], tot_loss[loss=0.1876, simple_loss=0.2499, pruned_loss=0.06263, over 971718.60 frames.], batch size: 33, lr: 9.45e-04 +2022-05-03 22:33:36,814 INFO [train.py:715] (6/8) Epoch 1, batch 13900, loss[loss=0.2277, simple_loss=0.2788, pruned_loss=0.08834, over 4807.00 frames.], tot_loss[loss=0.1871, simple_loss=0.2497, pruned_loss=0.06225, over 972744.45 frames.], batch size: 21, lr: 9.44e-04 +2022-05-03 22:34:15,310 INFO [train.py:715] (6/8) Epoch 1, batch 13950, loss[loss=0.1682, simple_loss=0.2315, pruned_loss=0.05245, over 4808.00 frames.], tot_loss[loss=0.1857, simple_loss=0.2481, pruned_loss=0.06161, over 972419.66 frames.], batch size: 21, lr: 9.44e-04 +2022-05-03 22:34:54,569 INFO [train.py:715] (6/8) Epoch 1, batch 14000, loss[loss=0.2221, simple_loss=0.2851, pruned_loss=0.07953, over 4850.00 frames.], tot_loss[loss=0.1873, simple_loss=0.2494, pruned_loss=0.06259, over 972392.80 frames.], batch size: 20, lr: 9.43e-04 +2022-05-03 22:35:34,717 INFO [train.py:715] (6/8) Epoch 1, batch 14050, loss[loss=0.1709, simple_loss=0.2348, pruned_loss=0.05352, over 4703.00 frames.], tot_loss[loss=0.1873, simple_loss=0.2492, pruned_loss=0.06274, over 971894.60 frames.], batch size: 15, lr: 9.43e-04 +2022-05-03 22:36:13,518 INFO [train.py:715] (6/8) Epoch 1, batch 14100, loss[loss=0.1795, simple_loss=0.253, pruned_loss=0.05302, over 4931.00 frames.], tot_loss[loss=0.1881, simple_loss=0.2502, pruned_loss=0.06302, over 972220.08 frames.], batch size: 23, lr: 9.42e-04 +2022-05-03 22:36:52,752 INFO [train.py:715] (6/8) Epoch 1, batch 14150, loss[loss=0.2177, simple_loss=0.2668, pruned_loss=0.0843, over 4957.00 frames.], tot_loss[loss=0.1877, simple_loss=0.2498, pruned_loss=0.06276, over 971744.52 frames.], batch size: 24, lr: 9.42e-04 +2022-05-03 22:37:31,985 INFO [train.py:715] (6/8) Epoch 1, batch 14200, loss[loss=0.1519, simple_loss=0.2354, pruned_loss=0.0342, over 4851.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2495, pruned_loss=0.0628, over 972482.57 frames.], batch size: 20, lr: 9.41e-04 +2022-05-03 22:38:12,095 INFO [train.py:715] (6/8) Epoch 1, batch 14250, loss[loss=0.1749, simple_loss=0.2414, pruned_loss=0.05414, over 4789.00 frames.], tot_loss[loss=0.1869, simple_loss=0.2492, pruned_loss=0.06232, over 972742.83 frames.], batch size: 14, lr: 9.41e-04 +2022-05-03 22:38:50,574 INFO [train.py:715] (6/8) Epoch 1, batch 14300, loss[loss=0.2146, simple_loss=0.276, pruned_loss=0.07658, over 4711.00 frames.], tot_loss[loss=0.1877, simple_loss=0.2498, pruned_loss=0.06275, over 972274.20 frames.], batch size: 15, lr: 9.40e-04 +2022-05-03 22:39:29,561 INFO [train.py:715] (6/8) Epoch 1, batch 14350, loss[loss=0.1843, simple_loss=0.2621, pruned_loss=0.05323, over 4977.00 frames.], tot_loss[loss=0.1886, simple_loss=0.251, pruned_loss=0.06308, over 972921.95 frames.], batch size: 15, lr: 9.40e-04 +2022-05-03 22:40:09,911 INFO [train.py:715] (6/8) Epoch 1, batch 14400, loss[loss=0.1815, simple_loss=0.2431, pruned_loss=0.0599, over 4861.00 frames.], tot_loss[loss=0.1873, simple_loss=0.2499, pruned_loss=0.0624, over 972977.13 frames.], batch size: 32, lr: 9.39e-04 +2022-05-03 22:40:48,730 INFO [train.py:715] (6/8) Epoch 1, batch 14450, loss[loss=0.1883, simple_loss=0.2471, pruned_loss=0.06471, over 4834.00 frames.], tot_loss[loss=0.1877, simple_loss=0.2503, pruned_loss=0.06258, over 972238.07 frames.], batch size: 30, lr: 9.39e-04 +2022-05-03 22:41:28,255 INFO [train.py:715] (6/8) Epoch 1, batch 14500, loss[loss=0.1817, simple_loss=0.2446, pruned_loss=0.05935, over 4843.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2501, pruned_loss=0.06244, over 971905.15 frames.], batch size: 30, lr: 9.39e-04 +2022-05-03 22:42:08,354 INFO [train.py:715] (6/8) Epoch 1, batch 14550, loss[loss=0.189, simple_loss=0.2599, pruned_loss=0.05902, over 4701.00 frames.], tot_loss[loss=0.1876, simple_loss=0.2503, pruned_loss=0.06247, over 971709.05 frames.], batch size: 15, lr: 9.38e-04 +2022-05-03 22:42:47,871 INFO [train.py:715] (6/8) Epoch 1, batch 14600, loss[loss=0.1911, simple_loss=0.2519, pruned_loss=0.06521, over 4771.00 frames.], tot_loss[loss=0.1883, simple_loss=0.2507, pruned_loss=0.06295, over 970823.77 frames.], batch size: 14, lr: 9.38e-04 +2022-05-03 22:43:26,827 INFO [train.py:715] (6/8) Epoch 1, batch 14650, loss[loss=0.1897, simple_loss=0.2659, pruned_loss=0.05676, over 4923.00 frames.], tot_loss[loss=0.1882, simple_loss=0.2504, pruned_loss=0.06303, over 970994.08 frames.], batch size: 23, lr: 9.37e-04 +2022-05-03 22:44:05,667 INFO [train.py:715] (6/8) Epoch 1, batch 14700, loss[loss=0.2008, simple_loss=0.2652, pruned_loss=0.06823, over 4762.00 frames.], tot_loss[loss=0.1877, simple_loss=0.2501, pruned_loss=0.06265, over 971211.79 frames.], batch size: 18, lr: 9.37e-04 +2022-05-03 22:44:45,792 INFO [train.py:715] (6/8) Epoch 1, batch 14750, loss[loss=0.1624, simple_loss=0.2328, pruned_loss=0.04604, over 4902.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2499, pruned_loss=0.06256, over 970353.98 frames.], batch size: 19, lr: 9.36e-04 +2022-05-03 22:45:24,939 INFO [train.py:715] (6/8) Epoch 1, batch 14800, loss[loss=0.1623, simple_loss=0.2275, pruned_loss=0.04856, over 4904.00 frames.], tot_loss[loss=0.1872, simple_loss=0.2498, pruned_loss=0.0623, over 970946.79 frames.], batch size: 17, lr: 9.36e-04 +2022-05-03 22:46:04,497 INFO [train.py:715] (6/8) Epoch 1, batch 14850, loss[loss=0.2517, simple_loss=0.3054, pruned_loss=0.09898, over 4939.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2501, pruned_loss=0.06245, over 971168.54 frames.], batch size: 21, lr: 9.35e-04 +2022-05-03 22:46:43,815 INFO [train.py:715] (6/8) Epoch 1, batch 14900, loss[loss=0.2055, simple_loss=0.2693, pruned_loss=0.07089, over 4986.00 frames.], tot_loss[loss=0.1876, simple_loss=0.2504, pruned_loss=0.06236, over 971374.85 frames.], batch size: 14, lr: 9.35e-04 +2022-05-03 22:47:22,420 INFO [train.py:715] (6/8) Epoch 1, batch 14950, loss[loss=0.183, simple_loss=0.249, pruned_loss=0.05851, over 4926.00 frames.], tot_loss[loss=0.188, simple_loss=0.2506, pruned_loss=0.06269, over 971200.02 frames.], batch size: 29, lr: 9.34e-04 +2022-05-03 22:48:02,038 INFO [train.py:715] (6/8) Epoch 1, batch 15000, loss[loss=0.1625, simple_loss=0.238, pruned_loss=0.04346, over 4919.00 frames.], tot_loss[loss=0.1885, simple_loss=0.2512, pruned_loss=0.06293, over 971615.57 frames.], batch size: 18, lr: 9.34e-04 +2022-05-03 22:48:02,038 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 22:48:17,509 INFO [train.py:742] (6/8) Epoch 1, validation: loss=0.1242, simple_loss=0.2115, pruned_loss=0.01842, over 914524.00 frames. +2022-05-03 22:48:57,661 INFO [train.py:715] (6/8) Epoch 1, batch 15050, loss[loss=0.1989, simple_loss=0.2674, pruned_loss=0.06523, over 4933.00 frames.], tot_loss[loss=0.1877, simple_loss=0.2503, pruned_loss=0.06254, over 971335.88 frames.], batch size: 23, lr: 9.33e-04 +2022-05-03 22:49:37,562 INFO [train.py:715] (6/8) Epoch 1, batch 15100, loss[loss=0.1538, simple_loss=0.2172, pruned_loss=0.04522, over 4831.00 frames.], tot_loss[loss=0.1878, simple_loss=0.2506, pruned_loss=0.0625, over 971812.96 frames.], batch size: 13, lr: 9.33e-04 +2022-05-03 22:50:18,096 INFO [train.py:715] (6/8) Epoch 1, batch 15150, loss[loss=0.1983, simple_loss=0.253, pruned_loss=0.07183, over 4908.00 frames.], tot_loss[loss=0.1881, simple_loss=0.251, pruned_loss=0.0626, over 972379.89 frames.], batch size: 19, lr: 9.32e-04 +2022-05-03 22:50:57,477 INFO [train.py:715] (6/8) Epoch 1, batch 15200, loss[loss=0.1688, simple_loss=0.2296, pruned_loss=0.05403, over 4659.00 frames.], tot_loss[loss=0.1871, simple_loss=0.2503, pruned_loss=0.06195, over 972512.99 frames.], batch size: 13, lr: 9.32e-04 +2022-05-03 22:51:37,957 INFO [train.py:715] (6/8) Epoch 1, batch 15250, loss[loss=0.1564, simple_loss=0.2259, pruned_loss=0.04347, over 4957.00 frames.], tot_loss[loss=0.1863, simple_loss=0.2497, pruned_loss=0.06146, over 972109.28 frames.], batch size: 24, lr: 9.32e-04 +2022-05-03 22:52:17,874 INFO [train.py:715] (6/8) Epoch 1, batch 15300, loss[loss=0.1716, simple_loss=0.239, pruned_loss=0.0521, over 4947.00 frames.], tot_loss[loss=0.1866, simple_loss=0.25, pruned_loss=0.06164, over 972006.92 frames.], batch size: 21, lr: 9.31e-04 +2022-05-03 22:52:57,764 INFO [train.py:715] (6/8) Epoch 1, batch 15350, loss[loss=0.2146, simple_loss=0.277, pruned_loss=0.07614, over 4959.00 frames.], tot_loss[loss=0.1881, simple_loss=0.2514, pruned_loss=0.06244, over 971492.89 frames.], batch size: 24, lr: 9.31e-04 +2022-05-03 22:53:37,902 INFO [train.py:715] (6/8) Epoch 1, batch 15400, loss[loss=0.1722, simple_loss=0.2387, pruned_loss=0.05291, over 4907.00 frames.], tot_loss[loss=0.1876, simple_loss=0.251, pruned_loss=0.0621, over 971346.61 frames.], batch size: 17, lr: 9.30e-04 +2022-05-03 22:54:18,168 INFO [train.py:715] (6/8) Epoch 1, batch 15450, loss[loss=0.1697, simple_loss=0.2414, pruned_loss=0.04905, over 4758.00 frames.], tot_loss[loss=0.1864, simple_loss=0.2495, pruned_loss=0.06164, over 971564.65 frames.], batch size: 16, lr: 9.30e-04 +2022-05-03 22:54:58,644 INFO [train.py:715] (6/8) Epoch 1, batch 15500, loss[loss=0.1652, simple_loss=0.2298, pruned_loss=0.05035, over 4820.00 frames.], tot_loss[loss=0.1866, simple_loss=0.2495, pruned_loss=0.06188, over 971771.89 frames.], batch size: 12, lr: 9.29e-04 +2022-05-03 22:55:37,739 INFO [train.py:715] (6/8) Epoch 1, batch 15550, loss[loss=0.1749, simple_loss=0.241, pruned_loss=0.05439, over 4880.00 frames.], tot_loss[loss=0.187, simple_loss=0.2498, pruned_loss=0.06206, over 971899.61 frames.], batch size: 16, lr: 9.29e-04 +2022-05-03 22:56:18,061 INFO [train.py:715] (6/8) Epoch 1, batch 15600, loss[loss=0.1832, simple_loss=0.2542, pruned_loss=0.05604, over 4891.00 frames.], tot_loss[loss=0.1874, simple_loss=0.2503, pruned_loss=0.0623, over 971975.21 frames.], batch size: 19, lr: 9.28e-04 +2022-05-03 22:56:58,356 INFO [train.py:715] (6/8) Epoch 1, batch 15650, loss[loss=0.1582, simple_loss=0.2197, pruned_loss=0.04833, over 4868.00 frames.], tot_loss[loss=0.1867, simple_loss=0.2498, pruned_loss=0.0618, over 971655.29 frames.], batch size: 32, lr: 9.28e-04 +2022-05-03 22:57:38,276 INFO [train.py:715] (6/8) Epoch 1, batch 15700, loss[loss=0.1777, simple_loss=0.2425, pruned_loss=0.05642, over 4983.00 frames.], tot_loss[loss=0.1858, simple_loss=0.2492, pruned_loss=0.06121, over 972113.70 frames.], batch size: 31, lr: 9.27e-04 +2022-05-03 22:58:17,911 INFO [train.py:715] (6/8) Epoch 1, batch 15750, loss[loss=0.1593, simple_loss=0.2269, pruned_loss=0.04586, over 4827.00 frames.], tot_loss[loss=0.186, simple_loss=0.2489, pruned_loss=0.06157, over 973049.97 frames.], batch size: 27, lr: 9.27e-04 +2022-05-03 22:58:58,198 INFO [train.py:715] (6/8) Epoch 1, batch 15800, loss[loss=0.2043, simple_loss=0.25, pruned_loss=0.07928, over 4856.00 frames.], tot_loss[loss=0.1875, simple_loss=0.25, pruned_loss=0.06249, over 972409.15 frames.], batch size: 32, lr: 9.27e-04 +2022-05-03 22:59:38,880 INFO [train.py:715] (6/8) Epoch 1, batch 15850, loss[loss=0.2218, simple_loss=0.2823, pruned_loss=0.08064, over 4862.00 frames.], tot_loss[loss=0.1866, simple_loss=0.2493, pruned_loss=0.06191, over 972260.70 frames.], batch size: 16, lr: 9.26e-04 +2022-05-03 23:00:18,431 INFO [train.py:715] (6/8) Epoch 1, batch 15900, loss[loss=0.155, simple_loss=0.2313, pruned_loss=0.03937, over 4799.00 frames.], tot_loss[loss=0.1866, simple_loss=0.2492, pruned_loss=0.06199, over 972978.66 frames.], batch size: 21, lr: 9.26e-04 +2022-05-03 23:00:58,074 INFO [train.py:715] (6/8) Epoch 1, batch 15950, loss[loss=0.232, simple_loss=0.2888, pruned_loss=0.08755, over 4815.00 frames.], tot_loss[loss=0.1865, simple_loss=0.2488, pruned_loss=0.06208, over 973287.36 frames.], batch size: 15, lr: 9.25e-04 +2022-05-03 23:01:37,506 INFO [train.py:715] (6/8) Epoch 1, batch 16000, loss[loss=0.1783, simple_loss=0.2298, pruned_loss=0.06341, over 4794.00 frames.], tot_loss[loss=0.1865, simple_loss=0.2488, pruned_loss=0.06213, over 972341.60 frames.], batch size: 12, lr: 9.25e-04 +2022-05-03 23:02:16,259 INFO [train.py:715] (6/8) Epoch 1, batch 16050, loss[loss=0.1665, simple_loss=0.2337, pruned_loss=0.04961, over 4778.00 frames.], tot_loss[loss=0.1862, simple_loss=0.2487, pruned_loss=0.0619, over 971800.40 frames.], batch size: 17, lr: 9.24e-04 +2022-05-03 23:02:55,589 INFO [train.py:715] (6/8) Epoch 1, batch 16100, loss[loss=0.2046, simple_loss=0.2699, pruned_loss=0.06964, over 4907.00 frames.], tot_loss[loss=0.1873, simple_loss=0.2496, pruned_loss=0.06254, over 971914.17 frames.], batch size: 17, lr: 9.24e-04 +2022-05-03 23:03:35,234 INFO [train.py:715] (6/8) Epoch 1, batch 16150, loss[loss=0.1834, simple_loss=0.2455, pruned_loss=0.06062, over 4904.00 frames.], tot_loss[loss=0.1882, simple_loss=0.2505, pruned_loss=0.0629, over 971598.15 frames.], batch size: 17, lr: 9.23e-04 +2022-05-03 23:04:15,423 INFO [train.py:715] (6/8) Epoch 1, batch 16200, loss[loss=0.2307, simple_loss=0.2691, pruned_loss=0.09617, over 4762.00 frames.], tot_loss[loss=0.188, simple_loss=0.2507, pruned_loss=0.06264, over 970982.97 frames.], batch size: 16, lr: 9.23e-04 +2022-05-03 23:04:53,730 INFO [train.py:715] (6/8) Epoch 1, batch 16250, loss[loss=0.1996, simple_loss=0.2571, pruned_loss=0.07103, over 4774.00 frames.], tot_loss[loss=0.1878, simple_loss=0.2505, pruned_loss=0.06253, over 972110.67 frames.], batch size: 17, lr: 9.22e-04 +2022-05-03 23:05:33,197 INFO [train.py:715] (6/8) Epoch 1, batch 16300, loss[loss=0.2097, simple_loss=0.2737, pruned_loss=0.07285, over 4980.00 frames.], tot_loss[loss=0.1876, simple_loss=0.2505, pruned_loss=0.0624, over 972596.59 frames.], batch size: 33, lr: 9.22e-04 +2022-05-03 23:06:12,745 INFO [train.py:715] (6/8) Epoch 1, batch 16350, loss[loss=0.2096, simple_loss=0.2653, pruned_loss=0.07689, over 4835.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2506, pruned_loss=0.06219, over 973006.15 frames.], batch size: 15, lr: 9.22e-04 +2022-05-03 23:06:51,403 INFO [train.py:715] (6/8) Epoch 1, batch 16400, loss[loss=0.1736, simple_loss=0.2395, pruned_loss=0.0538, over 4694.00 frames.], tot_loss[loss=0.1879, simple_loss=0.2511, pruned_loss=0.0624, over 972151.17 frames.], batch size: 15, lr: 9.21e-04 +2022-05-03 23:07:30,895 INFO [train.py:715] (6/8) Epoch 1, batch 16450, loss[loss=0.1665, simple_loss=0.2345, pruned_loss=0.04929, over 4821.00 frames.], tot_loss[loss=0.1884, simple_loss=0.2515, pruned_loss=0.06267, over 972024.85 frames.], batch size: 26, lr: 9.21e-04 +2022-05-03 23:08:10,544 INFO [train.py:715] (6/8) Epoch 1, batch 16500, loss[loss=0.1418, simple_loss=0.2064, pruned_loss=0.03857, over 4804.00 frames.], tot_loss[loss=0.1873, simple_loss=0.2501, pruned_loss=0.06219, over 972307.59 frames.], batch size: 21, lr: 9.20e-04 +2022-05-03 23:08:50,457 INFO [train.py:715] (6/8) Epoch 1, batch 16550, loss[loss=0.2047, simple_loss=0.2694, pruned_loss=0.07001, over 4928.00 frames.], tot_loss[loss=0.188, simple_loss=0.2505, pruned_loss=0.06277, over 971973.25 frames.], batch size: 39, lr: 9.20e-04 +2022-05-03 23:09:28,843 INFO [train.py:715] (6/8) Epoch 1, batch 16600, loss[loss=0.175, simple_loss=0.2395, pruned_loss=0.05529, over 4993.00 frames.], tot_loss[loss=0.1879, simple_loss=0.25, pruned_loss=0.06285, over 972163.04 frames.], batch size: 14, lr: 9.19e-04 +2022-05-03 23:10:09,007 INFO [train.py:715] (6/8) Epoch 1, batch 16650, loss[loss=0.2034, simple_loss=0.2601, pruned_loss=0.07331, over 4891.00 frames.], tot_loss[loss=0.1859, simple_loss=0.2492, pruned_loss=0.06133, over 971920.92 frames.], batch size: 19, lr: 9.19e-04 +2022-05-03 23:10:48,702 INFO [train.py:715] (6/8) Epoch 1, batch 16700, loss[loss=0.1281, simple_loss=0.1985, pruned_loss=0.02881, over 4712.00 frames.], tot_loss[loss=0.1862, simple_loss=0.2495, pruned_loss=0.06144, over 971788.56 frames.], batch size: 12, lr: 9.18e-04 +2022-05-03 23:11:28,441 INFO [train.py:715] (6/8) Epoch 1, batch 16750, loss[loss=0.2034, simple_loss=0.2603, pruned_loss=0.07327, over 4969.00 frames.], tot_loss[loss=0.186, simple_loss=0.2496, pruned_loss=0.0612, over 972327.73 frames.], batch size: 24, lr: 9.18e-04 +2022-05-03 23:12:08,275 INFO [train.py:715] (6/8) Epoch 1, batch 16800, loss[loss=0.1912, simple_loss=0.2664, pruned_loss=0.05802, over 4834.00 frames.], tot_loss[loss=0.1853, simple_loss=0.249, pruned_loss=0.06081, over 972842.96 frames.], batch size: 15, lr: 9.18e-04 +2022-05-03 23:12:47,927 INFO [train.py:715] (6/8) Epoch 1, batch 16850, loss[loss=0.2052, simple_loss=0.2671, pruned_loss=0.0716, over 4897.00 frames.], tot_loss[loss=0.1858, simple_loss=0.249, pruned_loss=0.06129, over 973411.99 frames.], batch size: 19, lr: 9.17e-04 +2022-05-03 23:13:27,911 INFO [train.py:715] (6/8) Epoch 1, batch 16900, loss[loss=0.1718, simple_loss=0.2309, pruned_loss=0.05633, over 4928.00 frames.], tot_loss[loss=0.1858, simple_loss=0.2487, pruned_loss=0.06144, over 973483.65 frames.], batch size: 18, lr: 9.17e-04 +2022-05-03 23:14:06,933 INFO [train.py:715] (6/8) Epoch 1, batch 16950, loss[loss=0.1745, simple_loss=0.2494, pruned_loss=0.04976, over 4969.00 frames.], tot_loss[loss=0.1855, simple_loss=0.2485, pruned_loss=0.0612, over 972337.46 frames.], batch size: 25, lr: 9.16e-04 +2022-05-03 23:14:46,349 INFO [train.py:715] (6/8) Epoch 1, batch 17000, loss[loss=0.1737, simple_loss=0.2392, pruned_loss=0.0541, over 4933.00 frames.], tot_loss[loss=0.1856, simple_loss=0.2486, pruned_loss=0.0613, over 973013.99 frames.], batch size: 29, lr: 9.16e-04 +2022-05-03 23:15:26,361 INFO [train.py:715] (6/8) Epoch 1, batch 17050, loss[loss=0.1969, simple_loss=0.2661, pruned_loss=0.06384, over 4836.00 frames.], tot_loss[loss=0.1857, simple_loss=0.2491, pruned_loss=0.06119, over 972706.25 frames.], batch size: 13, lr: 9.15e-04 +2022-05-03 23:16:05,143 INFO [train.py:715] (6/8) Epoch 1, batch 17100, loss[loss=0.1808, simple_loss=0.255, pruned_loss=0.05327, over 4819.00 frames.], tot_loss[loss=0.1863, simple_loss=0.2495, pruned_loss=0.06151, over 973077.38 frames.], batch size: 25, lr: 9.15e-04 +2022-05-03 23:16:44,852 INFO [train.py:715] (6/8) Epoch 1, batch 17150, loss[loss=0.1757, simple_loss=0.2454, pruned_loss=0.05305, over 4805.00 frames.], tot_loss[loss=0.187, simple_loss=0.2498, pruned_loss=0.06208, over 972925.49 frames.], batch size: 14, lr: 9.15e-04 +2022-05-03 23:17:25,482 INFO [train.py:715] (6/8) Epoch 1, batch 17200, loss[loss=0.2021, simple_loss=0.2604, pruned_loss=0.07189, over 4971.00 frames.], tot_loss[loss=0.1858, simple_loss=0.2488, pruned_loss=0.06142, over 974292.91 frames.], batch size: 15, lr: 9.14e-04 +2022-05-03 23:18:05,279 INFO [train.py:715] (6/8) Epoch 1, batch 17250, loss[loss=0.2154, simple_loss=0.2685, pruned_loss=0.08119, over 4959.00 frames.], tot_loss[loss=0.1855, simple_loss=0.2484, pruned_loss=0.06128, over 973864.28 frames.], batch size: 24, lr: 9.14e-04 +2022-05-03 23:18:43,791 INFO [train.py:715] (6/8) Epoch 1, batch 17300, loss[loss=0.1478, simple_loss=0.2084, pruned_loss=0.04358, over 4774.00 frames.], tot_loss[loss=0.1864, simple_loss=0.2493, pruned_loss=0.0617, over 973111.76 frames.], batch size: 14, lr: 9.13e-04 +2022-05-03 23:19:23,817 INFO [train.py:715] (6/8) Epoch 1, batch 17350, loss[loss=0.1836, simple_loss=0.2577, pruned_loss=0.05478, over 4804.00 frames.], tot_loss[loss=0.1872, simple_loss=0.2502, pruned_loss=0.06206, over 972850.46 frames.], batch size: 21, lr: 9.13e-04 +2022-05-03 23:20:03,643 INFO [train.py:715] (6/8) Epoch 1, batch 17400, loss[loss=0.1647, simple_loss=0.2433, pruned_loss=0.04306, over 4867.00 frames.], tot_loss[loss=0.1865, simple_loss=0.2497, pruned_loss=0.06168, over 972159.34 frames.], batch size: 20, lr: 9.12e-04 +2022-05-03 23:20:42,900 INFO [train.py:715] (6/8) Epoch 1, batch 17450, loss[loss=0.1935, simple_loss=0.2501, pruned_loss=0.06844, over 4803.00 frames.], tot_loss[loss=0.1861, simple_loss=0.2493, pruned_loss=0.06146, over 972644.64 frames.], batch size: 14, lr: 9.12e-04 +2022-05-03 23:21:23,299 INFO [train.py:715] (6/8) Epoch 1, batch 17500, loss[loss=0.2195, simple_loss=0.273, pruned_loss=0.08304, over 4973.00 frames.], tot_loss[loss=0.186, simple_loss=0.2495, pruned_loss=0.06125, over 973008.52 frames.], batch size: 24, lr: 9.11e-04 +2022-05-03 23:22:03,724 INFO [train.py:715] (6/8) Epoch 1, batch 17550, loss[loss=0.2255, simple_loss=0.2791, pruned_loss=0.08596, over 4911.00 frames.], tot_loss[loss=0.1849, simple_loss=0.2485, pruned_loss=0.06061, over 973095.68 frames.], batch size: 17, lr: 9.11e-04 +2022-05-03 23:22:44,350 INFO [train.py:715] (6/8) Epoch 1, batch 17600, loss[loss=0.1714, simple_loss=0.245, pruned_loss=0.04895, over 4794.00 frames.], tot_loss[loss=0.1847, simple_loss=0.2483, pruned_loss=0.06049, over 972140.93 frames.], batch size: 17, lr: 9.11e-04 +2022-05-03 23:23:24,044 INFO [train.py:715] (6/8) Epoch 1, batch 17650, loss[loss=0.1917, simple_loss=0.2532, pruned_loss=0.06514, over 4988.00 frames.], tot_loss[loss=0.1839, simple_loss=0.2476, pruned_loss=0.06008, over 972260.06 frames.], batch size: 31, lr: 9.10e-04 +2022-05-03 23:24:04,742 INFO [train.py:715] (6/8) Epoch 1, batch 17700, loss[loss=0.1996, simple_loss=0.2534, pruned_loss=0.07289, over 4860.00 frames.], tot_loss[loss=0.1837, simple_loss=0.2474, pruned_loss=0.05999, over 973397.77 frames.], batch size: 30, lr: 9.10e-04 +2022-05-03 23:24:44,987 INFO [train.py:715] (6/8) Epoch 1, batch 17750, loss[loss=0.1825, simple_loss=0.2403, pruned_loss=0.06232, over 4697.00 frames.], tot_loss[loss=0.1845, simple_loss=0.248, pruned_loss=0.0605, over 973413.68 frames.], batch size: 15, lr: 9.09e-04 +2022-05-03 23:25:24,520 INFO [train.py:715] (6/8) Epoch 1, batch 17800, loss[loss=0.1676, simple_loss=0.2369, pruned_loss=0.04915, over 4651.00 frames.], tot_loss[loss=0.1842, simple_loss=0.2481, pruned_loss=0.06012, over 972867.06 frames.], batch size: 13, lr: 9.09e-04 +2022-05-03 23:26:04,928 INFO [train.py:715] (6/8) Epoch 1, batch 17850, loss[loss=0.1892, simple_loss=0.255, pruned_loss=0.06173, over 4982.00 frames.], tot_loss[loss=0.1827, simple_loss=0.2469, pruned_loss=0.05924, over 972457.19 frames.], batch size: 28, lr: 9.08e-04 +2022-05-03 23:26:44,326 INFO [train.py:715] (6/8) Epoch 1, batch 17900, loss[loss=0.1502, simple_loss=0.2271, pruned_loss=0.03668, over 4791.00 frames.], tot_loss[loss=0.1827, simple_loss=0.2469, pruned_loss=0.05929, over 972234.72 frames.], batch size: 24, lr: 9.08e-04 +2022-05-03 23:27:23,565 INFO [train.py:715] (6/8) Epoch 1, batch 17950, loss[loss=0.147, simple_loss=0.2177, pruned_loss=0.03817, over 4764.00 frames.], tot_loss[loss=0.1823, simple_loss=0.2466, pruned_loss=0.05897, over 972249.28 frames.], batch size: 19, lr: 9.08e-04 +2022-05-03 23:28:02,863 INFO [train.py:715] (6/8) Epoch 1, batch 18000, loss[loss=0.1701, simple_loss=0.2487, pruned_loss=0.04572, over 4949.00 frames.], tot_loss[loss=0.1828, simple_loss=0.247, pruned_loss=0.0593, over 972036.63 frames.], batch size: 29, lr: 9.07e-04 +2022-05-03 23:28:02,864 INFO [train.py:733] (6/8) Computing validation loss +2022-05-03 23:28:17,470 INFO [train.py:742] (6/8) Epoch 1, validation: loss=0.123, simple_loss=0.21, pruned_loss=0.01804, over 914524.00 frames. +2022-05-03 23:28:56,685 INFO [train.py:715] (6/8) Epoch 1, batch 18050, loss[loss=0.2029, simple_loss=0.2622, pruned_loss=0.07178, over 4967.00 frames.], tot_loss[loss=0.1838, simple_loss=0.2476, pruned_loss=0.05997, over 971553.53 frames.], batch size: 15, lr: 9.07e-04 +2022-05-03 23:29:37,120 INFO [train.py:715] (6/8) Epoch 1, batch 18100, loss[loss=0.1887, simple_loss=0.2629, pruned_loss=0.05726, over 4917.00 frames.], tot_loss[loss=0.1834, simple_loss=0.2473, pruned_loss=0.05974, over 972431.78 frames.], batch size: 23, lr: 9.06e-04 +2022-05-03 23:30:16,934 INFO [train.py:715] (6/8) Epoch 1, batch 18150, loss[loss=0.1892, simple_loss=0.2548, pruned_loss=0.0618, over 4768.00 frames.], tot_loss[loss=0.1849, simple_loss=0.2485, pruned_loss=0.0606, over 972047.88 frames.], batch size: 19, lr: 9.06e-04 +2022-05-03 23:30:55,305 INFO [train.py:715] (6/8) Epoch 1, batch 18200, loss[loss=0.1857, simple_loss=0.2479, pruned_loss=0.0617, over 4878.00 frames.], tot_loss[loss=0.185, simple_loss=0.2486, pruned_loss=0.0607, over 972265.52 frames.], batch size: 38, lr: 9.05e-04 +2022-05-03 23:31:34,987 INFO [train.py:715] (6/8) Epoch 1, batch 18250, loss[loss=0.13, simple_loss=0.2029, pruned_loss=0.02854, over 4847.00 frames.], tot_loss[loss=0.1855, simple_loss=0.2489, pruned_loss=0.06102, over 971764.59 frames.], batch size: 13, lr: 9.05e-04 +2022-05-03 23:32:14,617 INFO [train.py:715] (6/8) Epoch 1, batch 18300, loss[loss=0.2052, simple_loss=0.2632, pruned_loss=0.07362, over 4929.00 frames.], tot_loss[loss=0.185, simple_loss=0.2485, pruned_loss=0.06073, over 971671.64 frames.], batch size: 18, lr: 9.05e-04 +2022-05-03 23:32:53,402 INFO [train.py:715] (6/8) Epoch 1, batch 18350, loss[loss=0.1992, simple_loss=0.2903, pruned_loss=0.05402, over 4744.00 frames.], tot_loss[loss=0.1862, simple_loss=0.2497, pruned_loss=0.06137, over 971137.01 frames.], batch size: 19, lr: 9.04e-04 +2022-05-03 23:33:33,137 INFO [train.py:715] (6/8) Epoch 1, batch 18400, loss[loss=0.1986, simple_loss=0.252, pruned_loss=0.07262, over 4856.00 frames.], tot_loss[loss=0.186, simple_loss=0.2494, pruned_loss=0.06137, over 971342.37 frames.], batch size: 20, lr: 9.04e-04 +2022-05-03 23:34:13,409 INFO [train.py:715] (6/8) Epoch 1, batch 18450, loss[loss=0.1936, simple_loss=0.2705, pruned_loss=0.05835, over 4699.00 frames.], tot_loss[loss=0.186, simple_loss=0.2499, pruned_loss=0.06107, over 970924.86 frames.], batch size: 15, lr: 9.03e-04 +2022-05-03 23:34:52,240 INFO [train.py:715] (6/8) Epoch 1, batch 18500, loss[loss=0.1914, simple_loss=0.2472, pruned_loss=0.06779, over 4783.00 frames.], tot_loss[loss=0.1864, simple_loss=0.2498, pruned_loss=0.06148, over 971481.00 frames.], batch size: 18, lr: 9.03e-04 +2022-05-03 23:35:31,275 INFO [train.py:715] (6/8) Epoch 1, batch 18550, loss[loss=0.1652, simple_loss=0.2353, pruned_loss=0.04748, over 4988.00 frames.], tot_loss[loss=0.1853, simple_loss=0.249, pruned_loss=0.06082, over 971356.91 frames.], batch size: 28, lr: 9.03e-04 +2022-05-03 23:36:11,454 INFO [train.py:715] (6/8) Epoch 1, batch 18600, loss[loss=0.2062, simple_loss=0.2568, pruned_loss=0.07776, over 4982.00 frames.], tot_loss[loss=0.1852, simple_loss=0.2489, pruned_loss=0.06081, over 972158.79 frames.], batch size: 39, lr: 9.02e-04 +2022-05-03 23:36:50,768 INFO [train.py:715] (6/8) Epoch 1, batch 18650, loss[loss=0.1756, simple_loss=0.2421, pruned_loss=0.05454, over 4814.00 frames.], tot_loss[loss=0.185, simple_loss=0.2484, pruned_loss=0.06073, over 972226.32 frames.], batch size: 13, lr: 9.02e-04 +2022-05-03 23:37:29,518 INFO [train.py:715] (6/8) Epoch 1, batch 18700, loss[loss=0.1979, simple_loss=0.2642, pruned_loss=0.06581, over 4878.00 frames.], tot_loss[loss=0.1849, simple_loss=0.2482, pruned_loss=0.06081, over 971577.43 frames.], batch size: 16, lr: 9.01e-04 +2022-05-03 23:38:08,765 INFO [train.py:715] (6/8) Epoch 1, batch 18750, loss[loss=0.1742, simple_loss=0.2398, pruned_loss=0.05426, over 4872.00 frames.], tot_loss[loss=0.1843, simple_loss=0.248, pruned_loss=0.06031, over 971491.40 frames.], batch size: 38, lr: 9.01e-04 +2022-05-03 23:38:48,691 INFO [train.py:715] (6/8) Epoch 1, batch 18800, loss[loss=0.1956, simple_loss=0.258, pruned_loss=0.06662, over 4783.00 frames.], tot_loss[loss=0.1838, simple_loss=0.2475, pruned_loss=0.06008, over 971518.57 frames.], batch size: 14, lr: 9.00e-04 +2022-05-03 23:39:27,388 INFO [train.py:715] (6/8) Epoch 1, batch 18850, loss[loss=0.1843, simple_loss=0.2558, pruned_loss=0.05643, over 4979.00 frames.], tot_loss[loss=0.1851, simple_loss=0.2487, pruned_loss=0.06071, over 972021.56 frames.], batch size: 24, lr: 9.00e-04 +2022-05-03 23:40:06,877 INFO [train.py:715] (6/8) Epoch 1, batch 18900, loss[loss=0.2053, simple_loss=0.2566, pruned_loss=0.07701, over 4875.00 frames.], tot_loss[loss=0.1854, simple_loss=0.2492, pruned_loss=0.06083, over 971896.92 frames.], batch size: 20, lr: 9.00e-04 +2022-05-03 23:40:46,612 INFO [train.py:715] (6/8) Epoch 1, batch 18950, loss[loss=0.2012, simple_loss=0.26, pruned_loss=0.07123, over 4888.00 frames.], tot_loss[loss=0.1866, simple_loss=0.25, pruned_loss=0.06161, over 972625.64 frames.], batch size: 16, lr: 8.99e-04 +2022-05-03 23:41:25,998 INFO [train.py:715] (6/8) Epoch 1, batch 19000, loss[loss=0.184, simple_loss=0.2566, pruned_loss=0.0557, over 4885.00 frames.], tot_loss[loss=0.1859, simple_loss=0.2491, pruned_loss=0.06132, over 973391.69 frames.], batch size: 16, lr: 8.99e-04 +2022-05-03 23:42:05,682 INFO [train.py:715] (6/8) Epoch 1, batch 19050, loss[loss=0.2452, simple_loss=0.2959, pruned_loss=0.09724, over 4932.00 frames.], tot_loss[loss=0.1854, simple_loss=0.2488, pruned_loss=0.06101, over 973087.23 frames.], batch size: 18, lr: 8.98e-04 +2022-05-03 23:42:44,850 INFO [train.py:715] (6/8) Epoch 1, batch 19100, loss[loss=0.1635, simple_loss=0.2376, pruned_loss=0.04472, over 4987.00 frames.], tot_loss[loss=0.1854, simple_loss=0.2485, pruned_loss=0.06108, over 972699.44 frames.], batch size: 20, lr: 8.98e-04 +2022-05-03 23:43:24,776 INFO [train.py:715] (6/8) Epoch 1, batch 19150, loss[loss=0.2071, simple_loss=0.2774, pruned_loss=0.06842, over 4966.00 frames.], tot_loss[loss=0.1857, simple_loss=0.2488, pruned_loss=0.06127, over 972780.77 frames.], batch size: 15, lr: 8.98e-04 +2022-05-03 23:44:03,417 INFO [train.py:715] (6/8) Epoch 1, batch 19200, loss[loss=0.1734, simple_loss=0.2513, pruned_loss=0.04775, over 4889.00 frames.], tot_loss[loss=0.1849, simple_loss=0.2484, pruned_loss=0.0607, over 972452.81 frames.], batch size: 22, lr: 8.97e-04 +2022-05-03 23:44:42,700 INFO [train.py:715] (6/8) Epoch 1, batch 19250, loss[loss=0.1977, simple_loss=0.2565, pruned_loss=0.06951, over 4819.00 frames.], tot_loss[loss=0.1842, simple_loss=0.248, pruned_loss=0.06024, over 972875.31 frames.], batch size: 13, lr: 8.97e-04 +2022-05-03 23:45:23,326 INFO [train.py:715] (6/8) Epoch 1, batch 19300, loss[loss=0.2207, simple_loss=0.2658, pruned_loss=0.08784, over 4855.00 frames.], tot_loss[loss=0.1837, simple_loss=0.2474, pruned_loss=0.05998, over 973556.82 frames.], batch size: 30, lr: 8.96e-04 +2022-05-03 23:46:02,789 INFO [train.py:715] (6/8) Epoch 1, batch 19350, loss[loss=0.2093, simple_loss=0.2646, pruned_loss=0.07699, over 4874.00 frames.], tot_loss[loss=0.1837, simple_loss=0.2469, pruned_loss=0.06024, over 974042.77 frames.], batch size: 22, lr: 8.96e-04 +2022-05-03 23:46:41,173 INFO [train.py:715] (6/8) Epoch 1, batch 19400, loss[loss=0.1746, simple_loss=0.2388, pruned_loss=0.05524, over 4882.00 frames.], tot_loss[loss=0.1832, simple_loss=0.2466, pruned_loss=0.05986, over 972898.83 frames.], batch size: 22, lr: 8.95e-04 +2022-05-03 23:47:20,598 INFO [train.py:715] (6/8) Epoch 1, batch 19450, loss[loss=0.1535, simple_loss=0.2174, pruned_loss=0.04477, over 4957.00 frames.], tot_loss[loss=0.1829, simple_loss=0.2463, pruned_loss=0.05975, over 971369.81 frames.], batch size: 15, lr: 8.95e-04 +2022-05-03 23:48:00,485 INFO [train.py:715] (6/8) Epoch 1, batch 19500, loss[loss=0.1616, simple_loss=0.2215, pruned_loss=0.0508, over 4805.00 frames.], tot_loss[loss=0.1831, simple_loss=0.2467, pruned_loss=0.0597, over 971397.91 frames.], batch size: 25, lr: 8.95e-04 +2022-05-03 23:48:39,205 INFO [train.py:715] (6/8) Epoch 1, batch 19550, loss[loss=0.1551, simple_loss=0.2247, pruned_loss=0.04277, over 4845.00 frames.], tot_loss[loss=0.1824, simple_loss=0.2458, pruned_loss=0.05957, over 970807.47 frames.], batch size: 15, lr: 8.94e-04 +2022-05-03 23:49:18,326 INFO [train.py:715] (6/8) Epoch 1, batch 19600, loss[loss=0.1642, simple_loss=0.2356, pruned_loss=0.04637, over 4834.00 frames.], tot_loss[loss=0.1815, simple_loss=0.245, pruned_loss=0.05895, over 971763.99 frames.], batch size: 15, lr: 8.94e-04 +2022-05-03 23:49:58,548 INFO [train.py:715] (6/8) Epoch 1, batch 19650, loss[loss=0.2284, simple_loss=0.2661, pruned_loss=0.09534, over 4880.00 frames.], tot_loss[loss=0.1822, simple_loss=0.2458, pruned_loss=0.05934, over 972750.57 frames.], batch size: 16, lr: 8.93e-04 +2022-05-03 23:50:37,449 INFO [train.py:715] (6/8) Epoch 1, batch 19700, loss[loss=0.1905, simple_loss=0.25, pruned_loss=0.06552, over 4915.00 frames.], tot_loss[loss=0.1827, simple_loss=0.2461, pruned_loss=0.05963, over 972165.42 frames.], batch size: 18, lr: 8.93e-04 +2022-05-03 23:51:16,597 INFO [train.py:715] (6/8) Epoch 1, batch 19750, loss[loss=0.2047, simple_loss=0.271, pruned_loss=0.06917, over 4880.00 frames.], tot_loss[loss=0.1841, simple_loss=0.2474, pruned_loss=0.0604, over 972052.64 frames.], batch size: 16, lr: 8.93e-04 +2022-05-03 23:51:56,241 INFO [train.py:715] (6/8) Epoch 1, batch 19800, loss[loss=0.1941, simple_loss=0.2545, pruned_loss=0.06681, over 4962.00 frames.], tot_loss[loss=0.1849, simple_loss=0.2486, pruned_loss=0.06063, over 971453.23 frames.], batch size: 24, lr: 8.92e-04 +2022-05-03 23:52:36,509 INFO [train.py:715] (6/8) Epoch 1, batch 19850, loss[loss=0.2217, simple_loss=0.2802, pruned_loss=0.08155, over 4751.00 frames.], tot_loss[loss=0.1857, simple_loss=0.2493, pruned_loss=0.06111, over 972035.49 frames.], batch size: 19, lr: 8.92e-04 +2022-05-03 23:53:15,889 INFO [train.py:715] (6/8) Epoch 1, batch 19900, loss[loss=0.1766, simple_loss=0.2365, pruned_loss=0.0583, over 4968.00 frames.], tot_loss[loss=0.1862, simple_loss=0.2492, pruned_loss=0.06154, over 972543.66 frames.], batch size: 35, lr: 8.91e-04 +2022-05-03 23:53:54,990 INFO [train.py:715] (6/8) Epoch 1, batch 19950, loss[loss=0.1602, simple_loss=0.2344, pruned_loss=0.04301, over 4977.00 frames.], tot_loss[loss=0.1849, simple_loss=0.2481, pruned_loss=0.06083, over 972589.84 frames.], batch size: 16, lr: 8.91e-04 +2022-05-03 23:54:35,252 INFO [train.py:715] (6/8) Epoch 1, batch 20000, loss[loss=0.1903, simple_loss=0.2669, pruned_loss=0.05685, over 4974.00 frames.], tot_loss[loss=0.1843, simple_loss=0.248, pruned_loss=0.0603, over 972718.84 frames.], batch size: 24, lr: 8.91e-04 +2022-05-03 23:55:14,863 INFO [train.py:715] (6/8) Epoch 1, batch 20050, loss[loss=0.1946, simple_loss=0.2575, pruned_loss=0.06584, over 4871.00 frames.], tot_loss[loss=0.1844, simple_loss=0.2483, pruned_loss=0.06024, over 972971.12 frames.], batch size: 32, lr: 8.90e-04 +2022-05-03 23:55:54,267 INFO [train.py:715] (6/8) Epoch 1, batch 20100, loss[loss=0.1617, simple_loss=0.2297, pruned_loss=0.04691, over 4834.00 frames.], tot_loss[loss=0.1842, simple_loss=0.248, pruned_loss=0.06021, over 972483.44 frames.], batch size: 15, lr: 8.90e-04 +2022-05-03 23:56:34,304 INFO [train.py:715] (6/8) Epoch 1, batch 20150, loss[loss=0.1978, simple_loss=0.2625, pruned_loss=0.06659, over 4817.00 frames.], tot_loss[loss=0.1828, simple_loss=0.2471, pruned_loss=0.05928, over 971898.88 frames.], batch size: 26, lr: 8.89e-04 +2022-05-03 23:57:15,177 INFO [train.py:715] (6/8) Epoch 1, batch 20200, loss[loss=0.2027, simple_loss=0.2746, pruned_loss=0.06536, over 4987.00 frames.], tot_loss[loss=0.183, simple_loss=0.2468, pruned_loss=0.0596, over 971902.15 frames.], batch size: 25, lr: 8.89e-04 +2022-05-03 23:57:53,974 INFO [train.py:715] (6/8) Epoch 1, batch 20250, loss[loss=0.1493, simple_loss=0.2125, pruned_loss=0.04307, over 4918.00 frames.], tot_loss[loss=0.1829, simple_loss=0.247, pruned_loss=0.05942, over 971880.60 frames.], batch size: 23, lr: 8.89e-04 +2022-05-03 23:58:33,270 INFO [train.py:715] (6/8) Epoch 1, batch 20300, loss[loss=0.162, simple_loss=0.2378, pruned_loss=0.04311, over 4751.00 frames.], tot_loss[loss=0.182, simple_loss=0.2459, pruned_loss=0.05902, over 971943.70 frames.], batch size: 12, lr: 8.88e-04 +2022-05-03 23:59:13,202 INFO [train.py:715] (6/8) Epoch 1, batch 20350, loss[loss=0.2118, simple_loss=0.2648, pruned_loss=0.07935, over 4964.00 frames.], tot_loss[loss=0.1828, simple_loss=0.2466, pruned_loss=0.05946, over 972241.76 frames.], batch size: 24, lr: 8.88e-04 +2022-05-03 23:59:51,745 INFO [train.py:715] (6/8) Epoch 1, batch 20400, loss[loss=0.1908, simple_loss=0.2527, pruned_loss=0.06447, over 4737.00 frames.], tot_loss[loss=0.1829, simple_loss=0.2469, pruned_loss=0.05948, over 972070.56 frames.], batch size: 16, lr: 8.87e-04 +2022-05-04 00:00:31,296 INFO [train.py:715] (6/8) Epoch 1, batch 20450, loss[loss=0.145, simple_loss=0.2174, pruned_loss=0.03628, over 4823.00 frames.], tot_loss[loss=0.1838, simple_loss=0.2475, pruned_loss=0.06001, over 972863.60 frames.], batch size: 26, lr: 8.87e-04 +2022-05-04 00:01:10,347 INFO [train.py:715] (6/8) Epoch 1, batch 20500, loss[loss=0.2044, simple_loss=0.2672, pruned_loss=0.07081, over 4846.00 frames.], tot_loss[loss=0.1834, simple_loss=0.2474, pruned_loss=0.05973, over 972982.90 frames.], batch size: 30, lr: 8.87e-04 +2022-05-04 00:01:50,044 INFO [train.py:715] (6/8) Epoch 1, batch 20550, loss[loss=0.2024, simple_loss=0.2652, pruned_loss=0.06981, over 4947.00 frames.], tot_loss[loss=0.1847, simple_loss=0.2486, pruned_loss=0.06037, over 973102.76 frames.], batch size: 21, lr: 8.86e-04 +2022-05-04 00:02:28,913 INFO [train.py:715] (6/8) Epoch 1, batch 20600, loss[loss=0.2208, simple_loss=0.2817, pruned_loss=0.07994, over 4928.00 frames.], tot_loss[loss=0.1839, simple_loss=0.2479, pruned_loss=0.0599, over 973759.79 frames.], batch size: 29, lr: 8.86e-04 +2022-05-04 00:03:08,454 INFO [train.py:715] (6/8) Epoch 1, batch 20650, loss[loss=0.2006, simple_loss=0.26, pruned_loss=0.0706, over 4826.00 frames.], tot_loss[loss=0.184, simple_loss=0.248, pruned_loss=0.06002, over 972678.70 frames.], batch size: 30, lr: 8.85e-04 +2022-05-04 00:03:48,943 INFO [train.py:715] (6/8) Epoch 1, batch 20700, loss[loss=0.1888, simple_loss=0.2518, pruned_loss=0.06286, over 4841.00 frames.], tot_loss[loss=0.1843, simple_loss=0.2481, pruned_loss=0.06031, over 973030.56 frames.], batch size: 32, lr: 8.85e-04 +2022-05-04 00:04:28,577 INFO [train.py:715] (6/8) Epoch 1, batch 20750, loss[loss=0.222, simple_loss=0.2698, pruned_loss=0.08707, over 4798.00 frames.], tot_loss[loss=0.1841, simple_loss=0.2477, pruned_loss=0.06026, over 972952.37 frames.], batch size: 14, lr: 8.85e-04 +2022-05-04 00:05:07,897 INFO [train.py:715] (6/8) Epoch 1, batch 20800, loss[loss=0.2114, simple_loss=0.2811, pruned_loss=0.07092, over 4848.00 frames.], tot_loss[loss=0.1824, simple_loss=0.2463, pruned_loss=0.05928, over 972250.51 frames.], batch size: 15, lr: 8.84e-04 +2022-05-04 00:05:47,732 INFO [train.py:715] (6/8) Epoch 1, batch 20850, loss[loss=0.1688, simple_loss=0.2347, pruned_loss=0.05145, over 4871.00 frames.], tot_loss[loss=0.1841, simple_loss=0.2476, pruned_loss=0.06036, over 972447.55 frames.], batch size: 20, lr: 8.84e-04 +2022-05-04 00:06:27,488 INFO [train.py:715] (6/8) Epoch 1, batch 20900, loss[loss=0.2231, simple_loss=0.2793, pruned_loss=0.08344, over 4759.00 frames.], tot_loss[loss=0.1846, simple_loss=0.2481, pruned_loss=0.06057, over 972120.39 frames.], batch size: 16, lr: 8.83e-04 +2022-05-04 00:07:06,278 INFO [train.py:715] (6/8) Epoch 1, batch 20950, loss[loss=0.2196, simple_loss=0.2908, pruned_loss=0.07421, over 4895.00 frames.], tot_loss[loss=0.1833, simple_loss=0.247, pruned_loss=0.05981, over 971363.61 frames.], batch size: 19, lr: 8.83e-04 +2022-05-04 00:07:45,663 INFO [train.py:715] (6/8) Epoch 1, batch 21000, loss[loss=0.1878, simple_loss=0.2514, pruned_loss=0.06207, over 4888.00 frames.], tot_loss[loss=0.1833, simple_loss=0.2469, pruned_loss=0.0598, over 971070.02 frames.], batch size: 22, lr: 8.83e-04 +2022-05-04 00:07:45,664 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 00:08:00,762 INFO [train.py:742] (6/8) Epoch 1, validation: loss=0.1226, simple_loss=0.2094, pruned_loss=0.01784, over 914524.00 frames. +2022-05-04 00:08:40,112 INFO [train.py:715] (6/8) Epoch 1, batch 21050, loss[loss=0.2341, simple_loss=0.2883, pruned_loss=0.08991, over 4888.00 frames.], tot_loss[loss=0.1834, simple_loss=0.2474, pruned_loss=0.05973, over 971287.88 frames.], batch size: 22, lr: 8.82e-04 +2022-05-04 00:09:19,952 INFO [train.py:715] (6/8) Epoch 1, batch 21100, loss[loss=0.218, simple_loss=0.2795, pruned_loss=0.07822, over 4801.00 frames.], tot_loss[loss=0.1827, simple_loss=0.2469, pruned_loss=0.05925, over 971777.11 frames.], batch size: 24, lr: 8.82e-04 +2022-05-04 00:09:58,324 INFO [train.py:715] (6/8) Epoch 1, batch 21150, loss[loss=0.1971, simple_loss=0.2613, pruned_loss=0.06649, over 4823.00 frames.], tot_loss[loss=0.1827, simple_loss=0.2465, pruned_loss=0.0594, over 971227.98 frames.], batch size: 15, lr: 8.81e-04 +2022-05-04 00:10:40,736 INFO [train.py:715] (6/8) Epoch 1, batch 21200, loss[loss=0.1659, simple_loss=0.237, pruned_loss=0.04739, over 4788.00 frames.], tot_loss[loss=0.1821, simple_loss=0.2459, pruned_loss=0.05915, over 971221.26 frames.], batch size: 24, lr: 8.81e-04 +2022-05-04 00:11:20,089 INFO [train.py:715] (6/8) Epoch 1, batch 21250, loss[loss=0.1749, simple_loss=0.2417, pruned_loss=0.05408, over 4911.00 frames.], tot_loss[loss=0.1822, simple_loss=0.2458, pruned_loss=0.05931, over 972600.56 frames.], batch size: 18, lr: 8.81e-04 +2022-05-04 00:11:59,263 INFO [train.py:715] (6/8) Epoch 1, batch 21300, loss[loss=0.1893, simple_loss=0.2581, pruned_loss=0.06024, over 4931.00 frames.], tot_loss[loss=0.1818, simple_loss=0.2454, pruned_loss=0.05912, over 972798.52 frames.], batch size: 23, lr: 8.80e-04 +2022-05-04 00:12:38,150 INFO [train.py:715] (6/8) Epoch 1, batch 21350, loss[loss=0.1406, simple_loss=0.2084, pruned_loss=0.03637, over 4813.00 frames.], tot_loss[loss=0.1813, simple_loss=0.2453, pruned_loss=0.05864, over 972627.84 frames.], batch size: 15, lr: 8.80e-04 +2022-05-04 00:13:17,801 INFO [train.py:715] (6/8) Epoch 1, batch 21400, loss[loss=0.2263, simple_loss=0.2901, pruned_loss=0.08122, over 4974.00 frames.], tot_loss[loss=0.1821, simple_loss=0.2461, pruned_loss=0.05907, over 972996.37 frames.], batch size: 15, lr: 8.80e-04 +2022-05-04 00:13:57,971 INFO [train.py:715] (6/8) Epoch 1, batch 21450, loss[loss=0.1629, simple_loss=0.227, pruned_loss=0.04942, over 4810.00 frames.], tot_loss[loss=0.1828, simple_loss=0.2469, pruned_loss=0.05938, over 971853.39 frames.], batch size: 13, lr: 8.79e-04 +2022-05-04 00:14:36,219 INFO [train.py:715] (6/8) Epoch 1, batch 21500, loss[loss=0.1976, simple_loss=0.2663, pruned_loss=0.06444, over 4984.00 frames.], tot_loss[loss=0.1835, simple_loss=0.2471, pruned_loss=0.05997, over 971266.59 frames.], batch size: 26, lr: 8.79e-04 +2022-05-04 00:15:15,309 INFO [train.py:715] (6/8) Epoch 1, batch 21550, loss[loss=0.1547, simple_loss=0.2292, pruned_loss=0.04008, over 4814.00 frames.], tot_loss[loss=0.1835, simple_loss=0.2474, pruned_loss=0.05979, over 971993.45 frames.], batch size: 27, lr: 8.78e-04 +2022-05-04 00:15:54,611 INFO [train.py:715] (6/8) Epoch 1, batch 21600, loss[loss=0.1896, simple_loss=0.2492, pruned_loss=0.065, over 4982.00 frames.], tot_loss[loss=0.1832, simple_loss=0.2472, pruned_loss=0.0596, over 972100.45 frames.], batch size: 31, lr: 8.78e-04 +2022-05-04 00:16:33,920 INFO [train.py:715] (6/8) Epoch 1, batch 21650, loss[loss=0.1836, simple_loss=0.25, pruned_loss=0.0586, over 4897.00 frames.], tot_loss[loss=0.184, simple_loss=0.248, pruned_loss=0.06005, over 972367.26 frames.], batch size: 19, lr: 8.78e-04 +2022-05-04 00:17:12,484 INFO [train.py:715] (6/8) Epoch 1, batch 21700, loss[loss=0.174, simple_loss=0.2421, pruned_loss=0.05293, over 4791.00 frames.], tot_loss[loss=0.1823, simple_loss=0.2463, pruned_loss=0.05917, over 973338.60 frames.], batch size: 14, lr: 8.77e-04 +2022-05-04 00:17:52,133 INFO [train.py:715] (6/8) Epoch 1, batch 21750, loss[loss=0.1671, simple_loss=0.2326, pruned_loss=0.05086, over 4761.00 frames.], tot_loss[loss=0.1825, simple_loss=0.2466, pruned_loss=0.05922, over 973887.98 frames.], batch size: 18, lr: 8.77e-04 +2022-05-04 00:18:31,692 INFO [train.py:715] (6/8) Epoch 1, batch 21800, loss[loss=0.1793, simple_loss=0.2379, pruned_loss=0.06038, over 4935.00 frames.], tot_loss[loss=0.183, simple_loss=0.2464, pruned_loss=0.0598, over 973219.50 frames.], batch size: 35, lr: 8.76e-04 +2022-05-04 00:19:10,442 INFO [train.py:715] (6/8) Epoch 1, batch 21850, loss[loss=0.1815, simple_loss=0.2549, pruned_loss=0.05402, over 4888.00 frames.], tot_loss[loss=0.1837, simple_loss=0.2472, pruned_loss=0.06009, over 973113.69 frames.], batch size: 19, lr: 8.76e-04 +2022-05-04 00:19:50,601 INFO [train.py:715] (6/8) Epoch 1, batch 21900, loss[loss=0.188, simple_loss=0.2462, pruned_loss=0.06495, over 4776.00 frames.], tot_loss[loss=0.1829, simple_loss=0.2466, pruned_loss=0.0596, over 972391.74 frames.], batch size: 18, lr: 8.76e-04 +2022-05-04 00:20:30,154 INFO [train.py:715] (6/8) Epoch 1, batch 21950, loss[loss=0.1979, simple_loss=0.2547, pruned_loss=0.0706, over 4826.00 frames.], tot_loss[loss=0.1832, simple_loss=0.2469, pruned_loss=0.05975, over 972642.80 frames.], batch size: 15, lr: 8.75e-04 +2022-05-04 00:21:09,937 INFO [train.py:715] (6/8) Epoch 1, batch 22000, loss[loss=0.2004, simple_loss=0.2505, pruned_loss=0.07511, over 4739.00 frames.], tot_loss[loss=0.1833, simple_loss=0.247, pruned_loss=0.05982, over 972553.99 frames.], batch size: 16, lr: 8.75e-04 +2022-05-04 00:21:48,906 INFO [train.py:715] (6/8) Epoch 1, batch 22050, loss[loss=0.1597, simple_loss=0.2389, pruned_loss=0.04025, over 4985.00 frames.], tot_loss[loss=0.1834, simple_loss=0.2472, pruned_loss=0.05976, over 972800.65 frames.], batch size: 24, lr: 8.75e-04 +2022-05-04 00:22:28,896 INFO [train.py:715] (6/8) Epoch 1, batch 22100, loss[loss=0.1962, simple_loss=0.2658, pruned_loss=0.06326, over 4962.00 frames.], tot_loss[loss=0.1834, simple_loss=0.2468, pruned_loss=0.06, over 971971.82 frames.], batch size: 24, lr: 8.74e-04 +2022-05-04 00:23:08,227 INFO [train.py:715] (6/8) Epoch 1, batch 22150, loss[loss=0.2094, simple_loss=0.2619, pruned_loss=0.07848, over 4905.00 frames.], tot_loss[loss=0.1824, simple_loss=0.2461, pruned_loss=0.0594, over 972453.91 frames.], batch size: 17, lr: 8.74e-04 +2022-05-04 00:23:46,651 INFO [train.py:715] (6/8) Epoch 1, batch 22200, loss[loss=0.2107, simple_loss=0.2747, pruned_loss=0.07334, over 4841.00 frames.], tot_loss[loss=0.1831, simple_loss=0.2469, pruned_loss=0.0597, over 972740.71 frames.], batch size: 15, lr: 8.73e-04 +2022-05-04 00:24:25,887 INFO [train.py:715] (6/8) Epoch 1, batch 22250, loss[loss=0.1918, simple_loss=0.2536, pruned_loss=0.065, over 4890.00 frames.], tot_loss[loss=0.1831, simple_loss=0.2466, pruned_loss=0.05977, over 972403.65 frames.], batch size: 19, lr: 8.73e-04 +2022-05-04 00:25:05,564 INFO [train.py:715] (6/8) Epoch 1, batch 22300, loss[loss=0.1548, simple_loss=0.2227, pruned_loss=0.04342, over 4972.00 frames.], tot_loss[loss=0.1835, simple_loss=0.2471, pruned_loss=0.05996, over 971668.94 frames.], batch size: 14, lr: 8.73e-04 +2022-05-04 00:25:45,331 INFO [train.py:715] (6/8) Epoch 1, batch 22350, loss[loss=0.228, simple_loss=0.2789, pruned_loss=0.08853, over 4841.00 frames.], tot_loss[loss=0.184, simple_loss=0.2476, pruned_loss=0.06025, over 971698.71 frames.], batch size: 13, lr: 8.72e-04 +2022-05-04 00:26:24,292 INFO [train.py:715] (6/8) Epoch 1, batch 22400, loss[loss=0.2143, simple_loss=0.2686, pruned_loss=0.08001, over 4982.00 frames.], tot_loss[loss=0.185, simple_loss=0.2479, pruned_loss=0.06101, over 971362.60 frames.], batch size: 15, lr: 8.72e-04 +2022-05-04 00:27:04,016 INFO [train.py:715] (6/8) Epoch 1, batch 22450, loss[loss=0.1511, simple_loss=0.2111, pruned_loss=0.04553, over 4788.00 frames.], tot_loss[loss=0.1839, simple_loss=0.2472, pruned_loss=0.06034, over 971443.36 frames.], batch size: 17, lr: 8.72e-04 +2022-05-04 00:27:43,653 INFO [train.py:715] (6/8) Epoch 1, batch 22500, loss[loss=0.1931, simple_loss=0.2638, pruned_loss=0.06121, over 4834.00 frames.], tot_loss[loss=0.1841, simple_loss=0.2474, pruned_loss=0.06043, over 971159.00 frames.], batch size: 30, lr: 8.71e-04 +2022-05-04 00:28:22,148 INFO [train.py:715] (6/8) Epoch 1, batch 22550, loss[loss=0.1594, simple_loss=0.2304, pruned_loss=0.04419, over 4942.00 frames.], tot_loss[loss=0.183, simple_loss=0.2466, pruned_loss=0.05966, over 971320.59 frames.], batch size: 21, lr: 8.71e-04 +2022-05-04 00:29:02,215 INFO [train.py:715] (6/8) Epoch 1, batch 22600, loss[loss=0.2005, simple_loss=0.2548, pruned_loss=0.07305, over 4858.00 frames.], tot_loss[loss=0.1834, simple_loss=0.2475, pruned_loss=0.05971, over 971107.18 frames.], batch size: 20, lr: 8.70e-04 +2022-05-04 00:29:42,689 INFO [train.py:715] (6/8) Epoch 1, batch 22650, loss[loss=0.1993, simple_loss=0.2599, pruned_loss=0.06933, over 4790.00 frames.], tot_loss[loss=0.1841, simple_loss=0.2472, pruned_loss=0.06048, over 970868.48 frames.], batch size: 17, lr: 8.70e-04 +2022-05-04 00:30:22,589 INFO [train.py:715] (6/8) Epoch 1, batch 22700, loss[loss=0.1674, simple_loss=0.2382, pruned_loss=0.04834, over 4802.00 frames.], tot_loss[loss=0.1839, simple_loss=0.2471, pruned_loss=0.06039, over 971246.50 frames.], batch size: 25, lr: 8.70e-04 +2022-05-04 00:31:00,982 INFO [train.py:715] (6/8) Epoch 1, batch 22750, loss[loss=0.1785, simple_loss=0.2339, pruned_loss=0.06155, over 4948.00 frames.], tot_loss[loss=0.1842, simple_loss=0.2472, pruned_loss=0.06056, over 971676.15 frames.], batch size: 35, lr: 8.69e-04 +2022-05-04 00:31:41,168 INFO [train.py:715] (6/8) Epoch 1, batch 22800, loss[loss=0.1318, simple_loss=0.2132, pruned_loss=0.02524, over 4888.00 frames.], tot_loss[loss=0.1835, simple_loss=0.2464, pruned_loss=0.06032, over 971581.38 frames.], batch size: 19, lr: 8.69e-04 +2022-05-04 00:32:20,891 INFO [train.py:715] (6/8) Epoch 1, batch 22850, loss[loss=0.2012, simple_loss=0.2555, pruned_loss=0.07344, over 4770.00 frames.], tot_loss[loss=0.183, simple_loss=0.2464, pruned_loss=0.05979, over 971676.11 frames.], batch size: 16, lr: 8.68e-04 +2022-05-04 00:32:59,727 INFO [train.py:715] (6/8) Epoch 1, batch 22900, loss[loss=0.1734, simple_loss=0.2389, pruned_loss=0.054, over 4957.00 frames.], tot_loss[loss=0.1837, simple_loss=0.2471, pruned_loss=0.06019, over 972459.87 frames.], batch size: 29, lr: 8.68e-04 +2022-05-04 00:33:39,277 INFO [train.py:715] (6/8) Epoch 1, batch 22950, loss[loss=0.1746, simple_loss=0.2384, pruned_loss=0.05542, over 4836.00 frames.], tot_loss[loss=0.1823, simple_loss=0.246, pruned_loss=0.05933, over 971735.70 frames.], batch size: 32, lr: 8.68e-04 +2022-05-04 00:34:19,080 INFO [train.py:715] (6/8) Epoch 1, batch 23000, loss[loss=0.1977, simple_loss=0.2554, pruned_loss=0.07002, over 4801.00 frames.], tot_loss[loss=0.1808, simple_loss=0.2448, pruned_loss=0.05844, over 972214.98 frames.], batch size: 24, lr: 8.67e-04 +2022-05-04 00:34:57,985 INFO [train.py:715] (6/8) Epoch 1, batch 23050, loss[loss=0.21, simple_loss=0.2733, pruned_loss=0.07331, over 4985.00 frames.], tot_loss[loss=0.1816, simple_loss=0.2453, pruned_loss=0.05895, over 971870.17 frames.], batch size: 28, lr: 8.67e-04 +2022-05-04 00:35:37,123 INFO [train.py:715] (6/8) Epoch 1, batch 23100, loss[loss=0.1972, simple_loss=0.2622, pruned_loss=0.06607, over 4810.00 frames.], tot_loss[loss=0.1818, simple_loss=0.2455, pruned_loss=0.05905, over 971621.97 frames.], batch size: 25, lr: 8.67e-04 +2022-05-04 00:36:16,858 INFO [train.py:715] (6/8) Epoch 1, batch 23150, loss[loss=0.155, simple_loss=0.2193, pruned_loss=0.04533, over 4795.00 frames.], tot_loss[loss=0.1829, simple_loss=0.246, pruned_loss=0.05991, over 972232.40 frames.], batch size: 12, lr: 8.66e-04 +2022-05-04 00:36:56,381 INFO [train.py:715] (6/8) Epoch 1, batch 23200, loss[loss=0.1929, simple_loss=0.2539, pruned_loss=0.06597, over 4983.00 frames.], tot_loss[loss=0.1834, simple_loss=0.2463, pruned_loss=0.06027, over 973061.27 frames.], batch size: 28, lr: 8.66e-04 +2022-05-04 00:37:34,638 INFO [train.py:715] (6/8) Epoch 1, batch 23250, loss[loss=0.1705, simple_loss=0.2376, pruned_loss=0.05171, over 4784.00 frames.], tot_loss[loss=0.1828, simple_loss=0.2458, pruned_loss=0.05984, over 972494.39 frames.], batch size: 17, lr: 8.66e-04 +2022-05-04 00:38:14,196 INFO [train.py:715] (6/8) Epoch 1, batch 23300, loss[loss=0.1639, simple_loss=0.2341, pruned_loss=0.04683, over 4963.00 frames.], tot_loss[loss=0.183, simple_loss=0.2458, pruned_loss=0.06016, over 971672.70 frames.], batch size: 21, lr: 8.65e-04 +2022-05-04 00:38:53,776 INFO [train.py:715] (6/8) Epoch 1, batch 23350, loss[loss=0.1843, simple_loss=0.2584, pruned_loss=0.05508, over 4989.00 frames.], tot_loss[loss=0.1834, simple_loss=0.2462, pruned_loss=0.06032, over 971530.84 frames.], batch size: 16, lr: 8.65e-04 +2022-05-04 00:39:32,073 INFO [train.py:715] (6/8) Epoch 1, batch 23400, loss[loss=0.154, simple_loss=0.2173, pruned_loss=0.04538, over 4799.00 frames.], tot_loss[loss=0.1843, simple_loss=0.247, pruned_loss=0.06084, over 971442.22 frames.], batch size: 21, lr: 8.64e-04 +2022-05-04 00:40:11,310 INFO [train.py:715] (6/8) Epoch 1, batch 23450, loss[loss=0.2152, simple_loss=0.2756, pruned_loss=0.07745, over 4914.00 frames.], tot_loss[loss=0.1848, simple_loss=0.2477, pruned_loss=0.06099, over 971916.61 frames.], batch size: 18, lr: 8.64e-04 +2022-05-04 00:40:50,696 INFO [train.py:715] (6/8) Epoch 1, batch 23500, loss[loss=0.1822, simple_loss=0.2457, pruned_loss=0.05932, over 4892.00 frames.], tot_loss[loss=0.1851, simple_loss=0.2482, pruned_loss=0.06101, over 971976.40 frames.], batch size: 32, lr: 8.64e-04 +2022-05-04 00:41:29,533 INFO [train.py:715] (6/8) Epoch 1, batch 23550, loss[loss=0.2248, simple_loss=0.2714, pruned_loss=0.08911, over 4748.00 frames.], tot_loss[loss=0.184, simple_loss=0.2475, pruned_loss=0.06024, over 971060.92 frames.], batch size: 16, lr: 8.63e-04 +2022-05-04 00:42:07,730 INFO [train.py:715] (6/8) Epoch 1, batch 23600, loss[loss=0.2167, simple_loss=0.2803, pruned_loss=0.07653, over 4951.00 frames.], tot_loss[loss=0.183, simple_loss=0.2465, pruned_loss=0.05977, over 971567.43 frames.], batch size: 29, lr: 8.63e-04 +2022-05-04 00:42:47,234 INFO [train.py:715] (6/8) Epoch 1, batch 23650, loss[loss=0.1914, simple_loss=0.2603, pruned_loss=0.06123, over 4845.00 frames.], tot_loss[loss=0.1831, simple_loss=0.2466, pruned_loss=0.05973, over 971968.99 frames.], batch size: 20, lr: 8.63e-04 +2022-05-04 00:43:26,753 INFO [train.py:715] (6/8) Epoch 1, batch 23700, loss[loss=0.1841, simple_loss=0.2612, pruned_loss=0.05351, over 4940.00 frames.], tot_loss[loss=0.1819, simple_loss=0.2456, pruned_loss=0.05913, over 972324.72 frames.], batch size: 23, lr: 8.62e-04 +2022-05-04 00:44:05,093 INFO [train.py:715] (6/8) Epoch 1, batch 23750, loss[loss=0.1517, simple_loss=0.2275, pruned_loss=0.03798, over 4955.00 frames.], tot_loss[loss=0.1811, simple_loss=0.2449, pruned_loss=0.05862, over 973150.32 frames.], batch size: 24, lr: 8.62e-04 +2022-05-04 00:44:44,147 INFO [train.py:715] (6/8) Epoch 1, batch 23800, loss[loss=0.1469, simple_loss=0.2291, pruned_loss=0.03235, over 4814.00 frames.], tot_loss[loss=0.181, simple_loss=0.2449, pruned_loss=0.05856, over 973465.84 frames.], batch size: 21, lr: 8.61e-04 +2022-05-04 00:45:24,231 INFO [train.py:715] (6/8) Epoch 1, batch 23850, loss[loss=0.1669, simple_loss=0.2363, pruned_loss=0.04872, over 4935.00 frames.], tot_loss[loss=0.1804, simple_loss=0.245, pruned_loss=0.05788, over 973279.38 frames.], batch size: 23, lr: 8.61e-04 +2022-05-04 00:46:03,792 INFO [train.py:715] (6/8) Epoch 1, batch 23900, loss[loss=0.2187, simple_loss=0.2771, pruned_loss=0.08019, over 4986.00 frames.], tot_loss[loss=0.1804, simple_loss=0.2451, pruned_loss=0.05782, over 973260.54 frames.], batch size: 25, lr: 8.61e-04 +2022-05-04 00:46:42,596 INFO [train.py:715] (6/8) Epoch 1, batch 23950, loss[loss=0.1697, simple_loss=0.2422, pruned_loss=0.04855, over 4749.00 frames.], tot_loss[loss=0.1806, simple_loss=0.2457, pruned_loss=0.05774, over 973010.87 frames.], batch size: 19, lr: 8.60e-04 +2022-05-04 00:47:22,330 INFO [train.py:715] (6/8) Epoch 1, batch 24000, loss[loss=0.1633, simple_loss=0.2383, pruned_loss=0.04411, over 4776.00 frames.], tot_loss[loss=0.1815, simple_loss=0.2464, pruned_loss=0.05833, over 971653.50 frames.], batch size: 18, lr: 8.60e-04 +2022-05-04 00:47:22,330 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 00:47:34,529 INFO [train.py:742] (6/8) Epoch 1, validation: loss=0.1217, simple_loss=0.2087, pruned_loss=0.01736, over 914524.00 frames. +2022-05-04 00:48:14,355 INFO [train.py:715] (6/8) Epoch 1, batch 24050, loss[loss=0.1764, simple_loss=0.2433, pruned_loss=0.05477, over 4777.00 frames.], tot_loss[loss=0.1815, simple_loss=0.2465, pruned_loss=0.05828, over 971706.36 frames.], batch size: 14, lr: 8.60e-04 +2022-05-04 00:48:53,688 INFO [train.py:715] (6/8) Epoch 1, batch 24100, loss[loss=0.1654, simple_loss=0.249, pruned_loss=0.04088, over 4923.00 frames.], tot_loss[loss=0.1811, simple_loss=0.2462, pruned_loss=0.05794, over 971387.42 frames.], batch size: 23, lr: 8.59e-04 +2022-05-04 00:49:32,281 INFO [train.py:715] (6/8) Epoch 1, batch 24150, loss[loss=0.178, simple_loss=0.2428, pruned_loss=0.05664, over 4813.00 frames.], tot_loss[loss=0.1819, simple_loss=0.2467, pruned_loss=0.05853, over 970806.76 frames.], batch size: 13, lr: 8.59e-04 +2022-05-04 00:50:11,575 INFO [train.py:715] (6/8) Epoch 1, batch 24200, loss[loss=0.1578, simple_loss=0.2286, pruned_loss=0.04349, over 4838.00 frames.], tot_loss[loss=0.1812, simple_loss=0.2461, pruned_loss=0.05814, over 971010.12 frames.], batch size: 15, lr: 8.59e-04 +2022-05-04 00:50:52,255 INFO [train.py:715] (6/8) Epoch 1, batch 24250, loss[loss=0.1694, simple_loss=0.2329, pruned_loss=0.05293, over 4833.00 frames.], tot_loss[loss=0.1818, simple_loss=0.2464, pruned_loss=0.05861, over 970887.32 frames.], batch size: 15, lr: 8.58e-04 +2022-05-04 00:51:31,682 INFO [train.py:715] (6/8) Epoch 1, batch 24300, loss[loss=0.1396, simple_loss=0.2024, pruned_loss=0.0384, over 4684.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2459, pruned_loss=0.05794, over 970728.27 frames.], batch size: 15, lr: 8.58e-04 +2022-05-04 00:52:11,128 INFO [train.py:715] (6/8) Epoch 1, batch 24350, loss[loss=0.1479, simple_loss=0.2188, pruned_loss=0.03846, over 4864.00 frames.], tot_loss[loss=0.18, simple_loss=0.2448, pruned_loss=0.05762, over 971097.63 frames.], batch size: 32, lr: 8.57e-04 +2022-05-04 00:52:51,502 INFO [train.py:715] (6/8) Epoch 1, batch 24400, loss[loss=0.1883, simple_loss=0.2505, pruned_loss=0.06303, over 4862.00 frames.], tot_loss[loss=0.1803, simple_loss=0.2448, pruned_loss=0.05786, over 971216.57 frames.], batch size: 32, lr: 8.57e-04 +2022-05-04 00:53:30,582 INFO [train.py:715] (6/8) Epoch 1, batch 24450, loss[loss=0.1595, simple_loss=0.2305, pruned_loss=0.04427, over 4950.00 frames.], tot_loss[loss=0.1803, simple_loss=0.2447, pruned_loss=0.05797, over 970915.56 frames.], batch size: 21, lr: 8.57e-04 +2022-05-04 00:54:09,304 INFO [train.py:715] (6/8) Epoch 1, batch 24500, loss[loss=0.1922, simple_loss=0.2519, pruned_loss=0.06623, over 4915.00 frames.], tot_loss[loss=0.181, simple_loss=0.2447, pruned_loss=0.0586, over 971251.67 frames.], batch size: 17, lr: 8.56e-04 +2022-05-04 00:54:48,967 INFO [train.py:715] (6/8) Epoch 1, batch 24550, loss[loss=0.1639, simple_loss=0.2308, pruned_loss=0.04853, over 4808.00 frames.], tot_loss[loss=0.1816, simple_loss=0.2455, pruned_loss=0.05888, over 970934.47 frames.], batch size: 26, lr: 8.56e-04 +2022-05-04 00:55:29,268 INFO [train.py:715] (6/8) Epoch 1, batch 24600, loss[loss=0.1671, simple_loss=0.2268, pruned_loss=0.05374, over 4743.00 frames.], tot_loss[loss=0.1804, simple_loss=0.2446, pruned_loss=0.05814, over 971007.33 frames.], batch size: 19, lr: 8.56e-04 +2022-05-04 00:56:08,136 INFO [train.py:715] (6/8) Epoch 1, batch 24650, loss[loss=0.1703, simple_loss=0.2356, pruned_loss=0.05247, over 4974.00 frames.], tot_loss[loss=0.1806, simple_loss=0.2446, pruned_loss=0.05835, over 971153.45 frames.], batch size: 25, lr: 8.55e-04 +2022-05-04 00:56:47,169 INFO [train.py:715] (6/8) Epoch 1, batch 24700, loss[loss=0.203, simple_loss=0.2643, pruned_loss=0.07086, over 4866.00 frames.], tot_loss[loss=0.1811, simple_loss=0.2451, pruned_loss=0.05854, over 971014.42 frames.], batch size: 38, lr: 8.55e-04 +2022-05-04 00:57:27,344 INFO [train.py:715] (6/8) Epoch 1, batch 24750, loss[loss=0.186, simple_loss=0.242, pruned_loss=0.06503, over 4982.00 frames.], tot_loss[loss=0.1818, simple_loss=0.246, pruned_loss=0.05886, over 970928.12 frames.], batch size: 28, lr: 8.55e-04 +2022-05-04 00:58:06,479 INFO [train.py:715] (6/8) Epoch 1, batch 24800, loss[loss=0.2067, simple_loss=0.2595, pruned_loss=0.07701, over 4978.00 frames.], tot_loss[loss=0.1814, simple_loss=0.2456, pruned_loss=0.05862, over 972106.96 frames.], batch size: 24, lr: 8.54e-04 +2022-05-04 00:58:45,111 INFO [train.py:715] (6/8) Epoch 1, batch 24850, loss[loss=0.1361, simple_loss=0.2148, pruned_loss=0.02866, over 4823.00 frames.], tot_loss[loss=0.1807, simple_loss=0.2452, pruned_loss=0.05816, over 972071.37 frames.], batch size: 25, lr: 8.54e-04 +2022-05-04 00:59:25,592 INFO [train.py:715] (6/8) Epoch 1, batch 24900, loss[loss=0.1848, simple_loss=0.2461, pruned_loss=0.06174, over 4863.00 frames.], tot_loss[loss=0.1812, simple_loss=0.2454, pruned_loss=0.05848, over 972492.59 frames.], batch size: 22, lr: 8.54e-04 +2022-05-04 01:00:05,523 INFO [train.py:715] (6/8) Epoch 1, batch 24950, loss[loss=0.218, simple_loss=0.279, pruned_loss=0.07848, over 4802.00 frames.], tot_loss[loss=0.1815, simple_loss=0.2457, pruned_loss=0.05862, over 972723.83 frames.], batch size: 21, lr: 8.53e-04 +2022-05-04 01:00:44,295 INFO [train.py:715] (6/8) Epoch 1, batch 25000, loss[loss=0.2123, simple_loss=0.2713, pruned_loss=0.07667, over 4876.00 frames.], tot_loss[loss=0.1817, simple_loss=0.2465, pruned_loss=0.05846, over 973014.49 frames.], batch size: 16, lr: 8.53e-04 +2022-05-04 01:01:22,937 INFO [train.py:715] (6/8) Epoch 1, batch 25050, loss[loss=0.2262, simple_loss=0.2818, pruned_loss=0.08524, over 4849.00 frames.], tot_loss[loss=0.1818, simple_loss=0.2459, pruned_loss=0.0588, over 972567.02 frames.], batch size: 20, lr: 8.53e-04 +2022-05-04 01:02:02,859 INFO [train.py:715] (6/8) Epoch 1, batch 25100, loss[loss=0.1703, simple_loss=0.2307, pruned_loss=0.05493, over 4743.00 frames.], tot_loss[loss=0.1831, simple_loss=0.2469, pruned_loss=0.05963, over 973019.68 frames.], batch size: 16, lr: 8.52e-04 +2022-05-04 01:02:42,031 INFO [train.py:715] (6/8) Epoch 1, batch 25150, loss[loss=0.2019, simple_loss=0.2707, pruned_loss=0.06651, over 4901.00 frames.], tot_loss[loss=0.182, simple_loss=0.2464, pruned_loss=0.05874, over 972623.51 frames.], batch size: 17, lr: 8.52e-04 +2022-05-04 01:03:20,876 INFO [train.py:715] (6/8) Epoch 1, batch 25200, loss[loss=0.1983, simple_loss=0.2574, pruned_loss=0.06956, over 4837.00 frames.], tot_loss[loss=0.183, simple_loss=0.2473, pruned_loss=0.0594, over 972088.23 frames.], batch size: 26, lr: 8.51e-04 +2022-05-04 01:04:00,097 INFO [train.py:715] (6/8) Epoch 1, batch 25250, loss[loss=0.1796, simple_loss=0.2529, pruned_loss=0.05316, over 4815.00 frames.], tot_loss[loss=0.1837, simple_loss=0.2475, pruned_loss=0.05993, over 971118.60 frames.], batch size: 21, lr: 8.51e-04 +2022-05-04 01:04:40,227 INFO [train.py:715] (6/8) Epoch 1, batch 25300, loss[loss=0.1571, simple_loss=0.2251, pruned_loss=0.04457, over 4829.00 frames.], tot_loss[loss=0.1837, simple_loss=0.2469, pruned_loss=0.06021, over 971820.94 frames.], batch size: 26, lr: 8.51e-04 +2022-05-04 01:05:18,878 INFO [train.py:715] (6/8) Epoch 1, batch 25350, loss[loss=0.1729, simple_loss=0.2316, pruned_loss=0.05708, over 4983.00 frames.], tot_loss[loss=0.1823, simple_loss=0.2459, pruned_loss=0.05931, over 971519.48 frames.], batch size: 27, lr: 8.50e-04 +2022-05-04 01:05:58,219 INFO [train.py:715] (6/8) Epoch 1, batch 25400, loss[loss=0.1507, simple_loss=0.221, pruned_loss=0.04022, over 4831.00 frames.], tot_loss[loss=0.1824, simple_loss=0.246, pruned_loss=0.05937, over 971471.73 frames.], batch size: 15, lr: 8.50e-04 +2022-05-04 01:06:38,482 INFO [train.py:715] (6/8) Epoch 1, batch 25450, loss[loss=0.1632, simple_loss=0.2292, pruned_loss=0.04861, over 4866.00 frames.], tot_loss[loss=0.1822, simple_loss=0.2462, pruned_loss=0.0591, over 971727.49 frames.], batch size: 32, lr: 8.50e-04 +2022-05-04 01:07:18,419 INFO [train.py:715] (6/8) Epoch 1, batch 25500, loss[loss=0.154, simple_loss=0.2283, pruned_loss=0.03992, over 4977.00 frames.], tot_loss[loss=0.181, simple_loss=0.2454, pruned_loss=0.0583, over 972171.14 frames.], batch size: 24, lr: 8.49e-04 +2022-05-04 01:07:56,850 INFO [train.py:715] (6/8) Epoch 1, batch 25550, loss[loss=0.171, simple_loss=0.2337, pruned_loss=0.05417, over 4791.00 frames.], tot_loss[loss=0.1822, simple_loss=0.246, pruned_loss=0.05921, over 972506.41 frames.], batch size: 14, lr: 8.49e-04 +2022-05-04 01:08:36,979 INFO [train.py:715] (6/8) Epoch 1, batch 25600, loss[loss=0.1987, simple_loss=0.261, pruned_loss=0.06819, over 4823.00 frames.], tot_loss[loss=0.1825, simple_loss=0.2463, pruned_loss=0.05933, over 972427.60 frames.], batch size: 26, lr: 8.49e-04 +2022-05-04 01:09:17,505 INFO [train.py:715] (6/8) Epoch 1, batch 25650, loss[loss=0.1815, simple_loss=0.2352, pruned_loss=0.06387, over 4926.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2449, pruned_loss=0.05845, over 972628.03 frames.], batch size: 18, lr: 8.48e-04 +2022-05-04 01:09:56,993 INFO [train.py:715] (6/8) Epoch 1, batch 25700, loss[loss=0.1883, simple_loss=0.2442, pruned_loss=0.06617, over 4785.00 frames.], tot_loss[loss=0.1814, simple_loss=0.2451, pruned_loss=0.05883, over 972237.74 frames.], batch size: 14, lr: 8.48e-04 +2022-05-04 01:10:36,900 INFO [train.py:715] (6/8) Epoch 1, batch 25750, loss[loss=0.1919, simple_loss=0.2653, pruned_loss=0.05927, over 4689.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2448, pruned_loss=0.05844, over 972253.38 frames.], batch size: 15, lr: 8.48e-04 +2022-05-04 01:11:17,397 INFO [train.py:715] (6/8) Epoch 1, batch 25800, loss[loss=0.1783, simple_loss=0.2495, pruned_loss=0.05361, over 4823.00 frames.], tot_loss[loss=0.1821, simple_loss=0.2458, pruned_loss=0.05925, over 971767.70 frames.], batch size: 26, lr: 8.47e-04 +2022-05-04 01:11:56,820 INFO [train.py:715] (6/8) Epoch 1, batch 25850, loss[loss=0.1683, simple_loss=0.2404, pruned_loss=0.04814, over 4968.00 frames.], tot_loss[loss=0.1823, simple_loss=0.246, pruned_loss=0.05929, over 971725.63 frames.], batch size: 24, lr: 8.47e-04 +2022-05-04 01:12:35,654 INFO [train.py:715] (6/8) Epoch 1, batch 25900, loss[loss=0.2038, simple_loss=0.2644, pruned_loss=0.07162, over 4843.00 frames.], tot_loss[loss=0.1831, simple_loss=0.2466, pruned_loss=0.05985, over 971756.59 frames.], batch size: 20, lr: 8.47e-04 +2022-05-04 01:13:15,326 INFO [train.py:715] (6/8) Epoch 1, batch 25950, loss[loss=0.2022, simple_loss=0.2639, pruned_loss=0.07028, over 4807.00 frames.], tot_loss[loss=0.1813, simple_loss=0.2451, pruned_loss=0.05871, over 972079.25 frames.], batch size: 25, lr: 8.46e-04 +2022-05-04 01:13:55,207 INFO [train.py:715] (6/8) Epoch 1, batch 26000, loss[loss=0.2242, simple_loss=0.2721, pruned_loss=0.08819, over 4751.00 frames.], tot_loss[loss=0.1807, simple_loss=0.2443, pruned_loss=0.05857, over 971055.77 frames.], batch size: 19, lr: 8.46e-04 +2022-05-04 01:14:34,097 INFO [train.py:715] (6/8) Epoch 1, batch 26050, loss[loss=0.1807, simple_loss=0.2481, pruned_loss=0.05662, over 4978.00 frames.], tot_loss[loss=0.1811, simple_loss=0.2446, pruned_loss=0.05884, over 971172.05 frames.], batch size: 15, lr: 8.46e-04 +2022-05-04 01:15:13,492 INFO [train.py:715] (6/8) Epoch 1, batch 26100, loss[loss=0.1827, simple_loss=0.2389, pruned_loss=0.06322, over 4903.00 frames.], tot_loss[loss=0.181, simple_loss=0.245, pruned_loss=0.05854, over 972055.87 frames.], batch size: 18, lr: 8.45e-04 +2022-05-04 01:15:53,625 INFO [train.py:715] (6/8) Epoch 1, batch 26150, loss[loss=0.1709, simple_loss=0.247, pruned_loss=0.04741, over 4909.00 frames.], tot_loss[loss=0.181, simple_loss=0.2448, pruned_loss=0.05854, over 972123.55 frames.], batch size: 29, lr: 8.45e-04 +2022-05-04 01:16:32,575 INFO [train.py:715] (6/8) Epoch 1, batch 26200, loss[loss=0.2092, simple_loss=0.2598, pruned_loss=0.0793, over 4978.00 frames.], tot_loss[loss=0.1819, simple_loss=0.2459, pruned_loss=0.05892, over 972164.40 frames.], batch size: 14, lr: 8.44e-04 +2022-05-04 01:17:11,438 INFO [train.py:715] (6/8) Epoch 1, batch 26250, loss[loss=0.2063, simple_loss=0.2633, pruned_loss=0.07465, over 4914.00 frames.], tot_loss[loss=0.1806, simple_loss=0.2449, pruned_loss=0.05821, over 972407.35 frames.], batch size: 19, lr: 8.44e-04 +2022-05-04 01:17:51,346 INFO [train.py:715] (6/8) Epoch 1, batch 26300, loss[loss=0.1452, simple_loss=0.2208, pruned_loss=0.0348, over 4745.00 frames.], tot_loss[loss=0.179, simple_loss=0.2437, pruned_loss=0.05718, over 972362.14 frames.], batch size: 19, lr: 8.44e-04 +2022-05-04 01:18:31,203 INFO [train.py:715] (6/8) Epoch 1, batch 26350, loss[loss=0.1518, simple_loss=0.2218, pruned_loss=0.04088, over 4865.00 frames.], tot_loss[loss=0.1793, simple_loss=0.2443, pruned_loss=0.05714, over 972692.84 frames.], batch size: 32, lr: 8.43e-04 +2022-05-04 01:19:09,971 INFO [train.py:715] (6/8) Epoch 1, batch 26400, loss[loss=0.1793, simple_loss=0.2497, pruned_loss=0.05446, over 4988.00 frames.], tot_loss[loss=0.1807, simple_loss=0.2453, pruned_loss=0.05803, over 973731.16 frames.], batch size: 25, lr: 8.43e-04 +2022-05-04 01:19:49,175 INFO [train.py:715] (6/8) Epoch 1, batch 26450, loss[loss=0.1742, simple_loss=0.2367, pruned_loss=0.05583, over 4947.00 frames.], tot_loss[loss=0.182, simple_loss=0.2464, pruned_loss=0.05876, over 973650.98 frames.], batch size: 29, lr: 8.43e-04 +2022-05-04 01:20:28,909 INFO [train.py:715] (6/8) Epoch 1, batch 26500, loss[loss=0.165, simple_loss=0.2309, pruned_loss=0.04955, over 4812.00 frames.], tot_loss[loss=0.1818, simple_loss=0.246, pruned_loss=0.0588, over 973289.04 frames.], batch size: 21, lr: 8.42e-04 +2022-05-04 01:21:08,261 INFO [train.py:715] (6/8) Epoch 1, batch 26550, loss[loss=0.1898, simple_loss=0.2607, pruned_loss=0.05944, over 4949.00 frames.], tot_loss[loss=0.1799, simple_loss=0.2445, pruned_loss=0.05763, over 972996.77 frames.], batch size: 21, lr: 8.42e-04 +2022-05-04 01:21:47,619 INFO [train.py:715] (6/8) Epoch 1, batch 26600, loss[loss=0.1895, simple_loss=0.2573, pruned_loss=0.06082, over 4892.00 frames.], tot_loss[loss=0.1794, simple_loss=0.2442, pruned_loss=0.05732, over 972269.94 frames.], batch size: 22, lr: 8.42e-04 +2022-05-04 01:22:27,659 INFO [train.py:715] (6/8) Epoch 1, batch 26650, loss[loss=0.1703, simple_loss=0.23, pruned_loss=0.05529, over 4952.00 frames.], tot_loss[loss=0.1804, simple_loss=0.2446, pruned_loss=0.05811, over 973434.80 frames.], batch size: 29, lr: 8.41e-04 +2022-05-04 01:23:07,614 INFO [train.py:715] (6/8) Epoch 1, batch 26700, loss[loss=0.2488, simple_loss=0.2945, pruned_loss=0.1015, over 4806.00 frames.], tot_loss[loss=0.1803, simple_loss=0.2442, pruned_loss=0.05818, over 972439.36 frames.], batch size: 24, lr: 8.41e-04 +2022-05-04 01:23:46,586 INFO [train.py:715] (6/8) Epoch 1, batch 26750, loss[loss=0.1469, simple_loss=0.2132, pruned_loss=0.04029, over 4826.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2443, pruned_loss=0.05876, over 972921.69 frames.], batch size: 12, lr: 8.41e-04 +2022-05-04 01:24:26,591 INFO [train.py:715] (6/8) Epoch 1, batch 26800, loss[loss=0.2031, simple_loss=0.2674, pruned_loss=0.06943, over 4991.00 frames.], tot_loss[loss=0.1814, simple_loss=0.245, pruned_loss=0.05884, over 973096.01 frames.], batch size: 26, lr: 8.40e-04 +2022-05-04 01:25:06,141 INFO [train.py:715] (6/8) Epoch 1, batch 26850, loss[loss=0.1792, simple_loss=0.244, pruned_loss=0.05724, over 4882.00 frames.], tot_loss[loss=0.1813, simple_loss=0.2452, pruned_loss=0.05868, over 973238.17 frames.], batch size: 32, lr: 8.40e-04 +2022-05-04 01:25:45,417 INFO [train.py:715] (6/8) Epoch 1, batch 26900, loss[loss=0.1813, simple_loss=0.2376, pruned_loss=0.06248, over 4792.00 frames.], tot_loss[loss=0.1803, simple_loss=0.2441, pruned_loss=0.05825, over 973373.47 frames.], batch size: 21, lr: 8.40e-04 +2022-05-04 01:26:24,114 INFO [train.py:715] (6/8) Epoch 1, batch 26950, loss[loss=0.1538, simple_loss=0.2204, pruned_loss=0.04362, over 4915.00 frames.], tot_loss[loss=0.1796, simple_loss=0.2433, pruned_loss=0.05793, over 973753.87 frames.], batch size: 23, lr: 8.39e-04 +2022-05-04 01:27:04,127 INFO [train.py:715] (6/8) Epoch 1, batch 27000, loss[loss=0.1583, simple_loss=0.2286, pruned_loss=0.04396, over 4952.00 frames.], tot_loss[loss=0.1796, simple_loss=0.2432, pruned_loss=0.05798, over 974026.05 frames.], batch size: 24, lr: 8.39e-04 +2022-05-04 01:27:04,128 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 01:27:12,718 INFO [train.py:742] (6/8) Epoch 1, validation: loss=0.1212, simple_loss=0.2081, pruned_loss=0.01718, over 914524.00 frames. +2022-05-04 01:27:53,063 INFO [train.py:715] (6/8) Epoch 1, batch 27050, loss[loss=0.1577, simple_loss=0.2299, pruned_loss=0.04277, over 4866.00 frames.], tot_loss[loss=0.1802, simple_loss=0.2439, pruned_loss=0.0582, over 973927.61 frames.], batch size: 20, lr: 8.39e-04 +2022-05-04 01:28:33,376 INFO [train.py:715] (6/8) Epoch 1, batch 27100, loss[loss=0.1719, simple_loss=0.2417, pruned_loss=0.05102, over 4892.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2448, pruned_loss=0.05843, over 973910.51 frames.], batch size: 19, lr: 8.38e-04 +2022-05-04 01:29:11,781 INFO [train.py:715] (6/8) Epoch 1, batch 27150, loss[loss=0.1719, simple_loss=0.2427, pruned_loss=0.0506, over 4887.00 frames.], tot_loss[loss=0.1817, simple_loss=0.2455, pruned_loss=0.05898, over 972537.55 frames.], batch size: 16, lr: 8.38e-04 +2022-05-04 01:29:51,726 INFO [train.py:715] (6/8) Epoch 1, batch 27200, loss[loss=0.1713, simple_loss=0.2368, pruned_loss=0.05286, over 4876.00 frames.], tot_loss[loss=0.1804, simple_loss=0.2449, pruned_loss=0.058, over 972919.10 frames.], batch size: 13, lr: 8.38e-04 +2022-05-04 01:30:32,016 INFO [train.py:715] (6/8) Epoch 1, batch 27250, loss[loss=0.2183, simple_loss=0.2737, pruned_loss=0.08148, over 4746.00 frames.], tot_loss[loss=0.1818, simple_loss=0.2458, pruned_loss=0.05891, over 973036.21 frames.], batch size: 16, lr: 8.37e-04 +2022-05-04 01:31:11,132 INFO [train.py:715] (6/8) Epoch 1, batch 27300, loss[loss=0.1754, simple_loss=0.2404, pruned_loss=0.0552, over 4988.00 frames.], tot_loss[loss=0.181, simple_loss=0.2453, pruned_loss=0.05833, over 973678.26 frames.], batch size: 24, lr: 8.37e-04 +2022-05-04 01:31:49,675 INFO [train.py:715] (6/8) Epoch 1, batch 27350, loss[loss=0.2029, simple_loss=0.2747, pruned_loss=0.06553, over 4874.00 frames.], tot_loss[loss=0.1812, simple_loss=0.2458, pruned_loss=0.05826, over 973189.77 frames.], batch size: 19, lr: 8.37e-04 +2022-05-04 01:32:29,601 INFO [train.py:715] (6/8) Epoch 1, batch 27400, loss[loss=0.1569, simple_loss=0.2215, pruned_loss=0.04615, over 4853.00 frames.], tot_loss[loss=0.1803, simple_loss=0.2449, pruned_loss=0.05789, over 972443.42 frames.], batch size: 20, lr: 8.36e-04 +2022-05-04 01:33:09,599 INFO [train.py:715] (6/8) Epoch 1, batch 27450, loss[loss=0.1858, simple_loss=0.2517, pruned_loss=0.06, over 4901.00 frames.], tot_loss[loss=0.1802, simple_loss=0.2447, pruned_loss=0.05787, over 972351.99 frames.], batch size: 19, lr: 8.36e-04 +2022-05-04 01:33:48,108 INFO [train.py:715] (6/8) Epoch 1, batch 27500, loss[loss=0.1785, simple_loss=0.2466, pruned_loss=0.05521, over 4826.00 frames.], tot_loss[loss=0.1811, simple_loss=0.2456, pruned_loss=0.05829, over 972305.44 frames.], batch size: 25, lr: 8.36e-04 +2022-05-04 01:34:27,763 INFO [train.py:715] (6/8) Epoch 1, batch 27550, loss[loss=0.1653, simple_loss=0.2294, pruned_loss=0.05064, over 4771.00 frames.], tot_loss[loss=0.1815, simple_loss=0.2458, pruned_loss=0.05862, over 971597.07 frames.], batch size: 12, lr: 8.35e-04 +2022-05-04 01:35:07,991 INFO [train.py:715] (6/8) Epoch 1, batch 27600, loss[loss=0.2001, simple_loss=0.2619, pruned_loss=0.06913, over 4836.00 frames.], tot_loss[loss=0.1815, simple_loss=0.2456, pruned_loss=0.05871, over 971986.53 frames.], batch size: 27, lr: 8.35e-04 +2022-05-04 01:35:47,296 INFO [train.py:715] (6/8) Epoch 1, batch 27650, loss[loss=0.1687, simple_loss=0.2278, pruned_loss=0.05482, over 4836.00 frames.], tot_loss[loss=0.1821, simple_loss=0.2456, pruned_loss=0.05925, over 970986.38 frames.], batch size: 12, lr: 8.35e-04 +2022-05-04 01:36:26,736 INFO [train.py:715] (6/8) Epoch 1, batch 27700, loss[loss=0.1572, simple_loss=0.2303, pruned_loss=0.04208, over 4797.00 frames.], tot_loss[loss=0.1813, simple_loss=0.2451, pruned_loss=0.05878, over 970878.51 frames.], batch size: 25, lr: 8.34e-04 +2022-05-04 01:37:07,285 INFO [train.py:715] (6/8) Epoch 1, batch 27750, loss[loss=0.1791, simple_loss=0.2561, pruned_loss=0.0511, over 4989.00 frames.], tot_loss[loss=0.1817, simple_loss=0.2452, pruned_loss=0.05913, over 971758.66 frames.], batch size: 20, lr: 8.34e-04 +2022-05-04 01:37:47,072 INFO [train.py:715] (6/8) Epoch 1, batch 27800, loss[loss=0.1874, simple_loss=0.252, pruned_loss=0.06136, over 4958.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2446, pruned_loss=0.05861, over 972059.72 frames.], batch size: 21, lr: 8.34e-04 +2022-05-04 01:38:26,360 INFO [train.py:715] (6/8) Epoch 1, batch 27850, loss[loss=0.1862, simple_loss=0.2497, pruned_loss=0.06138, over 4923.00 frames.], tot_loss[loss=0.1809, simple_loss=0.245, pruned_loss=0.05846, over 972653.91 frames.], batch size: 39, lr: 8.33e-04 +2022-05-04 01:39:06,467 INFO [train.py:715] (6/8) Epoch 1, batch 27900, loss[loss=0.1957, simple_loss=0.2586, pruned_loss=0.06642, over 4905.00 frames.], tot_loss[loss=0.1804, simple_loss=0.2444, pruned_loss=0.0582, over 972178.30 frames.], batch size: 18, lr: 8.33e-04 +2022-05-04 01:39:45,948 INFO [train.py:715] (6/8) Epoch 1, batch 27950, loss[loss=0.1386, simple_loss=0.2092, pruned_loss=0.03394, over 4893.00 frames.], tot_loss[loss=0.1804, simple_loss=0.2443, pruned_loss=0.05827, over 973353.03 frames.], batch size: 19, lr: 8.33e-04 +2022-05-04 01:40:25,329 INFO [train.py:715] (6/8) Epoch 1, batch 28000, loss[loss=0.2139, simple_loss=0.2743, pruned_loss=0.07676, over 4821.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2447, pruned_loss=0.05858, over 972615.61 frames.], batch size: 27, lr: 8.32e-04 +2022-05-04 01:41:04,108 INFO [train.py:715] (6/8) Epoch 1, batch 28050, loss[loss=0.184, simple_loss=0.2401, pruned_loss=0.064, over 4822.00 frames.], tot_loss[loss=0.1799, simple_loss=0.2439, pruned_loss=0.05798, over 972217.02 frames.], batch size: 26, lr: 8.32e-04 +2022-05-04 01:41:44,523 INFO [train.py:715] (6/8) Epoch 1, batch 28100, loss[loss=0.1915, simple_loss=0.2511, pruned_loss=0.06593, over 4702.00 frames.], tot_loss[loss=0.1804, simple_loss=0.2442, pruned_loss=0.05825, over 972133.57 frames.], batch size: 15, lr: 8.32e-04 +2022-05-04 01:42:23,905 INFO [train.py:715] (6/8) Epoch 1, batch 28150, loss[loss=0.1486, simple_loss=0.2175, pruned_loss=0.03981, over 4838.00 frames.], tot_loss[loss=0.1805, simple_loss=0.2441, pruned_loss=0.05841, over 972192.87 frames.], batch size: 15, lr: 8.31e-04 +2022-05-04 01:43:03,291 INFO [train.py:715] (6/8) Epoch 1, batch 28200, loss[loss=0.1191, simple_loss=0.1843, pruned_loss=0.02693, over 4810.00 frames.], tot_loss[loss=0.1789, simple_loss=0.2429, pruned_loss=0.05745, over 972015.08 frames.], batch size: 12, lr: 8.31e-04 +2022-05-04 01:43:43,976 INFO [train.py:715] (6/8) Epoch 1, batch 28250, loss[loss=0.1822, simple_loss=0.2478, pruned_loss=0.05824, over 4932.00 frames.], tot_loss[loss=0.1804, simple_loss=0.244, pruned_loss=0.05838, over 971944.20 frames.], batch size: 29, lr: 8.31e-04 +2022-05-04 01:44:24,421 INFO [train.py:715] (6/8) Epoch 1, batch 28300, loss[loss=0.1934, simple_loss=0.2585, pruned_loss=0.06412, over 4965.00 frames.], tot_loss[loss=0.1793, simple_loss=0.2432, pruned_loss=0.05768, over 971604.50 frames.], batch size: 24, lr: 8.30e-04 +2022-05-04 01:45:03,753 INFO [train.py:715] (6/8) Epoch 1, batch 28350, loss[loss=0.1767, simple_loss=0.2359, pruned_loss=0.05875, over 4906.00 frames.], tot_loss[loss=0.1797, simple_loss=0.2437, pruned_loss=0.05784, over 972585.51 frames.], batch size: 23, lr: 8.30e-04 +2022-05-04 01:45:42,704 INFO [train.py:715] (6/8) Epoch 1, batch 28400, loss[loss=0.1662, simple_loss=0.2394, pruned_loss=0.04648, over 4777.00 frames.], tot_loss[loss=0.1803, simple_loss=0.2439, pruned_loss=0.05831, over 973209.38 frames.], batch size: 14, lr: 8.30e-04 +2022-05-04 01:46:23,128 INFO [train.py:715] (6/8) Epoch 1, batch 28450, loss[loss=0.1738, simple_loss=0.2477, pruned_loss=0.04999, over 4899.00 frames.], tot_loss[loss=0.1801, simple_loss=0.2443, pruned_loss=0.05799, over 972592.04 frames.], batch size: 17, lr: 8.29e-04 +2022-05-04 01:47:02,717 INFO [train.py:715] (6/8) Epoch 1, batch 28500, loss[loss=0.1481, simple_loss=0.2191, pruned_loss=0.03851, over 4967.00 frames.], tot_loss[loss=0.1796, simple_loss=0.2438, pruned_loss=0.05765, over 972262.25 frames.], batch size: 28, lr: 8.29e-04 +2022-05-04 01:47:41,719 INFO [train.py:715] (6/8) Epoch 1, batch 28550, loss[loss=0.1828, simple_loss=0.2416, pruned_loss=0.06199, over 4756.00 frames.], tot_loss[loss=0.1796, simple_loss=0.244, pruned_loss=0.05755, over 972030.56 frames.], batch size: 16, lr: 8.29e-04 +2022-05-04 01:48:22,006 INFO [train.py:715] (6/8) Epoch 1, batch 28600, loss[loss=0.1709, simple_loss=0.236, pruned_loss=0.05287, over 4753.00 frames.], tot_loss[loss=0.179, simple_loss=0.2436, pruned_loss=0.05721, over 972253.62 frames.], batch size: 19, lr: 8.28e-04 +2022-05-04 01:49:01,952 INFO [train.py:715] (6/8) Epoch 1, batch 28650, loss[loss=0.1867, simple_loss=0.255, pruned_loss=0.05922, over 4694.00 frames.], tot_loss[loss=0.1794, simple_loss=0.2436, pruned_loss=0.0576, over 971733.02 frames.], batch size: 15, lr: 8.28e-04 +2022-05-04 01:49:41,102 INFO [train.py:715] (6/8) Epoch 1, batch 28700, loss[loss=0.1543, simple_loss=0.2123, pruned_loss=0.04813, over 4803.00 frames.], tot_loss[loss=0.1794, simple_loss=0.2434, pruned_loss=0.05771, over 971353.57 frames.], batch size: 12, lr: 8.28e-04 +2022-05-04 01:50:20,245 INFO [train.py:715] (6/8) Epoch 1, batch 28750, loss[loss=0.2056, simple_loss=0.2602, pruned_loss=0.07552, over 4877.00 frames.], tot_loss[loss=0.1792, simple_loss=0.2435, pruned_loss=0.05751, over 971186.07 frames.], batch size: 16, lr: 8.27e-04 +2022-05-04 01:51:00,840 INFO [train.py:715] (6/8) Epoch 1, batch 28800, loss[loss=0.171, simple_loss=0.237, pruned_loss=0.05251, over 4849.00 frames.], tot_loss[loss=0.1784, simple_loss=0.2427, pruned_loss=0.05704, over 971424.83 frames.], batch size: 20, lr: 8.27e-04 +2022-05-04 01:51:40,148 INFO [train.py:715] (6/8) Epoch 1, batch 28850, loss[loss=0.1767, simple_loss=0.2396, pruned_loss=0.05695, over 4857.00 frames.], tot_loss[loss=0.1774, simple_loss=0.2422, pruned_loss=0.05628, over 970945.84 frames.], batch size: 20, lr: 8.27e-04 +2022-05-04 01:52:19,910 INFO [train.py:715] (6/8) Epoch 1, batch 28900, loss[loss=0.2241, simple_loss=0.2759, pruned_loss=0.08613, over 4867.00 frames.], tot_loss[loss=0.1778, simple_loss=0.2428, pruned_loss=0.05642, over 971180.53 frames.], batch size: 20, lr: 8.27e-04 +2022-05-04 01:53:00,620 INFO [train.py:715] (6/8) Epoch 1, batch 28950, loss[loss=0.1911, simple_loss=0.2554, pruned_loss=0.06334, over 4689.00 frames.], tot_loss[loss=0.1778, simple_loss=0.2428, pruned_loss=0.05636, over 970840.94 frames.], batch size: 15, lr: 8.26e-04 +2022-05-04 01:53:40,741 INFO [train.py:715] (6/8) Epoch 1, batch 29000, loss[loss=0.1922, simple_loss=0.2474, pruned_loss=0.0685, over 4915.00 frames.], tot_loss[loss=0.1794, simple_loss=0.2442, pruned_loss=0.05728, over 971705.88 frames.], batch size: 18, lr: 8.26e-04 +2022-05-04 01:54:19,717 INFO [train.py:715] (6/8) Epoch 1, batch 29050, loss[loss=0.187, simple_loss=0.251, pruned_loss=0.06153, over 4865.00 frames.], tot_loss[loss=0.1791, simple_loss=0.2439, pruned_loss=0.05718, over 972500.56 frames.], batch size: 16, lr: 8.26e-04 +2022-05-04 01:54:59,589 INFO [train.py:715] (6/8) Epoch 1, batch 29100, loss[loss=0.1864, simple_loss=0.2375, pruned_loss=0.06768, over 4927.00 frames.], tot_loss[loss=0.18, simple_loss=0.2447, pruned_loss=0.05767, over 972252.49 frames.], batch size: 23, lr: 8.25e-04 +2022-05-04 01:55:40,270 INFO [train.py:715] (6/8) Epoch 1, batch 29150, loss[loss=0.1856, simple_loss=0.2582, pruned_loss=0.05654, over 4965.00 frames.], tot_loss[loss=0.18, simple_loss=0.2444, pruned_loss=0.05778, over 971525.59 frames.], batch size: 15, lr: 8.25e-04 +2022-05-04 01:56:22,369 INFO [train.py:715] (6/8) Epoch 1, batch 29200, loss[loss=0.1504, simple_loss=0.2249, pruned_loss=0.038, over 4745.00 frames.], tot_loss[loss=0.1806, simple_loss=0.245, pruned_loss=0.05808, over 971939.05 frames.], batch size: 16, lr: 8.25e-04 +2022-05-04 01:57:01,399 INFO [train.py:715] (6/8) Epoch 1, batch 29250, loss[loss=0.1653, simple_loss=0.2385, pruned_loss=0.04599, over 4800.00 frames.], tot_loss[loss=0.1802, simple_loss=0.2451, pruned_loss=0.05769, over 971640.39 frames.], batch size: 18, lr: 8.24e-04 +2022-05-04 01:57:41,947 INFO [train.py:715] (6/8) Epoch 1, batch 29300, loss[loss=0.1685, simple_loss=0.2448, pruned_loss=0.04608, over 4879.00 frames.], tot_loss[loss=0.1804, simple_loss=0.245, pruned_loss=0.05789, over 972132.15 frames.], batch size: 16, lr: 8.24e-04 +2022-05-04 01:58:22,152 INFO [train.py:715] (6/8) Epoch 1, batch 29350, loss[loss=0.1878, simple_loss=0.2494, pruned_loss=0.06306, over 4780.00 frames.], tot_loss[loss=0.1806, simple_loss=0.2448, pruned_loss=0.05818, over 971502.04 frames.], batch size: 18, lr: 8.24e-04 +2022-05-04 01:59:00,694 INFO [train.py:715] (6/8) Epoch 1, batch 29400, loss[loss=0.1787, simple_loss=0.2529, pruned_loss=0.05228, over 4812.00 frames.], tot_loss[loss=0.1809, simple_loss=0.245, pruned_loss=0.0584, over 972464.25 frames.], batch size: 27, lr: 8.23e-04 +2022-05-04 01:59:40,304 INFO [train.py:715] (6/8) Epoch 1, batch 29450, loss[loss=0.1726, simple_loss=0.2461, pruned_loss=0.04957, over 4920.00 frames.], tot_loss[loss=0.1797, simple_loss=0.2439, pruned_loss=0.0577, over 972974.46 frames.], batch size: 23, lr: 8.23e-04 +2022-05-04 02:00:20,006 INFO [train.py:715] (6/8) Epoch 1, batch 29500, loss[loss=0.1777, simple_loss=0.2478, pruned_loss=0.05385, over 4974.00 frames.], tot_loss[loss=0.1791, simple_loss=0.2435, pruned_loss=0.05735, over 972982.23 frames.], batch size: 15, lr: 8.23e-04 +2022-05-04 02:00:59,407 INFO [train.py:715] (6/8) Epoch 1, batch 29550, loss[loss=0.1643, simple_loss=0.2445, pruned_loss=0.0421, over 4930.00 frames.], tot_loss[loss=0.1792, simple_loss=0.2437, pruned_loss=0.05736, over 972748.57 frames.], batch size: 29, lr: 8.22e-04 +2022-05-04 02:01:37,997 INFO [train.py:715] (6/8) Epoch 1, batch 29600, loss[loss=0.1628, simple_loss=0.2348, pruned_loss=0.0454, over 4818.00 frames.], tot_loss[loss=0.1795, simple_loss=0.2442, pruned_loss=0.05735, over 973975.30 frames.], batch size: 26, lr: 8.22e-04 +2022-05-04 02:02:18,259 INFO [train.py:715] (6/8) Epoch 1, batch 29650, loss[loss=0.1404, simple_loss=0.2113, pruned_loss=0.03471, over 4810.00 frames.], tot_loss[loss=0.18, simple_loss=0.2446, pruned_loss=0.05768, over 973703.98 frames.], batch size: 27, lr: 8.22e-04 +2022-05-04 02:02:58,334 INFO [train.py:715] (6/8) Epoch 1, batch 29700, loss[loss=0.1891, simple_loss=0.2559, pruned_loss=0.06116, over 4826.00 frames.], tot_loss[loss=0.1801, simple_loss=0.2451, pruned_loss=0.05752, over 973571.90 frames.], batch size: 26, lr: 8.21e-04 +2022-05-04 02:03:36,333 INFO [train.py:715] (6/8) Epoch 1, batch 29750, loss[loss=0.1484, simple_loss=0.2202, pruned_loss=0.03833, over 4865.00 frames.], tot_loss[loss=0.1784, simple_loss=0.2436, pruned_loss=0.05659, over 973066.88 frames.], batch size: 20, lr: 8.21e-04 +2022-05-04 02:04:15,634 INFO [train.py:715] (6/8) Epoch 1, batch 29800, loss[loss=0.1704, simple_loss=0.2288, pruned_loss=0.05596, over 4907.00 frames.], tot_loss[loss=0.1778, simple_loss=0.2428, pruned_loss=0.05638, over 972773.61 frames.], batch size: 17, lr: 8.21e-04 +2022-05-04 02:04:55,053 INFO [train.py:715] (6/8) Epoch 1, batch 29850, loss[loss=0.1979, simple_loss=0.2579, pruned_loss=0.06897, over 4954.00 frames.], tot_loss[loss=0.1789, simple_loss=0.2435, pruned_loss=0.05712, over 972708.45 frames.], batch size: 35, lr: 8.20e-04 +2022-05-04 02:05:34,427 INFO [train.py:715] (6/8) Epoch 1, batch 29900, loss[loss=0.1635, simple_loss=0.2211, pruned_loss=0.0529, over 4848.00 frames.], tot_loss[loss=0.179, simple_loss=0.2438, pruned_loss=0.05707, over 973640.54 frames.], batch size: 13, lr: 8.20e-04 +2022-05-04 02:06:12,933 INFO [train.py:715] (6/8) Epoch 1, batch 29950, loss[loss=0.1466, simple_loss=0.2207, pruned_loss=0.03621, over 4851.00 frames.], tot_loss[loss=0.1786, simple_loss=0.2438, pruned_loss=0.05672, over 974026.92 frames.], batch size: 20, lr: 8.20e-04 +2022-05-04 02:06:52,739 INFO [train.py:715] (6/8) Epoch 1, batch 30000, loss[loss=0.1801, simple_loss=0.2397, pruned_loss=0.06022, over 4944.00 frames.], tot_loss[loss=0.1786, simple_loss=0.2439, pruned_loss=0.05668, over 973775.85 frames.], batch size: 23, lr: 8.20e-04 +2022-05-04 02:06:52,739 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 02:07:09,692 INFO [train.py:742] (6/8) Epoch 1, validation: loss=0.1207, simple_loss=0.2076, pruned_loss=0.01687, over 914524.00 frames. +2022-05-04 02:07:50,184 INFO [train.py:715] (6/8) Epoch 1, batch 30050, loss[loss=0.1774, simple_loss=0.2538, pruned_loss=0.05052, over 4923.00 frames.], tot_loss[loss=0.1783, simple_loss=0.2436, pruned_loss=0.05646, over 974173.40 frames.], batch size: 21, lr: 8.19e-04 +2022-05-04 02:08:29,662 INFO [train.py:715] (6/8) Epoch 1, batch 30100, loss[loss=0.1722, simple_loss=0.2408, pruned_loss=0.05181, over 4867.00 frames.], tot_loss[loss=0.1797, simple_loss=0.2445, pruned_loss=0.05742, over 973956.58 frames.], batch size: 16, lr: 8.19e-04 +2022-05-04 02:09:09,062 INFO [train.py:715] (6/8) Epoch 1, batch 30150, loss[loss=0.1913, simple_loss=0.2486, pruned_loss=0.067, over 4813.00 frames.], tot_loss[loss=0.1794, simple_loss=0.2439, pruned_loss=0.05741, over 972698.11 frames.], batch size: 27, lr: 8.19e-04 +2022-05-04 02:09:48,372 INFO [train.py:715] (6/8) Epoch 1, batch 30200, loss[loss=0.215, simple_loss=0.2801, pruned_loss=0.07495, over 4977.00 frames.], tot_loss[loss=0.1799, simple_loss=0.2443, pruned_loss=0.0578, over 972330.37 frames.], batch size: 28, lr: 8.18e-04 +2022-05-04 02:10:28,840 INFO [train.py:715] (6/8) Epoch 1, batch 30250, loss[loss=0.1815, simple_loss=0.2455, pruned_loss=0.0587, over 4865.00 frames.], tot_loss[loss=0.1793, simple_loss=0.2438, pruned_loss=0.05738, over 972645.52 frames.], batch size: 32, lr: 8.18e-04 +2022-05-04 02:11:08,802 INFO [train.py:715] (6/8) Epoch 1, batch 30300, loss[loss=0.1923, simple_loss=0.2518, pruned_loss=0.06642, over 4920.00 frames.], tot_loss[loss=0.179, simple_loss=0.2433, pruned_loss=0.05729, over 972839.72 frames.], batch size: 18, lr: 8.18e-04 +2022-05-04 02:11:47,712 INFO [train.py:715] (6/8) Epoch 1, batch 30350, loss[loss=0.1929, simple_loss=0.2438, pruned_loss=0.07105, over 4832.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2427, pruned_loss=0.05671, over 972707.47 frames.], batch size: 30, lr: 8.17e-04 +2022-05-04 02:12:27,778 INFO [train.py:715] (6/8) Epoch 1, batch 30400, loss[loss=0.1681, simple_loss=0.2378, pruned_loss=0.04919, over 4958.00 frames.], tot_loss[loss=0.179, simple_loss=0.2435, pruned_loss=0.05722, over 972029.87 frames.], batch size: 28, lr: 8.17e-04 +2022-05-04 02:13:07,269 INFO [train.py:715] (6/8) Epoch 1, batch 30450, loss[loss=0.1853, simple_loss=0.2567, pruned_loss=0.05692, over 4786.00 frames.], tot_loss[loss=0.1798, simple_loss=0.2439, pruned_loss=0.05789, over 972208.01 frames.], batch size: 17, lr: 8.17e-04 +2022-05-04 02:13:46,444 INFO [train.py:715] (6/8) Epoch 1, batch 30500, loss[loss=0.1444, simple_loss=0.2105, pruned_loss=0.0391, over 4781.00 frames.], tot_loss[loss=0.1796, simple_loss=0.2439, pruned_loss=0.05764, over 971556.24 frames.], batch size: 12, lr: 8.16e-04 +2022-05-04 02:14:25,544 INFO [train.py:715] (6/8) Epoch 1, batch 30550, loss[loss=0.1602, simple_loss=0.2148, pruned_loss=0.0528, over 4787.00 frames.], tot_loss[loss=0.1789, simple_loss=0.2434, pruned_loss=0.05724, over 972786.29 frames.], batch size: 12, lr: 8.16e-04 +2022-05-04 02:15:05,345 INFO [train.py:715] (6/8) Epoch 1, batch 30600, loss[loss=0.1665, simple_loss=0.2278, pruned_loss=0.05261, over 4905.00 frames.], tot_loss[loss=0.1787, simple_loss=0.2433, pruned_loss=0.05703, over 973212.87 frames.], batch size: 17, lr: 8.16e-04 +2022-05-04 02:15:44,808 INFO [train.py:715] (6/8) Epoch 1, batch 30650, loss[loss=0.2306, simple_loss=0.2695, pruned_loss=0.09581, over 4838.00 frames.], tot_loss[loss=0.1787, simple_loss=0.2433, pruned_loss=0.05704, over 972308.67 frames.], batch size: 30, lr: 8.15e-04 +2022-05-04 02:16:23,390 INFO [train.py:715] (6/8) Epoch 1, batch 30700, loss[loss=0.1615, simple_loss=0.2273, pruned_loss=0.04783, over 4809.00 frames.], tot_loss[loss=0.1777, simple_loss=0.2425, pruned_loss=0.05639, over 971937.60 frames.], batch size: 21, lr: 8.15e-04 +2022-05-04 02:17:03,639 INFO [train.py:715] (6/8) Epoch 1, batch 30750, loss[loss=0.1758, simple_loss=0.2307, pruned_loss=0.0605, over 4690.00 frames.], tot_loss[loss=0.1782, simple_loss=0.2427, pruned_loss=0.05687, over 972100.52 frames.], batch size: 15, lr: 8.15e-04 +2022-05-04 02:17:43,213 INFO [train.py:715] (6/8) Epoch 1, batch 30800, loss[loss=0.141, simple_loss=0.2119, pruned_loss=0.03509, over 4932.00 frames.], tot_loss[loss=0.178, simple_loss=0.2426, pruned_loss=0.05668, over 972792.68 frames.], batch size: 23, lr: 8.15e-04 +2022-05-04 02:18:22,130 INFO [train.py:715] (6/8) Epoch 1, batch 30850, loss[loss=0.1844, simple_loss=0.2504, pruned_loss=0.05921, over 4786.00 frames.], tot_loss[loss=0.1784, simple_loss=0.2435, pruned_loss=0.05664, over 971861.50 frames.], batch size: 17, lr: 8.14e-04 +2022-05-04 02:19:01,719 INFO [train.py:715] (6/8) Epoch 1, batch 30900, loss[loss=0.155, simple_loss=0.2133, pruned_loss=0.04834, over 4928.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2433, pruned_loss=0.05646, over 972326.43 frames.], batch size: 18, lr: 8.14e-04 +2022-05-04 02:19:41,346 INFO [train.py:715] (6/8) Epoch 1, batch 30950, loss[loss=0.239, simple_loss=0.287, pruned_loss=0.09553, over 4911.00 frames.], tot_loss[loss=0.1799, simple_loss=0.2449, pruned_loss=0.05747, over 972991.52 frames.], batch size: 39, lr: 8.14e-04 +2022-05-04 02:20:20,856 INFO [train.py:715] (6/8) Epoch 1, batch 31000, loss[loss=0.1711, simple_loss=0.2407, pruned_loss=0.05075, over 4869.00 frames.], tot_loss[loss=0.1794, simple_loss=0.2447, pruned_loss=0.05712, over 972432.47 frames.], batch size: 20, lr: 8.13e-04 +2022-05-04 02:21:00,354 INFO [train.py:715] (6/8) Epoch 1, batch 31050, loss[loss=0.1632, simple_loss=0.2382, pruned_loss=0.04414, over 4962.00 frames.], tot_loss[loss=0.1799, simple_loss=0.2451, pruned_loss=0.05735, over 972417.33 frames.], batch size: 24, lr: 8.13e-04 +2022-05-04 02:21:40,837 INFO [train.py:715] (6/8) Epoch 1, batch 31100, loss[loss=0.2016, simple_loss=0.2595, pruned_loss=0.0718, over 4912.00 frames.], tot_loss[loss=0.1799, simple_loss=0.2453, pruned_loss=0.0573, over 972711.16 frames.], batch size: 23, lr: 8.13e-04 +2022-05-04 02:22:20,584 INFO [train.py:715] (6/8) Epoch 1, batch 31150, loss[loss=0.2177, simple_loss=0.2724, pruned_loss=0.08151, over 4661.00 frames.], tot_loss[loss=0.1805, simple_loss=0.2457, pruned_loss=0.05766, over 971870.75 frames.], batch size: 14, lr: 8.12e-04 +2022-05-04 02:22:59,628 INFO [train.py:715] (6/8) Epoch 1, batch 31200, loss[loss=0.1474, simple_loss=0.2149, pruned_loss=0.03995, over 4795.00 frames.], tot_loss[loss=0.1792, simple_loss=0.2445, pruned_loss=0.05698, over 971841.75 frames.], batch size: 24, lr: 8.12e-04 +2022-05-04 02:23:39,862 INFO [train.py:715] (6/8) Epoch 1, batch 31250, loss[loss=0.2443, simple_loss=0.2917, pruned_loss=0.09849, over 4876.00 frames.], tot_loss[loss=0.1797, simple_loss=0.2447, pruned_loss=0.05735, over 971865.85 frames.], batch size: 16, lr: 8.12e-04 +2022-05-04 02:24:19,624 INFO [train.py:715] (6/8) Epoch 1, batch 31300, loss[loss=0.1653, simple_loss=0.2263, pruned_loss=0.05208, over 4801.00 frames.], tot_loss[loss=0.1794, simple_loss=0.244, pruned_loss=0.05738, over 971841.86 frames.], batch size: 13, lr: 8.11e-04 +2022-05-04 02:24:59,064 INFO [train.py:715] (6/8) Epoch 1, batch 31350, loss[loss=0.1868, simple_loss=0.2471, pruned_loss=0.06324, over 4772.00 frames.], tot_loss[loss=0.1789, simple_loss=0.2435, pruned_loss=0.05709, over 971955.94 frames.], batch size: 14, lr: 8.11e-04 +2022-05-04 02:25:38,862 INFO [train.py:715] (6/8) Epoch 1, batch 31400, loss[loss=0.1423, simple_loss=0.2006, pruned_loss=0.04197, over 4785.00 frames.], tot_loss[loss=0.1779, simple_loss=0.2428, pruned_loss=0.05655, over 971692.99 frames.], batch size: 12, lr: 8.11e-04 +2022-05-04 02:26:18,868 INFO [train.py:715] (6/8) Epoch 1, batch 31450, loss[loss=0.1754, simple_loss=0.2325, pruned_loss=0.05921, over 4941.00 frames.], tot_loss[loss=0.1781, simple_loss=0.243, pruned_loss=0.05656, over 972218.20 frames.], batch size: 21, lr: 8.11e-04 +2022-05-04 02:26:58,733 INFO [train.py:715] (6/8) Epoch 1, batch 31500, loss[loss=0.1534, simple_loss=0.2298, pruned_loss=0.03855, over 4932.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2433, pruned_loss=0.05647, over 972395.30 frames.], batch size: 29, lr: 8.10e-04 +2022-05-04 02:27:37,229 INFO [train.py:715] (6/8) Epoch 1, batch 31550, loss[loss=0.1992, simple_loss=0.2736, pruned_loss=0.06242, over 4921.00 frames.], tot_loss[loss=0.1795, simple_loss=0.2443, pruned_loss=0.05733, over 972701.12 frames.], batch size: 29, lr: 8.10e-04 +2022-05-04 02:28:17,419 INFO [train.py:715] (6/8) Epoch 1, batch 31600, loss[loss=0.1488, simple_loss=0.2063, pruned_loss=0.04565, over 4822.00 frames.], tot_loss[loss=0.1793, simple_loss=0.2443, pruned_loss=0.05712, over 972851.37 frames.], batch size: 13, lr: 8.10e-04 +2022-05-04 02:28:57,091 INFO [train.py:715] (6/8) Epoch 1, batch 31650, loss[loss=0.1805, simple_loss=0.2475, pruned_loss=0.05674, over 4972.00 frames.], tot_loss[loss=0.1794, simple_loss=0.2445, pruned_loss=0.05712, over 972936.14 frames.], batch size: 24, lr: 8.09e-04 +2022-05-04 02:29:37,004 INFO [train.py:715] (6/8) Epoch 1, batch 31700, loss[loss=0.1942, simple_loss=0.2632, pruned_loss=0.06259, over 4746.00 frames.], tot_loss[loss=0.1791, simple_loss=0.2445, pruned_loss=0.05687, over 973752.61 frames.], batch size: 19, lr: 8.09e-04 +2022-05-04 02:30:16,367 INFO [train.py:715] (6/8) Epoch 1, batch 31750, loss[loss=0.1704, simple_loss=0.2468, pruned_loss=0.04701, over 4954.00 frames.], tot_loss[loss=0.1788, simple_loss=0.2444, pruned_loss=0.05659, over 972490.72 frames.], batch size: 24, lr: 8.09e-04 +2022-05-04 02:30:56,484 INFO [train.py:715] (6/8) Epoch 1, batch 31800, loss[loss=0.1532, simple_loss=0.2222, pruned_loss=0.04206, over 4780.00 frames.], tot_loss[loss=0.178, simple_loss=0.2439, pruned_loss=0.0561, over 972493.63 frames.], batch size: 17, lr: 8.08e-04 +2022-05-04 02:31:36,274 INFO [train.py:715] (6/8) Epoch 1, batch 31850, loss[loss=0.1568, simple_loss=0.2297, pruned_loss=0.04189, over 4738.00 frames.], tot_loss[loss=0.1777, simple_loss=0.2432, pruned_loss=0.05613, over 972367.70 frames.], batch size: 16, lr: 8.08e-04 +2022-05-04 02:32:15,746 INFO [train.py:715] (6/8) Epoch 1, batch 31900, loss[loss=0.1804, simple_loss=0.2522, pruned_loss=0.0543, over 4907.00 frames.], tot_loss[loss=0.1779, simple_loss=0.2433, pruned_loss=0.0563, over 972629.08 frames.], batch size: 23, lr: 8.08e-04 +2022-05-04 02:32:55,110 INFO [train.py:715] (6/8) Epoch 1, batch 31950, loss[loss=0.1592, simple_loss=0.2205, pruned_loss=0.04899, over 4780.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2435, pruned_loss=0.05634, over 973017.59 frames.], batch size: 12, lr: 8.08e-04 +2022-05-04 02:33:34,640 INFO [train.py:715] (6/8) Epoch 1, batch 32000, loss[loss=0.1783, simple_loss=0.2349, pruned_loss=0.06084, over 4944.00 frames.], tot_loss[loss=0.1782, simple_loss=0.2435, pruned_loss=0.05648, over 972238.59 frames.], batch size: 29, lr: 8.07e-04 +2022-05-04 02:34:14,067 INFO [train.py:715] (6/8) Epoch 1, batch 32050, loss[loss=0.1619, simple_loss=0.2357, pruned_loss=0.04403, over 4812.00 frames.], tot_loss[loss=0.1782, simple_loss=0.2432, pruned_loss=0.0566, over 971576.07 frames.], batch size: 25, lr: 8.07e-04 +2022-05-04 02:34:53,320 INFO [train.py:715] (6/8) Epoch 1, batch 32100, loss[loss=0.1762, simple_loss=0.2418, pruned_loss=0.05532, over 4838.00 frames.], tot_loss[loss=0.1783, simple_loss=0.2432, pruned_loss=0.05674, over 971932.25 frames.], batch size: 30, lr: 8.07e-04 +2022-05-04 02:35:32,943 INFO [train.py:715] (6/8) Epoch 1, batch 32150, loss[loss=0.2, simple_loss=0.2611, pruned_loss=0.06948, over 4853.00 frames.], tot_loss[loss=0.1777, simple_loss=0.2425, pruned_loss=0.05648, over 972565.94 frames.], batch size: 20, lr: 8.06e-04 +2022-05-04 02:36:12,941 INFO [train.py:715] (6/8) Epoch 1, batch 32200, loss[loss=0.1985, simple_loss=0.2697, pruned_loss=0.06369, over 4948.00 frames.], tot_loss[loss=0.1778, simple_loss=0.2425, pruned_loss=0.05654, over 971411.41 frames.], batch size: 29, lr: 8.06e-04 +2022-05-04 02:36:51,842 INFO [train.py:715] (6/8) Epoch 1, batch 32250, loss[loss=0.2152, simple_loss=0.2707, pruned_loss=0.07988, over 4871.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2428, pruned_loss=0.05674, over 971844.34 frames.], batch size: 30, lr: 8.06e-04 +2022-05-04 02:37:31,251 INFO [train.py:715] (6/8) Epoch 1, batch 32300, loss[loss=0.1583, simple_loss=0.2193, pruned_loss=0.04867, over 4797.00 frames.], tot_loss[loss=0.1783, simple_loss=0.243, pruned_loss=0.05681, over 971645.47 frames.], batch size: 24, lr: 8.05e-04 +2022-05-04 02:38:10,690 INFO [train.py:715] (6/8) Epoch 1, batch 32350, loss[loss=0.2122, simple_loss=0.2579, pruned_loss=0.08329, over 4968.00 frames.], tot_loss[loss=0.1789, simple_loss=0.2432, pruned_loss=0.05731, over 971251.86 frames.], batch size: 24, lr: 8.05e-04 +2022-05-04 02:38:50,285 INFO [train.py:715] (6/8) Epoch 1, batch 32400, loss[loss=0.1921, simple_loss=0.2554, pruned_loss=0.06439, over 4874.00 frames.], tot_loss[loss=0.1774, simple_loss=0.2421, pruned_loss=0.0564, over 971715.86 frames.], batch size: 22, lr: 8.05e-04 +2022-05-04 02:39:29,218 INFO [train.py:715] (6/8) Epoch 1, batch 32450, loss[loss=0.1607, simple_loss=0.2275, pruned_loss=0.04695, over 4890.00 frames.], tot_loss[loss=0.1782, simple_loss=0.2432, pruned_loss=0.05659, over 971683.82 frames.], batch size: 19, lr: 8.05e-04 +2022-05-04 02:40:08,862 INFO [train.py:715] (6/8) Epoch 1, batch 32500, loss[loss=0.1999, simple_loss=0.2579, pruned_loss=0.07096, over 4750.00 frames.], tot_loss[loss=0.1787, simple_loss=0.2439, pruned_loss=0.05668, over 971215.70 frames.], batch size: 19, lr: 8.04e-04 +2022-05-04 02:40:48,377 INFO [train.py:715] (6/8) Epoch 1, batch 32550, loss[loss=0.193, simple_loss=0.253, pruned_loss=0.06655, over 4805.00 frames.], tot_loss[loss=0.1794, simple_loss=0.2445, pruned_loss=0.05716, over 971935.31 frames.], batch size: 21, lr: 8.04e-04 +2022-05-04 02:41:27,300 INFO [train.py:715] (6/8) Epoch 1, batch 32600, loss[loss=0.1666, simple_loss=0.2365, pruned_loss=0.04839, over 4874.00 frames.], tot_loss[loss=0.178, simple_loss=0.2433, pruned_loss=0.05635, over 972165.04 frames.], batch size: 16, lr: 8.04e-04 +2022-05-04 02:42:06,692 INFO [train.py:715] (6/8) Epoch 1, batch 32650, loss[loss=0.1829, simple_loss=0.2589, pruned_loss=0.0535, over 4894.00 frames.], tot_loss[loss=0.1777, simple_loss=0.243, pruned_loss=0.05619, over 973215.05 frames.], batch size: 22, lr: 8.03e-04 +2022-05-04 02:42:46,236 INFO [train.py:715] (6/8) Epoch 1, batch 32700, loss[loss=0.1518, simple_loss=0.2159, pruned_loss=0.04391, over 4820.00 frames.], tot_loss[loss=0.177, simple_loss=0.242, pruned_loss=0.05603, over 973115.53 frames.], batch size: 12, lr: 8.03e-04 +2022-05-04 02:43:25,965 INFO [train.py:715] (6/8) Epoch 1, batch 32750, loss[loss=0.1693, simple_loss=0.2364, pruned_loss=0.05106, over 4947.00 frames.], tot_loss[loss=0.1769, simple_loss=0.2419, pruned_loss=0.05594, over 972754.36 frames.], batch size: 21, lr: 8.03e-04 +2022-05-04 02:44:05,921 INFO [train.py:715] (6/8) Epoch 1, batch 32800, loss[loss=0.1808, simple_loss=0.2395, pruned_loss=0.06106, over 4877.00 frames.], tot_loss[loss=0.1775, simple_loss=0.242, pruned_loss=0.05645, over 972650.55 frames.], batch size: 16, lr: 8.02e-04 +2022-05-04 02:44:45,559 INFO [train.py:715] (6/8) Epoch 1, batch 32850, loss[loss=0.1583, simple_loss=0.2305, pruned_loss=0.04304, over 4959.00 frames.], tot_loss[loss=0.1771, simple_loss=0.2422, pruned_loss=0.05606, over 972262.15 frames.], batch size: 24, lr: 8.02e-04 +2022-05-04 02:45:24,930 INFO [train.py:715] (6/8) Epoch 1, batch 32900, loss[loss=0.1764, simple_loss=0.2452, pruned_loss=0.05385, over 4982.00 frames.], tot_loss[loss=0.1774, simple_loss=0.2423, pruned_loss=0.0563, over 972217.90 frames.], batch size: 35, lr: 8.02e-04 +2022-05-04 02:46:04,182 INFO [train.py:715] (6/8) Epoch 1, batch 32950, loss[loss=0.1924, simple_loss=0.2518, pruned_loss=0.06651, over 4798.00 frames.], tot_loss[loss=0.178, simple_loss=0.2427, pruned_loss=0.05667, over 972318.19 frames.], batch size: 21, lr: 8.02e-04 +2022-05-04 02:46:43,645 INFO [train.py:715] (6/8) Epoch 1, batch 33000, loss[loss=0.1334, simple_loss=0.2031, pruned_loss=0.03179, over 4830.00 frames.], tot_loss[loss=0.1779, simple_loss=0.2425, pruned_loss=0.0566, over 972675.55 frames.], batch size: 12, lr: 8.01e-04 +2022-05-04 02:46:43,646 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 02:46:52,425 INFO [train.py:742] (6/8) Epoch 1, validation: loss=0.1208, simple_loss=0.2074, pruned_loss=0.01714, over 914524.00 frames. +2022-05-04 02:47:32,122 INFO [train.py:715] (6/8) Epoch 1, batch 33050, loss[loss=0.1596, simple_loss=0.2329, pruned_loss=0.0432, over 4805.00 frames.], tot_loss[loss=0.1775, simple_loss=0.2427, pruned_loss=0.0561, over 971943.78 frames.], batch size: 21, lr: 8.01e-04 +2022-05-04 02:48:12,126 INFO [train.py:715] (6/8) Epoch 1, batch 33100, loss[loss=0.206, simple_loss=0.2613, pruned_loss=0.07533, over 4786.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2428, pruned_loss=0.05589, over 972320.30 frames.], batch size: 18, lr: 8.01e-04 +2022-05-04 02:48:52,002 INFO [train.py:715] (6/8) Epoch 1, batch 33150, loss[loss=0.1669, simple_loss=0.2371, pruned_loss=0.04837, over 4963.00 frames.], tot_loss[loss=0.1784, simple_loss=0.244, pruned_loss=0.05643, over 972288.06 frames.], batch size: 35, lr: 8.00e-04 +2022-05-04 02:49:31,140 INFO [train.py:715] (6/8) Epoch 1, batch 33200, loss[loss=0.1999, simple_loss=0.2525, pruned_loss=0.07368, over 4923.00 frames.], tot_loss[loss=0.178, simple_loss=0.2434, pruned_loss=0.05627, over 972542.85 frames.], batch size: 39, lr: 8.00e-04 +2022-05-04 02:50:11,559 INFO [train.py:715] (6/8) Epoch 1, batch 33250, loss[loss=0.1759, simple_loss=0.2382, pruned_loss=0.05682, over 4895.00 frames.], tot_loss[loss=0.177, simple_loss=0.2421, pruned_loss=0.05595, over 972445.62 frames.], batch size: 16, lr: 8.00e-04 +2022-05-04 02:50:51,593 INFO [train.py:715] (6/8) Epoch 1, batch 33300, loss[loss=0.1854, simple_loss=0.2548, pruned_loss=0.058, over 4755.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2425, pruned_loss=0.05604, over 972827.78 frames.], batch size: 14, lr: 8.00e-04 +2022-05-04 02:51:31,060 INFO [train.py:715] (6/8) Epoch 1, batch 33350, loss[loss=0.1395, simple_loss=0.2184, pruned_loss=0.03034, over 4872.00 frames.], tot_loss[loss=0.1778, simple_loss=0.2432, pruned_loss=0.05617, over 973077.19 frames.], batch size: 22, lr: 7.99e-04 +2022-05-04 02:52:11,437 INFO [train.py:715] (6/8) Epoch 1, batch 33400, loss[loss=0.1805, simple_loss=0.2442, pruned_loss=0.05838, over 4826.00 frames.], tot_loss[loss=0.1768, simple_loss=0.2426, pruned_loss=0.05547, over 972491.46 frames.], batch size: 25, lr: 7.99e-04 +2022-05-04 02:52:51,301 INFO [train.py:715] (6/8) Epoch 1, batch 33450, loss[loss=0.1613, simple_loss=0.2221, pruned_loss=0.05029, over 4841.00 frames.], tot_loss[loss=0.1769, simple_loss=0.2426, pruned_loss=0.05565, over 972174.42 frames.], batch size: 30, lr: 7.99e-04 +2022-05-04 02:53:30,412 INFO [train.py:715] (6/8) Epoch 1, batch 33500, loss[loss=0.1622, simple_loss=0.2397, pruned_loss=0.04229, over 4761.00 frames.], tot_loss[loss=0.1764, simple_loss=0.242, pruned_loss=0.05541, over 972012.21 frames.], batch size: 12, lr: 7.98e-04 +2022-05-04 02:54:10,341 INFO [train.py:715] (6/8) Epoch 1, batch 33550, loss[loss=0.1737, simple_loss=0.2386, pruned_loss=0.05437, over 4965.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2428, pruned_loss=0.05594, over 972793.34 frames.], batch size: 24, lr: 7.98e-04 +2022-05-04 02:54:50,185 INFO [train.py:715] (6/8) Epoch 1, batch 33600, loss[loss=0.1643, simple_loss=0.2225, pruned_loss=0.05301, over 4917.00 frames.], tot_loss[loss=0.1776, simple_loss=0.243, pruned_loss=0.0561, over 972611.45 frames.], batch size: 18, lr: 7.98e-04 +2022-05-04 02:55:29,606 INFO [train.py:715] (6/8) Epoch 1, batch 33650, loss[loss=0.1652, simple_loss=0.2355, pruned_loss=0.04739, over 4825.00 frames.], tot_loss[loss=0.1771, simple_loss=0.2426, pruned_loss=0.05576, over 973196.65 frames.], batch size: 15, lr: 7.97e-04 +2022-05-04 02:56:08,654 INFO [train.py:715] (6/8) Epoch 1, batch 33700, loss[loss=0.1712, simple_loss=0.2398, pruned_loss=0.0513, over 4818.00 frames.], tot_loss[loss=0.1766, simple_loss=0.2425, pruned_loss=0.05532, over 973023.32 frames.], batch size: 25, lr: 7.97e-04 +2022-05-04 02:56:47,810 INFO [train.py:715] (6/8) Epoch 1, batch 33750, loss[loss=0.1808, simple_loss=0.2351, pruned_loss=0.06322, over 4827.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2413, pruned_loss=0.05494, over 972146.62 frames.], batch size: 25, lr: 7.97e-04 +2022-05-04 02:57:27,455 INFO [train.py:715] (6/8) Epoch 1, batch 33800, loss[loss=0.1422, simple_loss=0.2058, pruned_loss=0.03932, over 4984.00 frames.], tot_loss[loss=0.1751, simple_loss=0.2409, pruned_loss=0.05465, over 971477.25 frames.], batch size: 31, lr: 7.97e-04 +2022-05-04 02:58:06,286 INFO [train.py:715] (6/8) Epoch 1, batch 33850, loss[loss=0.2208, simple_loss=0.2759, pruned_loss=0.08284, over 4884.00 frames.], tot_loss[loss=0.1761, simple_loss=0.2417, pruned_loss=0.05528, over 972437.61 frames.], batch size: 32, lr: 7.96e-04 +2022-05-04 02:58:45,805 INFO [train.py:715] (6/8) Epoch 1, batch 33900, loss[loss=0.1279, simple_loss=0.1986, pruned_loss=0.02856, over 4821.00 frames.], tot_loss[loss=0.1771, simple_loss=0.2425, pruned_loss=0.05579, over 972142.89 frames.], batch size: 13, lr: 7.96e-04 +2022-05-04 02:59:25,368 INFO [train.py:715] (6/8) Epoch 1, batch 33950, loss[loss=0.1771, simple_loss=0.2412, pruned_loss=0.05654, over 4885.00 frames.], tot_loss[loss=0.1763, simple_loss=0.2419, pruned_loss=0.05539, over 972168.87 frames.], batch size: 16, lr: 7.96e-04 +2022-05-04 03:00:05,094 INFO [train.py:715] (6/8) Epoch 1, batch 34000, loss[loss=0.203, simple_loss=0.2729, pruned_loss=0.06657, over 4760.00 frames.], tot_loss[loss=0.1767, simple_loss=0.2425, pruned_loss=0.05548, over 972538.56 frames.], batch size: 16, lr: 7.95e-04 +2022-05-04 03:00:44,415 INFO [train.py:715] (6/8) Epoch 1, batch 34050, loss[loss=0.1668, simple_loss=0.2291, pruned_loss=0.05228, over 4844.00 frames.], tot_loss[loss=0.177, simple_loss=0.2423, pruned_loss=0.05585, over 973131.30 frames.], batch size: 13, lr: 7.95e-04 +2022-05-04 03:01:23,799 INFO [train.py:715] (6/8) Epoch 1, batch 34100, loss[loss=0.1559, simple_loss=0.2163, pruned_loss=0.04776, over 4990.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2434, pruned_loss=0.05639, over 973126.30 frames.], batch size: 20, lr: 7.95e-04 +2022-05-04 03:02:03,184 INFO [train.py:715] (6/8) Epoch 1, batch 34150, loss[loss=0.1518, simple_loss=0.2202, pruned_loss=0.04171, over 4983.00 frames.], tot_loss[loss=0.1772, simple_loss=0.2425, pruned_loss=0.05592, over 972670.44 frames.], batch size: 15, lr: 7.95e-04 +2022-05-04 03:02:42,209 INFO [train.py:715] (6/8) Epoch 1, batch 34200, loss[loss=0.1946, simple_loss=0.2458, pruned_loss=0.07165, over 4710.00 frames.], tot_loss[loss=0.1778, simple_loss=0.2428, pruned_loss=0.0564, over 972310.61 frames.], batch size: 15, lr: 7.94e-04 +2022-05-04 03:03:21,758 INFO [train.py:715] (6/8) Epoch 1, batch 34250, loss[loss=0.2082, simple_loss=0.2788, pruned_loss=0.06875, over 4930.00 frames.], tot_loss[loss=0.1786, simple_loss=0.2436, pruned_loss=0.05673, over 972347.60 frames.], batch size: 21, lr: 7.94e-04 +2022-05-04 03:04:01,441 INFO [train.py:715] (6/8) Epoch 1, batch 34300, loss[loss=0.1645, simple_loss=0.2183, pruned_loss=0.05538, over 4809.00 frames.], tot_loss[loss=0.1793, simple_loss=0.2441, pruned_loss=0.05722, over 972470.88 frames.], batch size: 12, lr: 7.94e-04 +2022-05-04 03:04:40,850 INFO [train.py:715] (6/8) Epoch 1, batch 34350, loss[loss=0.1706, simple_loss=0.2434, pruned_loss=0.04893, over 4964.00 frames.], tot_loss[loss=0.1791, simple_loss=0.2441, pruned_loss=0.05699, over 972735.54 frames.], batch size: 24, lr: 7.93e-04 +2022-05-04 03:05:19,754 INFO [train.py:715] (6/8) Epoch 1, batch 34400, loss[loss=0.199, simple_loss=0.2676, pruned_loss=0.0652, over 4703.00 frames.], tot_loss[loss=0.1785, simple_loss=0.2434, pruned_loss=0.05685, over 971821.07 frames.], batch size: 15, lr: 7.93e-04 +2022-05-04 03:05:59,256 INFO [train.py:715] (6/8) Epoch 1, batch 34450, loss[loss=0.204, simple_loss=0.2509, pruned_loss=0.07854, over 4879.00 frames.], tot_loss[loss=0.1779, simple_loss=0.2427, pruned_loss=0.05655, over 972383.61 frames.], batch size: 32, lr: 7.93e-04 +2022-05-04 03:06:38,482 INFO [train.py:715] (6/8) Epoch 1, batch 34500, loss[loss=0.1446, simple_loss=0.2138, pruned_loss=0.03766, over 4925.00 frames.], tot_loss[loss=0.1786, simple_loss=0.2432, pruned_loss=0.05701, over 971451.31 frames.], batch size: 18, lr: 7.93e-04 +2022-05-04 03:07:17,763 INFO [train.py:715] (6/8) Epoch 1, batch 34550, loss[loss=0.1605, simple_loss=0.2399, pruned_loss=0.04058, over 4811.00 frames.], tot_loss[loss=0.178, simple_loss=0.2429, pruned_loss=0.05654, over 971909.78 frames.], batch size: 25, lr: 7.92e-04 +2022-05-04 03:07:57,344 INFO [train.py:715] (6/8) Epoch 1, batch 34600, loss[loss=0.21, simple_loss=0.2601, pruned_loss=0.07999, over 4765.00 frames.], tot_loss[loss=0.1774, simple_loss=0.2422, pruned_loss=0.05632, over 971019.84 frames.], batch size: 14, lr: 7.92e-04 +2022-05-04 03:08:37,232 INFO [train.py:715] (6/8) Epoch 1, batch 34650, loss[loss=0.1471, simple_loss=0.2137, pruned_loss=0.04022, over 4982.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2421, pruned_loss=0.05622, over 970845.42 frames.], batch size: 28, lr: 7.92e-04 +2022-05-04 03:09:17,433 INFO [train.py:715] (6/8) Epoch 1, batch 34700, loss[loss=0.2097, simple_loss=0.2738, pruned_loss=0.0728, over 4958.00 frames.], tot_loss[loss=0.1786, simple_loss=0.2434, pruned_loss=0.05689, over 970761.28 frames.], batch size: 39, lr: 7.91e-04 +2022-05-04 03:09:55,742 INFO [train.py:715] (6/8) Epoch 1, batch 34750, loss[loss=0.1593, simple_loss=0.2239, pruned_loss=0.04734, over 4846.00 frames.], tot_loss[loss=0.1805, simple_loss=0.245, pruned_loss=0.05807, over 971203.45 frames.], batch size: 15, lr: 7.91e-04 +2022-05-04 03:10:32,247 INFO [train.py:715] (6/8) Epoch 1, batch 34800, loss[loss=0.1604, simple_loss=0.2221, pruned_loss=0.0493, over 4942.00 frames.], tot_loss[loss=0.1789, simple_loss=0.2438, pruned_loss=0.05701, over 972510.21 frames.], batch size: 21, lr: 7.91e-04 +2022-05-04 03:11:25,705 INFO [train.py:715] (6/8) Epoch 2, batch 0, loss[loss=0.1912, simple_loss=0.2452, pruned_loss=0.06857, over 4778.00 frames.], tot_loss[loss=0.1912, simple_loss=0.2452, pruned_loss=0.06857, over 4778.00 frames.], batch size: 14, lr: 7.59e-04 +2022-05-04 03:12:05,767 INFO [train.py:715] (6/8) Epoch 2, batch 50, loss[loss=0.168, simple_loss=0.2375, pruned_loss=0.04928, over 4957.00 frames.], tot_loss[loss=0.1765, simple_loss=0.2416, pruned_loss=0.05571, over 218661.71 frames.], batch size: 24, lr: 7.59e-04 +2022-05-04 03:12:46,577 INFO [train.py:715] (6/8) Epoch 2, batch 100, loss[loss=0.1863, simple_loss=0.2526, pruned_loss=0.05994, over 4796.00 frames.], tot_loss[loss=0.1796, simple_loss=0.2433, pruned_loss=0.05795, over 385487.29 frames.], batch size: 21, lr: 7.59e-04 +2022-05-04 03:13:27,194 INFO [train.py:715] (6/8) Epoch 2, batch 150, loss[loss=0.1846, simple_loss=0.2433, pruned_loss=0.063, over 4975.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2417, pruned_loss=0.05638, over 514682.13 frames.], batch size: 14, lr: 7.59e-04 +2022-05-04 03:14:07,240 INFO [train.py:715] (6/8) Epoch 2, batch 200, loss[loss=0.1822, simple_loss=0.2549, pruned_loss=0.05477, over 4802.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2401, pruned_loss=0.05549, over 616430.61 frames.], batch size: 21, lr: 7.58e-04 +2022-05-04 03:14:47,999 INFO [train.py:715] (6/8) Epoch 2, batch 250, loss[loss=0.1884, simple_loss=0.2541, pruned_loss=0.06133, over 4754.00 frames.], tot_loss[loss=0.1753, simple_loss=0.2402, pruned_loss=0.05525, over 695023.61 frames.], batch size: 19, lr: 7.58e-04 +2022-05-04 03:15:29,365 INFO [train.py:715] (6/8) Epoch 2, batch 300, loss[loss=0.1583, simple_loss=0.2303, pruned_loss=0.04315, over 4754.00 frames.], tot_loss[loss=0.176, simple_loss=0.2413, pruned_loss=0.05534, over 757069.86 frames.], batch size: 19, lr: 7.58e-04 +2022-05-04 03:16:10,299 INFO [train.py:715] (6/8) Epoch 2, batch 350, loss[loss=0.2007, simple_loss=0.2617, pruned_loss=0.06982, over 4818.00 frames.], tot_loss[loss=0.1785, simple_loss=0.243, pruned_loss=0.05698, over 804698.33 frames.], batch size: 12, lr: 7.57e-04 +2022-05-04 03:16:49,961 INFO [train.py:715] (6/8) Epoch 2, batch 400, loss[loss=0.1806, simple_loss=0.2486, pruned_loss=0.05634, over 4756.00 frames.], tot_loss[loss=0.1774, simple_loss=0.242, pruned_loss=0.05638, over 842182.84 frames.], batch size: 19, lr: 7.57e-04 +2022-05-04 03:17:30,474 INFO [train.py:715] (6/8) Epoch 2, batch 450, loss[loss=0.1933, simple_loss=0.2603, pruned_loss=0.06313, over 4965.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2427, pruned_loss=0.05678, over 871061.08 frames.], batch size: 14, lr: 7.57e-04 +2022-05-04 03:18:11,615 INFO [train.py:715] (6/8) Epoch 2, batch 500, loss[loss=0.1706, simple_loss=0.2415, pruned_loss=0.04981, over 4885.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2422, pruned_loss=0.05622, over 894458.28 frames.], batch size: 16, lr: 7.57e-04 +2022-05-04 03:18:51,547 INFO [train.py:715] (6/8) Epoch 2, batch 550, loss[loss=0.1705, simple_loss=0.2377, pruned_loss=0.05161, over 4815.00 frames.], tot_loss[loss=0.1771, simple_loss=0.2424, pruned_loss=0.05593, over 910792.54 frames.], batch size: 25, lr: 7.56e-04 +2022-05-04 03:19:31,911 INFO [train.py:715] (6/8) Epoch 2, batch 600, loss[loss=0.1924, simple_loss=0.244, pruned_loss=0.07041, over 4966.00 frames.], tot_loss[loss=0.1776, simple_loss=0.2427, pruned_loss=0.0562, over 924581.24 frames.], batch size: 35, lr: 7.56e-04 +2022-05-04 03:20:12,750 INFO [train.py:715] (6/8) Epoch 2, batch 650, loss[loss=0.2014, simple_loss=0.2681, pruned_loss=0.06739, over 4956.00 frames.], tot_loss[loss=0.1776, simple_loss=0.2424, pruned_loss=0.05641, over 935524.69 frames.], batch size: 24, lr: 7.56e-04 +2022-05-04 03:20:53,342 INFO [train.py:715] (6/8) Epoch 2, batch 700, loss[loss=0.155, simple_loss=0.2161, pruned_loss=0.04697, over 4833.00 frames.], tot_loss[loss=0.177, simple_loss=0.242, pruned_loss=0.05603, over 943757.93 frames.], batch size: 30, lr: 7.56e-04 +2022-05-04 03:21:32,898 INFO [train.py:715] (6/8) Epoch 2, batch 750, loss[loss=0.179, simple_loss=0.2504, pruned_loss=0.05378, over 4690.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2409, pruned_loss=0.05533, over 950521.36 frames.], batch size: 15, lr: 7.55e-04 +2022-05-04 03:22:13,340 INFO [train.py:715] (6/8) Epoch 2, batch 800, loss[loss=0.1825, simple_loss=0.2475, pruned_loss=0.05876, over 4750.00 frames.], tot_loss[loss=0.1759, simple_loss=0.2408, pruned_loss=0.05554, over 954612.71 frames.], batch size: 19, lr: 7.55e-04 +2022-05-04 03:22:53,991 INFO [train.py:715] (6/8) Epoch 2, batch 850, loss[loss=0.217, simple_loss=0.268, pruned_loss=0.08295, over 4948.00 frames.], tot_loss[loss=0.1758, simple_loss=0.241, pruned_loss=0.05535, over 958550.69 frames.], batch size: 24, lr: 7.55e-04 +2022-05-04 03:23:34,283 INFO [train.py:715] (6/8) Epoch 2, batch 900, loss[loss=0.1615, simple_loss=0.243, pruned_loss=0.03998, over 4916.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2424, pruned_loss=0.05607, over 961844.58 frames.], batch size: 23, lr: 7.55e-04 +2022-05-04 03:24:14,707 INFO [train.py:715] (6/8) Epoch 2, batch 950, loss[loss=0.1835, simple_loss=0.2578, pruned_loss=0.0546, over 4869.00 frames.], tot_loss[loss=0.1766, simple_loss=0.2419, pruned_loss=0.05559, over 964704.87 frames.], batch size: 20, lr: 7.54e-04 +2022-05-04 03:24:55,391 INFO [train.py:715] (6/8) Epoch 2, batch 1000, loss[loss=0.1581, simple_loss=0.2377, pruned_loss=0.03922, over 4795.00 frames.], tot_loss[loss=0.1769, simple_loss=0.242, pruned_loss=0.05589, over 966870.09 frames.], batch size: 24, lr: 7.54e-04 +2022-05-04 03:25:36,194 INFO [train.py:715] (6/8) Epoch 2, batch 1050, loss[loss=0.1977, simple_loss=0.2475, pruned_loss=0.07396, over 4848.00 frames.], tot_loss[loss=0.1784, simple_loss=0.2429, pruned_loss=0.05694, over 968652.51 frames.], batch size: 34, lr: 7.54e-04 +2022-05-04 03:26:15,799 INFO [train.py:715] (6/8) Epoch 2, batch 1100, loss[loss=0.1762, simple_loss=0.2304, pruned_loss=0.06098, over 4784.00 frames.], tot_loss[loss=0.1777, simple_loss=0.2426, pruned_loss=0.05639, over 969589.47 frames.], batch size: 14, lr: 7.53e-04 +2022-05-04 03:26:56,295 INFO [train.py:715] (6/8) Epoch 2, batch 1150, loss[loss=0.175, simple_loss=0.2383, pruned_loss=0.05584, over 4801.00 frames.], tot_loss[loss=0.1783, simple_loss=0.2432, pruned_loss=0.05671, over 970496.94 frames.], batch size: 21, lr: 7.53e-04 +2022-05-04 03:27:37,629 INFO [train.py:715] (6/8) Epoch 2, batch 1200, loss[loss=0.1851, simple_loss=0.2539, pruned_loss=0.05817, over 4839.00 frames.], tot_loss[loss=0.1774, simple_loss=0.2427, pruned_loss=0.05611, over 970696.22 frames.], batch size: 15, lr: 7.53e-04 +2022-05-04 03:28:18,248 INFO [train.py:715] (6/8) Epoch 2, batch 1250, loss[loss=0.1849, simple_loss=0.2618, pruned_loss=0.05402, over 4805.00 frames.], tot_loss[loss=0.1768, simple_loss=0.2423, pruned_loss=0.05563, over 971698.10 frames.], batch size: 21, lr: 7.53e-04 +2022-05-04 03:28:57,932 INFO [train.py:715] (6/8) Epoch 2, batch 1300, loss[loss=0.1552, simple_loss=0.217, pruned_loss=0.04674, over 4981.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2424, pruned_loss=0.05613, over 971440.57 frames.], batch size: 14, lr: 7.52e-04 +2022-05-04 03:29:38,470 INFO [train.py:715] (6/8) Epoch 2, batch 1350, loss[loss=0.1447, simple_loss=0.2163, pruned_loss=0.03649, over 4878.00 frames.], tot_loss[loss=0.1776, simple_loss=0.2427, pruned_loss=0.05622, over 972486.38 frames.], batch size: 16, lr: 7.52e-04 +2022-05-04 03:30:19,124 INFO [train.py:715] (6/8) Epoch 2, batch 1400, loss[loss=0.1843, simple_loss=0.2576, pruned_loss=0.05547, over 4774.00 frames.], tot_loss[loss=0.1772, simple_loss=0.2427, pruned_loss=0.05583, over 972581.32 frames.], batch size: 17, lr: 7.52e-04 +2022-05-04 03:30:59,094 INFO [train.py:715] (6/8) Epoch 2, batch 1450, loss[loss=0.1544, simple_loss=0.2269, pruned_loss=0.04101, over 4818.00 frames.], tot_loss[loss=0.1772, simple_loss=0.2425, pruned_loss=0.05591, over 973016.92 frames.], batch size: 25, lr: 7.52e-04 +2022-05-04 03:31:39,494 INFO [train.py:715] (6/8) Epoch 2, batch 1500, loss[loss=0.2065, simple_loss=0.2655, pruned_loss=0.07378, over 4812.00 frames.], tot_loss[loss=0.1771, simple_loss=0.2422, pruned_loss=0.05604, over 972970.84 frames.], batch size: 21, lr: 7.51e-04 +2022-05-04 03:32:20,459 INFO [train.py:715] (6/8) Epoch 2, batch 1550, loss[loss=0.1839, simple_loss=0.2574, pruned_loss=0.05523, over 4750.00 frames.], tot_loss[loss=0.1772, simple_loss=0.2424, pruned_loss=0.05597, over 972857.46 frames.], batch size: 19, lr: 7.51e-04 +2022-05-04 03:33:00,534 INFO [train.py:715] (6/8) Epoch 2, batch 1600, loss[loss=0.192, simple_loss=0.2567, pruned_loss=0.06359, over 4934.00 frames.], tot_loss[loss=0.1762, simple_loss=0.2419, pruned_loss=0.05525, over 972690.36 frames.], batch size: 21, lr: 7.51e-04 +2022-05-04 03:33:40,352 INFO [train.py:715] (6/8) Epoch 2, batch 1650, loss[loss=0.2126, simple_loss=0.2589, pruned_loss=0.08318, over 4664.00 frames.], tot_loss[loss=0.1763, simple_loss=0.2422, pruned_loss=0.05525, over 972496.81 frames.], batch size: 14, lr: 7.51e-04 +2022-05-04 03:34:21,224 INFO [train.py:715] (6/8) Epoch 2, batch 1700, loss[loss=0.1568, simple_loss=0.2249, pruned_loss=0.04429, over 4928.00 frames.], tot_loss[loss=0.1765, simple_loss=0.2422, pruned_loss=0.05541, over 973606.61 frames.], batch size: 21, lr: 7.50e-04 +2022-05-04 03:35:02,267 INFO [train.py:715] (6/8) Epoch 2, batch 1750, loss[loss=0.2172, simple_loss=0.269, pruned_loss=0.0827, over 4799.00 frames.], tot_loss[loss=0.1761, simple_loss=0.2417, pruned_loss=0.05524, over 973726.97 frames.], batch size: 17, lr: 7.50e-04 +2022-05-04 03:35:42,177 INFO [train.py:715] (6/8) Epoch 2, batch 1800, loss[loss=0.1596, simple_loss=0.2309, pruned_loss=0.0442, over 4864.00 frames.], tot_loss[loss=0.1767, simple_loss=0.2421, pruned_loss=0.05563, over 972959.91 frames.], batch size: 20, lr: 7.50e-04 +2022-05-04 03:36:22,538 INFO [train.py:715] (6/8) Epoch 2, batch 1850, loss[loss=0.1831, simple_loss=0.2522, pruned_loss=0.05706, over 4757.00 frames.], tot_loss[loss=0.1776, simple_loss=0.2428, pruned_loss=0.05621, over 972716.52 frames.], batch size: 19, lr: 7.50e-04 +2022-05-04 03:37:03,509 INFO [train.py:715] (6/8) Epoch 2, batch 1900, loss[loss=0.1405, simple_loss=0.2113, pruned_loss=0.03488, over 4767.00 frames.], tot_loss[loss=0.1766, simple_loss=0.2419, pruned_loss=0.05562, over 972845.11 frames.], batch size: 14, lr: 7.49e-04 +2022-05-04 03:37:44,291 INFO [train.py:715] (6/8) Epoch 2, batch 1950, loss[loss=0.1856, simple_loss=0.2498, pruned_loss=0.0607, over 4736.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2411, pruned_loss=0.05507, over 972366.49 frames.], batch size: 16, lr: 7.49e-04 +2022-05-04 03:38:24,072 INFO [train.py:715] (6/8) Epoch 2, batch 2000, loss[loss=0.1844, simple_loss=0.2599, pruned_loss=0.05446, over 4916.00 frames.], tot_loss[loss=0.1752, simple_loss=0.2412, pruned_loss=0.05464, over 972055.24 frames.], batch size: 19, lr: 7.49e-04 +2022-05-04 03:39:04,271 INFO [train.py:715] (6/8) Epoch 2, batch 2050, loss[loss=0.2169, simple_loss=0.2818, pruned_loss=0.076, over 4863.00 frames.], tot_loss[loss=0.176, simple_loss=0.2419, pruned_loss=0.05503, over 972355.43 frames.], batch size: 22, lr: 7.48e-04 +2022-05-04 03:39:45,381 INFO [train.py:715] (6/8) Epoch 2, batch 2100, loss[loss=0.179, simple_loss=0.2504, pruned_loss=0.05383, over 4794.00 frames.], tot_loss[loss=0.1762, simple_loss=0.2422, pruned_loss=0.0551, over 971873.58 frames.], batch size: 24, lr: 7.48e-04 +2022-05-04 03:40:25,359 INFO [train.py:715] (6/8) Epoch 2, batch 2150, loss[loss=0.1463, simple_loss=0.2164, pruned_loss=0.03808, over 4742.00 frames.], tot_loss[loss=0.1769, simple_loss=0.2428, pruned_loss=0.05552, over 972463.20 frames.], batch size: 12, lr: 7.48e-04 +2022-05-04 03:41:04,883 INFO [train.py:715] (6/8) Epoch 2, batch 2200, loss[loss=0.143, simple_loss=0.2124, pruned_loss=0.03678, over 4773.00 frames.], tot_loss[loss=0.1765, simple_loss=0.2424, pruned_loss=0.05532, over 972627.82 frames.], batch size: 12, lr: 7.48e-04 +2022-05-04 03:41:45,606 INFO [train.py:715] (6/8) Epoch 2, batch 2250, loss[loss=0.1949, simple_loss=0.2632, pruned_loss=0.06332, over 4761.00 frames.], tot_loss[loss=0.1749, simple_loss=0.241, pruned_loss=0.05445, over 972993.51 frames.], batch size: 16, lr: 7.47e-04 +2022-05-04 03:42:26,407 INFO [train.py:715] (6/8) Epoch 2, batch 2300, loss[loss=0.1525, simple_loss=0.2168, pruned_loss=0.04411, over 4819.00 frames.], tot_loss[loss=0.1751, simple_loss=0.2405, pruned_loss=0.05486, over 972709.57 frames.], batch size: 15, lr: 7.47e-04 +2022-05-04 03:43:05,609 INFO [train.py:715] (6/8) Epoch 2, batch 2350, loss[loss=0.1701, simple_loss=0.2427, pruned_loss=0.04877, over 4815.00 frames.], tot_loss[loss=0.1747, simple_loss=0.2402, pruned_loss=0.05456, over 971985.90 frames.], batch size: 13, lr: 7.47e-04 +2022-05-04 03:43:48,322 INFO [train.py:715] (6/8) Epoch 2, batch 2400, loss[loss=0.1892, simple_loss=0.2559, pruned_loss=0.06126, over 4963.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2402, pruned_loss=0.05436, over 971444.16 frames.], batch size: 24, lr: 7.47e-04 +2022-05-04 03:44:29,313 INFO [train.py:715] (6/8) Epoch 2, batch 2450, loss[loss=0.1368, simple_loss=0.2131, pruned_loss=0.03022, over 4801.00 frames.], tot_loss[loss=0.1746, simple_loss=0.2402, pruned_loss=0.05455, over 971766.11 frames.], batch size: 21, lr: 7.46e-04 +2022-05-04 03:45:09,453 INFO [train.py:715] (6/8) Epoch 2, batch 2500, loss[loss=0.192, simple_loss=0.2385, pruned_loss=0.07274, over 4983.00 frames.], tot_loss[loss=0.1757, simple_loss=0.2411, pruned_loss=0.05512, over 972340.49 frames.], batch size: 33, lr: 7.46e-04 +2022-05-04 03:45:49,045 INFO [train.py:715] (6/8) Epoch 2, batch 2550, loss[loss=0.1632, simple_loss=0.2293, pruned_loss=0.04857, over 4901.00 frames.], tot_loss[loss=0.1754, simple_loss=0.2412, pruned_loss=0.05478, over 972071.79 frames.], batch size: 17, lr: 7.46e-04 +2022-05-04 03:46:29,871 INFO [train.py:715] (6/8) Epoch 2, batch 2600, loss[loss=0.1973, simple_loss=0.2613, pruned_loss=0.06663, over 4949.00 frames.], tot_loss[loss=0.1769, simple_loss=0.2424, pruned_loss=0.05573, over 971485.26 frames.], batch size: 21, lr: 7.46e-04 +2022-05-04 03:47:10,390 INFO [train.py:715] (6/8) Epoch 2, batch 2650, loss[loss=0.1811, simple_loss=0.2425, pruned_loss=0.05985, over 4777.00 frames.], tot_loss[loss=0.1771, simple_loss=0.2425, pruned_loss=0.05586, over 971888.03 frames.], batch size: 17, lr: 7.45e-04 +2022-05-04 03:47:49,299 INFO [train.py:715] (6/8) Epoch 2, batch 2700, loss[loss=0.144, simple_loss=0.2198, pruned_loss=0.03412, over 4904.00 frames.], tot_loss[loss=0.1761, simple_loss=0.2418, pruned_loss=0.05524, over 971190.05 frames.], batch size: 19, lr: 7.45e-04 +2022-05-04 03:48:29,305 INFO [train.py:715] (6/8) Epoch 2, batch 2750, loss[loss=0.1462, simple_loss=0.2134, pruned_loss=0.03952, over 4774.00 frames.], tot_loss[loss=0.176, simple_loss=0.2417, pruned_loss=0.05517, over 970480.35 frames.], batch size: 14, lr: 7.45e-04 +2022-05-04 03:49:10,353 INFO [train.py:715] (6/8) Epoch 2, batch 2800, loss[loss=0.1898, simple_loss=0.2526, pruned_loss=0.0635, over 4771.00 frames.], tot_loss[loss=0.176, simple_loss=0.2416, pruned_loss=0.05519, over 971573.38 frames.], batch size: 18, lr: 7.45e-04 +2022-05-04 03:49:50,285 INFO [train.py:715] (6/8) Epoch 2, batch 2850, loss[loss=0.1491, simple_loss=0.2115, pruned_loss=0.04334, over 4817.00 frames.], tot_loss[loss=0.1748, simple_loss=0.2406, pruned_loss=0.05445, over 971934.82 frames.], batch size: 25, lr: 7.44e-04 +2022-05-04 03:50:29,538 INFO [train.py:715] (6/8) Epoch 2, batch 2900, loss[loss=0.1967, simple_loss=0.2549, pruned_loss=0.06924, over 4857.00 frames.], tot_loss[loss=0.174, simple_loss=0.2399, pruned_loss=0.05401, over 970887.28 frames.], batch size: 20, lr: 7.44e-04 +2022-05-04 03:51:09,901 INFO [train.py:715] (6/8) Epoch 2, batch 2950, loss[loss=0.1782, simple_loss=0.2422, pruned_loss=0.05705, over 4843.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2401, pruned_loss=0.05428, over 971136.28 frames.], batch size: 20, lr: 7.44e-04 +2022-05-04 03:51:50,588 INFO [train.py:715] (6/8) Epoch 2, batch 3000, loss[loss=0.2149, simple_loss=0.294, pruned_loss=0.06786, over 4761.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2401, pruned_loss=0.05424, over 971180.40 frames.], batch size: 19, lr: 7.44e-04 +2022-05-04 03:51:50,590 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 03:52:00,002 INFO [train.py:742] (6/8) Epoch 2, validation: loss=0.1191, simple_loss=0.2058, pruned_loss=0.01615, over 914524.00 frames. +2022-05-04 03:52:40,622 INFO [train.py:715] (6/8) Epoch 2, batch 3050, loss[loss=0.1746, simple_loss=0.2448, pruned_loss=0.05225, over 4710.00 frames.], tot_loss[loss=0.1754, simple_loss=0.2408, pruned_loss=0.05501, over 971664.65 frames.], batch size: 15, lr: 7.43e-04 +2022-05-04 03:53:19,874 INFO [train.py:715] (6/8) Epoch 2, batch 3100, loss[loss=0.1958, simple_loss=0.2546, pruned_loss=0.06845, over 4860.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2399, pruned_loss=0.0544, over 972339.91 frames.], batch size: 20, lr: 7.43e-04 +2022-05-04 03:53:59,881 INFO [train.py:715] (6/8) Epoch 2, batch 3150, loss[loss=0.1887, simple_loss=0.2583, pruned_loss=0.0595, over 4892.00 frames.], tot_loss[loss=0.1742, simple_loss=0.24, pruned_loss=0.05423, over 972432.46 frames.], batch size: 22, lr: 7.43e-04 +2022-05-04 03:54:40,145 INFO [train.py:715] (6/8) Epoch 2, batch 3200, loss[loss=0.1992, simple_loss=0.2667, pruned_loss=0.06589, over 4919.00 frames.], tot_loss[loss=0.1759, simple_loss=0.2413, pruned_loss=0.05525, over 971404.86 frames.], batch size: 18, lr: 7.43e-04 +2022-05-04 03:55:19,788 INFO [train.py:715] (6/8) Epoch 2, batch 3250, loss[loss=0.1503, simple_loss=0.2252, pruned_loss=0.03769, over 4785.00 frames.], tot_loss[loss=0.1761, simple_loss=0.2415, pruned_loss=0.05532, over 971018.35 frames.], batch size: 17, lr: 7.42e-04 +2022-05-04 03:55:59,348 INFO [train.py:715] (6/8) Epoch 2, batch 3300, loss[loss=0.1479, simple_loss=0.2164, pruned_loss=0.03968, over 4785.00 frames.], tot_loss[loss=0.1751, simple_loss=0.2403, pruned_loss=0.05495, over 970347.11 frames.], batch size: 17, lr: 7.42e-04 +2022-05-04 03:56:39,592 INFO [train.py:715] (6/8) Epoch 2, batch 3350, loss[loss=0.198, simple_loss=0.251, pruned_loss=0.0725, over 4908.00 frames.], tot_loss[loss=0.1747, simple_loss=0.2401, pruned_loss=0.0547, over 970831.89 frames.], batch size: 17, lr: 7.42e-04 +2022-05-04 03:57:20,092 INFO [train.py:715] (6/8) Epoch 2, batch 3400, loss[loss=0.17, simple_loss=0.2414, pruned_loss=0.04934, over 4792.00 frames.], tot_loss[loss=0.1748, simple_loss=0.2399, pruned_loss=0.05483, over 971033.95 frames.], batch size: 17, lr: 7.42e-04 +2022-05-04 03:57:58,924 INFO [train.py:715] (6/8) Epoch 2, batch 3450, loss[loss=0.185, simple_loss=0.2451, pruned_loss=0.06243, over 4977.00 frames.], tot_loss[loss=0.1749, simple_loss=0.2402, pruned_loss=0.05477, over 971412.87 frames.], batch size: 15, lr: 7.41e-04 +2022-05-04 03:58:38,952 INFO [train.py:715] (6/8) Epoch 2, batch 3500, loss[loss=0.1513, simple_loss=0.2219, pruned_loss=0.04038, over 4765.00 frames.], tot_loss[loss=0.1746, simple_loss=0.2401, pruned_loss=0.05457, over 971246.26 frames.], batch size: 17, lr: 7.41e-04 +2022-05-04 03:59:19,007 INFO [train.py:715] (6/8) Epoch 2, batch 3550, loss[loss=0.182, simple_loss=0.2383, pruned_loss=0.0629, over 4882.00 frames.], tot_loss[loss=0.1754, simple_loss=0.2407, pruned_loss=0.05505, over 971669.88 frames.], batch size: 22, lr: 7.41e-04 +2022-05-04 03:59:58,774 INFO [train.py:715] (6/8) Epoch 2, batch 3600, loss[loss=0.1618, simple_loss=0.2344, pruned_loss=0.04461, over 4789.00 frames.], tot_loss[loss=0.175, simple_loss=0.2406, pruned_loss=0.05469, over 971866.21 frames.], batch size: 24, lr: 7.41e-04 +2022-05-04 04:00:37,774 INFO [train.py:715] (6/8) Epoch 2, batch 3650, loss[loss=0.1749, simple_loss=0.245, pruned_loss=0.05245, over 4842.00 frames.], tot_loss[loss=0.1755, simple_loss=0.2412, pruned_loss=0.05492, over 971260.68 frames.], batch size: 15, lr: 7.40e-04 +2022-05-04 04:01:18,178 INFO [train.py:715] (6/8) Epoch 2, batch 3700, loss[loss=0.1662, simple_loss=0.236, pruned_loss=0.04818, over 4881.00 frames.], tot_loss[loss=0.1746, simple_loss=0.2402, pruned_loss=0.05449, over 971364.71 frames.], batch size: 22, lr: 7.40e-04 +2022-05-04 04:01:58,348 INFO [train.py:715] (6/8) Epoch 2, batch 3750, loss[loss=0.1913, simple_loss=0.2534, pruned_loss=0.06455, over 4937.00 frames.], tot_loss[loss=0.174, simple_loss=0.2397, pruned_loss=0.05409, over 971014.52 frames.], batch size: 18, lr: 7.40e-04 +2022-05-04 04:02:37,085 INFO [train.py:715] (6/8) Epoch 2, batch 3800, loss[loss=0.1453, simple_loss=0.2109, pruned_loss=0.03983, over 4931.00 frames.], tot_loss[loss=0.1755, simple_loss=0.2408, pruned_loss=0.05506, over 971061.27 frames.], batch size: 29, lr: 7.40e-04 +2022-05-04 04:03:17,275 INFO [train.py:715] (6/8) Epoch 2, batch 3850, loss[loss=0.1974, simple_loss=0.257, pruned_loss=0.06893, over 4832.00 frames.], tot_loss[loss=0.1751, simple_loss=0.2409, pruned_loss=0.05468, over 971688.89 frames.], batch size: 25, lr: 7.39e-04 +2022-05-04 04:03:57,622 INFO [train.py:715] (6/8) Epoch 2, batch 3900, loss[loss=0.1772, simple_loss=0.2442, pruned_loss=0.0551, over 4906.00 frames.], tot_loss[loss=0.1748, simple_loss=0.2406, pruned_loss=0.05453, over 971448.21 frames.], batch size: 17, lr: 7.39e-04 +2022-05-04 04:04:36,855 INFO [train.py:715] (6/8) Epoch 2, batch 3950, loss[loss=0.1741, simple_loss=0.2419, pruned_loss=0.05317, over 4868.00 frames.], tot_loss[loss=0.1754, simple_loss=0.2408, pruned_loss=0.05498, over 971956.51 frames.], batch size: 39, lr: 7.39e-04 +2022-05-04 04:05:16,462 INFO [train.py:715] (6/8) Epoch 2, batch 4000, loss[loss=0.17, simple_loss=0.2361, pruned_loss=0.05194, over 4813.00 frames.], tot_loss[loss=0.1753, simple_loss=0.2412, pruned_loss=0.05472, over 971886.72 frames.], batch size: 27, lr: 7.39e-04 +2022-05-04 04:05:57,031 INFO [train.py:715] (6/8) Epoch 2, batch 4050, loss[loss=0.1726, simple_loss=0.2243, pruned_loss=0.06039, over 4842.00 frames.], tot_loss[loss=0.1751, simple_loss=0.2409, pruned_loss=0.0546, over 972313.24 frames.], batch size: 12, lr: 7.38e-04 +2022-05-04 04:06:37,526 INFO [train.py:715] (6/8) Epoch 2, batch 4100, loss[loss=0.1762, simple_loss=0.2434, pruned_loss=0.0545, over 4939.00 frames.], tot_loss[loss=0.1747, simple_loss=0.2408, pruned_loss=0.05429, over 972510.53 frames.], batch size: 21, lr: 7.38e-04 +2022-05-04 04:07:16,027 INFO [train.py:715] (6/8) Epoch 2, batch 4150, loss[loss=0.1468, simple_loss=0.2146, pruned_loss=0.03944, over 4968.00 frames.], tot_loss[loss=0.1748, simple_loss=0.2411, pruned_loss=0.05429, over 972439.20 frames.], batch size: 24, lr: 7.38e-04 +2022-05-04 04:07:55,388 INFO [train.py:715] (6/8) Epoch 2, batch 4200, loss[loss=0.1786, simple_loss=0.247, pruned_loss=0.05511, over 4757.00 frames.], tot_loss[loss=0.1748, simple_loss=0.2407, pruned_loss=0.0545, over 972814.29 frames.], batch size: 19, lr: 7.38e-04 +2022-05-04 04:08:35,833 INFO [train.py:715] (6/8) Epoch 2, batch 4250, loss[loss=0.159, simple_loss=0.2283, pruned_loss=0.04484, over 4881.00 frames.], tot_loss[loss=0.1741, simple_loss=0.2397, pruned_loss=0.05421, over 973008.38 frames.], batch size: 22, lr: 7.37e-04 +2022-05-04 04:09:15,084 INFO [train.py:715] (6/8) Epoch 2, batch 4300, loss[loss=0.1358, simple_loss=0.211, pruned_loss=0.03024, over 4818.00 frames.], tot_loss[loss=0.175, simple_loss=0.2407, pruned_loss=0.05463, over 972781.49 frames.], batch size: 26, lr: 7.37e-04 +2022-05-04 04:09:54,871 INFO [train.py:715] (6/8) Epoch 2, batch 4350, loss[loss=0.1851, simple_loss=0.2471, pruned_loss=0.06152, over 4937.00 frames.], tot_loss[loss=0.1752, simple_loss=0.241, pruned_loss=0.05465, over 972693.72 frames.], batch size: 39, lr: 7.37e-04 +2022-05-04 04:10:34,719 INFO [train.py:715] (6/8) Epoch 2, batch 4400, loss[loss=0.1577, simple_loss=0.2192, pruned_loss=0.04811, over 4817.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2415, pruned_loss=0.05506, over 972806.72 frames.], batch size: 15, lr: 7.37e-04 +2022-05-04 04:11:14,736 INFO [train.py:715] (6/8) Epoch 2, batch 4450, loss[loss=0.1926, simple_loss=0.2585, pruned_loss=0.06331, over 4746.00 frames.], tot_loss[loss=0.1751, simple_loss=0.2411, pruned_loss=0.05455, over 972294.00 frames.], batch size: 19, lr: 7.36e-04 +2022-05-04 04:11:53,880 INFO [train.py:715] (6/8) Epoch 2, batch 4500, loss[loss=0.1633, simple_loss=0.2222, pruned_loss=0.05222, over 4864.00 frames.], tot_loss[loss=0.1749, simple_loss=0.2412, pruned_loss=0.0543, over 972235.55 frames.], batch size: 32, lr: 7.36e-04 +2022-05-04 04:12:33,895 INFO [train.py:715] (6/8) Epoch 2, batch 4550, loss[loss=0.1496, simple_loss=0.2157, pruned_loss=0.04179, over 4766.00 frames.], tot_loss[loss=0.174, simple_loss=0.24, pruned_loss=0.05399, over 972048.12 frames.], batch size: 12, lr: 7.36e-04 +2022-05-04 04:13:14,640 INFO [train.py:715] (6/8) Epoch 2, batch 4600, loss[loss=0.1674, simple_loss=0.2354, pruned_loss=0.04967, over 4881.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2401, pruned_loss=0.05422, over 971682.76 frames.], batch size: 16, lr: 7.36e-04 +2022-05-04 04:13:53,695 INFO [train.py:715] (6/8) Epoch 2, batch 4650, loss[loss=0.1867, simple_loss=0.2532, pruned_loss=0.06015, over 4897.00 frames.], tot_loss[loss=0.1737, simple_loss=0.2394, pruned_loss=0.05403, over 971645.74 frames.], batch size: 19, lr: 7.35e-04 +2022-05-04 04:14:33,001 INFO [train.py:715] (6/8) Epoch 2, batch 4700, loss[loss=0.1876, simple_loss=0.2587, pruned_loss=0.05825, over 4972.00 frames.], tot_loss[loss=0.1733, simple_loss=0.2389, pruned_loss=0.05385, over 971088.05 frames.], batch size: 39, lr: 7.35e-04 +2022-05-04 04:15:13,201 INFO [train.py:715] (6/8) Epoch 2, batch 4750, loss[loss=0.1622, simple_loss=0.2279, pruned_loss=0.04827, over 4817.00 frames.], tot_loss[loss=0.1739, simple_loss=0.2396, pruned_loss=0.05403, over 971558.35 frames.], batch size: 25, lr: 7.35e-04 +2022-05-04 04:15:53,743 INFO [train.py:715] (6/8) Epoch 2, batch 4800, loss[loss=0.1555, simple_loss=0.2192, pruned_loss=0.04591, over 4823.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2405, pruned_loss=0.05413, over 972473.31 frames.], batch size: 26, lr: 7.35e-04 +2022-05-04 04:16:33,019 INFO [train.py:715] (6/8) Epoch 2, batch 4850, loss[loss=0.1896, simple_loss=0.2523, pruned_loss=0.06345, over 4945.00 frames.], tot_loss[loss=0.1741, simple_loss=0.2403, pruned_loss=0.05395, over 972989.48 frames.], batch size: 21, lr: 7.34e-04 +2022-05-04 04:17:12,478 INFO [train.py:715] (6/8) Epoch 2, batch 4900, loss[loss=0.1804, simple_loss=0.2548, pruned_loss=0.053, over 4764.00 frames.], tot_loss[loss=0.1742, simple_loss=0.2403, pruned_loss=0.05404, over 974133.94 frames.], batch size: 18, lr: 7.34e-04 +2022-05-04 04:17:52,935 INFO [train.py:715] (6/8) Epoch 2, batch 4950, loss[loss=0.1765, simple_loss=0.2274, pruned_loss=0.06279, over 4703.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2407, pruned_loss=0.05412, over 973395.14 frames.], batch size: 15, lr: 7.34e-04 +2022-05-04 04:18:32,534 INFO [train.py:715] (6/8) Epoch 2, batch 5000, loss[loss=0.1636, simple_loss=0.2246, pruned_loss=0.05135, over 4936.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2405, pruned_loss=0.05405, over 973740.14 frames.], batch size: 35, lr: 7.34e-04 +2022-05-04 04:19:12,115 INFO [train.py:715] (6/8) Epoch 2, batch 5050, loss[loss=0.2349, simple_loss=0.2942, pruned_loss=0.08784, over 4783.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2416, pruned_loss=0.05497, over 973481.53 frames.], batch size: 14, lr: 7.33e-04 +2022-05-04 04:19:53,166 INFO [train.py:715] (6/8) Epoch 2, batch 5100, loss[loss=0.1988, simple_loss=0.2461, pruned_loss=0.07578, over 4841.00 frames.], tot_loss[loss=0.176, simple_loss=0.2417, pruned_loss=0.05516, over 973211.35 frames.], batch size: 15, lr: 7.33e-04 +2022-05-04 04:20:34,130 INFO [train.py:715] (6/8) Epoch 2, batch 5150, loss[loss=0.1879, simple_loss=0.2485, pruned_loss=0.06366, over 4789.00 frames.], tot_loss[loss=0.1763, simple_loss=0.242, pruned_loss=0.05531, over 972479.35 frames.], batch size: 24, lr: 7.33e-04 +2022-05-04 04:21:13,068 INFO [train.py:715] (6/8) Epoch 2, batch 5200, loss[loss=0.1891, simple_loss=0.2459, pruned_loss=0.06614, over 4788.00 frames.], tot_loss[loss=0.1761, simple_loss=0.2418, pruned_loss=0.05518, over 973405.77 frames.], batch size: 21, lr: 7.33e-04 +2022-05-04 04:21:52,853 INFO [train.py:715] (6/8) Epoch 2, batch 5250, loss[loss=0.142, simple_loss=0.2164, pruned_loss=0.03382, over 4942.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2414, pruned_loss=0.05515, over 972549.33 frames.], batch size: 35, lr: 7.32e-04 +2022-05-04 04:22:33,066 INFO [train.py:715] (6/8) Epoch 2, batch 5300, loss[loss=0.2082, simple_loss=0.2702, pruned_loss=0.07314, over 4985.00 frames.], tot_loss[loss=0.1748, simple_loss=0.2408, pruned_loss=0.05438, over 972899.73 frames.], batch size: 31, lr: 7.32e-04 +2022-05-04 04:23:12,243 INFO [train.py:715] (6/8) Epoch 2, batch 5350, loss[loss=0.1833, simple_loss=0.2528, pruned_loss=0.05688, over 4897.00 frames.], tot_loss[loss=0.1751, simple_loss=0.241, pruned_loss=0.05457, over 972380.57 frames.], batch size: 17, lr: 7.32e-04 +2022-05-04 04:23:51,605 INFO [train.py:715] (6/8) Epoch 2, batch 5400, loss[loss=0.1857, simple_loss=0.2412, pruned_loss=0.06508, over 4958.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2413, pruned_loss=0.05493, over 972148.03 frames.], batch size: 14, lr: 7.32e-04 +2022-05-04 04:24:32,283 INFO [train.py:715] (6/8) Epoch 2, batch 5450, loss[loss=0.1771, simple_loss=0.2387, pruned_loss=0.05778, over 4763.00 frames.], tot_loss[loss=0.1763, simple_loss=0.2419, pruned_loss=0.05536, over 972156.88 frames.], batch size: 14, lr: 7.31e-04 +2022-05-04 04:25:12,072 INFO [train.py:715] (6/8) Epoch 2, batch 5500, loss[loss=0.1543, simple_loss=0.2208, pruned_loss=0.04383, over 4792.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2414, pruned_loss=0.05505, over 971666.67 frames.], batch size: 14, lr: 7.31e-04 +2022-05-04 04:25:51,708 INFO [train.py:715] (6/8) Epoch 2, batch 5550, loss[loss=0.1684, simple_loss=0.2425, pruned_loss=0.04712, over 4947.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2415, pruned_loss=0.055, over 972102.32 frames.], batch size: 21, lr: 7.31e-04 +2022-05-04 04:26:32,206 INFO [train.py:715] (6/8) Epoch 2, batch 5600, loss[loss=0.1627, simple_loss=0.2169, pruned_loss=0.05423, over 4850.00 frames.], tot_loss[loss=0.1747, simple_loss=0.2404, pruned_loss=0.05452, over 971962.98 frames.], batch size: 15, lr: 7.31e-04 +2022-05-04 04:27:13,265 INFO [train.py:715] (6/8) Epoch 2, batch 5650, loss[loss=0.1823, simple_loss=0.2417, pruned_loss=0.06143, over 4990.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2404, pruned_loss=0.05433, over 972248.37 frames.], batch size: 14, lr: 7.30e-04 +2022-05-04 04:27:53,173 INFO [train.py:715] (6/8) Epoch 2, batch 5700, loss[loss=0.1655, simple_loss=0.2271, pruned_loss=0.05202, over 4972.00 frames.], tot_loss[loss=0.1742, simple_loss=0.2401, pruned_loss=0.05414, over 972746.77 frames.], batch size: 15, lr: 7.30e-04 +2022-05-04 04:28:33,028 INFO [train.py:715] (6/8) Epoch 2, batch 5750, loss[loss=0.2193, simple_loss=0.2823, pruned_loss=0.0781, over 4792.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2405, pruned_loss=0.05401, over 972191.03 frames.], batch size: 21, lr: 7.30e-04 +2022-05-04 04:29:13,947 INFO [train.py:715] (6/8) Epoch 2, batch 5800, loss[loss=0.1538, simple_loss=0.2213, pruned_loss=0.04313, over 4867.00 frames.], tot_loss[loss=0.1737, simple_loss=0.2401, pruned_loss=0.0536, over 972361.27 frames.], batch size: 13, lr: 7.30e-04 +2022-05-04 04:29:55,098 INFO [train.py:715] (6/8) Epoch 2, batch 5850, loss[loss=0.227, simple_loss=0.2712, pruned_loss=0.09135, over 4970.00 frames.], tot_loss[loss=0.1739, simple_loss=0.2402, pruned_loss=0.05379, over 972167.46 frames.], batch size: 35, lr: 7.29e-04 +2022-05-04 04:30:34,548 INFO [train.py:715] (6/8) Epoch 2, batch 5900, loss[loss=0.1744, simple_loss=0.2473, pruned_loss=0.0508, over 4820.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2404, pruned_loss=0.05428, over 972235.33 frames.], batch size: 25, lr: 7.29e-04 +2022-05-04 04:31:15,144 INFO [train.py:715] (6/8) Epoch 2, batch 5950, loss[loss=0.16, simple_loss=0.2237, pruned_loss=0.04811, over 4862.00 frames.], tot_loss[loss=0.1746, simple_loss=0.2404, pruned_loss=0.05441, over 972057.20 frames.], batch size: 32, lr: 7.29e-04 +2022-05-04 04:31:56,171 INFO [train.py:715] (6/8) Epoch 2, batch 6000, loss[loss=0.1374, simple_loss=0.2122, pruned_loss=0.03133, over 4912.00 frames.], tot_loss[loss=0.1738, simple_loss=0.24, pruned_loss=0.05385, over 972255.49 frames.], batch size: 17, lr: 7.29e-04 +2022-05-04 04:31:56,172 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 04:32:04,807 INFO [train.py:742] (6/8) Epoch 2, validation: loss=0.1188, simple_loss=0.2054, pruned_loss=0.01614, over 914524.00 frames. +2022-05-04 04:32:46,139 INFO [train.py:715] (6/8) Epoch 2, batch 6050, loss[loss=0.1701, simple_loss=0.2401, pruned_loss=0.05001, over 4828.00 frames.], tot_loss[loss=0.1737, simple_loss=0.2399, pruned_loss=0.0538, over 971913.39 frames.], batch size: 13, lr: 7.29e-04 +2022-05-04 04:33:25,851 INFO [train.py:715] (6/8) Epoch 2, batch 6100, loss[loss=0.2034, simple_loss=0.2659, pruned_loss=0.07049, over 4700.00 frames.], tot_loss[loss=0.1742, simple_loss=0.2398, pruned_loss=0.05428, over 971374.78 frames.], batch size: 15, lr: 7.28e-04 +2022-05-04 04:34:05,819 INFO [train.py:715] (6/8) Epoch 2, batch 6150, loss[loss=0.1832, simple_loss=0.2579, pruned_loss=0.05426, over 4939.00 frames.], tot_loss[loss=0.1751, simple_loss=0.2406, pruned_loss=0.0548, over 971745.74 frames.], batch size: 21, lr: 7.28e-04 +2022-05-04 04:34:46,185 INFO [train.py:715] (6/8) Epoch 2, batch 6200, loss[loss=0.1968, simple_loss=0.2633, pruned_loss=0.06517, over 4940.00 frames.], tot_loss[loss=0.1742, simple_loss=0.24, pruned_loss=0.05419, over 972124.25 frames.], batch size: 23, lr: 7.28e-04 +2022-05-04 04:35:26,608 INFO [train.py:715] (6/8) Epoch 2, batch 6250, loss[loss=0.1472, simple_loss=0.2202, pruned_loss=0.03708, over 4835.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2383, pruned_loss=0.05339, over 972623.34 frames.], batch size: 13, lr: 7.28e-04 +2022-05-04 04:36:05,782 INFO [train.py:715] (6/8) Epoch 2, batch 6300, loss[loss=0.2182, simple_loss=0.2826, pruned_loss=0.07692, over 4953.00 frames.], tot_loss[loss=0.1731, simple_loss=0.2391, pruned_loss=0.05358, over 973197.68 frames.], batch size: 39, lr: 7.27e-04 +2022-05-04 04:36:46,024 INFO [train.py:715] (6/8) Epoch 2, batch 6350, loss[loss=0.2135, simple_loss=0.28, pruned_loss=0.07355, over 4849.00 frames.], tot_loss[loss=0.173, simple_loss=0.2391, pruned_loss=0.05346, over 973338.32 frames.], batch size: 32, lr: 7.27e-04 +2022-05-04 04:37:26,525 INFO [train.py:715] (6/8) Epoch 2, batch 6400, loss[loss=0.1662, simple_loss=0.234, pruned_loss=0.04918, over 4779.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2395, pruned_loss=0.05373, over 972685.67 frames.], batch size: 12, lr: 7.27e-04 +2022-05-04 04:38:05,327 INFO [train.py:715] (6/8) Epoch 2, batch 6450, loss[loss=0.1744, simple_loss=0.2378, pruned_loss=0.05555, over 4799.00 frames.], tot_loss[loss=0.1731, simple_loss=0.239, pruned_loss=0.0536, over 972556.17 frames.], batch size: 21, lr: 7.27e-04 +2022-05-04 04:38:44,594 INFO [train.py:715] (6/8) Epoch 2, batch 6500, loss[loss=0.1822, simple_loss=0.2588, pruned_loss=0.05283, over 4859.00 frames.], tot_loss[loss=0.1734, simple_loss=0.239, pruned_loss=0.05396, over 973717.86 frames.], batch size: 20, lr: 7.26e-04 +2022-05-04 04:39:24,834 INFO [train.py:715] (6/8) Epoch 2, batch 6550, loss[loss=0.1858, simple_loss=0.2406, pruned_loss=0.06551, over 4844.00 frames.], tot_loss[loss=0.1737, simple_loss=0.2394, pruned_loss=0.05396, over 972462.59 frames.], batch size: 32, lr: 7.26e-04 +2022-05-04 04:40:04,768 INFO [train.py:715] (6/8) Epoch 2, batch 6600, loss[loss=0.1732, simple_loss=0.2471, pruned_loss=0.04963, over 4764.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2395, pruned_loss=0.05379, over 972237.80 frames.], batch size: 19, lr: 7.26e-04 +2022-05-04 04:40:43,855 INFO [train.py:715] (6/8) Epoch 2, batch 6650, loss[loss=0.1896, simple_loss=0.2481, pruned_loss=0.06558, over 4728.00 frames.], tot_loss[loss=0.1731, simple_loss=0.2391, pruned_loss=0.0536, over 972572.34 frames.], batch size: 16, lr: 7.26e-04 +2022-05-04 04:41:23,372 INFO [train.py:715] (6/8) Epoch 2, batch 6700, loss[loss=0.2201, simple_loss=0.2682, pruned_loss=0.08605, over 4788.00 frames.], tot_loss[loss=0.1733, simple_loss=0.239, pruned_loss=0.0538, over 972720.73 frames.], batch size: 18, lr: 7.25e-04 +2022-05-04 04:42:03,555 INFO [train.py:715] (6/8) Epoch 2, batch 6750, loss[loss=0.1861, simple_loss=0.2534, pruned_loss=0.05933, over 4710.00 frames.], tot_loss[loss=0.1742, simple_loss=0.2398, pruned_loss=0.05433, over 971774.22 frames.], batch size: 15, lr: 7.25e-04 +2022-05-04 04:42:41,719 INFO [train.py:715] (6/8) Epoch 2, batch 6800, loss[loss=0.1674, simple_loss=0.2304, pruned_loss=0.05216, over 4979.00 frames.], tot_loss[loss=0.174, simple_loss=0.2399, pruned_loss=0.05409, over 971431.75 frames.], batch size: 35, lr: 7.25e-04 +2022-05-04 04:43:20,948 INFO [train.py:715] (6/8) Epoch 2, batch 6850, loss[loss=0.1509, simple_loss=0.2202, pruned_loss=0.0408, over 4899.00 frames.], tot_loss[loss=0.1736, simple_loss=0.2399, pruned_loss=0.05369, over 971496.08 frames.], batch size: 39, lr: 7.25e-04 +2022-05-04 04:44:01,041 INFO [train.py:715] (6/8) Epoch 2, batch 6900, loss[loss=0.1797, simple_loss=0.2421, pruned_loss=0.05863, over 4923.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2406, pruned_loss=0.05418, over 973023.34 frames.], batch size: 18, lr: 7.24e-04 +2022-05-04 04:44:41,209 INFO [train.py:715] (6/8) Epoch 2, batch 6950, loss[loss=0.1683, simple_loss=0.2378, pruned_loss=0.04939, over 4858.00 frames.], tot_loss[loss=0.1731, simple_loss=0.2395, pruned_loss=0.05337, over 973675.27 frames.], batch size: 16, lr: 7.24e-04 +2022-05-04 04:45:19,413 INFO [train.py:715] (6/8) Epoch 2, batch 7000, loss[loss=0.1984, simple_loss=0.2492, pruned_loss=0.0738, over 4888.00 frames.], tot_loss[loss=0.1733, simple_loss=0.2397, pruned_loss=0.05348, over 972352.78 frames.], batch size: 22, lr: 7.24e-04 +2022-05-04 04:45:59,981 INFO [train.py:715] (6/8) Epoch 2, batch 7050, loss[loss=0.1817, simple_loss=0.24, pruned_loss=0.06166, over 4942.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2396, pruned_loss=0.0537, over 972421.25 frames.], batch size: 18, lr: 7.24e-04 +2022-05-04 04:46:40,405 INFO [train.py:715] (6/8) Epoch 2, batch 7100, loss[loss=0.1406, simple_loss=0.2108, pruned_loss=0.03522, over 4966.00 frames.], tot_loss[loss=0.1736, simple_loss=0.2394, pruned_loss=0.0539, over 972752.34 frames.], batch size: 14, lr: 7.24e-04 +2022-05-04 04:47:19,803 INFO [train.py:715] (6/8) Epoch 2, batch 7150, loss[loss=0.1629, simple_loss=0.2223, pruned_loss=0.05178, over 4926.00 frames.], tot_loss[loss=0.1729, simple_loss=0.2383, pruned_loss=0.05372, over 972519.76 frames.], batch size: 18, lr: 7.23e-04 +2022-05-04 04:48:00,094 INFO [train.py:715] (6/8) Epoch 2, batch 7200, loss[loss=0.1757, simple_loss=0.2403, pruned_loss=0.0556, over 4743.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2378, pruned_loss=0.05315, over 972963.72 frames.], batch size: 16, lr: 7.23e-04 +2022-05-04 04:48:41,286 INFO [train.py:715] (6/8) Epoch 2, batch 7250, loss[loss=0.1933, simple_loss=0.2582, pruned_loss=0.06413, over 4810.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2381, pruned_loss=0.05324, over 973489.56 frames.], batch size: 27, lr: 7.23e-04 +2022-05-04 04:49:21,912 INFO [train.py:715] (6/8) Epoch 2, batch 7300, loss[loss=0.1857, simple_loss=0.2507, pruned_loss=0.06036, over 4834.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2399, pruned_loss=0.05453, over 972656.69 frames.], batch size: 30, lr: 7.23e-04 +2022-05-04 04:50:01,613 INFO [train.py:715] (6/8) Epoch 2, batch 7350, loss[loss=0.1654, simple_loss=0.2315, pruned_loss=0.04963, over 4749.00 frames.], tot_loss[loss=0.1739, simple_loss=0.2397, pruned_loss=0.05401, over 972940.58 frames.], batch size: 19, lr: 7.22e-04 +2022-05-04 04:50:42,537 INFO [train.py:715] (6/8) Epoch 2, batch 7400, loss[loss=0.152, simple_loss=0.2222, pruned_loss=0.04089, over 4849.00 frames.], tot_loss[loss=0.174, simple_loss=0.2398, pruned_loss=0.05408, over 972347.50 frames.], batch size: 20, lr: 7.22e-04 +2022-05-04 04:51:24,330 INFO [train.py:715] (6/8) Epoch 2, batch 7450, loss[loss=0.1835, simple_loss=0.2492, pruned_loss=0.05889, over 4889.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2402, pruned_loss=0.05423, over 972752.28 frames.], batch size: 22, lr: 7.22e-04 +2022-05-04 04:52:04,716 INFO [train.py:715] (6/8) Epoch 2, batch 7500, loss[loss=0.1392, simple_loss=0.2181, pruned_loss=0.03012, over 4790.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2402, pruned_loss=0.05422, over 972222.80 frames.], batch size: 24, lr: 7.22e-04 +2022-05-04 04:52:45,163 INFO [train.py:715] (6/8) Epoch 2, batch 7550, loss[loss=0.1581, simple_loss=0.2201, pruned_loss=0.04802, over 4823.00 frames.], tot_loss[loss=0.1747, simple_loss=0.2406, pruned_loss=0.05436, over 971833.89 frames.], batch size: 26, lr: 7.21e-04 +2022-05-04 04:53:26,940 INFO [train.py:715] (6/8) Epoch 2, batch 7600, loss[loss=0.1628, simple_loss=0.2357, pruned_loss=0.04495, over 4780.00 frames.], tot_loss[loss=0.1748, simple_loss=0.2405, pruned_loss=0.05453, over 971824.89 frames.], batch size: 18, lr: 7.21e-04 +2022-05-04 04:54:08,328 INFO [train.py:715] (6/8) Epoch 2, batch 7650, loss[loss=0.1432, simple_loss=0.2087, pruned_loss=0.0388, over 4938.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2405, pruned_loss=0.05408, over 972255.19 frames.], batch size: 29, lr: 7.21e-04 +2022-05-04 04:54:48,391 INFO [train.py:715] (6/8) Epoch 2, batch 7700, loss[loss=0.1475, simple_loss=0.2212, pruned_loss=0.03692, over 4933.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2406, pruned_loss=0.05403, over 972621.87 frames.], batch size: 29, lr: 7.21e-04 +2022-05-04 04:55:29,841 INFO [train.py:715] (6/8) Epoch 2, batch 7750, loss[loss=0.1326, simple_loss=0.205, pruned_loss=0.0301, over 4771.00 frames.], tot_loss[loss=0.1732, simple_loss=0.2395, pruned_loss=0.05343, over 972464.73 frames.], batch size: 14, lr: 7.21e-04 +2022-05-04 04:56:11,501 INFO [train.py:715] (6/8) Epoch 2, batch 7800, loss[loss=0.19, simple_loss=0.2514, pruned_loss=0.06434, over 4937.00 frames.], tot_loss[loss=0.1731, simple_loss=0.2395, pruned_loss=0.05331, over 971922.27 frames.], batch size: 29, lr: 7.20e-04 +2022-05-04 04:56:52,013 INFO [train.py:715] (6/8) Epoch 2, batch 7850, loss[loss=0.1623, simple_loss=0.2312, pruned_loss=0.04669, over 4701.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2391, pruned_loss=0.05264, over 970892.46 frames.], batch size: 15, lr: 7.20e-04 +2022-05-04 04:57:33,356 INFO [train.py:715] (6/8) Epoch 2, batch 7900, loss[loss=0.2068, simple_loss=0.2625, pruned_loss=0.07553, over 4977.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2388, pruned_loss=0.05281, over 971835.12 frames.], batch size: 24, lr: 7.20e-04 +2022-05-04 04:58:15,554 INFO [train.py:715] (6/8) Epoch 2, batch 7950, loss[loss=0.1649, simple_loss=0.2371, pruned_loss=0.04631, over 4940.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2398, pruned_loss=0.05364, over 972602.18 frames.], batch size: 23, lr: 7.20e-04 +2022-05-04 04:58:57,046 INFO [train.py:715] (6/8) Epoch 2, batch 8000, loss[loss=0.16, simple_loss=0.2194, pruned_loss=0.05027, over 4894.00 frames.], tot_loss[loss=0.1741, simple_loss=0.2399, pruned_loss=0.05409, over 972683.20 frames.], batch size: 19, lr: 7.19e-04 +2022-05-04 04:59:37,247 INFO [train.py:715] (6/8) Epoch 2, batch 8050, loss[loss=0.1698, simple_loss=0.2329, pruned_loss=0.05332, over 4844.00 frames.], tot_loss[loss=0.1745, simple_loss=0.24, pruned_loss=0.05455, over 972416.63 frames.], batch size: 13, lr: 7.19e-04 +2022-05-04 05:00:18,972 INFO [train.py:715] (6/8) Epoch 2, batch 8100, loss[loss=0.1657, simple_loss=0.2317, pruned_loss=0.04988, over 4927.00 frames.], tot_loss[loss=0.1738, simple_loss=0.2394, pruned_loss=0.05403, over 972000.29 frames.], batch size: 23, lr: 7.19e-04 +2022-05-04 05:01:00,839 INFO [train.py:715] (6/8) Epoch 2, batch 8150, loss[loss=0.1325, simple_loss=0.1984, pruned_loss=0.03327, over 4887.00 frames.], tot_loss[loss=0.1739, simple_loss=0.2392, pruned_loss=0.05433, over 971862.16 frames.], batch size: 32, lr: 7.19e-04 +2022-05-04 05:01:41,277 INFO [train.py:715] (6/8) Epoch 2, batch 8200, loss[loss=0.1368, simple_loss=0.2019, pruned_loss=0.03581, over 4775.00 frames.], tot_loss[loss=0.1731, simple_loss=0.2387, pruned_loss=0.05379, over 972119.47 frames.], batch size: 18, lr: 7.18e-04 +2022-05-04 05:02:22,251 INFO [train.py:715] (6/8) Epoch 2, batch 8250, loss[loss=0.1485, simple_loss=0.2103, pruned_loss=0.04339, over 4846.00 frames.], tot_loss[loss=0.1737, simple_loss=0.2396, pruned_loss=0.05389, over 972200.16 frames.], batch size: 13, lr: 7.18e-04 +2022-05-04 05:03:04,365 INFO [train.py:715] (6/8) Epoch 2, batch 8300, loss[loss=0.1723, simple_loss=0.2453, pruned_loss=0.04966, over 4786.00 frames.], tot_loss[loss=0.1749, simple_loss=0.2405, pruned_loss=0.05462, over 972953.10 frames.], batch size: 21, lr: 7.18e-04 +2022-05-04 05:03:46,078 INFO [train.py:715] (6/8) Epoch 2, batch 8350, loss[loss=0.1689, simple_loss=0.2335, pruned_loss=0.05211, over 4979.00 frames.], tot_loss[loss=0.1754, simple_loss=0.241, pruned_loss=0.05492, over 972526.30 frames.], batch size: 31, lr: 7.18e-04 +2022-05-04 05:04:26,346 INFO [train.py:715] (6/8) Epoch 2, batch 8400, loss[loss=0.1897, simple_loss=0.2635, pruned_loss=0.05793, over 4848.00 frames.], tot_loss[loss=0.1752, simple_loss=0.2411, pruned_loss=0.0547, over 972270.70 frames.], batch size: 30, lr: 7.18e-04 +2022-05-04 05:05:07,479 INFO [train.py:715] (6/8) Epoch 2, batch 8450, loss[loss=0.2045, simple_loss=0.2656, pruned_loss=0.07173, over 4929.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2406, pruned_loss=0.05398, over 971994.27 frames.], batch size: 23, lr: 7.17e-04 +2022-05-04 05:05:49,592 INFO [train.py:715] (6/8) Epoch 2, batch 8500, loss[loss=0.1767, simple_loss=0.24, pruned_loss=0.05673, over 4881.00 frames.], tot_loss[loss=0.1733, simple_loss=0.2395, pruned_loss=0.0536, over 972372.40 frames.], batch size: 22, lr: 7.17e-04 +2022-05-04 05:06:29,765 INFO [train.py:715] (6/8) Epoch 2, batch 8550, loss[loss=0.1708, simple_loss=0.2358, pruned_loss=0.0529, over 4896.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2394, pruned_loss=0.05384, over 972293.02 frames.], batch size: 22, lr: 7.17e-04 +2022-05-04 05:07:10,978 INFO [train.py:715] (6/8) Epoch 2, batch 8600, loss[loss=0.1719, simple_loss=0.2505, pruned_loss=0.04669, over 4971.00 frames.], tot_loss[loss=0.1741, simple_loss=0.24, pruned_loss=0.05412, over 972676.92 frames.], batch size: 29, lr: 7.17e-04 +2022-05-04 05:07:52,998 INFO [train.py:715] (6/8) Epoch 2, batch 8650, loss[loss=0.1559, simple_loss=0.2228, pruned_loss=0.0445, over 4817.00 frames.], tot_loss[loss=0.1747, simple_loss=0.2405, pruned_loss=0.05444, over 972240.37 frames.], batch size: 27, lr: 7.16e-04 +2022-05-04 05:08:34,297 INFO [train.py:715] (6/8) Epoch 2, batch 8700, loss[loss=0.1655, simple_loss=0.2393, pruned_loss=0.04584, over 4932.00 frames.], tot_loss[loss=0.1754, simple_loss=0.2407, pruned_loss=0.05501, over 972617.74 frames.], batch size: 21, lr: 7.16e-04 +2022-05-04 05:09:14,834 INFO [train.py:715] (6/8) Epoch 2, batch 8750, loss[loss=0.2027, simple_loss=0.2679, pruned_loss=0.06876, over 4889.00 frames.], tot_loss[loss=0.1734, simple_loss=0.2391, pruned_loss=0.05381, over 971955.02 frames.], batch size: 22, lr: 7.16e-04 +2022-05-04 05:09:56,632 INFO [train.py:715] (6/8) Epoch 2, batch 8800, loss[loss=0.171, simple_loss=0.2495, pruned_loss=0.0462, over 4704.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2388, pruned_loss=0.05344, over 972611.53 frames.], batch size: 15, lr: 7.16e-04 +2022-05-04 05:10:38,736 INFO [train.py:715] (6/8) Epoch 2, batch 8850, loss[loss=0.1697, simple_loss=0.2299, pruned_loss=0.05473, over 4910.00 frames.], tot_loss[loss=0.173, simple_loss=0.2396, pruned_loss=0.05322, over 973388.72 frames.], batch size: 18, lr: 7.15e-04 +2022-05-04 05:11:18,695 INFO [train.py:715] (6/8) Epoch 2, batch 8900, loss[loss=0.175, simple_loss=0.2404, pruned_loss=0.0548, over 4948.00 frames.], tot_loss[loss=0.1725, simple_loss=0.2389, pruned_loss=0.05309, over 973111.80 frames.], batch size: 39, lr: 7.15e-04 +2022-05-04 05:12:00,189 INFO [train.py:715] (6/8) Epoch 2, batch 8950, loss[loss=0.1537, simple_loss=0.2251, pruned_loss=0.04116, over 4924.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2379, pruned_loss=0.05286, over 971765.32 frames.], batch size: 21, lr: 7.15e-04 +2022-05-04 05:12:42,403 INFO [train.py:715] (6/8) Epoch 2, batch 9000, loss[loss=0.1667, simple_loss=0.2282, pruned_loss=0.05262, over 4815.00 frames.], tot_loss[loss=0.1733, simple_loss=0.2389, pruned_loss=0.0538, over 972473.88 frames.], batch size: 26, lr: 7.15e-04 +2022-05-04 05:12:42,403 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 05:12:58,992 INFO [train.py:742] (6/8) Epoch 2, validation: loss=0.1181, simple_loss=0.2047, pruned_loss=0.01572, over 914524.00 frames. +2022-05-04 05:13:41,071 INFO [train.py:715] (6/8) Epoch 2, batch 9050, loss[loss=0.2094, simple_loss=0.2693, pruned_loss=0.07471, over 4971.00 frames.], tot_loss[loss=0.1732, simple_loss=0.2388, pruned_loss=0.05376, over 972449.22 frames.], batch size: 39, lr: 7.15e-04 +2022-05-04 05:14:21,245 INFO [train.py:715] (6/8) Epoch 2, batch 9100, loss[loss=0.1687, simple_loss=0.2381, pruned_loss=0.04969, over 4760.00 frames.], tot_loss[loss=0.1736, simple_loss=0.2395, pruned_loss=0.0539, over 971675.86 frames.], batch size: 16, lr: 7.14e-04 +2022-05-04 05:15:02,343 INFO [train.py:715] (6/8) Epoch 2, batch 9150, loss[loss=0.1757, simple_loss=0.2374, pruned_loss=0.05701, over 4893.00 frames.], tot_loss[loss=0.1746, simple_loss=0.2401, pruned_loss=0.05457, over 971265.88 frames.], batch size: 19, lr: 7.14e-04 +2022-05-04 05:15:43,582 INFO [train.py:715] (6/8) Epoch 2, batch 9200, loss[loss=0.1605, simple_loss=0.2244, pruned_loss=0.0483, over 4649.00 frames.], tot_loss[loss=0.1741, simple_loss=0.2399, pruned_loss=0.05412, over 971588.75 frames.], batch size: 13, lr: 7.14e-04 +2022-05-04 05:16:25,090 INFO [train.py:715] (6/8) Epoch 2, batch 9250, loss[loss=0.1585, simple_loss=0.2205, pruned_loss=0.0482, over 4806.00 frames.], tot_loss[loss=0.1741, simple_loss=0.24, pruned_loss=0.05413, over 970941.41 frames.], batch size: 12, lr: 7.14e-04 +2022-05-04 05:17:05,074 INFO [train.py:715] (6/8) Epoch 2, batch 9300, loss[loss=0.1659, simple_loss=0.2273, pruned_loss=0.05229, over 4899.00 frames.], tot_loss[loss=0.1737, simple_loss=0.2395, pruned_loss=0.05395, over 972227.08 frames.], batch size: 29, lr: 7.13e-04 +2022-05-04 05:17:46,767 INFO [train.py:715] (6/8) Epoch 2, batch 9350, loss[loss=0.1654, simple_loss=0.2294, pruned_loss=0.05077, over 4932.00 frames.], tot_loss[loss=0.1732, simple_loss=0.2389, pruned_loss=0.05373, over 972003.21 frames.], batch size: 23, lr: 7.13e-04 +2022-05-04 05:18:28,861 INFO [train.py:715] (6/8) Epoch 2, batch 9400, loss[loss=0.1726, simple_loss=0.2462, pruned_loss=0.04951, over 4865.00 frames.], tot_loss[loss=0.1739, simple_loss=0.2396, pruned_loss=0.05405, over 972264.78 frames.], batch size: 20, lr: 7.13e-04 +2022-05-04 05:19:08,502 INFO [train.py:715] (6/8) Epoch 2, batch 9450, loss[loss=0.1754, simple_loss=0.2363, pruned_loss=0.05729, over 4750.00 frames.], tot_loss[loss=0.1742, simple_loss=0.24, pruned_loss=0.05425, over 972473.67 frames.], batch size: 16, lr: 7.13e-04 +2022-05-04 05:19:48,362 INFO [train.py:715] (6/8) Epoch 2, batch 9500, loss[loss=0.2013, simple_loss=0.267, pruned_loss=0.06781, over 4820.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2394, pruned_loss=0.05381, over 972480.94 frames.], batch size: 26, lr: 7.13e-04 +2022-05-04 05:20:28,635 INFO [train.py:715] (6/8) Epoch 2, batch 9550, loss[loss=0.1914, simple_loss=0.256, pruned_loss=0.0634, over 4754.00 frames.], tot_loss[loss=0.1736, simple_loss=0.2394, pruned_loss=0.05387, over 972415.47 frames.], batch size: 19, lr: 7.12e-04 +2022-05-04 05:21:08,642 INFO [train.py:715] (6/8) Epoch 2, batch 9600, loss[loss=0.1337, simple_loss=0.1905, pruned_loss=0.03849, over 4750.00 frames.], tot_loss[loss=0.1725, simple_loss=0.2383, pruned_loss=0.05338, over 971716.65 frames.], batch size: 12, lr: 7.12e-04 +2022-05-04 05:21:47,538 INFO [train.py:715] (6/8) Epoch 2, batch 9650, loss[loss=0.1575, simple_loss=0.2286, pruned_loss=0.0432, over 4984.00 frames.], tot_loss[loss=0.1724, simple_loss=0.238, pruned_loss=0.05337, over 971631.94 frames.], batch size: 28, lr: 7.12e-04 +2022-05-04 05:22:27,781 INFO [train.py:715] (6/8) Epoch 2, batch 9700, loss[loss=0.1675, simple_loss=0.2371, pruned_loss=0.04898, over 4956.00 frames.], tot_loss[loss=0.1714, simple_loss=0.2371, pruned_loss=0.05285, over 971397.72 frames.], batch size: 15, lr: 7.12e-04 +2022-05-04 05:23:08,413 INFO [train.py:715] (6/8) Epoch 2, batch 9750, loss[loss=0.1702, simple_loss=0.2369, pruned_loss=0.05175, over 4929.00 frames.], tot_loss[loss=0.1717, simple_loss=0.2376, pruned_loss=0.05295, over 971713.78 frames.], batch size: 29, lr: 7.11e-04 +2022-05-04 05:23:47,697 INFO [train.py:715] (6/8) Epoch 2, batch 9800, loss[loss=0.2021, simple_loss=0.2709, pruned_loss=0.06668, over 4876.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2397, pruned_loss=0.05369, over 971763.49 frames.], batch size: 20, lr: 7.11e-04 +2022-05-04 05:24:26,798 INFO [train.py:715] (6/8) Epoch 2, batch 9850, loss[loss=0.1617, simple_loss=0.2199, pruned_loss=0.05177, over 4974.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2403, pruned_loss=0.05425, over 972401.85 frames.], batch size: 14, lr: 7.11e-04 +2022-05-04 05:25:06,820 INFO [train.py:715] (6/8) Epoch 2, batch 9900, loss[loss=0.169, simple_loss=0.2288, pruned_loss=0.0546, over 4858.00 frames.], tot_loss[loss=0.1746, simple_loss=0.241, pruned_loss=0.05412, over 972233.08 frames.], batch size: 30, lr: 7.11e-04 +2022-05-04 05:25:46,412 INFO [train.py:715] (6/8) Epoch 2, batch 9950, loss[loss=0.1558, simple_loss=0.2206, pruned_loss=0.04551, over 4935.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2402, pruned_loss=0.05343, over 972031.41 frames.], batch size: 21, lr: 7.11e-04 +2022-05-04 05:26:25,431 INFO [train.py:715] (6/8) Epoch 2, batch 10000, loss[loss=0.1547, simple_loss=0.2329, pruned_loss=0.03822, over 4743.00 frames.], tot_loss[loss=0.1729, simple_loss=0.2395, pruned_loss=0.05318, over 971477.62 frames.], batch size: 16, lr: 7.10e-04 +2022-05-04 05:27:06,103 INFO [train.py:715] (6/8) Epoch 2, batch 10050, loss[loss=0.1947, simple_loss=0.2565, pruned_loss=0.06646, over 4924.00 frames.], tot_loss[loss=0.1734, simple_loss=0.2396, pruned_loss=0.05357, over 970946.46 frames.], batch size: 39, lr: 7.10e-04 +2022-05-04 05:27:45,914 INFO [train.py:715] (6/8) Epoch 2, batch 10100, loss[loss=0.1774, simple_loss=0.2408, pruned_loss=0.05701, over 4805.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2394, pruned_loss=0.05309, over 971035.06 frames.], batch size: 14, lr: 7.10e-04 +2022-05-04 05:28:25,923 INFO [train.py:715] (6/8) Epoch 2, batch 10150, loss[loss=0.1686, simple_loss=0.2288, pruned_loss=0.05421, over 4863.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2387, pruned_loss=0.05274, over 971456.69 frames.], batch size: 20, lr: 7.10e-04 +2022-05-04 05:29:06,179 INFO [train.py:715] (6/8) Epoch 2, batch 10200, loss[loss=0.1853, simple_loss=0.2422, pruned_loss=0.06423, over 4940.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2385, pruned_loss=0.05256, over 972258.08 frames.], batch size: 39, lr: 7.09e-04 +2022-05-04 05:29:47,604 INFO [train.py:715] (6/8) Epoch 2, batch 10250, loss[loss=0.1436, simple_loss=0.2179, pruned_loss=0.03462, over 4829.00 frames.], tot_loss[loss=0.172, simple_loss=0.2387, pruned_loss=0.05265, over 972267.09 frames.], batch size: 25, lr: 7.09e-04 +2022-05-04 05:30:27,425 INFO [train.py:715] (6/8) Epoch 2, batch 10300, loss[loss=0.179, simple_loss=0.2504, pruned_loss=0.05382, over 4933.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2391, pruned_loss=0.05269, over 971294.23 frames.], batch size: 18, lr: 7.09e-04 +2022-05-04 05:31:07,041 INFO [train.py:715] (6/8) Epoch 2, batch 10350, loss[loss=0.1968, simple_loss=0.2607, pruned_loss=0.06651, over 4954.00 frames.], tot_loss[loss=0.1736, simple_loss=0.2401, pruned_loss=0.05359, over 971364.91 frames.], batch size: 39, lr: 7.09e-04 +2022-05-04 05:31:49,859 INFO [train.py:715] (6/8) Epoch 2, batch 10400, loss[loss=0.1569, simple_loss=0.2335, pruned_loss=0.0401, over 4986.00 frames.], tot_loss[loss=0.1746, simple_loss=0.2407, pruned_loss=0.05427, over 971860.27 frames.], batch size: 25, lr: 7.09e-04 +2022-05-04 05:32:31,025 INFO [train.py:715] (6/8) Epoch 2, batch 10450, loss[loss=0.1879, simple_loss=0.2552, pruned_loss=0.06031, over 4739.00 frames.], tot_loss[loss=0.1748, simple_loss=0.2409, pruned_loss=0.05431, over 972371.15 frames.], batch size: 16, lr: 7.08e-04 +2022-05-04 05:33:11,282 INFO [train.py:715] (6/8) Epoch 2, batch 10500, loss[loss=0.192, simple_loss=0.2449, pruned_loss=0.06958, over 4795.00 frames.], tot_loss[loss=0.1737, simple_loss=0.2401, pruned_loss=0.05363, over 971643.67 frames.], batch size: 24, lr: 7.08e-04 +2022-05-04 05:33:50,626 INFO [train.py:715] (6/8) Epoch 2, batch 10550, loss[loss=0.1684, simple_loss=0.2288, pruned_loss=0.05407, over 4913.00 frames.], tot_loss[loss=0.1734, simple_loss=0.2399, pruned_loss=0.05346, over 972315.44 frames.], batch size: 17, lr: 7.08e-04 +2022-05-04 05:34:31,852 INFO [train.py:715] (6/8) Epoch 2, batch 10600, loss[loss=0.1821, simple_loss=0.2432, pruned_loss=0.0605, over 4950.00 frames.], tot_loss[loss=0.173, simple_loss=0.2392, pruned_loss=0.05337, over 972941.41 frames.], batch size: 35, lr: 7.08e-04 +2022-05-04 05:35:12,043 INFO [train.py:715] (6/8) Epoch 2, batch 10650, loss[loss=0.1847, simple_loss=0.248, pruned_loss=0.06069, over 4837.00 frames.], tot_loss[loss=0.1717, simple_loss=0.2382, pruned_loss=0.0526, over 972838.63 frames.], batch size: 15, lr: 7.07e-04 +2022-05-04 05:35:51,939 INFO [train.py:715] (6/8) Epoch 2, batch 10700, loss[loss=0.2034, simple_loss=0.266, pruned_loss=0.07042, over 4940.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2388, pruned_loss=0.053, over 972905.80 frames.], batch size: 29, lr: 7.07e-04 +2022-05-04 05:36:32,503 INFO [train.py:715] (6/8) Epoch 2, batch 10750, loss[loss=0.1455, simple_loss=0.2261, pruned_loss=0.0325, over 4756.00 frames.], tot_loss[loss=0.1733, simple_loss=0.2397, pruned_loss=0.05345, over 972876.41 frames.], batch size: 19, lr: 7.07e-04 +2022-05-04 05:37:13,633 INFO [train.py:715] (6/8) Epoch 2, batch 10800, loss[loss=0.1602, simple_loss=0.2419, pruned_loss=0.03927, over 4888.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2392, pruned_loss=0.05294, over 973311.44 frames.], batch size: 22, lr: 7.07e-04 +2022-05-04 05:37:53,813 INFO [train.py:715] (6/8) Epoch 2, batch 10850, loss[loss=0.23, simple_loss=0.2878, pruned_loss=0.08609, over 4898.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2387, pruned_loss=0.05248, over 972794.01 frames.], batch size: 19, lr: 7.07e-04 +2022-05-04 05:38:33,327 INFO [train.py:715] (6/8) Epoch 2, batch 10900, loss[loss=0.1576, simple_loss=0.2304, pruned_loss=0.0424, over 4965.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2383, pruned_loss=0.05264, over 973251.07 frames.], batch size: 35, lr: 7.06e-04 +2022-05-04 05:39:14,361 INFO [train.py:715] (6/8) Epoch 2, batch 10950, loss[loss=0.1854, simple_loss=0.2574, pruned_loss=0.0567, over 4973.00 frames.], tot_loss[loss=0.1732, simple_loss=0.2395, pruned_loss=0.05342, over 973298.65 frames.], batch size: 28, lr: 7.06e-04 +2022-05-04 05:39:54,167 INFO [train.py:715] (6/8) Epoch 2, batch 11000, loss[loss=0.178, simple_loss=0.2581, pruned_loss=0.04898, over 4780.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2381, pruned_loss=0.0525, over 972767.37 frames.], batch size: 18, lr: 7.06e-04 +2022-05-04 05:40:33,760 INFO [train.py:715] (6/8) Epoch 2, batch 11050, loss[loss=0.1531, simple_loss=0.2231, pruned_loss=0.04157, over 4819.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2387, pruned_loss=0.05273, over 973137.03 frames.], batch size: 26, lr: 7.06e-04 +2022-05-04 05:41:14,439 INFO [train.py:715] (6/8) Epoch 2, batch 11100, loss[loss=0.1673, simple_loss=0.2419, pruned_loss=0.04628, over 4792.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2391, pruned_loss=0.05323, over 972682.78 frames.], batch size: 14, lr: 7.05e-04 +2022-05-04 05:41:54,874 INFO [train.py:715] (6/8) Epoch 2, batch 11150, loss[loss=0.1856, simple_loss=0.2621, pruned_loss=0.05452, over 4799.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2389, pruned_loss=0.05328, over 972856.91 frames.], batch size: 25, lr: 7.05e-04 +2022-05-04 05:42:35,629 INFO [train.py:715] (6/8) Epoch 2, batch 11200, loss[loss=0.1487, simple_loss=0.2267, pruned_loss=0.03533, over 4971.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2387, pruned_loss=0.05309, over 972389.02 frames.], batch size: 24, lr: 7.05e-04 +2022-05-04 05:43:15,662 INFO [train.py:715] (6/8) Epoch 2, batch 11250, loss[loss=0.1688, simple_loss=0.2264, pruned_loss=0.05561, over 4797.00 frames.], tot_loss[loss=0.173, simple_loss=0.2391, pruned_loss=0.05342, over 972558.51 frames.], batch size: 21, lr: 7.05e-04 +2022-05-04 05:43:56,717 INFO [train.py:715] (6/8) Epoch 2, batch 11300, loss[loss=0.1572, simple_loss=0.2171, pruned_loss=0.04868, over 4725.00 frames.], tot_loss[loss=0.172, simple_loss=0.2382, pruned_loss=0.05288, over 971862.02 frames.], batch size: 12, lr: 7.05e-04 +2022-05-04 05:44:37,063 INFO [train.py:715] (6/8) Epoch 2, batch 11350, loss[loss=0.1718, simple_loss=0.2477, pruned_loss=0.04797, over 4940.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2387, pruned_loss=0.05277, over 972301.32 frames.], batch size: 23, lr: 7.04e-04 +2022-05-04 05:45:16,686 INFO [train.py:715] (6/8) Epoch 2, batch 11400, loss[loss=0.1825, simple_loss=0.2502, pruned_loss=0.05738, over 4932.00 frames.], tot_loss[loss=0.1729, simple_loss=0.239, pruned_loss=0.05346, over 972624.81 frames.], batch size: 23, lr: 7.04e-04 +2022-05-04 05:45:56,741 INFO [train.py:715] (6/8) Epoch 2, batch 11450, loss[loss=0.1431, simple_loss=0.2153, pruned_loss=0.03543, over 4815.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2388, pruned_loss=0.05332, over 971646.73 frames.], batch size: 21, lr: 7.04e-04 +2022-05-04 05:46:37,334 INFO [train.py:715] (6/8) Epoch 2, batch 11500, loss[loss=0.1967, simple_loss=0.2515, pruned_loss=0.07094, over 4983.00 frames.], tot_loss[loss=0.1733, simple_loss=0.2394, pruned_loss=0.05362, over 972302.26 frames.], batch size: 24, lr: 7.04e-04 +2022-05-04 05:47:18,056 INFO [train.py:715] (6/8) Epoch 2, batch 11550, loss[loss=0.1542, simple_loss=0.2299, pruned_loss=0.03928, over 4905.00 frames.], tot_loss[loss=0.174, simple_loss=0.24, pruned_loss=0.05404, over 972349.39 frames.], batch size: 17, lr: 7.04e-04 +2022-05-04 05:47:58,031 INFO [train.py:715] (6/8) Epoch 2, batch 11600, loss[loss=0.1825, simple_loss=0.2484, pruned_loss=0.05832, over 4947.00 frames.], tot_loss[loss=0.1736, simple_loss=0.2394, pruned_loss=0.05388, over 972566.20 frames.], batch size: 14, lr: 7.03e-04 +2022-05-04 05:48:39,184 INFO [train.py:715] (6/8) Epoch 2, batch 11650, loss[loss=0.153, simple_loss=0.2258, pruned_loss=0.04008, over 4879.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2381, pruned_loss=0.05325, over 972029.91 frames.], batch size: 32, lr: 7.03e-04 +2022-05-04 05:49:19,428 INFO [train.py:715] (6/8) Epoch 2, batch 11700, loss[loss=0.1714, simple_loss=0.2296, pruned_loss=0.05664, over 4889.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2382, pruned_loss=0.05328, over 971779.31 frames.], batch size: 19, lr: 7.03e-04 +2022-05-04 05:49:59,622 INFO [train.py:715] (6/8) Epoch 2, batch 11750, loss[loss=0.1862, simple_loss=0.2525, pruned_loss=0.05995, over 4775.00 frames.], tot_loss[loss=0.1725, simple_loss=0.2384, pruned_loss=0.05326, over 971508.02 frames.], batch size: 19, lr: 7.03e-04 +2022-05-04 05:50:40,403 INFO [train.py:715] (6/8) Epoch 2, batch 11800, loss[loss=0.1604, simple_loss=0.2278, pruned_loss=0.04649, over 4992.00 frames.], tot_loss[loss=0.172, simple_loss=0.2382, pruned_loss=0.05287, over 972319.70 frames.], batch size: 16, lr: 7.02e-04 +2022-05-04 05:51:20,992 INFO [train.py:715] (6/8) Epoch 2, batch 11850, loss[loss=0.1974, simple_loss=0.2796, pruned_loss=0.05759, over 4937.00 frames.], tot_loss[loss=0.172, simple_loss=0.2382, pruned_loss=0.05287, over 973030.14 frames.], batch size: 21, lr: 7.02e-04 +2022-05-04 05:52:00,408 INFO [train.py:715] (6/8) Epoch 2, batch 11900, loss[loss=0.1673, simple_loss=0.2355, pruned_loss=0.04957, over 4977.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2386, pruned_loss=0.05293, over 973227.50 frames.], batch size: 35, lr: 7.02e-04 +2022-05-04 05:52:40,340 INFO [train.py:715] (6/8) Epoch 2, batch 11950, loss[loss=0.1668, simple_loss=0.2285, pruned_loss=0.05255, over 4892.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2379, pruned_loss=0.05286, over 972830.66 frames.], batch size: 32, lr: 7.02e-04 +2022-05-04 05:53:21,663 INFO [train.py:715] (6/8) Epoch 2, batch 12000, loss[loss=0.1484, simple_loss=0.2166, pruned_loss=0.04012, over 4817.00 frames.], tot_loss[loss=0.1726, simple_loss=0.239, pruned_loss=0.05304, over 973443.44 frames.], batch size: 15, lr: 7.02e-04 +2022-05-04 05:53:21,664 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 05:53:45,624 INFO [train.py:742] (6/8) Epoch 2, validation: loss=0.1181, simple_loss=0.2049, pruned_loss=0.01568, over 914524.00 frames. +2022-05-04 05:54:27,029 INFO [train.py:715] (6/8) Epoch 2, batch 12050, loss[loss=0.1737, simple_loss=0.2314, pruned_loss=0.05803, over 4869.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2388, pruned_loss=0.05281, over 972407.27 frames.], batch size: 32, lr: 7.01e-04 +2022-05-04 05:55:07,120 INFO [train.py:715] (6/8) Epoch 2, batch 12100, loss[loss=0.2136, simple_loss=0.2699, pruned_loss=0.07867, over 4938.00 frames.], tot_loss[loss=0.1725, simple_loss=0.2391, pruned_loss=0.05295, over 972602.74 frames.], batch size: 21, lr: 7.01e-04 +2022-05-04 05:55:47,109 INFO [train.py:715] (6/8) Epoch 2, batch 12150, loss[loss=0.2074, simple_loss=0.272, pruned_loss=0.0714, over 4778.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2398, pruned_loss=0.05359, over 972271.22 frames.], batch size: 17, lr: 7.01e-04 +2022-05-04 05:56:27,812 INFO [train.py:715] (6/8) Epoch 2, batch 12200, loss[loss=0.157, simple_loss=0.219, pruned_loss=0.0475, over 4825.00 frames.], tot_loss[loss=0.1732, simple_loss=0.2394, pruned_loss=0.05348, over 972324.58 frames.], batch size: 12, lr: 7.01e-04 +2022-05-04 05:57:07,983 INFO [train.py:715] (6/8) Epoch 2, batch 12250, loss[loss=0.1933, simple_loss=0.2786, pruned_loss=0.05401, over 4823.00 frames.], tot_loss[loss=0.173, simple_loss=0.2392, pruned_loss=0.05346, over 973114.25 frames.], batch size: 25, lr: 7.01e-04 +2022-05-04 05:57:48,416 INFO [train.py:715] (6/8) Epoch 2, batch 12300, loss[loss=0.153, simple_loss=0.2188, pruned_loss=0.04362, over 4747.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2385, pruned_loss=0.05336, over 973633.80 frames.], batch size: 16, lr: 7.00e-04 +2022-05-04 05:58:28,543 INFO [train.py:715] (6/8) Epoch 2, batch 12350, loss[loss=0.1911, simple_loss=0.2663, pruned_loss=0.05797, over 4979.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2396, pruned_loss=0.05363, over 973574.92 frames.], batch size: 24, lr: 7.00e-04 +2022-05-04 05:59:09,759 INFO [train.py:715] (6/8) Epoch 2, batch 12400, loss[loss=0.1591, simple_loss=0.2248, pruned_loss=0.04674, over 4851.00 frames.], tot_loss[loss=0.173, simple_loss=0.239, pruned_loss=0.05348, over 973736.30 frames.], batch size: 32, lr: 7.00e-04 +2022-05-04 05:59:50,019 INFO [train.py:715] (6/8) Epoch 2, batch 12450, loss[loss=0.1702, simple_loss=0.2389, pruned_loss=0.05075, over 4968.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2386, pruned_loss=0.05337, over 973746.47 frames.], batch size: 24, lr: 7.00e-04 +2022-05-04 06:00:29,875 INFO [train.py:715] (6/8) Epoch 2, batch 12500, loss[loss=0.1605, simple_loss=0.2321, pruned_loss=0.04442, over 4767.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2384, pruned_loss=0.05339, over 973551.37 frames.], batch size: 16, lr: 6.99e-04 +2022-05-04 06:01:10,539 INFO [train.py:715] (6/8) Epoch 2, batch 12550, loss[loss=0.1753, simple_loss=0.2433, pruned_loss=0.0536, over 4823.00 frames.], tot_loss[loss=0.1729, simple_loss=0.2385, pruned_loss=0.05365, over 972532.27 frames.], batch size: 26, lr: 6.99e-04 +2022-05-04 06:01:50,875 INFO [train.py:715] (6/8) Epoch 2, batch 12600, loss[loss=0.1243, simple_loss=0.181, pruned_loss=0.03376, over 4757.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2381, pruned_loss=0.05309, over 972569.12 frames.], batch size: 12, lr: 6.99e-04 +2022-05-04 06:02:30,897 INFO [train.py:715] (6/8) Epoch 2, batch 12650, loss[loss=0.1539, simple_loss=0.2107, pruned_loss=0.0485, over 4767.00 frames.], tot_loss[loss=0.1712, simple_loss=0.2374, pruned_loss=0.05253, over 971776.13 frames.], batch size: 19, lr: 6.99e-04 +2022-05-04 06:03:11,020 INFO [train.py:715] (6/8) Epoch 2, batch 12700, loss[loss=0.1842, simple_loss=0.251, pruned_loss=0.05869, over 4764.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2387, pruned_loss=0.05344, over 971159.46 frames.], batch size: 19, lr: 6.99e-04 +2022-05-04 06:03:51,756 INFO [train.py:715] (6/8) Epoch 2, batch 12750, loss[loss=0.1768, simple_loss=0.2439, pruned_loss=0.05484, over 4699.00 frames.], tot_loss[loss=0.1721, simple_loss=0.238, pruned_loss=0.05308, over 971684.09 frames.], batch size: 15, lr: 6.98e-04 +2022-05-04 06:04:31,920 INFO [train.py:715] (6/8) Epoch 2, batch 12800, loss[loss=0.1689, simple_loss=0.239, pruned_loss=0.04934, over 4812.00 frames.], tot_loss[loss=0.1735, simple_loss=0.239, pruned_loss=0.05397, over 972675.43 frames.], batch size: 25, lr: 6.98e-04 +2022-05-04 06:05:11,612 INFO [train.py:715] (6/8) Epoch 2, batch 12850, loss[loss=0.1492, simple_loss=0.2253, pruned_loss=0.03657, over 4877.00 frames.], tot_loss[loss=0.1738, simple_loss=0.2391, pruned_loss=0.05421, over 971992.08 frames.], batch size: 16, lr: 6.98e-04 +2022-05-04 06:05:52,438 INFO [train.py:715] (6/8) Epoch 2, batch 12900, loss[loss=0.1854, simple_loss=0.2449, pruned_loss=0.06301, over 4909.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2386, pruned_loss=0.05335, over 972780.35 frames.], batch size: 23, lr: 6.98e-04 +2022-05-04 06:06:32,858 INFO [train.py:715] (6/8) Epoch 2, batch 12950, loss[loss=0.156, simple_loss=0.2267, pruned_loss=0.04268, over 4798.00 frames.], tot_loss[loss=0.1719, simple_loss=0.238, pruned_loss=0.0529, over 972284.71 frames.], batch size: 25, lr: 6.98e-04 +2022-05-04 06:07:12,808 INFO [train.py:715] (6/8) Epoch 2, batch 13000, loss[loss=0.1907, simple_loss=0.2633, pruned_loss=0.05908, over 4778.00 frames.], tot_loss[loss=0.1726, simple_loss=0.239, pruned_loss=0.05312, over 971881.04 frames.], batch size: 14, lr: 6.97e-04 +2022-05-04 06:07:53,250 INFO [train.py:715] (6/8) Epoch 2, batch 13050, loss[loss=0.1443, simple_loss=0.2169, pruned_loss=0.03589, over 4772.00 frames.], tot_loss[loss=0.1725, simple_loss=0.2387, pruned_loss=0.05314, over 971584.53 frames.], batch size: 18, lr: 6.97e-04 +2022-05-04 06:08:34,493 INFO [train.py:715] (6/8) Epoch 2, batch 13100, loss[loss=0.157, simple_loss=0.2202, pruned_loss=0.04688, over 4960.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2389, pruned_loss=0.05317, over 972230.33 frames.], batch size: 35, lr: 6.97e-04 +2022-05-04 06:09:14,676 INFO [train.py:715] (6/8) Epoch 2, batch 13150, loss[loss=0.1709, simple_loss=0.2352, pruned_loss=0.05326, over 4968.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2386, pruned_loss=0.05295, over 971975.52 frames.], batch size: 14, lr: 6.97e-04 +2022-05-04 06:09:54,441 INFO [train.py:715] (6/8) Epoch 2, batch 13200, loss[loss=0.19, simple_loss=0.257, pruned_loss=0.06148, over 4910.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2388, pruned_loss=0.05298, over 971955.48 frames.], batch size: 19, lr: 6.96e-04 +2022-05-04 06:10:35,329 INFO [train.py:715] (6/8) Epoch 2, batch 13250, loss[loss=0.1894, simple_loss=0.267, pruned_loss=0.05593, over 4961.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2392, pruned_loss=0.05311, over 972241.61 frames.], batch size: 24, lr: 6.96e-04 +2022-05-04 06:11:15,870 INFO [train.py:715] (6/8) Epoch 2, batch 13300, loss[loss=0.1444, simple_loss=0.2163, pruned_loss=0.0362, over 4794.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2398, pruned_loss=0.05362, over 971781.79 frames.], batch size: 17, lr: 6.96e-04 +2022-05-04 06:11:55,900 INFO [train.py:715] (6/8) Epoch 2, batch 13350, loss[loss=0.2308, simple_loss=0.2885, pruned_loss=0.08654, over 4844.00 frames.], tot_loss[loss=0.1739, simple_loss=0.2403, pruned_loss=0.05377, over 971162.93 frames.], batch size: 32, lr: 6.96e-04 +2022-05-04 06:12:36,499 INFO [train.py:715] (6/8) Epoch 2, batch 13400, loss[loss=0.134, simple_loss=0.2076, pruned_loss=0.03018, over 4803.00 frames.], tot_loss[loss=0.1717, simple_loss=0.2386, pruned_loss=0.05246, over 972156.09 frames.], batch size: 21, lr: 6.96e-04 +2022-05-04 06:13:17,584 INFO [train.py:715] (6/8) Epoch 2, batch 13450, loss[loss=0.2171, simple_loss=0.2801, pruned_loss=0.0771, over 4935.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2392, pruned_loss=0.05317, over 972674.85 frames.], batch size: 21, lr: 6.95e-04 +2022-05-04 06:13:57,535 INFO [train.py:715] (6/8) Epoch 2, batch 13500, loss[loss=0.2126, simple_loss=0.2621, pruned_loss=0.08159, over 4908.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2386, pruned_loss=0.05341, over 972919.80 frames.], batch size: 17, lr: 6.95e-04 +2022-05-04 06:14:37,542 INFO [train.py:715] (6/8) Epoch 2, batch 13550, loss[loss=0.1609, simple_loss=0.2262, pruned_loss=0.04777, over 4861.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2388, pruned_loss=0.05297, over 972874.20 frames.], batch size: 30, lr: 6.95e-04 +2022-05-04 06:15:18,687 INFO [train.py:715] (6/8) Epoch 2, batch 13600, loss[loss=0.1993, simple_loss=0.268, pruned_loss=0.06533, over 4792.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2387, pruned_loss=0.05298, over 972259.33 frames.], batch size: 17, lr: 6.95e-04 +2022-05-04 06:15:59,132 INFO [train.py:715] (6/8) Epoch 2, batch 13650, loss[loss=0.2189, simple_loss=0.2624, pruned_loss=0.08771, over 4857.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2389, pruned_loss=0.05288, over 972694.17 frames.], batch size: 20, lr: 6.95e-04 +2022-05-04 06:16:38,701 INFO [train.py:715] (6/8) Epoch 2, batch 13700, loss[loss=0.1827, simple_loss=0.2567, pruned_loss=0.05437, over 4819.00 frames.], tot_loss[loss=0.1725, simple_loss=0.2389, pruned_loss=0.05301, over 973409.32 frames.], batch size: 25, lr: 6.94e-04 +2022-05-04 06:17:19,964 INFO [train.py:715] (6/8) Epoch 2, batch 13750, loss[loss=0.1713, simple_loss=0.2216, pruned_loss=0.06057, over 4774.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2381, pruned_loss=0.05255, over 972997.92 frames.], batch size: 18, lr: 6.94e-04 +2022-05-04 06:18:00,041 INFO [train.py:715] (6/8) Epoch 2, batch 13800, loss[loss=0.1578, simple_loss=0.2352, pruned_loss=0.04017, over 4761.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2388, pruned_loss=0.05295, over 972871.99 frames.], batch size: 14, lr: 6.94e-04 +2022-05-04 06:18:39,732 INFO [train.py:715] (6/8) Epoch 2, batch 13850, loss[loss=0.1672, simple_loss=0.2491, pruned_loss=0.04263, over 4949.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2391, pruned_loss=0.05277, over 971982.61 frames.], batch size: 21, lr: 6.94e-04 +2022-05-04 06:19:19,323 INFO [train.py:715] (6/8) Epoch 2, batch 13900, loss[loss=0.1819, simple_loss=0.2485, pruned_loss=0.05762, over 4933.00 frames.], tot_loss[loss=0.172, simple_loss=0.2388, pruned_loss=0.05264, over 971327.77 frames.], batch size: 29, lr: 6.94e-04 +2022-05-04 06:20:00,089 INFO [train.py:715] (6/8) Epoch 2, batch 13950, loss[loss=0.1669, simple_loss=0.2428, pruned_loss=0.04552, over 4859.00 frames.], tot_loss[loss=0.1715, simple_loss=0.2386, pruned_loss=0.05217, over 971664.40 frames.], batch size: 32, lr: 6.93e-04 +2022-05-04 06:20:40,300 INFO [train.py:715] (6/8) Epoch 2, batch 14000, loss[loss=0.1585, simple_loss=0.2278, pruned_loss=0.04463, over 4867.00 frames.], tot_loss[loss=0.1732, simple_loss=0.2402, pruned_loss=0.0531, over 971535.63 frames.], batch size: 32, lr: 6.93e-04 +2022-05-04 06:21:19,551 INFO [train.py:715] (6/8) Epoch 2, batch 14050, loss[loss=0.2192, simple_loss=0.2657, pruned_loss=0.08638, over 4899.00 frames.], tot_loss[loss=0.1734, simple_loss=0.2402, pruned_loss=0.05332, over 971748.88 frames.], batch size: 19, lr: 6.93e-04 +2022-05-04 06:22:01,055 INFO [train.py:715] (6/8) Epoch 2, batch 14100, loss[loss=0.1609, simple_loss=0.2217, pruned_loss=0.05009, over 4917.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2393, pruned_loss=0.05282, over 972259.60 frames.], batch size: 18, lr: 6.93e-04 +2022-05-04 06:22:41,697 INFO [train.py:715] (6/8) Epoch 2, batch 14150, loss[loss=0.2032, simple_loss=0.2543, pruned_loss=0.07602, over 4853.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2393, pruned_loss=0.05296, over 972782.87 frames.], batch size: 30, lr: 6.93e-04 +2022-05-04 06:23:21,642 INFO [train.py:715] (6/8) Epoch 2, batch 14200, loss[loss=0.145, simple_loss=0.2175, pruned_loss=0.03624, over 4749.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2395, pruned_loss=0.05381, over 971670.12 frames.], batch size: 19, lr: 6.92e-04 +2022-05-04 06:24:01,486 INFO [train.py:715] (6/8) Epoch 2, batch 14250, loss[loss=0.1566, simple_loss=0.2253, pruned_loss=0.04392, over 4807.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2389, pruned_loss=0.05336, over 971345.32 frames.], batch size: 21, lr: 6.92e-04 +2022-05-04 06:24:42,099 INFO [train.py:715] (6/8) Epoch 2, batch 14300, loss[loss=0.1768, simple_loss=0.2436, pruned_loss=0.05499, over 4778.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2375, pruned_loss=0.05196, over 971846.95 frames.], batch size: 14, lr: 6.92e-04 +2022-05-04 06:25:21,664 INFO [train.py:715] (6/8) Epoch 2, batch 14350, loss[loss=0.2034, simple_loss=0.265, pruned_loss=0.07095, over 4863.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2377, pruned_loss=0.05209, over 971482.67 frames.], batch size: 13, lr: 6.92e-04 +2022-05-04 06:26:01,523 INFO [train.py:715] (6/8) Epoch 2, batch 14400, loss[loss=0.1734, simple_loss=0.2309, pruned_loss=0.05795, over 4980.00 frames.], tot_loss[loss=0.1713, simple_loss=0.2374, pruned_loss=0.05261, over 971292.94 frames.], batch size: 33, lr: 6.92e-04 +2022-05-04 06:26:41,865 INFO [train.py:715] (6/8) Epoch 2, batch 14450, loss[loss=0.175, simple_loss=0.2398, pruned_loss=0.05509, over 4895.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2372, pruned_loss=0.05224, over 971682.86 frames.], batch size: 17, lr: 6.91e-04 +2022-05-04 06:27:22,108 INFO [train.py:715] (6/8) Epoch 2, batch 14500, loss[loss=0.1319, simple_loss=0.2086, pruned_loss=0.02761, over 4986.00 frames.], tot_loss[loss=0.1703, simple_loss=0.2369, pruned_loss=0.05192, over 971782.00 frames.], batch size: 28, lr: 6.91e-04 +2022-05-04 06:28:01,692 INFO [train.py:715] (6/8) Epoch 2, batch 14550, loss[loss=0.1783, simple_loss=0.2377, pruned_loss=0.05948, over 4697.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2379, pruned_loss=0.05285, over 971968.38 frames.], batch size: 15, lr: 6.91e-04 +2022-05-04 06:28:42,172 INFO [train.py:715] (6/8) Epoch 2, batch 14600, loss[loss=0.1605, simple_loss=0.2299, pruned_loss=0.04556, over 4900.00 frames.], tot_loss[loss=0.172, simple_loss=0.2386, pruned_loss=0.05267, over 972030.49 frames.], batch size: 32, lr: 6.91e-04 +2022-05-04 06:29:22,666 INFO [train.py:715] (6/8) Epoch 2, batch 14650, loss[loss=0.1801, simple_loss=0.2458, pruned_loss=0.05726, over 4748.00 frames.], tot_loss[loss=0.172, simple_loss=0.2385, pruned_loss=0.05277, over 972131.41 frames.], batch size: 16, lr: 6.90e-04 +2022-05-04 06:30:01,962 INFO [train.py:715] (6/8) Epoch 2, batch 14700, loss[loss=0.1676, simple_loss=0.2386, pruned_loss=0.04828, over 4756.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2384, pruned_loss=0.05295, over 971573.66 frames.], batch size: 19, lr: 6.90e-04 +2022-05-04 06:30:41,284 INFO [train.py:715] (6/8) Epoch 2, batch 14750, loss[loss=0.1868, simple_loss=0.2592, pruned_loss=0.05718, over 4815.00 frames.], tot_loss[loss=0.1706, simple_loss=0.2371, pruned_loss=0.05205, over 970994.24 frames.], batch size: 27, lr: 6.90e-04 +2022-05-04 06:31:21,775 INFO [train.py:715] (6/8) Epoch 2, batch 14800, loss[loss=0.168, simple_loss=0.234, pruned_loss=0.05097, over 4970.00 frames.], tot_loss[loss=0.1703, simple_loss=0.2369, pruned_loss=0.05188, over 971247.66 frames.], batch size: 15, lr: 6.90e-04 +2022-05-04 06:32:01,275 INFO [train.py:715] (6/8) Epoch 2, batch 14850, loss[loss=0.149, simple_loss=0.224, pruned_loss=0.03704, over 4887.00 frames.], tot_loss[loss=0.171, simple_loss=0.2376, pruned_loss=0.05225, over 970894.22 frames.], batch size: 22, lr: 6.90e-04 +2022-05-04 06:32:40,952 INFO [train.py:715] (6/8) Epoch 2, batch 14900, loss[loss=0.171, simple_loss=0.2433, pruned_loss=0.04932, over 4877.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2388, pruned_loss=0.05291, over 971753.81 frames.], batch size: 16, lr: 6.89e-04 +2022-05-04 06:33:21,122 INFO [train.py:715] (6/8) Epoch 2, batch 14950, loss[loss=0.2659, simple_loss=0.3331, pruned_loss=0.09931, over 4942.00 frames.], tot_loss[loss=0.1732, simple_loss=0.2398, pruned_loss=0.05333, over 972608.95 frames.], batch size: 21, lr: 6.89e-04 +2022-05-04 06:34:01,762 INFO [train.py:715] (6/8) Epoch 2, batch 15000, loss[loss=0.1801, simple_loss=0.2365, pruned_loss=0.06182, over 4943.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2389, pruned_loss=0.0527, over 973287.94 frames.], batch size: 21, lr: 6.89e-04 +2022-05-04 06:34:01,762 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 06:34:11,142 INFO [train.py:742] (6/8) Epoch 2, validation: loss=0.1176, simple_loss=0.2043, pruned_loss=0.01548, over 914524.00 frames. +2022-05-04 06:34:52,069 INFO [train.py:715] (6/8) Epoch 2, batch 15050, loss[loss=0.2215, simple_loss=0.2789, pruned_loss=0.08201, over 4917.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2392, pruned_loss=0.05302, over 973536.26 frames.], batch size: 18, lr: 6.89e-04 +2022-05-04 06:35:31,188 INFO [train.py:715] (6/8) Epoch 2, batch 15100, loss[loss=0.1733, simple_loss=0.2371, pruned_loss=0.05479, over 4944.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2389, pruned_loss=0.0524, over 973667.21 frames.], batch size: 21, lr: 6.89e-04 +2022-05-04 06:36:11,675 INFO [train.py:715] (6/8) Epoch 2, batch 15150, loss[loss=0.1915, simple_loss=0.262, pruned_loss=0.06046, over 4968.00 frames.], tot_loss[loss=0.1715, simple_loss=0.2385, pruned_loss=0.05223, over 973194.27 frames.], batch size: 15, lr: 6.88e-04 +2022-05-04 06:36:52,160 INFO [train.py:715] (6/8) Epoch 2, batch 15200, loss[loss=0.146, simple_loss=0.2227, pruned_loss=0.0347, over 4940.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2396, pruned_loss=0.0528, over 973502.99 frames.], batch size: 23, lr: 6.88e-04 +2022-05-04 06:37:31,909 INFO [train.py:715] (6/8) Epoch 2, batch 15250, loss[loss=0.1653, simple_loss=0.2308, pruned_loss=0.04994, over 4874.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2395, pruned_loss=0.05297, over 973142.78 frames.], batch size: 32, lr: 6.88e-04 +2022-05-04 06:38:11,352 INFO [train.py:715] (6/8) Epoch 2, batch 15300, loss[loss=0.1859, simple_loss=0.254, pruned_loss=0.05892, over 4756.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2393, pruned_loss=0.05302, over 972680.98 frames.], batch size: 19, lr: 6.88e-04 +2022-05-04 06:38:51,805 INFO [train.py:715] (6/8) Epoch 2, batch 15350, loss[loss=0.2109, simple_loss=0.2528, pruned_loss=0.08455, over 4840.00 frames.], tot_loss[loss=0.173, simple_loss=0.2397, pruned_loss=0.05319, over 971982.79 frames.], batch size: 13, lr: 6.88e-04 +2022-05-04 06:39:32,696 INFO [train.py:715] (6/8) Epoch 2, batch 15400, loss[loss=0.1826, simple_loss=0.247, pruned_loss=0.05904, over 4925.00 frames.], tot_loss[loss=0.1737, simple_loss=0.2402, pruned_loss=0.05363, over 972567.40 frames.], batch size: 23, lr: 6.87e-04 +2022-05-04 06:40:11,873 INFO [train.py:715] (6/8) Epoch 2, batch 15450, loss[loss=0.1521, simple_loss=0.2297, pruned_loss=0.03723, over 4832.00 frames.], tot_loss[loss=0.1736, simple_loss=0.24, pruned_loss=0.05361, over 972965.47 frames.], batch size: 26, lr: 6.87e-04 +2022-05-04 06:40:52,379 INFO [train.py:715] (6/8) Epoch 2, batch 15500, loss[loss=0.1944, simple_loss=0.2548, pruned_loss=0.06698, over 4779.00 frames.], tot_loss[loss=0.1736, simple_loss=0.2403, pruned_loss=0.0535, over 972788.33 frames.], batch size: 14, lr: 6.87e-04 +2022-05-04 06:41:32,627 INFO [train.py:715] (6/8) Epoch 2, batch 15550, loss[loss=0.1556, simple_loss=0.2174, pruned_loss=0.04683, over 4900.00 frames.], tot_loss[loss=0.173, simple_loss=0.2393, pruned_loss=0.05334, over 973197.57 frames.], batch size: 19, lr: 6.87e-04 +2022-05-04 06:42:12,567 INFO [train.py:715] (6/8) Epoch 2, batch 15600, loss[loss=0.2098, simple_loss=0.2741, pruned_loss=0.07272, over 4982.00 frames.], tot_loss[loss=0.172, simple_loss=0.2386, pruned_loss=0.05274, over 973014.90 frames.], batch size: 15, lr: 6.87e-04 +2022-05-04 06:42:52,377 INFO [train.py:715] (6/8) Epoch 2, batch 15650, loss[loss=0.1361, simple_loss=0.2085, pruned_loss=0.03184, over 4943.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2385, pruned_loss=0.05256, over 973188.82 frames.], batch size: 29, lr: 6.86e-04 +2022-05-04 06:43:33,102 INFO [train.py:715] (6/8) Epoch 2, batch 15700, loss[loss=0.1436, simple_loss=0.2221, pruned_loss=0.03256, over 4957.00 frames.], tot_loss[loss=0.1713, simple_loss=0.2382, pruned_loss=0.0522, over 973190.87 frames.], batch size: 15, lr: 6.86e-04 +2022-05-04 06:44:13,633 INFO [train.py:715] (6/8) Epoch 2, batch 15750, loss[loss=0.2343, simple_loss=0.2865, pruned_loss=0.09101, over 4807.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2385, pruned_loss=0.05266, over 972838.42 frames.], batch size: 21, lr: 6.86e-04 +2022-05-04 06:44:52,977 INFO [train.py:715] (6/8) Epoch 2, batch 15800, loss[loss=0.1383, simple_loss=0.2186, pruned_loss=0.02897, over 4920.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2374, pruned_loss=0.05221, over 972603.04 frames.], batch size: 18, lr: 6.86e-04 +2022-05-04 06:45:33,635 INFO [train.py:715] (6/8) Epoch 2, batch 15850, loss[loss=0.1376, simple_loss=0.1997, pruned_loss=0.03781, over 4803.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2375, pruned_loss=0.0522, over 972326.66 frames.], batch size: 21, lr: 6.86e-04 +2022-05-04 06:46:14,117 INFO [train.py:715] (6/8) Epoch 2, batch 15900, loss[loss=0.1689, simple_loss=0.2323, pruned_loss=0.05275, over 4970.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2368, pruned_loss=0.05177, over 972251.00 frames.], batch size: 15, lr: 6.85e-04 +2022-05-04 06:46:53,883 INFO [train.py:715] (6/8) Epoch 2, batch 15950, loss[loss=0.1579, simple_loss=0.2181, pruned_loss=0.04885, over 4968.00 frames.], tot_loss[loss=0.1692, simple_loss=0.236, pruned_loss=0.05125, over 972517.72 frames.], batch size: 15, lr: 6.85e-04 +2022-05-04 06:47:34,109 INFO [train.py:715] (6/8) Epoch 2, batch 16000, loss[loss=0.1809, simple_loss=0.254, pruned_loss=0.05391, over 4814.00 frames.], tot_loss[loss=0.1705, simple_loss=0.237, pruned_loss=0.05201, over 973288.63 frames.], batch size: 27, lr: 6.85e-04 +2022-05-04 06:48:14,448 INFO [train.py:715] (6/8) Epoch 2, batch 16050, loss[loss=0.1608, simple_loss=0.2247, pruned_loss=0.04841, over 4907.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2369, pruned_loss=0.05234, over 973676.89 frames.], batch size: 18, lr: 6.85e-04 +2022-05-04 06:48:54,895 INFO [train.py:715] (6/8) Epoch 2, batch 16100, loss[loss=0.1545, simple_loss=0.2131, pruned_loss=0.04795, over 4797.00 frames.], tot_loss[loss=0.1706, simple_loss=0.2367, pruned_loss=0.05225, over 973382.16 frames.], batch size: 12, lr: 6.85e-04 +2022-05-04 06:49:34,161 INFO [train.py:715] (6/8) Epoch 2, batch 16150, loss[loss=0.1686, simple_loss=0.2277, pruned_loss=0.05478, over 4853.00 frames.], tot_loss[loss=0.1706, simple_loss=0.2364, pruned_loss=0.05239, over 972963.48 frames.], batch size: 32, lr: 6.84e-04 +2022-05-04 06:50:14,550 INFO [train.py:715] (6/8) Epoch 2, batch 16200, loss[loss=0.206, simple_loss=0.2738, pruned_loss=0.0691, over 4959.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2372, pruned_loss=0.05232, over 973066.57 frames.], batch size: 35, lr: 6.84e-04 +2022-05-04 06:50:54,956 INFO [train.py:715] (6/8) Epoch 2, batch 16250, loss[loss=0.168, simple_loss=0.2387, pruned_loss=0.04864, over 4980.00 frames.], tot_loss[loss=0.1717, simple_loss=0.2382, pruned_loss=0.05263, over 972700.21 frames.], batch size: 15, lr: 6.84e-04 +2022-05-04 06:51:34,798 INFO [train.py:715] (6/8) Epoch 2, batch 16300, loss[loss=0.1759, simple_loss=0.2416, pruned_loss=0.05511, over 4743.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2381, pruned_loss=0.0526, over 972692.03 frames.], batch size: 16, lr: 6.84e-04 +2022-05-04 06:52:14,670 INFO [train.py:715] (6/8) Epoch 2, batch 16350, loss[loss=0.1293, simple_loss=0.2062, pruned_loss=0.02618, over 4849.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2383, pruned_loss=0.05281, over 973150.39 frames.], batch size: 13, lr: 6.84e-04 +2022-05-04 06:52:55,174 INFO [train.py:715] (6/8) Epoch 2, batch 16400, loss[loss=0.1522, simple_loss=0.2318, pruned_loss=0.03633, over 4823.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2386, pruned_loss=0.05276, over 972654.60 frames.], batch size: 26, lr: 6.83e-04 +2022-05-04 06:53:35,564 INFO [train.py:715] (6/8) Epoch 2, batch 16450, loss[loss=0.178, simple_loss=0.2339, pruned_loss=0.06106, over 4785.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2388, pruned_loss=0.05265, over 972866.83 frames.], batch size: 21, lr: 6.83e-04 +2022-05-04 06:54:15,150 INFO [train.py:715] (6/8) Epoch 2, batch 16500, loss[loss=0.1866, simple_loss=0.2465, pruned_loss=0.06337, over 4807.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2373, pruned_loss=0.05178, over 972085.95 frames.], batch size: 24, lr: 6.83e-04 +2022-05-04 06:54:56,133 INFO [train.py:715] (6/8) Epoch 2, batch 16550, loss[loss=0.2197, simple_loss=0.2688, pruned_loss=0.08529, over 4986.00 frames.], tot_loss[loss=0.1729, simple_loss=0.2393, pruned_loss=0.05323, over 972924.05 frames.], batch size: 15, lr: 6.83e-04 +2022-05-04 06:55:36,866 INFO [train.py:715] (6/8) Epoch 2, batch 16600, loss[loss=0.1954, simple_loss=0.2628, pruned_loss=0.06405, over 4967.00 frames.], tot_loss[loss=0.1734, simple_loss=0.2396, pruned_loss=0.05359, over 972391.75 frames.], batch size: 24, lr: 6.83e-04 +2022-05-04 06:56:16,719 INFO [train.py:715] (6/8) Epoch 2, batch 16650, loss[loss=0.1631, simple_loss=0.2341, pruned_loss=0.04602, over 4826.00 frames.], tot_loss[loss=0.172, simple_loss=0.2387, pruned_loss=0.05265, over 972085.35 frames.], batch size: 15, lr: 6.82e-04 +2022-05-04 06:56:57,164 INFO [train.py:715] (6/8) Epoch 2, batch 16700, loss[loss=0.1729, simple_loss=0.2342, pruned_loss=0.05579, over 4918.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2389, pruned_loss=0.0532, over 972793.90 frames.], batch size: 29, lr: 6.82e-04 +2022-05-04 06:57:37,924 INFO [train.py:715] (6/8) Epoch 2, batch 16750, loss[loss=0.1687, simple_loss=0.2437, pruned_loss=0.04688, over 4990.00 frames.], tot_loss[loss=0.1728, simple_loss=0.239, pruned_loss=0.05332, over 973606.74 frames.], batch size: 16, lr: 6.82e-04 +2022-05-04 06:58:18,624 INFO [train.py:715] (6/8) Epoch 2, batch 16800, loss[loss=0.1394, simple_loss=0.2226, pruned_loss=0.02809, over 4957.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2386, pruned_loss=0.05301, over 973116.13 frames.], batch size: 21, lr: 6.82e-04 +2022-05-04 06:58:58,048 INFO [train.py:715] (6/8) Epoch 2, batch 16850, loss[loss=0.1636, simple_loss=0.2313, pruned_loss=0.04792, over 4886.00 frames.], tot_loss[loss=0.1712, simple_loss=0.2378, pruned_loss=0.05227, over 971701.39 frames.], batch size: 22, lr: 6.82e-04 +2022-05-04 06:59:39,314 INFO [train.py:715] (6/8) Epoch 2, batch 16900, loss[loss=0.1409, simple_loss=0.2108, pruned_loss=0.03553, over 4941.00 frames.], tot_loss[loss=0.1707, simple_loss=0.237, pruned_loss=0.05215, over 972515.90 frames.], batch size: 24, lr: 6.81e-04 +2022-05-04 07:00:20,141 INFO [train.py:715] (6/8) Epoch 2, batch 16950, loss[loss=0.1866, simple_loss=0.249, pruned_loss=0.06214, over 4927.00 frames.], tot_loss[loss=0.1714, simple_loss=0.2378, pruned_loss=0.05249, over 972897.00 frames.], batch size: 39, lr: 6.81e-04 +2022-05-04 07:00:59,947 INFO [train.py:715] (6/8) Epoch 2, batch 17000, loss[loss=0.194, simple_loss=0.2401, pruned_loss=0.07393, over 4887.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2386, pruned_loss=0.053, over 972410.44 frames.], batch size: 19, lr: 6.81e-04 +2022-05-04 07:01:40,377 INFO [train.py:715] (6/8) Epoch 2, batch 17050, loss[loss=0.1521, simple_loss=0.2257, pruned_loss=0.03927, over 4690.00 frames.], tot_loss[loss=0.1715, simple_loss=0.238, pruned_loss=0.0525, over 972498.63 frames.], batch size: 15, lr: 6.81e-04 +2022-05-04 07:02:20,966 INFO [train.py:715] (6/8) Epoch 2, batch 17100, loss[loss=0.1818, simple_loss=0.2486, pruned_loss=0.05752, over 4970.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2385, pruned_loss=0.0524, over 973554.41 frames.], batch size: 15, lr: 6.81e-04 +2022-05-04 07:03:01,199 INFO [train.py:715] (6/8) Epoch 2, batch 17150, loss[loss=0.1717, simple_loss=0.2433, pruned_loss=0.05006, over 4871.00 frames.], tot_loss[loss=0.1713, simple_loss=0.2386, pruned_loss=0.05198, over 973641.01 frames.], batch size: 20, lr: 6.81e-04 +2022-05-04 07:03:40,480 INFO [train.py:715] (6/8) Epoch 2, batch 17200, loss[loss=0.1861, simple_loss=0.2574, pruned_loss=0.05737, over 4896.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2389, pruned_loss=0.05241, over 973249.47 frames.], batch size: 22, lr: 6.80e-04 +2022-05-04 07:04:20,885 INFO [train.py:715] (6/8) Epoch 2, batch 17250, loss[loss=0.1993, simple_loss=0.2541, pruned_loss=0.0722, over 4874.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2388, pruned_loss=0.05216, over 972936.08 frames.], batch size: 30, lr: 6.80e-04 +2022-05-04 07:05:01,346 INFO [train.py:715] (6/8) Epoch 2, batch 17300, loss[loss=0.1941, simple_loss=0.2538, pruned_loss=0.06724, over 4805.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2387, pruned_loss=0.05229, over 972743.36 frames.], batch size: 21, lr: 6.80e-04 +2022-05-04 07:05:40,927 INFO [train.py:715] (6/8) Epoch 2, batch 17350, loss[loss=0.1409, simple_loss=0.2117, pruned_loss=0.03505, over 4823.00 frames.], tot_loss[loss=0.1713, simple_loss=0.2382, pruned_loss=0.05218, over 972443.82 frames.], batch size: 12, lr: 6.80e-04 +2022-05-04 07:06:20,387 INFO [train.py:715] (6/8) Epoch 2, batch 17400, loss[loss=0.1444, simple_loss=0.2171, pruned_loss=0.03589, over 4891.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2378, pruned_loss=0.05183, over 972679.31 frames.], batch size: 19, lr: 6.80e-04 +2022-05-04 07:07:00,343 INFO [train.py:715] (6/8) Epoch 2, batch 17450, loss[loss=0.1679, simple_loss=0.2319, pruned_loss=0.05194, over 4933.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2381, pruned_loss=0.05271, over 972492.71 frames.], batch size: 21, lr: 6.79e-04 +2022-05-04 07:07:40,090 INFO [train.py:715] (6/8) Epoch 2, batch 17500, loss[loss=0.1479, simple_loss=0.2129, pruned_loss=0.04143, over 4697.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2371, pruned_loss=0.05193, over 973175.38 frames.], batch size: 15, lr: 6.79e-04 +2022-05-04 07:08:18,854 INFO [train.py:715] (6/8) Epoch 2, batch 17550, loss[loss=0.1923, simple_loss=0.248, pruned_loss=0.06833, over 4923.00 frames.], tot_loss[loss=0.1715, simple_loss=0.2378, pruned_loss=0.05261, over 973016.75 frames.], batch size: 18, lr: 6.79e-04 +2022-05-04 07:08:58,973 INFO [train.py:715] (6/8) Epoch 2, batch 17600, loss[loss=0.1978, simple_loss=0.2613, pruned_loss=0.06719, over 4784.00 frames.], tot_loss[loss=0.1714, simple_loss=0.2377, pruned_loss=0.05259, over 973661.71 frames.], batch size: 14, lr: 6.79e-04 +2022-05-04 07:09:38,388 INFO [train.py:715] (6/8) Epoch 2, batch 17650, loss[loss=0.2184, simple_loss=0.2789, pruned_loss=0.07896, over 4930.00 frames.], tot_loss[loss=0.171, simple_loss=0.2378, pruned_loss=0.0521, over 973603.83 frames.], batch size: 21, lr: 6.79e-04 +2022-05-04 07:10:17,890 INFO [train.py:715] (6/8) Epoch 2, batch 17700, loss[loss=0.1661, simple_loss=0.2434, pruned_loss=0.04436, over 4889.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2372, pruned_loss=0.05189, over 972608.52 frames.], batch size: 17, lr: 6.78e-04 +2022-05-04 07:10:57,827 INFO [train.py:715] (6/8) Epoch 2, batch 17750, loss[loss=0.1641, simple_loss=0.2247, pruned_loss=0.05177, over 4795.00 frames.], tot_loss[loss=0.1703, simple_loss=0.2368, pruned_loss=0.05185, over 972602.86 frames.], batch size: 24, lr: 6.78e-04 +2022-05-04 07:11:37,692 INFO [train.py:715] (6/8) Epoch 2, batch 17800, loss[loss=0.1838, simple_loss=0.2394, pruned_loss=0.06408, over 4957.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2363, pruned_loss=0.05138, over 973422.90 frames.], batch size: 15, lr: 6.78e-04 +2022-05-04 07:12:17,978 INFO [train.py:715] (6/8) Epoch 2, batch 17850, loss[loss=0.1985, simple_loss=0.2522, pruned_loss=0.07242, over 4853.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2363, pruned_loss=0.0515, over 972542.86 frames.], batch size: 34, lr: 6.78e-04 +2022-05-04 07:12:56,817 INFO [train.py:715] (6/8) Epoch 2, batch 17900, loss[loss=0.2124, simple_loss=0.2798, pruned_loss=0.07253, over 4873.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2369, pruned_loss=0.05197, over 972746.76 frames.], batch size: 38, lr: 6.78e-04 +2022-05-04 07:13:36,740 INFO [train.py:715] (6/8) Epoch 2, batch 17950, loss[loss=0.1725, simple_loss=0.2402, pruned_loss=0.05237, over 4840.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2378, pruned_loss=0.05271, over 972713.01 frames.], batch size: 13, lr: 6.77e-04 +2022-05-04 07:14:16,908 INFO [train.py:715] (6/8) Epoch 2, batch 18000, loss[loss=0.1633, simple_loss=0.2225, pruned_loss=0.05203, over 4916.00 frames.], tot_loss[loss=0.1715, simple_loss=0.238, pruned_loss=0.05253, over 972928.20 frames.], batch size: 18, lr: 6.77e-04 +2022-05-04 07:14:16,909 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 07:14:26,628 INFO [train.py:742] (6/8) Epoch 2, validation: loss=0.1173, simple_loss=0.2039, pruned_loss=0.01538, over 914524.00 frames. +2022-05-04 07:15:07,353 INFO [train.py:715] (6/8) Epoch 2, batch 18050, loss[loss=0.2386, simple_loss=0.2863, pruned_loss=0.09541, over 4760.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2383, pruned_loss=0.0527, over 972637.16 frames.], batch size: 19, lr: 6.77e-04 +2022-05-04 07:15:46,526 INFO [train.py:715] (6/8) Epoch 2, batch 18100, loss[loss=0.1716, simple_loss=0.2329, pruned_loss=0.05515, over 4978.00 frames.], tot_loss[loss=0.1717, simple_loss=0.2384, pruned_loss=0.05251, over 972891.27 frames.], batch size: 31, lr: 6.77e-04 +2022-05-04 07:16:27,419 INFO [train.py:715] (6/8) Epoch 2, batch 18150, loss[loss=0.1637, simple_loss=0.2413, pruned_loss=0.04307, over 4932.00 frames.], tot_loss[loss=0.1711, simple_loss=0.2379, pruned_loss=0.05219, over 972910.57 frames.], batch size: 29, lr: 6.77e-04 +2022-05-04 07:17:08,366 INFO [train.py:715] (6/8) Epoch 2, batch 18200, loss[loss=0.1604, simple_loss=0.2197, pruned_loss=0.05058, over 4988.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2384, pruned_loss=0.05266, over 973814.51 frames.], batch size: 25, lr: 6.76e-04 +2022-05-04 07:17:49,832 INFO [train.py:715] (6/8) Epoch 2, batch 18250, loss[loss=0.169, simple_loss=0.228, pruned_loss=0.05496, over 4966.00 frames.], tot_loss[loss=0.172, simple_loss=0.2386, pruned_loss=0.05268, over 973577.40 frames.], batch size: 15, lr: 6.76e-04 +2022-05-04 07:18:30,276 INFO [train.py:715] (6/8) Epoch 2, batch 18300, loss[loss=0.1786, simple_loss=0.2419, pruned_loss=0.05763, over 4941.00 frames.], tot_loss[loss=0.1712, simple_loss=0.2381, pruned_loss=0.05214, over 972945.91 frames.], batch size: 39, lr: 6.76e-04 +2022-05-04 07:19:12,146 INFO [train.py:715] (6/8) Epoch 2, batch 18350, loss[loss=0.1703, simple_loss=0.2373, pruned_loss=0.05166, over 4773.00 frames.], tot_loss[loss=0.1701, simple_loss=0.2373, pruned_loss=0.05144, over 972460.07 frames.], batch size: 14, lr: 6.76e-04 +2022-05-04 07:19:56,505 INFO [train.py:715] (6/8) Epoch 2, batch 18400, loss[loss=0.1842, simple_loss=0.2522, pruned_loss=0.05805, over 4779.00 frames.], tot_loss[loss=0.1712, simple_loss=0.2381, pruned_loss=0.05219, over 972225.23 frames.], batch size: 17, lr: 6.76e-04 +2022-05-04 07:20:36,600 INFO [train.py:715] (6/8) Epoch 2, batch 18450, loss[loss=0.1804, simple_loss=0.244, pruned_loss=0.0584, over 4952.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2379, pruned_loss=0.05173, over 971905.68 frames.], batch size: 23, lr: 6.75e-04 +2022-05-04 07:21:18,115 INFO [train.py:715] (6/8) Epoch 2, batch 18500, loss[loss=0.1787, simple_loss=0.2427, pruned_loss=0.05732, over 4979.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2375, pruned_loss=0.05177, over 972193.32 frames.], batch size: 28, lr: 6.75e-04 +2022-05-04 07:21:59,817 INFO [train.py:715] (6/8) Epoch 2, batch 18550, loss[loss=0.1634, simple_loss=0.2264, pruned_loss=0.05016, over 4948.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2374, pruned_loss=0.05178, over 972106.80 frames.], batch size: 35, lr: 6.75e-04 +2022-05-04 07:22:41,518 INFO [train.py:715] (6/8) Epoch 2, batch 18600, loss[loss=0.1716, simple_loss=0.2288, pruned_loss=0.05722, over 4742.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2372, pruned_loss=0.05167, over 972197.34 frames.], batch size: 16, lr: 6.75e-04 +2022-05-04 07:23:21,837 INFO [train.py:715] (6/8) Epoch 2, batch 18650, loss[loss=0.1674, simple_loss=0.2397, pruned_loss=0.04758, over 4912.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2373, pruned_loss=0.05209, over 971828.74 frames.], batch size: 17, lr: 6.75e-04 +2022-05-04 07:24:03,489 INFO [train.py:715] (6/8) Epoch 2, batch 18700, loss[loss=0.1885, simple_loss=0.2571, pruned_loss=0.05995, over 4779.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2372, pruned_loss=0.05185, over 971837.29 frames.], batch size: 18, lr: 6.75e-04 +2022-05-04 07:24:45,188 INFO [train.py:715] (6/8) Epoch 2, batch 18750, loss[loss=0.1713, simple_loss=0.2356, pruned_loss=0.05347, over 4835.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2372, pruned_loss=0.05191, over 971754.92 frames.], batch size: 27, lr: 6.74e-04 +2022-05-04 07:25:25,723 INFO [train.py:715] (6/8) Epoch 2, batch 18800, loss[loss=0.1514, simple_loss=0.2133, pruned_loss=0.04479, over 4759.00 frames.], tot_loss[loss=0.1698, simple_loss=0.2363, pruned_loss=0.05162, over 971911.89 frames.], batch size: 16, lr: 6.74e-04 +2022-05-04 07:26:06,675 INFO [train.py:715] (6/8) Epoch 2, batch 18850, loss[loss=0.1727, simple_loss=0.2395, pruned_loss=0.05294, over 4744.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2359, pruned_loss=0.05121, over 971365.36 frames.], batch size: 19, lr: 6.74e-04 +2022-05-04 07:26:48,084 INFO [train.py:715] (6/8) Epoch 2, batch 18900, loss[loss=0.1584, simple_loss=0.2326, pruned_loss=0.0421, over 4986.00 frames.], tot_loss[loss=0.1703, simple_loss=0.2372, pruned_loss=0.0517, over 972342.59 frames.], batch size: 25, lr: 6.74e-04 +2022-05-04 07:27:29,076 INFO [train.py:715] (6/8) Epoch 2, batch 18950, loss[loss=0.1524, simple_loss=0.2223, pruned_loss=0.04129, over 4894.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2362, pruned_loss=0.05139, over 973428.16 frames.], batch size: 22, lr: 6.74e-04 +2022-05-04 07:28:09,472 INFO [train.py:715] (6/8) Epoch 2, batch 19000, loss[loss=0.1534, simple_loss=0.2296, pruned_loss=0.0386, over 4928.00 frames.], tot_loss[loss=0.1696, simple_loss=0.2364, pruned_loss=0.0514, over 972610.11 frames.], batch size: 29, lr: 6.73e-04 +2022-05-04 07:28:51,006 INFO [train.py:715] (6/8) Epoch 2, batch 19050, loss[loss=0.1591, simple_loss=0.2347, pruned_loss=0.04174, over 4780.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2355, pruned_loss=0.05081, over 972465.71 frames.], batch size: 18, lr: 6.73e-04 +2022-05-04 07:29:32,584 INFO [train.py:715] (6/8) Epoch 2, batch 19100, loss[loss=0.1582, simple_loss=0.2237, pruned_loss=0.04634, over 4824.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2356, pruned_loss=0.05126, over 971641.89 frames.], batch size: 27, lr: 6.73e-04 +2022-05-04 07:30:13,197 INFO [train.py:715] (6/8) Epoch 2, batch 19150, loss[loss=0.1908, simple_loss=0.2667, pruned_loss=0.05752, over 4989.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2356, pruned_loss=0.05108, over 972259.40 frames.], batch size: 24, lr: 6.73e-04 +2022-05-04 07:30:53,906 INFO [train.py:715] (6/8) Epoch 2, batch 19200, loss[loss=0.1428, simple_loss=0.2113, pruned_loss=0.03711, over 4838.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2362, pruned_loss=0.05109, over 973202.16 frames.], batch size: 30, lr: 6.73e-04 +2022-05-04 07:31:35,011 INFO [train.py:715] (6/8) Epoch 2, batch 19250, loss[loss=0.1468, simple_loss=0.2191, pruned_loss=0.03726, over 4888.00 frames.], tot_loss[loss=0.169, simple_loss=0.2359, pruned_loss=0.05108, over 974321.16 frames.], batch size: 16, lr: 6.72e-04 +2022-05-04 07:32:15,462 INFO [train.py:715] (6/8) Epoch 2, batch 19300, loss[loss=0.1761, simple_loss=0.2538, pruned_loss=0.04921, over 4967.00 frames.], tot_loss[loss=0.1696, simple_loss=0.2364, pruned_loss=0.05144, over 973383.91 frames.], batch size: 24, lr: 6.72e-04 +2022-05-04 07:32:55,615 INFO [train.py:715] (6/8) Epoch 2, batch 19350, loss[loss=0.1803, simple_loss=0.2558, pruned_loss=0.05236, over 4918.00 frames.], tot_loss[loss=0.1699, simple_loss=0.2368, pruned_loss=0.05148, over 973069.71 frames.], batch size: 18, lr: 6.72e-04 +2022-05-04 07:33:36,562 INFO [train.py:715] (6/8) Epoch 2, batch 19400, loss[loss=0.1883, simple_loss=0.25, pruned_loss=0.06329, over 4767.00 frames.], tot_loss[loss=0.1702, simple_loss=0.237, pruned_loss=0.05171, over 972503.45 frames.], batch size: 17, lr: 6.72e-04 +2022-05-04 07:34:18,487 INFO [train.py:715] (6/8) Epoch 2, batch 19450, loss[loss=0.21, simple_loss=0.2869, pruned_loss=0.06659, over 4905.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2386, pruned_loss=0.05247, over 973268.00 frames.], batch size: 19, lr: 6.72e-04 +2022-05-04 07:34:58,703 INFO [train.py:715] (6/8) Epoch 2, batch 19500, loss[loss=0.1783, simple_loss=0.242, pruned_loss=0.05732, over 4641.00 frames.], tot_loss[loss=0.1721, simple_loss=0.239, pruned_loss=0.05262, over 972180.37 frames.], batch size: 13, lr: 6.72e-04 +2022-05-04 07:35:38,986 INFO [train.py:715] (6/8) Epoch 2, batch 19550, loss[loss=0.2048, simple_loss=0.2557, pruned_loss=0.07701, over 4943.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2392, pruned_loss=0.05317, over 972411.16 frames.], batch size: 21, lr: 6.71e-04 +2022-05-04 07:36:20,457 INFO [train.py:715] (6/8) Epoch 2, batch 19600, loss[loss=0.1763, simple_loss=0.2418, pruned_loss=0.05546, over 4750.00 frames.], tot_loss[loss=0.173, simple_loss=0.2396, pruned_loss=0.05317, over 972411.77 frames.], batch size: 19, lr: 6.71e-04 +2022-05-04 07:37:01,116 INFO [train.py:715] (6/8) Epoch 2, batch 19650, loss[loss=0.2072, simple_loss=0.2635, pruned_loss=0.07541, over 4944.00 frames.], tot_loss[loss=0.1724, simple_loss=0.239, pruned_loss=0.05293, over 972646.06 frames.], batch size: 39, lr: 6.71e-04 +2022-05-04 07:37:40,954 INFO [train.py:715] (6/8) Epoch 2, batch 19700, loss[loss=0.1425, simple_loss=0.2108, pruned_loss=0.03707, over 4863.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2386, pruned_loss=0.05312, over 971945.93 frames.], batch size: 30, lr: 6.71e-04 +2022-05-04 07:38:21,844 INFO [train.py:715] (6/8) Epoch 2, batch 19750, loss[loss=0.1744, simple_loss=0.2477, pruned_loss=0.05053, over 4819.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2388, pruned_loss=0.05286, over 972459.70 frames.], batch size: 26, lr: 6.71e-04 +2022-05-04 07:39:02,980 INFO [train.py:715] (6/8) Epoch 2, batch 19800, loss[loss=0.182, simple_loss=0.255, pruned_loss=0.05453, over 4929.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2392, pruned_loss=0.05277, over 972685.19 frames.], batch size: 29, lr: 6.70e-04 +2022-05-04 07:39:42,766 INFO [train.py:715] (6/8) Epoch 2, batch 19850, loss[loss=0.129, simple_loss=0.2041, pruned_loss=0.02693, over 4798.00 frames.], tot_loss[loss=0.172, simple_loss=0.2387, pruned_loss=0.05266, over 972524.33 frames.], batch size: 14, lr: 6.70e-04 +2022-05-04 07:40:23,486 INFO [train.py:715] (6/8) Epoch 2, batch 19900, loss[loss=0.1692, simple_loss=0.2366, pruned_loss=0.05088, over 4957.00 frames.], tot_loss[loss=0.1712, simple_loss=0.2375, pruned_loss=0.05241, over 972395.01 frames.], batch size: 35, lr: 6.70e-04 +2022-05-04 07:41:04,472 INFO [train.py:715] (6/8) Epoch 2, batch 19950, loss[loss=0.159, simple_loss=0.2266, pruned_loss=0.04575, over 4918.00 frames.], tot_loss[loss=0.171, simple_loss=0.2374, pruned_loss=0.05232, over 972455.59 frames.], batch size: 29, lr: 6.70e-04 +2022-05-04 07:41:44,808 INFO [train.py:715] (6/8) Epoch 2, batch 20000, loss[loss=0.1752, simple_loss=0.2292, pruned_loss=0.06062, over 4782.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2361, pruned_loss=0.05213, over 972843.70 frames.], batch size: 17, lr: 6.70e-04 +2022-05-04 07:42:25,568 INFO [train.py:715] (6/8) Epoch 2, batch 20050, loss[loss=0.1682, simple_loss=0.2301, pruned_loss=0.0532, over 4893.00 frames.], tot_loss[loss=0.171, simple_loss=0.2367, pruned_loss=0.05264, over 972607.15 frames.], batch size: 16, lr: 6.69e-04 +2022-05-04 07:43:06,881 INFO [train.py:715] (6/8) Epoch 2, batch 20100, loss[loss=0.1698, simple_loss=0.2499, pruned_loss=0.04486, over 4954.00 frames.], tot_loss[loss=0.172, simple_loss=0.2377, pruned_loss=0.05314, over 973238.72 frames.], batch size: 21, lr: 6.69e-04 +2022-05-04 07:43:48,585 INFO [train.py:715] (6/8) Epoch 2, batch 20150, loss[loss=0.1751, simple_loss=0.2548, pruned_loss=0.04771, over 4886.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2384, pruned_loss=0.05353, over 973060.11 frames.], batch size: 19, lr: 6.69e-04 +2022-05-04 07:44:28,881 INFO [train.py:715] (6/8) Epoch 2, batch 20200, loss[loss=0.1913, simple_loss=0.2567, pruned_loss=0.06301, over 4857.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2388, pruned_loss=0.0533, over 972744.24 frames.], batch size: 20, lr: 6.69e-04 +2022-05-04 07:45:10,321 INFO [train.py:715] (6/8) Epoch 2, batch 20250, loss[loss=0.1508, simple_loss=0.2256, pruned_loss=0.03798, over 4808.00 frames.], tot_loss[loss=0.1729, simple_loss=0.239, pruned_loss=0.05339, over 972017.80 frames.], batch size: 21, lr: 6.69e-04 +2022-05-04 07:45:52,277 INFO [train.py:715] (6/8) Epoch 2, batch 20300, loss[loss=0.1815, simple_loss=0.238, pruned_loss=0.06253, over 4838.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2389, pruned_loss=0.05288, over 971856.89 frames.], batch size: 32, lr: 6.69e-04 +2022-05-04 07:46:33,096 INFO [train.py:715] (6/8) Epoch 2, batch 20350, loss[loss=0.1662, simple_loss=0.2357, pruned_loss=0.04838, over 4964.00 frames.], tot_loss[loss=0.1715, simple_loss=0.2382, pruned_loss=0.05241, over 973217.29 frames.], batch size: 24, lr: 6.68e-04 +2022-05-04 07:47:14,067 INFO [train.py:715] (6/8) Epoch 2, batch 20400, loss[loss=0.1674, simple_loss=0.2225, pruned_loss=0.05616, over 4927.00 frames.], tot_loss[loss=0.1712, simple_loss=0.238, pruned_loss=0.05218, over 973227.08 frames.], batch size: 35, lr: 6.68e-04 +2022-05-04 07:47:56,154 INFO [train.py:715] (6/8) Epoch 2, batch 20450, loss[loss=0.1816, simple_loss=0.244, pruned_loss=0.05956, over 4844.00 frames.], tot_loss[loss=0.172, simple_loss=0.2388, pruned_loss=0.05265, over 972727.91 frames.], batch size: 15, lr: 6.68e-04 +2022-05-04 07:48:37,714 INFO [train.py:715] (6/8) Epoch 2, batch 20500, loss[loss=0.1468, simple_loss=0.2218, pruned_loss=0.03587, over 4781.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2377, pruned_loss=0.05194, over 972635.23 frames.], batch size: 18, lr: 6.68e-04 +2022-05-04 07:49:18,508 INFO [train.py:715] (6/8) Epoch 2, batch 20550, loss[loss=0.1802, simple_loss=0.2398, pruned_loss=0.06032, over 4717.00 frames.], tot_loss[loss=0.1715, simple_loss=0.2381, pruned_loss=0.05244, over 971981.43 frames.], batch size: 15, lr: 6.68e-04 +2022-05-04 07:49:59,712 INFO [train.py:715] (6/8) Epoch 2, batch 20600, loss[loss=0.1706, simple_loss=0.2436, pruned_loss=0.04876, over 4987.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2384, pruned_loss=0.05263, over 972691.63 frames.], batch size: 25, lr: 6.67e-04 +2022-05-04 07:50:41,273 INFO [train.py:715] (6/8) Epoch 2, batch 20650, loss[loss=0.2091, simple_loss=0.2711, pruned_loss=0.07358, over 4841.00 frames.], tot_loss[loss=0.172, simple_loss=0.2385, pruned_loss=0.0528, over 972267.59 frames.], batch size: 30, lr: 6.67e-04 +2022-05-04 07:51:22,532 INFO [train.py:715] (6/8) Epoch 2, batch 20700, loss[loss=0.1584, simple_loss=0.22, pruned_loss=0.04838, over 4819.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2381, pruned_loss=0.05251, over 972095.08 frames.], batch size: 15, lr: 6.67e-04 +2022-05-04 07:52:03,045 INFO [train.py:715] (6/8) Epoch 2, batch 20750, loss[loss=0.2025, simple_loss=0.2691, pruned_loss=0.06792, over 4915.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2385, pruned_loss=0.05297, over 972919.33 frames.], batch size: 17, lr: 6.67e-04 +2022-05-04 07:52:44,289 INFO [train.py:715] (6/8) Epoch 2, batch 20800, loss[loss=0.2128, simple_loss=0.292, pruned_loss=0.0668, over 4816.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2385, pruned_loss=0.0531, over 972846.42 frames.], batch size: 25, lr: 6.67e-04 +2022-05-04 07:53:25,485 INFO [train.py:715] (6/8) Epoch 2, batch 20850, loss[loss=0.1577, simple_loss=0.2289, pruned_loss=0.0432, over 4799.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2382, pruned_loss=0.05274, over 972851.06 frames.], batch size: 24, lr: 6.66e-04 +2022-05-04 07:54:06,140 INFO [train.py:715] (6/8) Epoch 2, batch 20900, loss[loss=0.1528, simple_loss=0.2264, pruned_loss=0.03956, over 4934.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2386, pruned_loss=0.05333, over 972878.22 frames.], batch size: 23, lr: 6.66e-04 +2022-05-04 07:54:47,197 INFO [train.py:715] (6/8) Epoch 2, batch 20950, loss[loss=0.1859, simple_loss=0.2463, pruned_loss=0.06275, over 4776.00 frames.], tot_loss[loss=0.1732, simple_loss=0.2389, pruned_loss=0.05372, over 973437.76 frames.], batch size: 18, lr: 6.66e-04 +2022-05-04 07:55:28,392 INFO [train.py:715] (6/8) Epoch 2, batch 21000, loss[loss=0.2126, simple_loss=0.2791, pruned_loss=0.07305, over 4911.00 frames.], tot_loss[loss=0.1727, simple_loss=0.239, pruned_loss=0.05319, over 973388.69 frames.], batch size: 39, lr: 6.66e-04 +2022-05-04 07:55:28,393 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 07:55:39,045 INFO [train.py:742] (6/8) Epoch 2, validation: loss=0.1174, simple_loss=0.2036, pruned_loss=0.01562, over 914524.00 frames. +2022-05-04 07:56:20,523 INFO [train.py:715] (6/8) Epoch 2, batch 21050, loss[loss=0.2227, simple_loss=0.2805, pruned_loss=0.08248, over 4884.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2393, pruned_loss=0.0532, over 973292.25 frames.], batch size: 38, lr: 6.66e-04 +2022-05-04 07:57:00,997 INFO [train.py:715] (6/8) Epoch 2, batch 21100, loss[loss=0.1725, simple_loss=0.2382, pruned_loss=0.05335, over 4916.00 frames.], tot_loss[loss=0.1732, simple_loss=0.2397, pruned_loss=0.05334, over 973597.00 frames.], batch size: 18, lr: 6.66e-04 +2022-05-04 07:57:41,499 INFO [train.py:715] (6/8) Epoch 2, batch 21150, loss[loss=0.1187, simple_loss=0.1915, pruned_loss=0.02299, over 4683.00 frames.], tot_loss[loss=0.1712, simple_loss=0.2382, pruned_loss=0.05209, over 974033.82 frames.], batch size: 15, lr: 6.65e-04 +2022-05-04 07:58:22,039 INFO [train.py:715] (6/8) Epoch 2, batch 21200, loss[loss=0.1794, simple_loss=0.2566, pruned_loss=0.05112, over 4966.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2393, pruned_loss=0.05247, over 973826.79 frames.], batch size: 24, lr: 6.65e-04 +2022-05-04 07:59:02,139 INFO [train.py:715] (6/8) Epoch 2, batch 21250, loss[loss=0.1578, simple_loss=0.2236, pruned_loss=0.04598, over 4931.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2395, pruned_loss=0.05312, over 973856.52 frames.], batch size: 29, lr: 6.65e-04 +2022-05-04 07:59:42,851 INFO [train.py:715] (6/8) Epoch 2, batch 21300, loss[loss=0.1611, simple_loss=0.235, pruned_loss=0.04357, over 4770.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2391, pruned_loss=0.0526, over 973359.25 frames.], batch size: 14, lr: 6.65e-04 +2022-05-04 08:00:23,561 INFO [train.py:715] (6/8) Epoch 2, batch 21350, loss[loss=0.1328, simple_loss=0.2047, pruned_loss=0.03049, over 4845.00 frames.], tot_loss[loss=0.172, simple_loss=0.2391, pruned_loss=0.05251, over 972750.65 frames.], batch size: 13, lr: 6.65e-04 +2022-05-04 08:01:04,868 INFO [train.py:715] (6/8) Epoch 2, batch 21400, loss[loss=0.1895, simple_loss=0.259, pruned_loss=0.05998, over 4808.00 frames.], tot_loss[loss=0.1711, simple_loss=0.2379, pruned_loss=0.05217, over 972545.33 frames.], batch size: 25, lr: 6.64e-04 +2022-05-04 08:01:45,138 INFO [train.py:715] (6/8) Epoch 2, batch 21450, loss[loss=0.1438, simple_loss=0.2086, pruned_loss=0.03949, over 4830.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2373, pruned_loss=0.0519, over 973063.05 frames.], batch size: 12, lr: 6.64e-04 +2022-05-04 08:02:26,069 INFO [train.py:715] (6/8) Epoch 2, batch 21500, loss[loss=0.1527, simple_loss=0.2243, pruned_loss=0.04058, over 4957.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2376, pruned_loss=0.05158, over 972136.50 frames.], batch size: 24, lr: 6.64e-04 +2022-05-04 08:03:07,359 INFO [train.py:715] (6/8) Epoch 2, batch 21550, loss[loss=0.2054, simple_loss=0.277, pruned_loss=0.06689, over 4929.00 frames.], tot_loss[loss=0.1713, simple_loss=0.238, pruned_loss=0.05228, over 972249.51 frames.], batch size: 23, lr: 6.64e-04 +2022-05-04 08:03:47,361 INFO [train.py:715] (6/8) Epoch 2, batch 21600, loss[loss=0.2009, simple_loss=0.2659, pruned_loss=0.06793, over 4977.00 frames.], tot_loss[loss=0.1706, simple_loss=0.2375, pruned_loss=0.05185, over 971705.44 frames.], batch size: 15, lr: 6.64e-04 +2022-05-04 08:04:28,569 INFO [train.py:715] (6/8) Epoch 2, batch 21650, loss[loss=0.1607, simple_loss=0.2193, pruned_loss=0.05108, over 4759.00 frames.], tot_loss[loss=0.171, simple_loss=0.2381, pruned_loss=0.05198, over 970899.74 frames.], batch size: 16, lr: 6.64e-04 +2022-05-04 08:05:10,117 INFO [train.py:715] (6/8) Epoch 2, batch 21700, loss[loss=0.1166, simple_loss=0.1859, pruned_loss=0.02364, over 4731.00 frames.], tot_loss[loss=0.1706, simple_loss=0.2378, pruned_loss=0.05168, over 971784.42 frames.], batch size: 12, lr: 6.63e-04 +2022-05-04 08:05:50,687 INFO [train.py:715] (6/8) Epoch 2, batch 21750, loss[loss=0.1689, simple_loss=0.2308, pruned_loss=0.05348, over 4904.00 frames.], tot_loss[loss=0.1703, simple_loss=0.2376, pruned_loss=0.05153, over 971316.87 frames.], batch size: 19, lr: 6.63e-04 +2022-05-04 08:06:31,763 INFO [train.py:715] (6/8) Epoch 2, batch 21800, loss[loss=0.1597, simple_loss=0.2262, pruned_loss=0.04654, over 4704.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2362, pruned_loss=0.05097, over 971027.25 frames.], batch size: 15, lr: 6.63e-04 +2022-05-04 08:07:12,182 INFO [train.py:715] (6/8) Epoch 2, batch 21850, loss[loss=0.1949, simple_loss=0.2547, pruned_loss=0.06751, over 4850.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2359, pruned_loss=0.05088, over 971892.97 frames.], batch size: 20, lr: 6.63e-04 +2022-05-04 08:07:53,272 INFO [train.py:715] (6/8) Epoch 2, batch 21900, loss[loss=0.1652, simple_loss=0.2346, pruned_loss=0.0479, over 4939.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2357, pruned_loss=0.05101, over 972259.12 frames.], batch size: 23, lr: 6.63e-04 +2022-05-04 08:08:33,968 INFO [train.py:715] (6/8) Epoch 2, batch 21950, loss[loss=0.1881, simple_loss=0.2547, pruned_loss=0.06074, over 4845.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2355, pruned_loss=0.05096, over 972422.84 frames.], batch size: 20, lr: 6.62e-04 +2022-05-04 08:09:15,719 INFO [train.py:715] (6/8) Epoch 2, batch 22000, loss[loss=0.1816, simple_loss=0.2535, pruned_loss=0.0548, over 4873.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2355, pruned_loss=0.05112, over 973230.23 frames.], batch size: 30, lr: 6.62e-04 +2022-05-04 08:09:57,817 INFO [train.py:715] (6/8) Epoch 2, batch 22050, loss[loss=0.1555, simple_loss=0.2272, pruned_loss=0.04186, over 4921.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2355, pruned_loss=0.05102, over 973381.55 frames.], batch size: 29, lr: 6.62e-04 +2022-05-04 08:10:38,629 INFO [train.py:715] (6/8) Epoch 2, batch 22100, loss[loss=0.1637, simple_loss=0.2271, pruned_loss=0.05015, over 4910.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2363, pruned_loss=0.05135, over 973526.58 frames.], batch size: 29, lr: 6.62e-04 +2022-05-04 08:11:20,102 INFO [train.py:715] (6/8) Epoch 2, batch 22150, loss[loss=0.1589, simple_loss=0.238, pruned_loss=0.03997, over 4704.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2365, pruned_loss=0.05128, over 973959.40 frames.], batch size: 15, lr: 6.62e-04 +2022-05-04 08:12:01,871 INFO [train.py:715] (6/8) Epoch 2, batch 22200, loss[loss=0.1636, simple_loss=0.2257, pruned_loss=0.05074, over 4883.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2364, pruned_loss=0.05106, over 973222.42 frames.], batch size: 19, lr: 6.62e-04 +2022-05-04 08:12:43,329 INFO [train.py:715] (6/8) Epoch 2, batch 22250, loss[loss=0.19, simple_loss=0.2556, pruned_loss=0.06226, over 4758.00 frames.], tot_loss[loss=0.1696, simple_loss=0.2366, pruned_loss=0.05128, over 973057.55 frames.], batch size: 19, lr: 6.61e-04 +2022-05-04 08:13:24,130 INFO [train.py:715] (6/8) Epoch 2, batch 22300, loss[loss=0.2, simple_loss=0.2733, pruned_loss=0.06338, over 4976.00 frames.], tot_loss[loss=0.1711, simple_loss=0.238, pruned_loss=0.05211, over 972590.22 frames.], batch size: 14, lr: 6.61e-04 +2022-05-04 08:14:05,209 INFO [train.py:715] (6/8) Epoch 2, batch 22350, loss[loss=0.1311, simple_loss=0.2125, pruned_loss=0.02486, over 4830.00 frames.], tot_loss[loss=0.1703, simple_loss=0.2373, pruned_loss=0.05168, over 972927.35 frames.], batch size: 25, lr: 6.61e-04 +2022-05-04 08:14:46,097 INFO [train.py:715] (6/8) Epoch 2, batch 22400, loss[loss=0.1675, simple_loss=0.233, pruned_loss=0.05102, over 4898.00 frames.], tot_loss[loss=0.1706, simple_loss=0.2376, pruned_loss=0.05187, over 972741.21 frames.], batch size: 16, lr: 6.61e-04 +2022-05-04 08:15:26,452 INFO [train.py:715] (6/8) Epoch 2, batch 22450, loss[loss=0.1242, simple_loss=0.1936, pruned_loss=0.02745, over 4956.00 frames.], tot_loss[loss=0.1706, simple_loss=0.2375, pruned_loss=0.05188, over 972497.10 frames.], batch size: 15, lr: 6.61e-04 +2022-05-04 08:16:07,662 INFO [train.py:715] (6/8) Epoch 2, batch 22500, loss[loss=0.1662, simple_loss=0.2396, pruned_loss=0.04634, over 4922.00 frames.], tot_loss[loss=0.1715, simple_loss=0.2382, pruned_loss=0.05243, over 973782.61 frames.], batch size: 18, lr: 6.61e-04 +2022-05-04 08:16:48,526 INFO [train.py:715] (6/8) Epoch 2, batch 22550, loss[loss=0.1828, simple_loss=0.2611, pruned_loss=0.05225, over 4886.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2385, pruned_loss=0.0524, over 974322.62 frames.], batch size: 22, lr: 6.60e-04 +2022-05-04 08:17:29,229 INFO [train.py:715] (6/8) Epoch 2, batch 22600, loss[loss=0.146, simple_loss=0.2111, pruned_loss=0.04044, over 4874.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2375, pruned_loss=0.05204, over 974489.88 frames.], batch size: 16, lr: 6.60e-04 +2022-05-04 08:18:09,997 INFO [train.py:715] (6/8) Epoch 2, batch 22650, loss[loss=0.1826, simple_loss=0.2598, pruned_loss=0.05266, over 4857.00 frames.], tot_loss[loss=0.1701, simple_loss=0.237, pruned_loss=0.05159, over 974245.68 frames.], batch size: 20, lr: 6.60e-04 +2022-05-04 08:18:50,677 INFO [train.py:715] (6/8) Epoch 2, batch 22700, loss[loss=0.1864, simple_loss=0.2503, pruned_loss=0.06123, over 4948.00 frames.], tot_loss[loss=0.1712, simple_loss=0.238, pruned_loss=0.05222, over 974653.87 frames.], batch size: 21, lr: 6.60e-04 +2022-05-04 08:19:31,398 INFO [train.py:715] (6/8) Epoch 2, batch 22750, loss[loss=0.1551, simple_loss=0.2276, pruned_loss=0.04127, over 4787.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2379, pruned_loss=0.0519, over 973858.39 frames.], batch size: 23, lr: 6.60e-04 +2022-05-04 08:20:12,251 INFO [train.py:715] (6/8) Epoch 2, batch 22800, loss[loss=0.1844, simple_loss=0.2469, pruned_loss=0.06095, over 4976.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2372, pruned_loss=0.0518, over 973560.70 frames.], batch size: 24, lr: 6.59e-04 +2022-05-04 08:20:53,306 INFO [train.py:715] (6/8) Epoch 2, batch 22850, loss[loss=0.1561, simple_loss=0.2137, pruned_loss=0.0493, over 4763.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2372, pruned_loss=0.05185, over 972679.01 frames.], batch size: 19, lr: 6.59e-04 +2022-05-04 08:21:34,655 INFO [train.py:715] (6/8) Epoch 2, batch 22900, loss[loss=0.1409, simple_loss=0.2218, pruned_loss=0.03005, over 4950.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2364, pruned_loss=0.05101, over 973304.60 frames.], batch size: 29, lr: 6.59e-04 +2022-05-04 08:22:15,457 INFO [train.py:715] (6/8) Epoch 2, batch 22950, loss[loss=0.1739, simple_loss=0.2382, pruned_loss=0.05478, over 4858.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2365, pruned_loss=0.05098, over 972900.26 frames.], batch size: 30, lr: 6.59e-04 +2022-05-04 08:22:56,060 INFO [train.py:715] (6/8) Epoch 2, batch 23000, loss[loss=0.1937, simple_loss=0.254, pruned_loss=0.06667, over 4883.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2362, pruned_loss=0.05085, over 972911.50 frames.], batch size: 32, lr: 6.59e-04 +2022-05-04 08:23:37,063 INFO [train.py:715] (6/8) Epoch 2, batch 23050, loss[loss=0.1731, simple_loss=0.2474, pruned_loss=0.04939, over 4912.00 frames.], tot_loss[loss=0.1694, simple_loss=0.2368, pruned_loss=0.05101, over 972819.67 frames.], batch size: 18, lr: 6.59e-04 +2022-05-04 08:24:17,902 INFO [train.py:715] (6/8) Epoch 2, batch 23100, loss[loss=0.2232, simple_loss=0.2706, pruned_loss=0.08792, over 4779.00 frames.], tot_loss[loss=0.1703, simple_loss=0.2372, pruned_loss=0.05166, over 972752.94 frames.], batch size: 17, lr: 6.58e-04 +2022-05-04 08:24:58,407 INFO [train.py:715] (6/8) Epoch 2, batch 23150, loss[loss=0.1686, simple_loss=0.2321, pruned_loss=0.05255, over 4823.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2364, pruned_loss=0.05129, over 973019.97 frames.], batch size: 13, lr: 6.58e-04 +2022-05-04 08:25:39,721 INFO [train.py:715] (6/8) Epoch 2, batch 23200, loss[loss=0.176, simple_loss=0.2551, pruned_loss=0.04849, over 4777.00 frames.], tot_loss[loss=0.17, simple_loss=0.237, pruned_loss=0.05153, over 972903.77 frames.], batch size: 18, lr: 6.58e-04 +2022-05-04 08:26:20,400 INFO [train.py:715] (6/8) Epoch 2, batch 23250, loss[loss=0.1808, simple_loss=0.2499, pruned_loss=0.05586, over 4977.00 frames.], tot_loss[loss=0.1711, simple_loss=0.2378, pruned_loss=0.05222, over 972610.28 frames.], batch size: 15, lr: 6.58e-04 +2022-05-04 08:27:00,753 INFO [train.py:715] (6/8) Epoch 2, batch 23300, loss[loss=0.154, simple_loss=0.2247, pruned_loss=0.0417, over 4825.00 frames.], tot_loss[loss=0.1705, simple_loss=0.237, pruned_loss=0.05198, over 972399.58 frames.], batch size: 30, lr: 6.58e-04 +2022-05-04 08:27:41,451 INFO [train.py:715] (6/8) Epoch 2, batch 23350, loss[loss=0.163, simple_loss=0.2366, pruned_loss=0.04471, over 4865.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2368, pruned_loss=0.05184, over 972383.74 frames.], batch size: 16, lr: 6.57e-04 +2022-05-04 08:28:22,395 INFO [train.py:715] (6/8) Epoch 2, batch 23400, loss[loss=0.1962, simple_loss=0.2578, pruned_loss=0.06726, over 4913.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2356, pruned_loss=0.0509, over 973080.18 frames.], batch size: 29, lr: 6.57e-04 +2022-05-04 08:29:03,335 INFO [train.py:715] (6/8) Epoch 2, batch 23450, loss[loss=0.1761, simple_loss=0.2469, pruned_loss=0.05263, over 4855.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2355, pruned_loss=0.05059, over 972298.62 frames.], batch size: 20, lr: 6.57e-04 +2022-05-04 08:29:43,631 INFO [train.py:715] (6/8) Epoch 2, batch 23500, loss[loss=0.1268, simple_loss=0.1957, pruned_loss=0.02891, over 4930.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2352, pruned_loss=0.05021, over 971322.88 frames.], batch size: 18, lr: 6.57e-04 +2022-05-04 08:30:24,816 INFO [train.py:715] (6/8) Epoch 2, batch 23550, loss[loss=0.181, simple_loss=0.2394, pruned_loss=0.06132, over 4957.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2356, pruned_loss=0.05058, over 972213.88 frames.], batch size: 21, lr: 6.57e-04 +2022-05-04 08:31:05,707 INFO [train.py:715] (6/8) Epoch 2, batch 23600, loss[loss=0.1884, simple_loss=0.255, pruned_loss=0.06092, over 4692.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2356, pruned_loss=0.05045, over 972189.06 frames.], batch size: 15, lr: 6.57e-04 +2022-05-04 08:31:45,452 INFO [train.py:715] (6/8) Epoch 2, batch 23650, loss[loss=0.1994, simple_loss=0.2497, pruned_loss=0.07457, over 4985.00 frames.], tot_loss[loss=0.169, simple_loss=0.2361, pruned_loss=0.05099, over 971990.03 frames.], batch size: 15, lr: 6.56e-04 +2022-05-04 08:32:27,526 INFO [train.py:715] (6/8) Epoch 2, batch 23700, loss[loss=0.1555, simple_loss=0.2249, pruned_loss=0.04303, over 4832.00 frames.], tot_loss[loss=0.1703, simple_loss=0.2371, pruned_loss=0.05177, over 971586.31 frames.], batch size: 15, lr: 6.56e-04 +2022-05-04 08:33:07,931 INFO [train.py:715] (6/8) Epoch 2, batch 23750, loss[loss=0.1568, simple_loss=0.2353, pruned_loss=0.03916, over 4966.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2357, pruned_loss=0.05102, over 971914.25 frames.], batch size: 24, lr: 6.56e-04 +2022-05-04 08:33:48,796 INFO [train.py:715] (6/8) Epoch 2, batch 23800, loss[loss=0.1812, simple_loss=0.2413, pruned_loss=0.06055, over 4958.00 frames.], tot_loss[loss=0.169, simple_loss=0.2361, pruned_loss=0.05096, over 972635.45 frames.], batch size: 35, lr: 6.56e-04 +2022-05-04 08:34:29,267 INFO [train.py:715] (6/8) Epoch 2, batch 23850, loss[loss=0.1255, simple_loss=0.1961, pruned_loss=0.02745, over 4924.00 frames.], tot_loss[loss=0.169, simple_loss=0.2362, pruned_loss=0.05093, over 971444.81 frames.], batch size: 29, lr: 6.56e-04 +2022-05-04 08:35:10,717 INFO [train.py:715] (6/8) Epoch 2, batch 23900, loss[loss=0.1614, simple_loss=0.2289, pruned_loss=0.04694, over 4903.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2358, pruned_loss=0.05049, over 971131.49 frames.], batch size: 17, lr: 6.56e-04 +2022-05-04 08:35:51,722 INFO [train.py:715] (6/8) Epoch 2, batch 23950, loss[loss=0.1543, simple_loss=0.2239, pruned_loss=0.04238, over 4782.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2347, pruned_loss=0.05029, over 970540.26 frames.], batch size: 18, lr: 6.55e-04 +2022-05-04 08:36:31,653 INFO [train.py:715] (6/8) Epoch 2, batch 24000, loss[loss=0.1752, simple_loss=0.2388, pruned_loss=0.05577, over 4992.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2346, pruned_loss=0.05029, over 970679.95 frames.], batch size: 16, lr: 6.55e-04 +2022-05-04 08:36:31,654 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 08:36:40,333 INFO [train.py:742] (6/8) Epoch 2, validation: loss=0.1168, simple_loss=0.2032, pruned_loss=0.01518, over 914524.00 frames. +2022-05-04 08:37:20,463 INFO [train.py:715] (6/8) Epoch 2, batch 24050, loss[loss=0.1463, simple_loss=0.2188, pruned_loss=0.03693, over 4979.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2345, pruned_loss=0.05042, over 971221.91 frames.], batch size: 28, lr: 6.55e-04 +2022-05-04 08:38:01,994 INFO [train.py:715] (6/8) Epoch 2, batch 24100, loss[loss=0.1844, simple_loss=0.2512, pruned_loss=0.05882, over 4945.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2355, pruned_loss=0.05096, over 971065.16 frames.], batch size: 23, lr: 6.55e-04 +2022-05-04 08:38:42,999 INFO [train.py:715] (6/8) Epoch 2, batch 24150, loss[loss=0.2361, simple_loss=0.2892, pruned_loss=0.09148, over 4983.00 frames.], tot_loss[loss=0.1683, simple_loss=0.235, pruned_loss=0.05084, over 970722.63 frames.], batch size: 31, lr: 6.55e-04 +2022-05-04 08:39:24,319 INFO [train.py:715] (6/8) Epoch 2, batch 24200, loss[loss=0.1746, simple_loss=0.2437, pruned_loss=0.05275, over 4876.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2359, pruned_loss=0.05121, over 971356.77 frames.], batch size: 22, lr: 6.55e-04 +2022-05-04 08:40:05,202 INFO [train.py:715] (6/8) Epoch 2, batch 24250, loss[loss=0.1754, simple_loss=0.2438, pruned_loss=0.05349, over 4743.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2357, pruned_loss=0.0506, over 971036.92 frames.], batch size: 16, lr: 6.54e-04 +2022-05-04 08:40:46,099 INFO [train.py:715] (6/8) Epoch 2, batch 24300, loss[loss=0.1613, simple_loss=0.2347, pruned_loss=0.04392, over 4859.00 frames.], tot_loss[loss=0.1683, simple_loss=0.236, pruned_loss=0.05035, over 971000.46 frames.], batch size: 20, lr: 6.54e-04 +2022-05-04 08:41:26,665 INFO [train.py:715] (6/8) Epoch 2, batch 24350, loss[loss=0.1616, simple_loss=0.2317, pruned_loss=0.04578, over 4815.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2359, pruned_loss=0.05036, over 971679.41 frames.], batch size: 15, lr: 6.54e-04 +2022-05-04 08:42:06,530 INFO [train.py:715] (6/8) Epoch 2, batch 24400, loss[loss=0.1464, simple_loss=0.1998, pruned_loss=0.04649, over 4845.00 frames.], tot_loss[loss=0.1686, simple_loss=0.236, pruned_loss=0.05055, over 972469.40 frames.], batch size: 30, lr: 6.54e-04 +2022-05-04 08:42:47,548 INFO [train.py:715] (6/8) Epoch 2, batch 24450, loss[loss=0.1325, simple_loss=0.2005, pruned_loss=0.03222, over 4810.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2363, pruned_loss=0.05106, over 972106.91 frames.], batch size: 13, lr: 6.54e-04 +2022-05-04 08:43:27,487 INFO [train.py:715] (6/8) Epoch 2, batch 24500, loss[loss=0.1451, simple_loss=0.2125, pruned_loss=0.03881, over 4936.00 frames.], tot_loss[loss=0.1696, simple_loss=0.2365, pruned_loss=0.05132, over 971430.63 frames.], batch size: 35, lr: 6.53e-04 +2022-05-04 08:44:07,371 INFO [train.py:715] (6/8) Epoch 2, batch 24550, loss[loss=0.1527, simple_loss=0.234, pruned_loss=0.03572, over 4829.00 frames.], tot_loss[loss=0.1693, simple_loss=0.2365, pruned_loss=0.05107, over 971366.21 frames.], batch size: 13, lr: 6.53e-04 +2022-05-04 08:44:46,892 INFO [train.py:715] (6/8) Epoch 2, batch 24600, loss[loss=0.1693, simple_loss=0.2376, pruned_loss=0.05055, over 4967.00 frames.], tot_loss[loss=0.1698, simple_loss=0.2367, pruned_loss=0.05147, over 970786.40 frames.], batch size: 24, lr: 6.53e-04 +2022-05-04 08:45:27,059 INFO [train.py:715] (6/8) Epoch 2, batch 24650, loss[loss=0.146, simple_loss=0.2128, pruned_loss=0.03961, over 4860.00 frames.], tot_loss[loss=0.1698, simple_loss=0.2368, pruned_loss=0.05135, over 971563.98 frames.], batch size: 20, lr: 6.53e-04 +2022-05-04 08:46:06,411 INFO [train.py:715] (6/8) Epoch 2, batch 24700, loss[loss=0.1609, simple_loss=0.2197, pruned_loss=0.05108, over 4905.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2365, pruned_loss=0.05124, over 971655.54 frames.], batch size: 17, lr: 6.53e-04 +2022-05-04 08:46:45,153 INFO [train.py:715] (6/8) Epoch 2, batch 24750, loss[loss=0.15, simple_loss=0.2176, pruned_loss=0.04119, over 4748.00 frames.], tot_loss[loss=0.17, simple_loss=0.2373, pruned_loss=0.05138, over 971999.37 frames.], batch size: 12, lr: 6.53e-04 +2022-05-04 08:47:24,976 INFO [train.py:715] (6/8) Epoch 2, batch 24800, loss[loss=0.1736, simple_loss=0.2474, pruned_loss=0.04994, over 4951.00 frames.], tot_loss[loss=0.1703, simple_loss=0.2376, pruned_loss=0.05151, over 971432.19 frames.], batch size: 15, lr: 6.52e-04 +2022-05-04 08:48:04,567 INFO [train.py:715] (6/8) Epoch 2, batch 24850, loss[loss=0.1762, simple_loss=0.2295, pruned_loss=0.06141, over 4792.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2376, pruned_loss=0.05193, over 972020.35 frames.], batch size: 18, lr: 6.52e-04 +2022-05-04 08:48:43,458 INFO [train.py:715] (6/8) Epoch 2, batch 24900, loss[loss=0.1958, simple_loss=0.2521, pruned_loss=0.06979, over 4986.00 frames.], tot_loss[loss=0.1713, simple_loss=0.2382, pruned_loss=0.05225, over 972887.02 frames.], batch size: 31, lr: 6.52e-04 +2022-05-04 08:49:22,927 INFO [train.py:715] (6/8) Epoch 2, batch 24950, loss[loss=0.1722, simple_loss=0.2346, pruned_loss=0.05486, over 4934.00 frames.], tot_loss[loss=0.1717, simple_loss=0.2383, pruned_loss=0.05257, over 973623.98 frames.], batch size: 18, lr: 6.52e-04 +2022-05-04 08:50:02,459 INFO [train.py:715] (6/8) Epoch 2, batch 25000, loss[loss=0.1807, simple_loss=0.2458, pruned_loss=0.05778, over 4964.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2374, pruned_loss=0.05199, over 973431.29 frames.], batch size: 15, lr: 6.52e-04 +2022-05-04 08:50:41,259 INFO [train.py:715] (6/8) Epoch 2, batch 25050, loss[loss=0.1676, simple_loss=0.2481, pruned_loss=0.04361, over 4972.00 frames.], tot_loss[loss=0.17, simple_loss=0.237, pruned_loss=0.05149, over 973294.92 frames.], batch size: 25, lr: 6.52e-04 +2022-05-04 08:51:19,783 INFO [train.py:715] (6/8) Epoch 2, batch 25100, loss[loss=0.1726, simple_loss=0.2346, pruned_loss=0.05532, over 4908.00 frames.], tot_loss[loss=0.1699, simple_loss=0.2368, pruned_loss=0.05154, over 972826.88 frames.], batch size: 17, lr: 6.51e-04 +2022-05-04 08:51:59,036 INFO [train.py:715] (6/8) Epoch 2, batch 25150, loss[loss=0.1454, simple_loss=0.2098, pruned_loss=0.04055, over 4817.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2359, pruned_loss=0.05151, over 972381.75 frames.], batch size: 26, lr: 6.51e-04 +2022-05-04 08:52:37,851 INFO [train.py:715] (6/8) Epoch 2, batch 25200, loss[loss=0.1808, simple_loss=0.2376, pruned_loss=0.06203, over 4874.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2358, pruned_loss=0.05117, over 971943.15 frames.], batch size: 22, lr: 6.51e-04 +2022-05-04 08:53:16,882 INFO [train.py:715] (6/8) Epoch 2, batch 25250, loss[loss=0.1787, simple_loss=0.2401, pruned_loss=0.05864, over 4781.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2355, pruned_loss=0.0509, over 970989.76 frames.], batch size: 18, lr: 6.51e-04 +2022-05-04 08:53:55,854 INFO [train.py:715] (6/8) Epoch 2, batch 25300, loss[loss=0.2161, simple_loss=0.2628, pruned_loss=0.08468, over 4830.00 frames.], tot_loss[loss=0.1693, simple_loss=0.2359, pruned_loss=0.05133, over 971774.19 frames.], batch size: 15, lr: 6.51e-04 +2022-05-04 08:54:35,072 INFO [train.py:715] (6/8) Epoch 2, batch 25350, loss[loss=0.1553, simple_loss=0.222, pruned_loss=0.04429, over 4701.00 frames.], tot_loss[loss=0.169, simple_loss=0.2356, pruned_loss=0.05125, over 971866.56 frames.], batch size: 15, lr: 6.51e-04 +2022-05-04 08:55:14,142 INFO [train.py:715] (6/8) Epoch 2, batch 25400, loss[loss=0.1678, simple_loss=0.2332, pruned_loss=0.05117, over 4888.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2353, pruned_loss=0.0509, over 972536.43 frames.], batch size: 19, lr: 6.50e-04 +2022-05-04 08:55:52,996 INFO [train.py:715] (6/8) Epoch 2, batch 25450, loss[loss=0.1842, simple_loss=0.2539, pruned_loss=0.0572, over 4985.00 frames.], tot_loss[loss=0.1689, simple_loss=0.236, pruned_loss=0.05093, over 972470.28 frames.], batch size: 28, lr: 6.50e-04 +2022-05-04 08:56:32,022 INFO [train.py:715] (6/8) Epoch 2, batch 25500, loss[loss=0.1764, simple_loss=0.2414, pruned_loss=0.05568, over 4975.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2369, pruned_loss=0.05122, over 972795.26 frames.], batch size: 35, lr: 6.50e-04 +2022-05-04 08:57:11,299 INFO [train.py:715] (6/8) Epoch 2, batch 25550, loss[loss=0.1561, simple_loss=0.2298, pruned_loss=0.04124, over 4895.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2359, pruned_loss=0.05093, over 972247.97 frames.], batch size: 22, lr: 6.50e-04 +2022-05-04 08:57:50,305 INFO [train.py:715] (6/8) Epoch 2, batch 25600, loss[loss=0.1547, simple_loss=0.232, pruned_loss=0.03867, over 4930.00 frames.], tot_loss[loss=0.167, simple_loss=0.2344, pruned_loss=0.04985, over 972854.31 frames.], batch size: 29, lr: 6.50e-04 +2022-05-04 08:58:29,661 INFO [train.py:715] (6/8) Epoch 2, batch 25650, loss[loss=0.1633, simple_loss=0.229, pruned_loss=0.04875, over 4914.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2346, pruned_loss=0.04991, over 972470.55 frames.], batch size: 17, lr: 6.50e-04 +2022-05-04 08:59:09,568 INFO [train.py:715] (6/8) Epoch 2, batch 25700, loss[loss=0.1687, simple_loss=0.2422, pruned_loss=0.04758, over 4951.00 frames.], tot_loss[loss=0.1681, simple_loss=0.2353, pruned_loss=0.05041, over 971526.03 frames.], batch size: 35, lr: 6.49e-04 +2022-05-04 08:59:48,689 INFO [train.py:715] (6/8) Epoch 2, batch 25750, loss[loss=0.171, simple_loss=0.2421, pruned_loss=0.04993, over 4901.00 frames.], tot_loss[loss=0.169, simple_loss=0.236, pruned_loss=0.05099, over 972213.73 frames.], batch size: 22, lr: 6.49e-04 +2022-05-04 09:00:27,441 INFO [train.py:715] (6/8) Epoch 2, batch 25800, loss[loss=0.1581, simple_loss=0.2265, pruned_loss=0.04487, over 4958.00 frames.], tot_loss[loss=0.1681, simple_loss=0.2351, pruned_loss=0.05055, over 972902.56 frames.], batch size: 31, lr: 6.49e-04 +2022-05-04 09:01:06,417 INFO [train.py:715] (6/8) Epoch 2, batch 25850, loss[loss=0.162, simple_loss=0.2307, pruned_loss=0.04663, over 4969.00 frames.], tot_loss[loss=0.1673, simple_loss=0.2342, pruned_loss=0.05018, over 973577.41 frames.], batch size: 39, lr: 6.49e-04 +2022-05-04 09:01:46,179 INFO [train.py:715] (6/8) Epoch 2, batch 25900, loss[loss=0.1588, simple_loss=0.2229, pruned_loss=0.04734, over 4821.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2343, pruned_loss=0.04977, over 973631.10 frames.], batch size: 15, lr: 6.49e-04 +2022-05-04 09:02:25,987 INFO [train.py:715] (6/8) Epoch 2, batch 25950, loss[loss=0.1397, simple_loss=0.2066, pruned_loss=0.03638, over 4841.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2347, pruned_loss=0.0504, over 973066.90 frames.], batch size: 13, lr: 6.49e-04 +2022-05-04 09:03:05,063 INFO [train.py:715] (6/8) Epoch 2, batch 26000, loss[loss=0.177, simple_loss=0.2487, pruned_loss=0.05264, over 4945.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2352, pruned_loss=0.05068, over 973311.93 frames.], batch size: 21, lr: 6.48e-04 +2022-05-04 09:03:44,736 INFO [train.py:715] (6/8) Epoch 2, batch 26050, loss[loss=0.1877, simple_loss=0.2417, pruned_loss=0.06681, over 4643.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2352, pruned_loss=0.05082, over 972488.86 frames.], batch size: 13, lr: 6.48e-04 +2022-05-04 09:04:24,307 INFO [train.py:715] (6/8) Epoch 2, batch 26100, loss[loss=0.2021, simple_loss=0.2686, pruned_loss=0.06785, over 4946.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2357, pruned_loss=0.05127, over 972362.90 frames.], batch size: 39, lr: 6.48e-04 +2022-05-04 09:05:03,481 INFO [train.py:715] (6/8) Epoch 2, batch 26150, loss[loss=0.1703, simple_loss=0.2382, pruned_loss=0.05123, over 4956.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2348, pruned_loss=0.05074, over 972951.77 frames.], batch size: 24, lr: 6.48e-04 +2022-05-04 09:05:42,982 INFO [train.py:715] (6/8) Epoch 2, batch 26200, loss[loss=0.1693, simple_loss=0.2405, pruned_loss=0.04898, over 4980.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2347, pruned_loss=0.051, over 972711.99 frames.], batch size: 28, lr: 6.48e-04 +2022-05-04 09:06:22,734 INFO [train.py:715] (6/8) Epoch 2, batch 26250, loss[loss=0.1516, simple_loss=0.2065, pruned_loss=0.04834, over 4785.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2352, pruned_loss=0.05107, over 972053.75 frames.], batch size: 12, lr: 6.48e-04 +2022-05-04 09:07:02,322 INFO [train.py:715] (6/8) Epoch 2, batch 26300, loss[loss=0.1315, simple_loss=0.2095, pruned_loss=0.02673, over 4939.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2351, pruned_loss=0.05081, over 972215.47 frames.], batch size: 23, lr: 6.47e-04 +2022-05-04 09:07:40,826 INFO [train.py:715] (6/8) Epoch 2, batch 26350, loss[loss=0.1601, simple_loss=0.2331, pruned_loss=0.04354, over 4786.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2346, pruned_loss=0.05098, over 971584.80 frames.], batch size: 17, lr: 6.47e-04 +2022-05-04 09:08:23,930 INFO [train.py:715] (6/8) Epoch 2, batch 26400, loss[loss=0.1884, simple_loss=0.2622, pruned_loss=0.05726, over 4922.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2352, pruned_loss=0.05097, over 971203.24 frames.], batch size: 18, lr: 6.47e-04 +2022-05-04 09:09:03,684 INFO [train.py:715] (6/8) Epoch 2, batch 26450, loss[loss=0.1917, simple_loss=0.2568, pruned_loss=0.06333, over 4734.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2346, pruned_loss=0.0506, over 972193.54 frames.], batch size: 16, lr: 6.47e-04 +2022-05-04 09:09:42,583 INFO [train.py:715] (6/8) Epoch 2, batch 26500, loss[loss=0.1402, simple_loss=0.2042, pruned_loss=0.03806, over 4782.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2353, pruned_loss=0.05057, over 972670.21 frames.], batch size: 17, lr: 6.47e-04 +2022-05-04 09:10:22,394 INFO [train.py:715] (6/8) Epoch 2, batch 26550, loss[loss=0.161, simple_loss=0.226, pruned_loss=0.04795, over 4948.00 frames.], tot_loss[loss=0.1669, simple_loss=0.234, pruned_loss=0.04994, over 972899.44 frames.], batch size: 21, lr: 6.46e-04 +2022-05-04 09:11:02,375 INFO [train.py:715] (6/8) Epoch 2, batch 26600, loss[loss=0.1817, simple_loss=0.2534, pruned_loss=0.05505, over 4885.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2345, pruned_loss=0.0498, over 973207.02 frames.], batch size: 19, lr: 6.46e-04 +2022-05-04 09:11:41,995 INFO [train.py:715] (6/8) Epoch 2, batch 26650, loss[loss=0.2018, simple_loss=0.266, pruned_loss=0.06875, over 4868.00 frames.], tot_loss[loss=0.1675, simple_loss=0.235, pruned_loss=0.05, over 972584.18 frames.], batch size: 39, lr: 6.46e-04 +2022-05-04 09:12:21,006 INFO [train.py:715] (6/8) Epoch 2, batch 26700, loss[loss=0.1295, simple_loss=0.1986, pruned_loss=0.03015, over 4828.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2342, pruned_loss=0.04966, over 972951.54 frames.], batch size: 13, lr: 6.46e-04 +2022-05-04 09:13:00,969 INFO [train.py:715] (6/8) Epoch 2, batch 26750, loss[loss=0.1309, simple_loss=0.1981, pruned_loss=0.03186, over 4969.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2344, pruned_loss=0.0499, over 971859.84 frames.], batch size: 28, lr: 6.46e-04 +2022-05-04 09:13:40,195 INFO [train.py:715] (6/8) Epoch 2, batch 26800, loss[loss=0.1443, simple_loss=0.2095, pruned_loss=0.03955, over 4795.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2348, pruned_loss=0.04969, over 971086.94 frames.], batch size: 13, lr: 6.46e-04 +2022-05-04 09:14:19,185 INFO [train.py:715] (6/8) Epoch 2, batch 26850, loss[loss=0.1937, simple_loss=0.2589, pruned_loss=0.06429, over 4978.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2352, pruned_loss=0.05005, over 971065.59 frames.], batch size: 24, lr: 6.45e-04 +2022-05-04 09:14:58,117 INFO [train.py:715] (6/8) Epoch 2, batch 26900, loss[loss=0.1667, simple_loss=0.2432, pruned_loss=0.04513, over 4879.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2344, pruned_loss=0.04967, over 970478.12 frames.], batch size: 16, lr: 6.45e-04 +2022-05-04 09:15:37,582 INFO [train.py:715] (6/8) Epoch 2, batch 26950, loss[loss=0.1543, simple_loss=0.2247, pruned_loss=0.04197, over 4901.00 frames.], tot_loss[loss=0.167, simple_loss=0.2346, pruned_loss=0.04964, over 971634.13 frames.], batch size: 19, lr: 6.45e-04 +2022-05-04 09:16:16,468 INFO [train.py:715] (6/8) Epoch 2, batch 27000, loss[loss=0.1382, simple_loss=0.2115, pruned_loss=0.03247, over 4814.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2352, pruned_loss=0.05005, over 971771.58 frames.], batch size: 21, lr: 6.45e-04 +2022-05-04 09:16:16,468 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 09:16:25,254 INFO [train.py:742] (6/8) Epoch 2, validation: loss=0.1164, simple_loss=0.2027, pruned_loss=0.01502, over 914524.00 frames. +2022-05-04 09:17:03,623 INFO [train.py:715] (6/8) Epoch 2, batch 27050, loss[loss=0.1491, simple_loss=0.2075, pruned_loss=0.04531, over 4760.00 frames.], tot_loss[loss=0.1692, simple_loss=0.236, pruned_loss=0.05115, over 972527.80 frames.], batch size: 12, lr: 6.45e-04 +2022-05-04 09:17:42,886 INFO [train.py:715] (6/8) Epoch 2, batch 27100, loss[loss=0.1426, simple_loss=0.212, pruned_loss=0.03662, over 4757.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2363, pruned_loss=0.0511, over 971396.60 frames.], batch size: 12, lr: 6.45e-04 +2022-05-04 09:18:22,888 INFO [train.py:715] (6/8) Epoch 2, batch 27150, loss[loss=0.1599, simple_loss=0.2291, pruned_loss=0.04534, over 4967.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2357, pruned_loss=0.05062, over 972444.53 frames.], batch size: 24, lr: 6.44e-04 +2022-05-04 09:19:02,270 INFO [train.py:715] (6/8) Epoch 2, batch 27200, loss[loss=0.1731, simple_loss=0.2415, pruned_loss=0.05238, over 4864.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2362, pruned_loss=0.05075, over 972746.42 frames.], batch size: 20, lr: 6.44e-04 +2022-05-04 09:19:41,125 INFO [train.py:715] (6/8) Epoch 2, batch 27250, loss[loss=0.1381, simple_loss=0.2115, pruned_loss=0.03234, over 4860.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2356, pruned_loss=0.05063, over 973846.08 frames.], batch size: 22, lr: 6.44e-04 +2022-05-04 09:20:20,689 INFO [train.py:715] (6/8) Epoch 2, batch 27300, loss[loss=0.2123, simple_loss=0.2778, pruned_loss=0.07341, over 4891.00 frames.], tot_loss[loss=0.169, simple_loss=0.2359, pruned_loss=0.05104, over 973277.43 frames.], batch size: 22, lr: 6.44e-04 +2022-05-04 09:20:59,725 INFO [train.py:715] (6/8) Epoch 2, batch 27350, loss[loss=0.1403, simple_loss=0.2166, pruned_loss=0.03196, over 4691.00 frames.], tot_loss[loss=0.1694, simple_loss=0.2361, pruned_loss=0.05134, over 972876.99 frames.], batch size: 15, lr: 6.44e-04 +2022-05-04 09:21:38,801 INFO [train.py:715] (6/8) Epoch 2, batch 27400, loss[loss=0.2009, simple_loss=0.2726, pruned_loss=0.06464, over 4784.00 frames.], tot_loss[loss=0.169, simple_loss=0.2363, pruned_loss=0.05088, over 972789.08 frames.], batch size: 18, lr: 6.44e-04 +2022-05-04 09:22:17,484 INFO [train.py:715] (6/8) Epoch 2, batch 27450, loss[loss=0.178, simple_loss=0.2403, pruned_loss=0.05787, over 4772.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2359, pruned_loss=0.05071, over 972660.32 frames.], batch size: 17, lr: 6.44e-04 +2022-05-04 09:22:57,212 INFO [train.py:715] (6/8) Epoch 2, batch 27500, loss[loss=0.1373, simple_loss=0.212, pruned_loss=0.03134, over 4910.00 frames.], tot_loss[loss=0.1686, simple_loss=0.236, pruned_loss=0.05055, over 972646.73 frames.], batch size: 17, lr: 6.43e-04 +2022-05-04 09:23:37,098 INFO [train.py:715] (6/8) Epoch 2, batch 27550, loss[loss=0.1456, simple_loss=0.2223, pruned_loss=0.03445, over 4698.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2359, pruned_loss=0.05068, over 971956.17 frames.], batch size: 15, lr: 6.43e-04 +2022-05-04 09:24:16,426 INFO [train.py:715] (6/8) Epoch 2, batch 27600, loss[loss=0.1247, simple_loss=0.1921, pruned_loss=0.02871, over 4778.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2356, pruned_loss=0.05066, over 971187.15 frames.], batch size: 14, lr: 6.43e-04 +2022-05-04 09:24:55,998 INFO [train.py:715] (6/8) Epoch 2, batch 27650, loss[loss=0.197, simple_loss=0.2555, pruned_loss=0.0692, over 4836.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2358, pruned_loss=0.05086, over 970024.00 frames.], batch size: 15, lr: 6.43e-04 +2022-05-04 09:25:36,611 INFO [train.py:715] (6/8) Epoch 2, batch 27700, loss[loss=0.1633, simple_loss=0.2325, pruned_loss=0.04708, over 4980.00 frames.], tot_loss[loss=0.1677, simple_loss=0.235, pruned_loss=0.05018, over 971158.43 frames.], batch size: 15, lr: 6.43e-04 +2022-05-04 09:26:16,939 INFO [train.py:715] (6/8) Epoch 2, batch 27750, loss[loss=0.1348, simple_loss=0.2095, pruned_loss=0.03007, over 4870.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2341, pruned_loss=0.04967, over 971422.21 frames.], batch size: 16, lr: 6.43e-04 +2022-05-04 09:26:56,323 INFO [train.py:715] (6/8) Epoch 2, batch 27800, loss[loss=0.1493, simple_loss=0.2235, pruned_loss=0.0376, over 4706.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2337, pruned_loss=0.04924, over 971208.79 frames.], batch size: 15, lr: 6.42e-04 +2022-05-04 09:27:36,615 INFO [train.py:715] (6/8) Epoch 2, batch 27850, loss[loss=0.1598, simple_loss=0.2205, pruned_loss=0.04958, over 4847.00 frames.], tot_loss[loss=0.1667, simple_loss=0.234, pruned_loss=0.04968, over 971259.34 frames.], batch size: 12, lr: 6.42e-04 +2022-05-04 09:28:15,905 INFO [train.py:715] (6/8) Epoch 2, batch 27900, loss[loss=0.1547, simple_loss=0.2275, pruned_loss=0.04098, over 4834.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2342, pruned_loss=0.04979, over 971704.75 frames.], batch size: 27, lr: 6.42e-04 +2022-05-04 09:28:55,088 INFO [train.py:715] (6/8) Epoch 2, batch 27950, loss[loss=0.1912, simple_loss=0.2535, pruned_loss=0.06451, over 4848.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2354, pruned_loss=0.051, over 972185.29 frames.], batch size: 30, lr: 6.42e-04 +2022-05-04 09:29:34,668 INFO [train.py:715] (6/8) Epoch 2, batch 28000, loss[loss=0.1828, simple_loss=0.2429, pruned_loss=0.06137, over 4993.00 frames.], tot_loss[loss=0.1677, simple_loss=0.235, pruned_loss=0.05019, over 972884.49 frames.], batch size: 14, lr: 6.42e-04 +2022-05-04 09:30:15,047 INFO [train.py:715] (6/8) Epoch 2, batch 28050, loss[loss=0.172, simple_loss=0.2422, pruned_loss=0.05092, over 4843.00 frames.], tot_loss[loss=0.1673, simple_loss=0.2346, pruned_loss=0.04998, over 973171.49 frames.], batch size: 13, lr: 6.42e-04 +2022-05-04 09:30:54,023 INFO [train.py:715] (6/8) Epoch 2, batch 28100, loss[loss=0.1405, simple_loss=0.2217, pruned_loss=0.02963, over 4801.00 frames.], tot_loss[loss=0.1663, simple_loss=0.234, pruned_loss=0.04928, over 972497.15 frames.], batch size: 21, lr: 6.41e-04 +2022-05-04 09:31:33,562 INFO [train.py:715] (6/8) Epoch 2, batch 28150, loss[loss=0.1333, simple_loss=0.2027, pruned_loss=0.03194, over 4769.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2346, pruned_loss=0.0498, over 973611.99 frames.], batch size: 14, lr: 6.41e-04 +2022-05-04 09:32:13,300 INFO [train.py:715] (6/8) Epoch 2, batch 28200, loss[loss=0.1638, simple_loss=0.2373, pruned_loss=0.0451, over 4700.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2348, pruned_loss=0.05002, over 972551.14 frames.], batch size: 15, lr: 6.41e-04 +2022-05-04 09:32:52,904 INFO [train.py:715] (6/8) Epoch 2, batch 28250, loss[loss=0.1474, simple_loss=0.2164, pruned_loss=0.03922, over 4819.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2352, pruned_loss=0.0502, over 972388.03 frames.], batch size: 26, lr: 6.41e-04 +2022-05-04 09:33:31,982 INFO [train.py:715] (6/8) Epoch 2, batch 28300, loss[loss=0.1462, simple_loss=0.2065, pruned_loss=0.04289, over 4819.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2356, pruned_loss=0.0508, over 972900.74 frames.], batch size: 12, lr: 6.41e-04 +2022-05-04 09:34:11,323 INFO [train.py:715] (6/8) Epoch 2, batch 28350, loss[loss=0.172, simple_loss=0.2337, pruned_loss=0.05513, over 4828.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2358, pruned_loss=0.05071, over 973507.67 frames.], batch size: 13, lr: 6.41e-04 +2022-05-04 09:34:51,512 INFO [train.py:715] (6/8) Epoch 2, batch 28400, loss[loss=0.1669, simple_loss=0.2426, pruned_loss=0.04561, over 4934.00 frames.], tot_loss[loss=0.1698, simple_loss=0.2365, pruned_loss=0.05152, over 973708.01 frames.], batch size: 21, lr: 6.40e-04 +2022-05-04 09:35:30,763 INFO [train.py:715] (6/8) Epoch 2, batch 28450, loss[loss=0.163, simple_loss=0.2357, pruned_loss=0.04513, over 4746.00 frames.], tot_loss[loss=0.1699, simple_loss=0.2369, pruned_loss=0.05146, over 973067.01 frames.], batch size: 19, lr: 6.40e-04 +2022-05-04 09:36:10,163 INFO [train.py:715] (6/8) Epoch 2, batch 28500, loss[loss=0.1799, simple_loss=0.2417, pruned_loss=0.05903, over 4963.00 frames.], tot_loss[loss=0.1687, simple_loss=0.236, pruned_loss=0.05071, over 973598.56 frames.], batch size: 35, lr: 6.40e-04 +2022-05-04 09:36:50,113 INFO [train.py:715] (6/8) Epoch 2, batch 28550, loss[loss=0.1815, simple_loss=0.2592, pruned_loss=0.05189, over 4922.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2361, pruned_loss=0.0508, over 973704.36 frames.], batch size: 19, lr: 6.40e-04 +2022-05-04 09:37:30,240 INFO [train.py:715] (6/8) Epoch 2, batch 28600, loss[loss=0.1625, simple_loss=0.2324, pruned_loss=0.04636, over 4962.00 frames.], tot_loss[loss=0.168, simple_loss=0.2355, pruned_loss=0.05029, over 973435.27 frames.], batch size: 15, lr: 6.40e-04 +2022-05-04 09:38:09,275 INFO [train.py:715] (6/8) Epoch 2, batch 28650, loss[loss=0.1976, simple_loss=0.2696, pruned_loss=0.06278, over 4770.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2366, pruned_loss=0.05078, over 973190.11 frames.], batch size: 18, lr: 6.40e-04 +2022-05-04 09:38:49,124 INFO [train.py:715] (6/8) Epoch 2, batch 28700, loss[loss=0.175, simple_loss=0.2378, pruned_loss=0.05613, over 4742.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2356, pruned_loss=0.05003, over 973657.91 frames.], batch size: 16, lr: 6.39e-04 +2022-05-04 09:39:29,590 INFO [train.py:715] (6/8) Epoch 2, batch 28750, loss[loss=0.1515, simple_loss=0.213, pruned_loss=0.04495, over 4956.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2355, pruned_loss=0.05059, over 973597.77 frames.], batch size: 35, lr: 6.39e-04 +2022-05-04 09:40:08,513 INFO [train.py:715] (6/8) Epoch 2, batch 28800, loss[loss=0.1677, simple_loss=0.2344, pruned_loss=0.05047, over 4751.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2367, pruned_loss=0.0512, over 973175.68 frames.], batch size: 16, lr: 6.39e-04 +2022-05-04 09:40:48,105 INFO [train.py:715] (6/8) Epoch 2, batch 28850, loss[loss=0.1715, simple_loss=0.242, pruned_loss=0.05048, over 4889.00 frames.], tot_loss[loss=0.1698, simple_loss=0.237, pruned_loss=0.05135, over 973408.49 frames.], batch size: 16, lr: 6.39e-04 +2022-05-04 09:41:28,112 INFO [train.py:715] (6/8) Epoch 2, batch 28900, loss[loss=0.1863, simple_loss=0.2689, pruned_loss=0.05186, over 4965.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2367, pruned_loss=0.05128, over 974145.41 frames.], batch size: 24, lr: 6.39e-04 +2022-05-04 09:42:07,489 INFO [train.py:715] (6/8) Epoch 2, batch 28950, loss[loss=0.167, simple_loss=0.2274, pruned_loss=0.05333, over 4894.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2355, pruned_loss=0.05069, over 974231.52 frames.], batch size: 19, lr: 6.39e-04 +2022-05-04 09:42:46,860 INFO [train.py:715] (6/8) Epoch 2, batch 29000, loss[loss=0.1891, simple_loss=0.2371, pruned_loss=0.07049, over 4731.00 frames.], tot_loss[loss=0.1691, simple_loss=0.236, pruned_loss=0.05108, over 973132.16 frames.], batch size: 12, lr: 6.38e-04 +2022-05-04 09:43:26,613 INFO [train.py:715] (6/8) Epoch 2, batch 29050, loss[loss=0.1425, simple_loss=0.2136, pruned_loss=0.03573, over 4912.00 frames.], tot_loss[loss=0.1681, simple_loss=0.2353, pruned_loss=0.05047, over 972965.97 frames.], batch size: 18, lr: 6.38e-04 +2022-05-04 09:44:06,293 INFO [train.py:715] (6/8) Epoch 2, batch 29100, loss[loss=0.1909, simple_loss=0.2391, pruned_loss=0.07139, over 4834.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2359, pruned_loss=0.05091, over 972347.34 frames.], batch size: 30, lr: 6.38e-04 +2022-05-04 09:44:45,464 INFO [train.py:715] (6/8) Epoch 2, batch 29150, loss[loss=0.1461, simple_loss=0.2127, pruned_loss=0.0397, over 4983.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2355, pruned_loss=0.05064, over 972661.14 frames.], batch size: 14, lr: 6.38e-04 +2022-05-04 09:45:24,949 INFO [train.py:715] (6/8) Epoch 2, batch 29200, loss[loss=0.1621, simple_loss=0.2374, pruned_loss=0.04344, over 4892.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2348, pruned_loss=0.0498, over 973872.18 frames.], batch size: 22, lr: 6.38e-04 +2022-05-04 09:46:05,378 INFO [train.py:715] (6/8) Epoch 2, batch 29250, loss[loss=0.1499, simple_loss=0.2212, pruned_loss=0.03931, over 4812.00 frames.], tot_loss[loss=0.1666, simple_loss=0.234, pruned_loss=0.04956, over 973502.82 frames.], batch size: 26, lr: 6.38e-04 +2022-05-04 09:46:44,481 INFO [train.py:715] (6/8) Epoch 2, batch 29300, loss[loss=0.1544, simple_loss=0.2359, pruned_loss=0.03645, over 4975.00 frames.], tot_loss[loss=0.1662, simple_loss=0.2337, pruned_loss=0.04937, over 973326.54 frames.], batch size: 25, lr: 6.37e-04 +2022-05-04 09:47:23,250 INFO [train.py:715] (6/8) Epoch 2, batch 29350, loss[loss=0.145, simple_loss=0.217, pruned_loss=0.03652, over 4854.00 frames.], tot_loss[loss=0.1663, simple_loss=0.234, pruned_loss=0.04933, over 972649.97 frames.], batch size: 30, lr: 6.37e-04 +2022-05-04 09:48:02,467 INFO [train.py:715] (6/8) Epoch 2, batch 29400, loss[loss=0.1773, simple_loss=0.2381, pruned_loss=0.05827, over 4851.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2346, pruned_loss=0.04942, over 972014.06 frames.], batch size: 30, lr: 6.37e-04 +2022-05-04 09:48:41,889 INFO [train.py:715] (6/8) Epoch 2, batch 29450, loss[loss=0.1586, simple_loss=0.2166, pruned_loss=0.05026, over 4853.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2346, pruned_loss=0.04955, over 972109.81 frames.], batch size: 32, lr: 6.37e-04 +2022-05-04 09:49:20,753 INFO [train.py:715] (6/8) Epoch 2, batch 29500, loss[loss=0.1215, simple_loss=0.1949, pruned_loss=0.02406, over 4896.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2349, pruned_loss=0.04973, over 972394.31 frames.], batch size: 22, lr: 6.37e-04 +2022-05-04 09:49:59,769 INFO [train.py:715] (6/8) Epoch 2, batch 29550, loss[loss=0.1839, simple_loss=0.2539, pruned_loss=0.05692, over 4937.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2345, pruned_loss=0.04966, over 972308.88 frames.], batch size: 23, lr: 6.37e-04 +2022-05-04 09:50:39,183 INFO [train.py:715] (6/8) Epoch 2, batch 29600, loss[loss=0.1756, simple_loss=0.2485, pruned_loss=0.05135, over 4979.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2341, pruned_loss=0.0496, over 972815.49 frames.], batch size: 24, lr: 6.37e-04 +2022-05-04 09:51:18,369 INFO [train.py:715] (6/8) Epoch 2, batch 29650, loss[loss=0.1546, simple_loss=0.2271, pruned_loss=0.04111, over 4795.00 frames.], tot_loss[loss=0.1662, simple_loss=0.2338, pruned_loss=0.04925, over 972989.54 frames.], batch size: 14, lr: 6.36e-04 +2022-05-04 09:51:57,130 INFO [train.py:715] (6/8) Epoch 2, batch 29700, loss[loss=0.1915, simple_loss=0.243, pruned_loss=0.07002, over 4973.00 frames.], tot_loss[loss=0.1666, simple_loss=0.2338, pruned_loss=0.04969, over 972759.58 frames.], batch size: 15, lr: 6.36e-04 +2022-05-04 09:52:36,255 INFO [train.py:715] (6/8) Epoch 2, batch 29750, loss[loss=0.1913, simple_loss=0.2543, pruned_loss=0.06413, over 4807.00 frames.], tot_loss[loss=0.1662, simple_loss=0.2337, pruned_loss=0.04939, over 971822.90 frames.], batch size: 25, lr: 6.36e-04 +2022-05-04 09:53:15,367 INFO [train.py:715] (6/8) Epoch 2, batch 29800, loss[loss=0.1542, simple_loss=0.2269, pruned_loss=0.04077, over 4984.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2343, pruned_loss=0.0496, over 972203.12 frames.], batch size: 28, lr: 6.36e-04 +2022-05-04 09:53:53,997 INFO [train.py:715] (6/8) Epoch 2, batch 29850, loss[loss=0.1706, simple_loss=0.2256, pruned_loss=0.05781, over 4850.00 frames.], tot_loss[loss=0.1663, simple_loss=0.2335, pruned_loss=0.04955, over 971907.08 frames.], batch size: 32, lr: 6.36e-04 +2022-05-04 09:54:33,011 INFO [train.py:715] (6/8) Epoch 2, batch 29900, loss[loss=0.1466, simple_loss=0.2207, pruned_loss=0.0362, over 4981.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2341, pruned_loss=0.04974, over 972884.36 frames.], batch size: 25, lr: 6.36e-04 +2022-05-04 09:55:12,831 INFO [train.py:715] (6/8) Epoch 2, batch 29950, loss[loss=0.1617, simple_loss=0.2227, pruned_loss=0.05032, over 4824.00 frames.], tot_loss[loss=0.1662, simple_loss=0.2336, pruned_loss=0.04938, over 972868.84 frames.], batch size: 15, lr: 6.35e-04 +2022-05-04 09:55:51,638 INFO [train.py:715] (6/8) Epoch 2, batch 30000, loss[loss=0.188, simple_loss=0.2525, pruned_loss=0.0618, over 4976.00 frames.], tot_loss[loss=0.1673, simple_loss=0.2346, pruned_loss=0.05006, over 973334.16 frames.], batch size: 24, lr: 6.35e-04 +2022-05-04 09:55:51,639 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 09:56:00,454 INFO [train.py:742] (6/8) Epoch 2, validation: loss=0.1166, simple_loss=0.2028, pruned_loss=0.01515, over 914524.00 frames. +2022-05-04 09:56:39,121 INFO [train.py:715] (6/8) Epoch 2, batch 30050, loss[loss=0.1751, simple_loss=0.2339, pruned_loss=0.05817, over 4681.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2348, pruned_loss=0.04972, over 972653.44 frames.], batch size: 15, lr: 6.35e-04 +2022-05-04 09:57:18,481 INFO [train.py:715] (6/8) Epoch 2, batch 30100, loss[loss=0.2002, simple_loss=0.2563, pruned_loss=0.072, over 4867.00 frames.], tot_loss[loss=0.168, simple_loss=0.2354, pruned_loss=0.0503, over 971982.57 frames.], batch size: 32, lr: 6.35e-04 +2022-05-04 09:57:57,552 INFO [train.py:715] (6/8) Epoch 2, batch 30150, loss[loss=0.2015, simple_loss=0.2676, pruned_loss=0.06771, over 4873.00 frames.], tot_loss[loss=0.1663, simple_loss=0.2338, pruned_loss=0.04943, over 972411.02 frames.], batch size: 16, lr: 6.35e-04 +2022-05-04 09:58:37,030 INFO [train.py:715] (6/8) Epoch 2, batch 30200, loss[loss=0.1768, simple_loss=0.2417, pruned_loss=0.05593, over 4780.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2355, pruned_loss=0.05056, over 971786.92 frames.], batch size: 18, lr: 6.35e-04 +2022-05-04 09:59:15,777 INFO [train.py:715] (6/8) Epoch 2, batch 30250, loss[loss=0.1791, simple_loss=0.2401, pruned_loss=0.05903, over 4958.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2367, pruned_loss=0.05091, over 972014.16 frames.], batch size: 15, lr: 6.34e-04 +2022-05-04 09:59:55,026 INFO [train.py:715] (6/8) Epoch 2, batch 30300, loss[loss=0.1424, simple_loss=0.2122, pruned_loss=0.0363, over 4787.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2361, pruned_loss=0.05087, over 971886.19 frames.], batch size: 12, lr: 6.34e-04 +2022-05-04 10:00:35,004 INFO [train.py:715] (6/8) Epoch 2, batch 30350, loss[loss=0.1646, simple_loss=0.227, pruned_loss=0.05115, over 4746.00 frames.], tot_loss[loss=0.169, simple_loss=0.2358, pruned_loss=0.05114, over 972050.72 frames.], batch size: 19, lr: 6.34e-04 +2022-05-04 10:01:14,092 INFO [train.py:715] (6/8) Epoch 2, batch 30400, loss[loss=0.1556, simple_loss=0.232, pruned_loss=0.03959, over 4896.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2352, pruned_loss=0.05072, over 971721.01 frames.], batch size: 17, lr: 6.34e-04 +2022-05-04 10:01:53,196 INFO [train.py:715] (6/8) Epoch 2, batch 30450, loss[loss=0.1531, simple_loss=0.226, pruned_loss=0.04006, over 4887.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2343, pruned_loss=0.05, over 971764.90 frames.], batch size: 22, lr: 6.34e-04 +2022-05-04 10:02:32,966 INFO [train.py:715] (6/8) Epoch 2, batch 30500, loss[loss=0.1579, simple_loss=0.2201, pruned_loss=0.04788, over 4933.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2352, pruned_loss=0.05062, over 971454.70 frames.], batch size: 35, lr: 6.34e-04 +2022-05-04 10:03:12,637 INFO [train.py:715] (6/8) Epoch 2, batch 30550, loss[loss=0.1478, simple_loss=0.2195, pruned_loss=0.03807, over 4966.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2349, pruned_loss=0.04996, over 971809.81 frames.], batch size: 29, lr: 6.33e-04 +2022-05-04 10:03:51,368 INFO [train.py:715] (6/8) Epoch 2, batch 30600, loss[loss=0.1626, simple_loss=0.2404, pruned_loss=0.04235, over 4710.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2351, pruned_loss=0.04994, over 971994.83 frames.], batch size: 15, lr: 6.33e-04 +2022-05-04 10:04:31,221 INFO [train.py:715] (6/8) Epoch 2, batch 30650, loss[loss=0.1417, simple_loss=0.2114, pruned_loss=0.03602, over 4928.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2349, pruned_loss=0.0499, over 972676.96 frames.], batch size: 18, lr: 6.33e-04 +2022-05-04 10:05:11,277 INFO [train.py:715] (6/8) Epoch 2, batch 30700, loss[loss=0.1642, simple_loss=0.2316, pruned_loss=0.04844, over 4940.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2347, pruned_loss=0.04985, over 972901.74 frames.], batch size: 23, lr: 6.33e-04 +2022-05-04 10:05:51,107 INFO [train.py:715] (6/8) Epoch 2, batch 30750, loss[loss=0.1844, simple_loss=0.2581, pruned_loss=0.05536, over 4984.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2346, pruned_loss=0.04936, over 972188.90 frames.], batch size: 27, lr: 6.33e-04 +2022-05-04 10:06:30,175 INFO [train.py:715] (6/8) Epoch 2, batch 30800, loss[loss=0.1678, simple_loss=0.2331, pruned_loss=0.05123, over 4820.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2352, pruned_loss=0.04963, over 972442.08 frames.], batch size: 12, lr: 6.33e-04 +2022-05-04 10:07:09,686 INFO [train.py:715] (6/8) Epoch 2, batch 30850, loss[loss=0.1842, simple_loss=0.2598, pruned_loss=0.05435, over 4953.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2341, pruned_loss=0.0487, over 971982.69 frames.], batch size: 39, lr: 6.33e-04 +2022-05-04 10:07:49,326 INFO [train.py:715] (6/8) Epoch 2, batch 30900, loss[loss=0.1514, simple_loss=0.223, pruned_loss=0.03991, over 4860.00 frames.], tot_loss[loss=0.1659, simple_loss=0.234, pruned_loss=0.0489, over 972787.25 frames.], batch size: 34, lr: 6.32e-04 +2022-05-04 10:08:27,848 INFO [train.py:715] (6/8) Epoch 2, batch 30950, loss[loss=0.1707, simple_loss=0.2381, pruned_loss=0.05158, over 4987.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2352, pruned_loss=0.04989, over 972537.10 frames.], batch size: 28, lr: 6.32e-04 +2022-05-04 10:09:07,774 INFO [train.py:715] (6/8) Epoch 2, batch 31000, loss[loss=0.1732, simple_loss=0.2436, pruned_loss=0.05135, over 4789.00 frames.], tot_loss[loss=0.1673, simple_loss=0.2352, pruned_loss=0.04971, over 972910.36 frames.], batch size: 24, lr: 6.32e-04 +2022-05-04 10:09:48,215 INFO [train.py:715] (6/8) Epoch 2, batch 31050, loss[loss=0.175, simple_loss=0.2495, pruned_loss=0.05027, over 4884.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2359, pruned_loss=0.05051, over 973087.72 frames.], batch size: 22, lr: 6.32e-04 +2022-05-04 10:10:27,691 INFO [train.py:715] (6/8) Epoch 2, batch 31100, loss[loss=0.1825, simple_loss=0.251, pruned_loss=0.05694, over 4967.00 frames.], tot_loss[loss=0.1688, simple_loss=0.236, pruned_loss=0.05078, over 973067.80 frames.], batch size: 24, lr: 6.32e-04 +2022-05-04 10:11:07,483 INFO [train.py:715] (6/8) Epoch 2, batch 31150, loss[loss=0.1683, simple_loss=0.2378, pruned_loss=0.0494, over 4788.00 frames.], tot_loss[loss=0.169, simple_loss=0.2364, pruned_loss=0.0508, over 973896.12 frames.], batch size: 17, lr: 6.32e-04 +2022-05-04 10:11:47,660 INFO [train.py:715] (6/8) Epoch 2, batch 31200, loss[loss=0.1521, simple_loss=0.2324, pruned_loss=0.03588, over 4921.00 frames.], tot_loss[loss=0.1687, simple_loss=0.236, pruned_loss=0.0507, over 974040.35 frames.], batch size: 29, lr: 6.31e-04 +2022-05-04 10:12:27,439 INFO [train.py:715] (6/8) Epoch 2, batch 31250, loss[loss=0.1454, simple_loss=0.2183, pruned_loss=0.03628, over 4937.00 frames.], tot_loss[loss=0.169, simple_loss=0.2361, pruned_loss=0.05093, over 974132.47 frames.], batch size: 29, lr: 6.31e-04 +2022-05-04 10:13:06,638 INFO [train.py:715] (6/8) Epoch 2, batch 31300, loss[loss=0.2113, simple_loss=0.2635, pruned_loss=0.07954, over 4798.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2352, pruned_loss=0.05017, over 973041.18 frames.], batch size: 17, lr: 6.31e-04 +2022-05-04 10:13:46,589 INFO [train.py:715] (6/8) Epoch 2, batch 31350, loss[loss=0.2427, simple_loss=0.3057, pruned_loss=0.08978, over 4734.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2368, pruned_loss=0.05113, over 972969.31 frames.], batch size: 16, lr: 6.31e-04 +2022-05-04 10:14:26,957 INFO [train.py:715] (6/8) Epoch 2, batch 31400, loss[loss=0.1437, simple_loss=0.22, pruned_loss=0.03372, over 4752.00 frames.], tot_loss[loss=0.1681, simple_loss=0.2354, pruned_loss=0.05036, over 973218.46 frames.], batch size: 16, lr: 6.31e-04 +2022-05-04 10:15:06,596 INFO [train.py:715] (6/8) Epoch 2, batch 31450, loss[loss=0.1633, simple_loss=0.2344, pruned_loss=0.0461, over 4751.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2346, pruned_loss=0.05017, over 972780.03 frames.], batch size: 16, lr: 6.31e-04 +2022-05-04 10:15:46,242 INFO [train.py:715] (6/8) Epoch 2, batch 31500, loss[loss=0.1563, simple_loss=0.2285, pruned_loss=0.04211, over 4916.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2357, pruned_loss=0.05057, over 973129.77 frames.], batch size: 18, lr: 6.31e-04 +2022-05-04 10:16:26,034 INFO [train.py:715] (6/8) Epoch 2, batch 31550, loss[loss=0.196, simple_loss=0.2669, pruned_loss=0.06257, over 4926.00 frames.], tot_loss[loss=0.168, simple_loss=0.2355, pruned_loss=0.05029, over 973570.90 frames.], batch size: 17, lr: 6.30e-04 +2022-05-04 10:17:05,443 INFO [train.py:715] (6/8) Epoch 2, batch 31600, loss[loss=0.1546, simple_loss=0.2166, pruned_loss=0.0463, over 4868.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2355, pruned_loss=0.04999, over 973588.18 frames.], batch size: 16, lr: 6.30e-04 +2022-05-04 10:17:44,226 INFO [train.py:715] (6/8) Epoch 2, batch 31650, loss[loss=0.1417, simple_loss=0.2132, pruned_loss=0.03504, over 4857.00 frames.], tot_loss[loss=0.168, simple_loss=0.2354, pruned_loss=0.05028, over 973320.02 frames.], batch size: 30, lr: 6.30e-04 +2022-05-04 10:18:24,072 INFO [train.py:715] (6/8) Epoch 2, batch 31700, loss[loss=0.197, simple_loss=0.2677, pruned_loss=0.06311, over 4682.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2354, pruned_loss=0.05057, over 973260.96 frames.], batch size: 15, lr: 6.30e-04 +2022-05-04 10:19:04,310 INFO [train.py:715] (6/8) Epoch 2, batch 31750, loss[loss=0.1794, simple_loss=0.2564, pruned_loss=0.05121, over 4795.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2347, pruned_loss=0.0503, over 972448.75 frames.], batch size: 24, lr: 6.30e-04 +2022-05-04 10:19:44,145 INFO [train.py:715] (6/8) Epoch 2, batch 31800, loss[loss=0.2282, simple_loss=0.2775, pruned_loss=0.0895, over 4782.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2341, pruned_loss=0.05012, over 972590.05 frames.], batch size: 17, lr: 6.30e-04 +2022-05-04 10:20:23,468 INFO [train.py:715] (6/8) Epoch 2, batch 31850, loss[loss=0.187, simple_loss=0.2487, pruned_loss=0.06265, over 4700.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2348, pruned_loss=0.05025, over 972414.15 frames.], batch size: 15, lr: 6.29e-04 +2022-05-04 10:21:02,957 INFO [train.py:715] (6/8) Epoch 2, batch 31900, loss[loss=0.1745, simple_loss=0.2372, pruned_loss=0.05593, over 4899.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2345, pruned_loss=0.05044, over 973023.77 frames.], batch size: 39, lr: 6.29e-04 +2022-05-04 10:21:42,549 INFO [train.py:715] (6/8) Epoch 2, batch 31950, loss[loss=0.1753, simple_loss=0.2444, pruned_loss=0.05309, over 4692.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2347, pruned_loss=0.05042, over 973587.11 frames.], batch size: 15, lr: 6.29e-04 +2022-05-04 10:22:21,472 INFO [train.py:715] (6/8) Epoch 2, batch 32000, loss[loss=0.1535, simple_loss=0.2238, pruned_loss=0.04156, over 4937.00 frames.], tot_loss[loss=0.167, simple_loss=0.2337, pruned_loss=0.05012, over 973098.19 frames.], batch size: 23, lr: 6.29e-04 +2022-05-04 10:23:01,105 INFO [train.py:715] (6/8) Epoch 2, batch 32050, loss[loss=0.1695, simple_loss=0.2313, pruned_loss=0.05388, over 4877.00 frames.], tot_loss[loss=0.1673, simple_loss=0.2342, pruned_loss=0.05024, over 973212.86 frames.], batch size: 19, lr: 6.29e-04 +2022-05-04 10:23:41,018 INFO [train.py:715] (6/8) Epoch 2, batch 32100, loss[loss=0.1727, simple_loss=0.2314, pruned_loss=0.05695, over 4978.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2344, pruned_loss=0.05015, over 973081.53 frames.], batch size: 35, lr: 6.29e-04 +2022-05-04 10:24:20,304 INFO [train.py:715] (6/8) Epoch 2, batch 32150, loss[loss=0.1919, simple_loss=0.2501, pruned_loss=0.06691, over 4776.00 frames.], tot_loss[loss=0.166, simple_loss=0.2336, pruned_loss=0.04916, over 972787.37 frames.], batch size: 14, lr: 6.29e-04 +2022-05-04 10:24:59,278 INFO [train.py:715] (6/8) Epoch 2, batch 32200, loss[loss=0.1488, simple_loss=0.2224, pruned_loss=0.03758, over 4787.00 frames.], tot_loss[loss=0.1658, simple_loss=0.234, pruned_loss=0.04882, over 972168.69 frames.], batch size: 17, lr: 6.28e-04 +2022-05-04 10:25:39,140 INFO [train.py:715] (6/8) Epoch 2, batch 32250, loss[loss=0.1475, simple_loss=0.2093, pruned_loss=0.04283, over 4945.00 frames.], tot_loss[loss=0.1659, simple_loss=0.2339, pruned_loss=0.04897, over 971965.78 frames.], batch size: 21, lr: 6.28e-04 +2022-05-04 10:26:18,497 INFO [train.py:715] (6/8) Epoch 2, batch 32300, loss[loss=0.1977, simple_loss=0.2607, pruned_loss=0.06737, over 4967.00 frames.], tot_loss[loss=0.1659, simple_loss=0.2336, pruned_loss=0.04907, over 972031.34 frames.], batch size: 14, lr: 6.28e-04 +2022-05-04 10:26:57,487 INFO [train.py:715] (6/8) Epoch 2, batch 32350, loss[loss=0.1573, simple_loss=0.2233, pruned_loss=0.04564, over 4839.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2345, pruned_loss=0.04982, over 972397.68 frames.], batch size: 32, lr: 6.28e-04 +2022-05-04 10:27:37,327 INFO [train.py:715] (6/8) Epoch 2, batch 32400, loss[loss=0.1681, simple_loss=0.2442, pruned_loss=0.04596, over 4983.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2345, pruned_loss=0.04987, over 972596.30 frames.], batch size: 27, lr: 6.28e-04 +2022-05-04 10:28:17,095 INFO [train.py:715] (6/8) Epoch 2, batch 32450, loss[loss=0.1717, simple_loss=0.2369, pruned_loss=0.05324, over 4689.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2354, pruned_loss=0.04992, over 971877.27 frames.], batch size: 15, lr: 6.28e-04 +2022-05-04 10:28:56,080 INFO [train.py:715] (6/8) Epoch 2, batch 32500, loss[loss=0.1784, simple_loss=0.2384, pruned_loss=0.05916, over 4777.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2352, pruned_loss=0.04999, over 971190.50 frames.], batch size: 17, lr: 6.27e-04 +2022-05-04 10:29:35,595 INFO [train.py:715] (6/8) Epoch 2, batch 32550, loss[loss=0.1564, simple_loss=0.2211, pruned_loss=0.04585, over 4898.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2349, pruned_loss=0.0503, over 970992.42 frames.], batch size: 29, lr: 6.27e-04 +2022-05-04 10:30:15,650 INFO [train.py:715] (6/8) Epoch 2, batch 32600, loss[loss=0.1559, simple_loss=0.2251, pruned_loss=0.04333, over 4807.00 frames.], tot_loss[loss=0.1673, simple_loss=0.2349, pruned_loss=0.04988, over 970989.37 frames.], batch size: 25, lr: 6.27e-04 +2022-05-04 10:30:54,903 INFO [train.py:715] (6/8) Epoch 2, batch 32650, loss[loss=0.1695, simple_loss=0.2301, pruned_loss=0.05446, over 4920.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2349, pruned_loss=0.05016, over 971732.43 frames.], batch size: 17, lr: 6.27e-04 +2022-05-04 10:31:33,745 INFO [train.py:715] (6/8) Epoch 2, batch 32700, loss[loss=0.1392, simple_loss=0.2073, pruned_loss=0.03558, over 4945.00 frames.], tot_loss[loss=0.168, simple_loss=0.2354, pruned_loss=0.05025, over 972127.99 frames.], batch size: 29, lr: 6.27e-04 +2022-05-04 10:32:13,537 INFO [train.py:715] (6/8) Epoch 2, batch 32750, loss[loss=0.2124, simple_loss=0.2656, pruned_loss=0.0796, over 4812.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2353, pruned_loss=0.05003, over 972546.13 frames.], batch size: 13, lr: 6.27e-04 +2022-05-04 10:32:53,523 INFO [train.py:715] (6/8) Epoch 2, batch 32800, loss[loss=0.1759, simple_loss=0.2309, pruned_loss=0.06043, over 4742.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2351, pruned_loss=0.05012, over 972638.22 frames.], batch size: 16, lr: 6.27e-04 +2022-05-04 10:33:32,252 INFO [train.py:715] (6/8) Epoch 2, batch 32850, loss[loss=0.1688, simple_loss=0.2337, pruned_loss=0.0519, over 4844.00 frames.], tot_loss[loss=0.1674, simple_loss=0.235, pruned_loss=0.04988, over 972826.67 frames.], batch size: 30, lr: 6.26e-04 +2022-05-04 10:34:11,598 INFO [train.py:715] (6/8) Epoch 2, batch 32900, loss[loss=0.1902, simple_loss=0.2466, pruned_loss=0.06686, over 4871.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2354, pruned_loss=0.05001, over 972269.62 frames.], batch size: 32, lr: 6.26e-04 +2022-05-04 10:34:51,517 INFO [train.py:715] (6/8) Epoch 2, batch 32950, loss[loss=0.2173, simple_loss=0.273, pruned_loss=0.08082, over 4771.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2344, pruned_loss=0.04972, over 972259.19 frames.], batch size: 18, lr: 6.26e-04 +2022-05-04 10:35:30,098 INFO [train.py:715] (6/8) Epoch 2, batch 33000, loss[loss=0.1796, simple_loss=0.245, pruned_loss=0.05713, over 4922.00 frames.], tot_loss[loss=0.1679, simple_loss=0.235, pruned_loss=0.05033, over 972610.05 frames.], batch size: 18, lr: 6.26e-04 +2022-05-04 10:35:30,098 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 10:35:38,852 INFO [train.py:742] (6/8) Epoch 2, validation: loss=0.1163, simple_loss=0.2025, pruned_loss=0.01504, over 914524.00 frames. +2022-05-04 10:36:17,843 INFO [train.py:715] (6/8) Epoch 2, batch 33050, loss[loss=0.1464, simple_loss=0.2193, pruned_loss=0.0367, over 4875.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2353, pruned_loss=0.05068, over 971749.67 frames.], batch size: 16, lr: 6.26e-04 +2022-05-04 10:36:57,381 INFO [train.py:715] (6/8) Epoch 2, batch 33100, loss[loss=0.1632, simple_loss=0.2247, pruned_loss=0.05088, over 4921.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2355, pruned_loss=0.0506, over 971419.71 frames.], batch size: 23, lr: 6.26e-04 +2022-05-04 10:37:37,180 INFO [train.py:715] (6/8) Epoch 2, batch 33150, loss[loss=0.1852, simple_loss=0.2447, pruned_loss=0.0629, over 4964.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2355, pruned_loss=0.05067, over 971680.16 frames.], batch size: 24, lr: 6.25e-04 +2022-05-04 10:38:16,780 INFO [train.py:715] (6/8) Epoch 2, batch 33200, loss[loss=0.1619, simple_loss=0.2279, pruned_loss=0.04795, over 4903.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2355, pruned_loss=0.05055, over 972742.67 frames.], batch size: 17, lr: 6.25e-04 +2022-05-04 10:38:56,320 INFO [train.py:715] (6/8) Epoch 2, batch 33250, loss[loss=0.1755, simple_loss=0.2355, pruned_loss=0.05779, over 4974.00 frames.], tot_loss[loss=0.169, simple_loss=0.2357, pruned_loss=0.05115, over 973202.50 frames.], batch size: 35, lr: 6.25e-04 +2022-05-04 10:39:35,524 INFO [train.py:715] (6/8) Epoch 2, batch 33300, loss[loss=0.1425, simple_loss=0.2126, pruned_loss=0.03622, over 4844.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2341, pruned_loss=0.05013, over 972891.86 frames.], batch size: 13, lr: 6.25e-04 +2022-05-04 10:40:14,696 INFO [train.py:715] (6/8) Epoch 2, batch 33350, loss[loss=0.148, simple_loss=0.2218, pruned_loss=0.03714, over 4758.00 frames.], tot_loss[loss=0.1665, simple_loss=0.2333, pruned_loss=0.04985, over 971812.83 frames.], batch size: 16, lr: 6.25e-04 +2022-05-04 10:40:53,961 INFO [train.py:715] (6/8) Epoch 2, batch 33400, loss[loss=0.2035, simple_loss=0.2643, pruned_loss=0.07131, over 4770.00 frames.], tot_loss[loss=0.1666, simple_loss=0.2335, pruned_loss=0.04991, over 971643.64 frames.], batch size: 17, lr: 6.25e-04 +2022-05-04 10:41:33,184 INFO [train.py:715] (6/8) Epoch 2, batch 33450, loss[loss=0.1823, simple_loss=0.2429, pruned_loss=0.06082, over 4979.00 frames.], tot_loss[loss=0.1673, simple_loss=0.2338, pruned_loss=0.05044, over 972149.70 frames.], batch size: 25, lr: 6.25e-04 +2022-05-04 10:42:13,248 INFO [train.py:715] (6/8) Epoch 2, batch 33500, loss[loss=0.1658, simple_loss=0.2412, pruned_loss=0.04515, over 4960.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2349, pruned_loss=0.0509, over 972629.48 frames.], batch size: 24, lr: 6.24e-04 +2022-05-04 10:42:52,011 INFO [train.py:715] (6/8) Epoch 2, batch 33550, loss[loss=0.1611, simple_loss=0.2231, pruned_loss=0.04954, over 4994.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2343, pruned_loss=0.05052, over 972195.30 frames.], batch size: 16, lr: 6.24e-04 +2022-05-04 10:43:31,507 INFO [train.py:715] (6/8) Epoch 2, batch 33600, loss[loss=0.1488, simple_loss=0.2288, pruned_loss=0.03435, over 4868.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2347, pruned_loss=0.05085, over 972888.41 frames.], batch size: 20, lr: 6.24e-04 +2022-05-04 10:44:11,049 INFO [train.py:715] (6/8) Epoch 2, batch 33650, loss[loss=0.1582, simple_loss=0.2338, pruned_loss=0.04129, over 4773.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2354, pruned_loss=0.05116, over 973185.54 frames.], batch size: 18, lr: 6.24e-04 +2022-05-04 10:44:50,489 INFO [train.py:715] (6/8) Epoch 2, batch 33700, loss[loss=0.1918, simple_loss=0.273, pruned_loss=0.05528, over 4885.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2353, pruned_loss=0.05085, over 973126.06 frames.], batch size: 19, lr: 6.24e-04 +2022-05-04 10:45:29,908 INFO [train.py:715] (6/8) Epoch 2, batch 33750, loss[loss=0.1729, simple_loss=0.2393, pruned_loss=0.05324, over 4936.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2346, pruned_loss=0.05023, over 973619.96 frames.], batch size: 21, lr: 6.24e-04 +2022-05-04 10:46:09,313 INFO [train.py:715] (6/8) Epoch 2, batch 33800, loss[loss=0.1551, simple_loss=0.2194, pruned_loss=0.04542, over 4818.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2344, pruned_loss=0.04954, over 973229.42 frames.], batch size: 25, lr: 6.23e-04 +2022-05-04 10:46:49,492 INFO [train.py:715] (6/8) Epoch 2, batch 33850, loss[loss=0.174, simple_loss=0.2366, pruned_loss=0.05568, over 4961.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2344, pruned_loss=0.04956, over 972535.04 frames.], batch size: 24, lr: 6.23e-04 +2022-05-04 10:47:28,888 INFO [train.py:715] (6/8) Epoch 2, batch 33900, loss[loss=0.1646, simple_loss=0.2436, pruned_loss=0.04281, over 4789.00 frames.], tot_loss[loss=0.1659, simple_loss=0.2332, pruned_loss=0.04931, over 972452.00 frames.], batch size: 17, lr: 6.23e-04 +2022-05-04 10:48:08,032 INFO [train.py:715] (6/8) Epoch 2, batch 33950, loss[loss=0.1846, simple_loss=0.2417, pruned_loss=0.06375, over 4927.00 frames.], tot_loss[loss=0.1665, simple_loss=0.2341, pruned_loss=0.04944, over 971979.51 frames.], batch size: 35, lr: 6.23e-04 +2022-05-04 10:48:47,956 INFO [train.py:715] (6/8) Epoch 2, batch 34000, loss[loss=0.1748, simple_loss=0.2405, pruned_loss=0.0546, over 4933.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2346, pruned_loss=0.04975, over 972389.98 frames.], batch size: 29, lr: 6.23e-04 +2022-05-04 10:49:27,584 INFO [train.py:715] (6/8) Epoch 2, batch 34050, loss[loss=0.162, simple_loss=0.2347, pruned_loss=0.04466, over 4772.00 frames.], tot_loss[loss=0.167, simple_loss=0.2349, pruned_loss=0.04956, over 971887.48 frames.], batch size: 14, lr: 6.23e-04 +2022-05-04 10:50:07,050 INFO [train.py:715] (6/8) Epoch 2, batch 34100, loss[loss=0.1927, simple_loss=0.2563, pruned_loss=0.06457, over 4921.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2355, pruned_loss=0.04996, over 972242.90 frames.], batch size: 23, lr: 6.23e-04 +2022-05-04 10:50:46,463 INFO [train.py:715] (6/8) Epoch 2, batch 34150, loss[loss=0.1774, simple_loss=0.251, pruned_loss=0.05187, over 4867.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2345, pruned_loss=0.04966, over 971479.95 frames.], batch size: 16, lr: 6.22e-04 +2022-05-04 10:51:26,748 INFO [train.py:715] (6/8) Epoch 2, batch 34200, loss[loss=0.1831, simple_loss=0.2419, pruned_loss=0.06219, over 4786.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2343, pruned_loss=0.04949, over 971421.89 frames.], batch size: 14, lr: 6.22e-04 +2022-05-04 10:52:06,317 INFO [train.py:715] (6/8) Epoch 2, batch 34250, loss[loss=0.2193, simple_loss=0.2979, pruned_loss=0.07032, over 4861.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2353, pruned_loss=0.05052, over 970771.76 frames.], batch size: 20, lr: 6.22e-04 +2022-05-04 10:52:45,484 INFO [train.py:715] (6/8) Epoch 2, batch 34300, loss[loss=0.194, simple_loss=0.2568, pruned_loss=0.06563, over 4779.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2346, pruned_loss=0.05032, over 971034.69 frames.], batch size: 18, lr: 6.22e-04 +2022-05-04 10:53:25,370 INFO [train.py:715] (6/8) Epoch 2, batch 34350, loss[loss=0.1837, simple_loss=0.257, pruned_loss=0.05518, over 4780.00 frames.], tot_loss[loss=0.1667, simple_loss=0.234, pruned_loss=0.04969, over 972103.18 frames.], batch size: 17, lr: 6.22e-04 +2022-05-04 10:54:07,394 INFO [train.py:715] (6/8) Epoch 2, batch 34400, loss[loss=0.2029, simple_loss=0.2535, pruned_loss=0.07614, over 4769.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2344, pruned_loss=0.05041, over 971596.39 frames.], batch size: 14, lr: 6.22e-04 +2022-05-04 10:54:46,516 INFO [train.py:715] (6/8) Epoch 2, batch 34450, loss[loss=0.1641, simple_loss=0.2342, pruned_loss=0.04698, over 4964.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2346, pruned_loss=0.05015, over 972285.86 frames.], batch size: 35, lr: 6.22e-04 +2022-05-04 10:55:25,437 INFO [train.py:715] (6/8) Epoch 2, batch 34500, loss[loss=0.1895, simple_loss=0.2685, pruned_loss=0.05521, over 4824.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2361, pruned_loss=0.05048, over 972371.98 frames.], batch size: 15, lr: 6.21e-04 +2022-05-04 10:56:05,345 INFO [train.py:715] (6/8) Epoch 2, batch 34550, loss[loss=0.1363, simple_loss=0.2043, pruned_loss=0.03415, over 4864.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2352, pruned_loss=0.05024, over 972836.60 frames.], batch size: 13, lr: 6.21e-04 +2022-05-04 10:56:44,135 INFO [train.py:715] (6/8) Epoch 2, batch 34600, loss[loss=0.1751, simple_loss=0.2261, pruned_loss=0.06206, over 4746.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2351, pruned_loss=0.04991, over 972906.17 frames.], batch size: 16, lr: 6.21e-04 +2022-05-04 10:57:23,177 INFO [train.py:715] (6/8) Epoch 2, batch 34650, loss[loss=0.1457, simple_loss=0.2169, pruned_loss=0.03718, over 4673.00 frames.], tot_loss[loss=0.1673, simple_loss=0.2349, pruned_loss=0.04985, over 972401.54 frames.], batch size: 14, lr: 6.21e-04 +2022-05-04 10:58:02,541 INFO [train.py:715] (6/8) Epoch 2, batch 34700, loss[loss=0.1653, simple_loss=0.2453, pruned_loss=0.04269, over 4929.00 frames.], tot_loss[loss=0.167, simple_loss=0.2344, pruned_loss=0.04977, over 972728.05 frames.], batch size: 23, lr: 6.21e-04 +2022-05-04 10:58:40,566 INFO [train.py:715] (6/8) Epoch 2, batch 34750, loss[loss=0.1705, simple_loss=0.2465, pruned_loss=0.04728, over 4819.00 frames.], tot_loss[loss=0.1666, simple_loss=0.2341, pruned_loss=0.04954, over 972574.28 frames.], batch size: 21, lr: 6.21e-04 +2022-05-04 10:59:17,104 INFO [train.py:715] (6/8) Epoch 2, batch 34800, loss[loss=0.1156, simple_loss=0.1845, pruned_loss=0.02337, over 4780.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2336, pruned_loss=0.04933, over 971842.68 frames.], batch size: 12, lr: 6.20e-04 +2022-05-04 11:00:07,069 INFO [train.py:715] (6/8) Epoch 3, batch 0, loss[loss=0.1698, simple_loss=0.2334, pruned_loss=0.05308, over 4772.00 frames.], tot_loss[loss=0.1698, simple_loss=0.2334, pruned_loss=0.05308, over 4772.00 frames.], batch size: 17, lr: 5.87e-04 +2022-05-04 11:00:45,741 INFO [train.py:715] (6/8) Epoch 3, batch 50, loss[loss=0.1732, simple_loss=0.2316, pruned_loss=0.05745, over 4831.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2319, pruned_loss=0.04918, over 219655.71 frames.], batch size: 30, lr: 5.87e-04 +2022-05-04 11:01:25,681 INFO [train.py:715] (6/8) Epoch 3, batch 100, loss[loss=0.1538, simple_loss=0.2269, pruned_loss=0.04039, over 4784.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2306, pruned_loss=0.04799, over 386861.25 frames.], batch size: 18, lr: 5.87e-04 +2022-05-04 11:02:05,241 INFO [train.py:715] (6/8) Epoch 3, batch 150, loss[loss=0.1757, simple_loss=0.2391, pruned_loss=0.05613, over 4929.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2325, pruned_loss=0.0492, over 516586.50 frames.], batch size: 39, lr: 5.86e-04 +2022-05-04 11:02:44,388 INFO [train.py:715] (6/8) Epoch 3, batch 200, loss[loss=0.1624, simple_loss=0.2262, pruned_loss=0.04925, over 4940.00 frames.], tot_loss[loss=0.1665, simple_loss=0.2333, pruned_loss=0.04982, over 617839.35 frames.], batch size: 29, lr: 5.86e-04 +2022-05-04 11:03:23,629 INFO [train.py:715] (6/8) Epoch 3, batch 250, loss[loss=0.1346, simple_loss=0.2006, pruned_loss=0.03433, over 4863.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2356, pruned_loss=0.05099, over 697778.54 frames.], batch size: 32, lr: 5.86e-04 +2022-05-04 11:04:03,636 INFO [train.py:715] (6/8) Epoch 3, batch 300, loss[loss=0.1513, simple_loss=0.219, pruned_loss=0.04179, over 4751.00 frames.], tot_loss[loss=0.167, simple_loss=0.2344, pruned_loss=0.04983, over 758578.84 frames.], batch size: 16, lr: 5.86e-04 +2022-05-04 11:04:42,647 INFO [train.py:715] (6/8) Epoch 3, batch 350, loss[loss=0.2229, simple_loss=0.2866, pruned_loss=0.07957, over 4919.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2346, pruned_loss=0.04988, over 806408.81 frames.], batch size: 18, lr: 5.86e-04 +2022-05-04 11:05:21,845 INFO [train.py:715] (6/8) Epoch 3, batch 400, loss[loss=0.171, simple_loss=0.2405, pruned_loss=0.05073, over 4982.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2346, pruned_loss=0.05008, over 842538.76 frames.], batch size: 39, lr: 5.86e-04 +2022-05-04 11:06:01,613 INFO [train.py:715] (6/8) Epoch 3, batch 450, loss[loss=0.2064, simple_loss=0.2571, pruned_loss=0.07787, over 4857.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2353, pruned_loss=0.0503, over 871117.23 frames.], batch size: 32, lr: 5.86e-04 +2022-05-04 11:06:41,127 INFO [train.py:715] (6/8) Epoch 3, batch 500, loss[loss=0.1335, simple_loss=0.1954, pruned_loss=0.03574, over 4851.00 frames.], tot_loss[loss=0.1666, simple_loss=0.234, pruned_loss=0.0496, over 893347.71 frames.], batch size: 12, lr: 5.85e-04 +2022-05-04 11:07:20,471 INFO [train.py:715] (6/8) Epoch 3, batch 550, loss[loss=0.1508, simple_loss=0.216, pruned_loss=0.04279, over 4740.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2342, pruned_loss=0.04978, over 911930.21 frames.], batch size: 16, lr: 5.85e-04 +2022-05-04 11:07:59,342 INFO [train.py:715] (6/8) Epoch 3, batch 600, loss[loss=0.1417, simple_loss=0.2134, pruned_loss=0.03499, over 4966.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2333, pruned_loss=0.0495, over 925580.17 frames.], batch size: 35, lr: 5.85e-04 +2022-05-04 11:08:39,295 INFO [train.py:715] (6/8) Epoch 3, batch 650, loss[loss=0.1632, simple_loss=0.2356, pruned_loss=0.04541, over 4918.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2334, pruned_loss=0.04941, over 935936.56 frames.], batch size: 29, lr: 5.85e-04 +2022-05-04 11:09:18,641 INFO [train.py:715] (6/8) Epoch 3, batch 700, loss[loss=0.1765, simple_loss=0.246, pruned_loss=0.05353, over 4863.00 frames.], tot_loss[loss=0.1665, simple_loss=0.2336, pruned_loss=0.04974, over 944926.21 frames.], batch size: 20, lr: 5.85e-04 +2022-05-04 11:09:57,742 INFO [train.py:715] (6/8) Epoch 3, batch 750, loss[loss=0.1443, simple_loss=0.2161, pruned_loss=0.03622, over 4954.00 frames.], tot_loss[loss=0.1681, simple_loss=0.2347, pruned_loss=0.05077, over 950449.29 frames.], batch size: 21, lr: 5.85e-04 +2022-05-04 11:10:37,309 INFO [train.py:715] (6/8) Epoch 3, batch 800, loss[loss=0.1343, simple_loss=0.2006, pruned_loss=0.03394, over 4849.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2344, pruned_loss=0.05072, over 955116.64 frames.], batch size: 13, lr: 5.85e-04 +2022-05-04 11:11:17,445 INFO [train.py:715] (6/8) Epoch 3, batch 850, loss[loss=0.2028, simple_loss=0.2503, pruned_loss=0.07767, over 4838.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2351, pruned_loss=0.05075, over 959950.97 frames.], batch size: 13, lr: 5.84e-04 +2022-05-04 11:11:56,828 INFO [train.py:715] (6/8) Epoch 3, batch 900, loss[loss=0.1573, simple_loss=0.2319, pruned_loss=0.04132, over 4827.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2342, pruned_loss=0.05027, over 962363.57 frames.], batch size: 13, lr: 5.84e-04 +2022-05-04 11:12:35,442 INFO [train.py:715] (6/8) Epoch 3, batch 950, loss[loss=0.2123, simple_loss=0.2637, pruned_loss=0.08046, over 4855.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2346, pruned_loss=0.05105, over 964292.67 frames.], batch size: 32, lr: 5.84e-04 +2022-05-04 11:13:15,428 INFO [train.py:715] (6/8) Epoch 3, batch 1000, loss[loss=0.1532, simple_loss=0.2194, pruned_loss=0.04355, over 4739.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2339, pruned_loss=0.05043, over 966003.87 frames.], batch size: 16, lr: 5.84e-04 +2022-05-04 11:13:55,096 INFO [train.py:715] (6/8) Epoch 3, batch 1050, loss[loss=0.1511, simple_loss=0.2202, pruned_loss=0.04103, over 4835.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2342, pruned_loss=0.05079, over 967853.93 frames.], batch size: 30, lr: 5.84e-04 +2022-05-04 11:14:34,008 INFO [train.py:715] (6/8) Epoch 3, batch 1100, loss[loss=0.1522, simple_loss=0.21, pruned_loss=0.04722, over 4834.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2339, pruned_loss=0.05019, over 968880.70 frames.], batch size: 15, lr: 5.84e-04 +2022-05-04 11:15:12,879 INFO [train.py:715] (6/8) Epoch 3, batch 1150, loss[loss=0.2324, simple_loss=0.2638, pruned_loss=0.1005, over 4787.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2345, pruned_loss=0.05052, over 968949.14 frames.], batch size: 12, lr: 5.84e-04 +2022-05-04 11:15:52,685 INFO [train.py:715] (6/8) Epoch 3, batch 1200, loss[loss=0.1773, simple_loss=0.237, pruned_loss=0.05877, over 4825.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2336, pruned_loss=0.05005, over 969593.50 frames.], batch size: 15, lr: 5.83e-04 +2022-05-04 11:16:31,656 INFO [train.py:715] (6/8) Epoch 3, batch 1250, loss[loss=0.2018, simple_loss=0.2882, pruned_loss=0.05776, over 4967.00 frames.], tot_loss[loss=0.1666, simple_loss=0.2337, pruned_loss=0.04978, over 970525.03 frames.], batch size: 15, lr: 5.83e-04 +2022-05-04 11:17:10,172 INFO [train.py:715] (6/8) Epoch 3, batch 1300, loss[loss=0.1626, simple_loss=0.2342, pruned_loss=0.04555, over 4878.00 frames.], tot_loss[loss=0.1666, simple_loss=0.2336, pruned_loss=0.04979, over 970908.42 frames.], batch size: 16, lr: 5.83e-04 +2022-05-04 11:17:49,722 INFO [train.py:715] (6/8) Epoch 3, batch 1350, loss[loss=0.1579, simple_loss=0.2308, pruned_loss=0.04248, over 4952.00 frames.], tot_loss[loss=0.1659, simple_loss=0.233, pruned_loss=0.04936, over 971846.84 frames.], batch size: 21, lr: 5.83e-04 +2022-05-04 11:18:29,002 INFO [train.py:715] (6/8) Epoch 3, batch 1400, loss[loss=0.1475, simple_loss=0.22, pruned_loss=0.03744, over 4817.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2329, pruned_loss=0.04935, over 972507.85 frames.], batch size: 26, lr: 5.83e-04 +2022-05-04 11:19:07,868 INFO [train.py:715] (6/8) Epoch 3, batch 1450, loss[loss=0.1716, simple_loss=0.2439, pruned_loss=0.0497, over 4798.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2327, pruned_loss=0.0491, over 972240.02 frames.], batch size: 25, lr: 5.83e-04 +2022-05-04 11:19:46,424 INFO [train.py:715] (6/8) Epoch 3, batch 1500, loss[loss=0.1328, simple_loss=0.2036, pruned_loss=0.03105, over 4824.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2316, pruned_loss=0.0484, over 972689.15 frames.], batch size: 27, lr: 5.83e-04 +2022-05-04 11:20:26,148 INFO [train.py:715] (6/8) Epoch 3, batch 1550, loss[loss=0.1715, simple_loss=0.2452, pruned_loss=0.04884, over 4984.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2326, pruned_loss=0.04912, over 973479.27 frames.], batch size: 24, lr: 5.83e-04 +2022-05-04 11:21:05,412 INFO [train.py:715] (6/8) Epoch 3, batch 1600, loss[loss=0.1712, simple_loss=0.2346, pruned_loss=0.0539, over 4867.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2325, pruned_loss=0.04906, over 972607.99 frames.], batch size: 16, lr: 5.82e-04 +2022-05-04 11:21:43,534 INFO [train.py:715] (6/8) Epoch 3, batch 1650, loss[loss=0.129, simple_loss=0.2076, pruned_loss=0.02514, over 4836.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2321, pruned_loss=0.04884, over 972557.48 frames.], batch size: 20, lr: 5.82e-04 +2022-05-04 11:22:22,784 INFO [train.py:715] (6/8) Epoch 3, batch 1700, loss[loss=0.1633, simple_loss=0.2421, pruned_loss=0.04228, over 4864.00 frames.], tot_loss[loss=0.166, simple_loss=0.2332, pruned_loss=0.04936, over 972256.58 frames.], batch size: 20, lr: 5.82e-04 +2022-05-04 11:23:02,321 INFO [train.py:715] (6/8) Epoch 3, batch 1750, loss[loss=0.1717, simple_loss=0.2262, pruned_loss=0.05859, over 4772.00 frames.], tot_loss[loss=0.1666, simple_loss=0.2338, pruned_loss=0.04968, over 971300.29 frames.], batch size: 18, lr: 5.82e-04 +2022-05-04 11:23:41,625 INFO [train.py:715] (6/8) Epoch 3, batch 1800, loss[loss=0.1902, simple_loss=0.2562, pruned_loss=0.06215, over 4939.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2339, pruned_loss=0.04996, over 971547.73 frames.], batch size: 21, lr: 5.82e-04 +2022-05-04 11:24:20,324 INFO [train.py:715] (6/8) Epoch 3, batch 1850, loss[loss=0.1501, simple_loss=0.2306, pruned_loss=0.0348, over 4910.00 frames.], tot_loss[loss=0.1673, simple_loss=0.2343, pruned_loss=0.05018, over 972139.75 frames.], batch size: 23, lr: 5.82e-04 +2022-05-04 11:25:00,294 INFO [train.py:715] (6/8) Epoch 3, batch 1900, loss[loss=0.1644, simple_loss=0.2322, pruned_loss=0.04835, over 4697.00 frames.], tot_loss[loss=0.1662, simple_loss=0.2334, pruned_loss=0.04952, over 972226.98 frames.], batch size: 15, lr: 5.82e-04 +2022-05-04 11:25:39,890 INFO [train.py:715] (6/8) Epoch 3, batch 1950, loss[loss=0.1585, simple_loss=0.2283, pruned_loss=0.04433, over 4781.00 frames.], tot_loss[loss=0.1662, simple_loss=0.2335, pruned_loss=0.04945, over 972625.75 frames.], batch size: 18, lr: 5.81e-04 +2022-05-04 11:26:18,807 INFO [train.py:715] (6/8) Epoch 3, batch 2000, loss[loss=0.183, simple_loss=0.2572, pruned_loss=0.05446, over 4854.00 frames.], tot_loss[loss=0.1665, simple_loss=0.2338, pruned_loss=0.04965, over 972723.24 frames.], batch size: 32, lr: 5.81e-04 +2022-05-04 11:26:58,015 INFO [train.py:715] (6/8) Epoch 3, batch 2050, loss[loss=0.1657, simple_loss=0.2357, pruned_loss=0.04779, over 4887.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2352, pruned_loss=0.05026, over 972143.83 frames.], batch size: 22, lr: 5.81e-04 +2022-05-04 11:27:37,800 INFO [train.py:715] (6/8) Epoch 3, batch 2100, loss[loss=0.1536, simple_loss=0.2359, pruned_loss=0.03563, over 4794.00 frames.], tot_loss[loss=0.1673, simple_loss=0.2345, pruned_loss=0.04999, over 971750.13 frames.], batch size: 24, lr: 5.81e-04 +2022-05-04 11:28:17,053 INFO [train.py:715] (6/8) Epoch 3, batch 2150, loss[loss=0.1667, simple_loss=0.235, pruned_loss=0.04921, over 4810.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2348, pruned_loss=0.04983, over 972347.69 frames.], batch size: 25, lr: 5.81e-04 +2022-05-04 11:28:55,725 INFO [train.py:715] (6/8) Epoch 3, batch 2200, loss[loss=0.1236, simple_loss=0.1947, pruned_loss=0.02622, over 4833.00 frames.], tot_loss[loss=0.1677, simple_loss=0.235, pruned_loss=0.05021, over 972176.57 frames.], batch size: 13, lr: 5.81e-04 +2022-05-04 11:29:35,107 INFO [train.py:715] (6/8) Epoch 3, batch 2250, loss[loss=0.1645, simple_loss=0.2355, pruned_loss=0.0467, over 4899.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2349, pruned_loss=0.05003, over 972841.98 frames.], batch size: 19, lr: 5.81e-04 +2022-05-04 11:30:14,524 INFO [train.py:715] (6/8) Epoch 3, batch 2300, loss[loss=0.1464, simple_loss=0.2167, pruned_loss=0.03808, over 4910.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2345, pruned_loss=0.04962, over 972255.45 frames.], batch size: 19, lr: 5.80e-04 +2022-05-04 11:30:53,582 INFO [train.py:715] (6/8) Epoch 3, batch 2350, loss[loss=0.1472, simple_loss=0.2113, pruned_loss=0.04154, over 4834.00 frames.], tot_loss[loss=0.1665, simple_loss=0.2338, pruned_loss=0.04963, over 973019.29 frames.], batch size: 30, lr: 5.80e-04 +2022-05-04 11:31:32,376 INFO [train.py:715] (6/8) Epoch 3, batch 2400, loss[loss=0.1462, simple_loss=0.216, pruned_loss=0.03822, over 4889.00 frames.], tot_loss[loss=0.166, simple_loss=0.2333, pruned_loss=0.04932, over 972179.57 frames.], batch size: 16, lr: 5.80e-04 +2022-05-04 11:32:12,611 INFO [train.py:715] (6/8) Epoch 3, batch 2450, loss[loss=0.1655, simple_loss=0.241, pruned_loss=0.04498, over 4699.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2332, pruned_loss=0.04946, over 972040.18 frames.], batch size: 15, lr: 5.80e-04 +2022-05-04 11:32:51,968 INFO [train.py:715] (6/8) Epoch 3, batch 2500, loss[loss=0.1802, simple_loss=0.2467, pruned_loss=0.05683, over 4840.00 frames.], tot_loss[loss=0.1663, simple_loss=0.2338, pruned_loss=0.04938, over 972569.54 frames.], batch size: 27, lr: 5.80e-04 +2022-05-04 11:33:30,792 INFO [train.py:715] (6/8) Epoch 3, batch 2550, loss[loss=0.151, simple_loss=0.2306, pruned_loss=0.03564, over 4922.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2335, pruned_loss=0.04902, over 972497.76 frames.], batch size: 23, lr: 5.80e-04 +2022-05-04 11:34:11,450 INFO [train.py:715] (6/8) Epoch 3, batch 2600, loss[loss=0.1809, simple_loss=0.2487, pruned_loss=0.05654, over 4896.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2328, pruned_loss=0.04898, over 972291.77 frames.], batch size: 19, lr: 5.80e-04 +2022-05-04 11:34:51,565 INFO [train.py:715] (6/8) Epoch 3, batch 2650, loss[loss=0.1737, simple_loss=0.2385, pruned_loss=0.05444, over 4812.00 frames.], tot_loss[loss=0.1669, simple_loss=0.234, pruned_loss=0.04992, over 973281.91 frames.], batch size: 13, lr: 5.80e-04 +2022-05-04 11:35:30,759 INFO [train.py:715] (6/8) Epoch 3, batch 2700, loss[loss=0.2006, simple_loss=0.2739, pruned_loss=0.0637, over 4890.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2349, pruned_loss=0.05023, over 972678.06 frames.], batch size: 39, lr: 5.79e-04 +2022-05-04 11:36:10,255 INFO [train.py:715] (6/8) Epoch 3, batch 2750, loss[loss=0.1446, simple_loss=0.217, pruned_loss=0.03606, over 4886.00 frames.], tot_loss[loss=0.1662, simple_loss=0.2335, pruned_loss=0.04941, over 973059.09 frames.], batch size: 19, lr: 5.79e-04 +2022-05-04 11:36:50,509 INFO [train.py:715] (6/8) Epoch 3, batch 2800, loss[loss=0.1538, simple_loss=0.2317, pruned_loss=0.0379, over 4747.00 frames.], tot_loss[loss=0.1655, simple_loss=0.233, pruned_loss=0.04898, over 972740.05 frames.], batch size: 16, lr: 5.79e-04 +2022-05-04 11:37:29,795 INFO [train.py:715] (6/8) Epoch 3, batch 2850, loss[loss=0.178, simple_loss=0.2226, pruned_loss=0.06668, over 4994.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2333, pruned_loss=0.049, over 973010.21 frames.], batch size: 14, lr: 5.79e-04 +2022-05-04 11:38:08,473 INFO [train.py:715] (6/8) Epoch 3, batch 2900, loss[loss=0.1241, simple_loss=0.1886, pruned_loss=0.02981, over 4839.00 frames.], tot_loss[loss=0.1657, simple_loss=0.2331, pruned_loss=0.04913, over 972852.20 frames.], batch size: 12, lr: 5.79e-04 +2022-05-04 11:38:48,428 INFO [train.py:715] (6/8) Epoch 3, batch 2950, loss[loss=0.1762, simple_loss=0.2444, pruned_loss=0.05401, over 4786.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2333, pruned_loss=0.04864, over 972849.10 frames.], batch size: 18, lr: 5.79e-04 +2022-05-04 11:39:28,055 INFO [train.py:715] (6/8) Epoch 3, batch 3000, loss[loss=0.1815, simple_loss=0.2422, pruned_loss=0.06041, over 4820.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2337, pruned_loss=0.04872, over 972557.16 frames.], batch size: 26, lr: 5.79e-04 +2022-05-04 11:39:28,055 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 11:39:36,790 INFO [train.py:742] (6/8) Epoch 3, validation: loss=0.1153, simple_loss=0.2015, pruned_loss=0.0146, over 914524.00 frames. +2022-05-04 11:40:16,888 INFO [train.py:715] (6/8) Epoch 3, batch 3050, loss[loss=0.1702, simple_loss=0.2415, pruned_loss=0.04947, over 4903.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2334, pruned_loss=0.04876, over 973951.72 frames.], batch size: 17, lr: 5.78e-04 +2022-05-04 11:40:55,671 INFO [train.py:715] (6/8) Epoch 3, batch 3100, loss[loss=0.1651, simple_loss=0.2356, pruned_loss=0.04735, over 4910.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2329, pruned_loss=0.04882, over 973825.82 frames.], batch size: 17, lr: 5.78e-04 +2022-05-04 11:41:35,059 INFO [train.py:715] (6/8) Epoch 3, batch 3150, loss[loss=0.1557, simple_loss=0.2262, pruned_loss=0.04261, over 4813.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2328, pruned_loss=0.0487, over 972852.77 frames.], batch size: 21, lr: 5.78e-04 +2022-05-04 11:42:14,859 INFO [train.py:715] (6/8) Epoch 3, batch 3200, loss[loss=0.1545, simple_loss=0.2227, pruned_loss=0.04318, over 4829.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2327, pruned_loss=0.04848, over 972431.58 frames.], batch size: 13, lr: 5.78e-04 +2022-05-04 11:42:54,658 INFO [train.py:715] (6/8) Epoch 3, batch 3250, loss[loss=0.1832, simple_loss=0.2596, pruned_loss=0.05338, over 4978.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2315, pruned_loss=0.04796, over 972673.61 frames.], batch size: 24, lr: 5.78e-04 +2022-05-04 11:43:33,197 INFO [train.py:715] (6/8) Epoch 3, batch 3300, loss[loss=0.2045, simple_loss=0.2766, pruned_loss=0.06619, over 4755.00 frames.], tot_loss[loss=0.1642, simple_loss=0.232, pruned_loss=0.04819, over 972237.83 frames.], batch size: 19, lr: 5.78e-04 +2022-05-04 11:44:13,011 INFO [train.py:715] (6/8) Epoch 3, batch 3350, loss[loss=0.1788, simple_loss=0.2522, pruned_loss=0.05264, over 4954.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2322, pruned_loss=0.04821, over 972741.51 frames.], batch size: 15, lr: 5.78e-04 +2022-05-04 11:44:52,488 INFO [train.py:715] (6/8) Epoch 3, batch 3400, loss[loss=0.1414, simple_loss=0.2142, pruned_loss=0.03435, over 4816.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2316, pruned_loss=0.04754, over 972958.54 frames.], batch size: 27, lr: 5.77e-04 +2022-05-04 11:45:31,176 INFO [train.py:715] (6/8) Epoch 3, batch 3450, loss[loss=0.1442, simple_loss=0.2296, pruned_loss=0.02941, over 4794.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2328, pruned_loss=0.04799, over 973107.92 frames.], batch size: 21, lr: 5.77e-04 +2022-05-04 11:46:10,509 INFO [train.py:715] (6/8) Epoch 3, batch 3500, loss[loss=0.1657, simple_loss=0.2287, pruned_loss=0.05129, over 4934.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2336, pruned_loss=0.04874, over 972858.94 frames.], batch size: 18, lr: 5.77e-04 +2022-05-04 11:46:50,813 INFO [train.py:715] (6/8) Epoch 3, batch 3550, loss[loss=0.1864, simple_loss=0.258, pruned_loss=0.05738, over 4692.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2333, pruned_loss=0.0486, over 973138.70 frames.], batch size: 15, lr: 5.77e-04 +2022-05-04 11:47:30,667 INFO [train.py:715] (6/8) Epoch 3, batch 3600, loss[loss=0.168, simple_loss=0.2437, pruned_loss=0.04616, over 4986.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2336, pruned_loss=0.04862, over 972944.59 frames.], batch size: 28, lr: 5.77e-04 +2022-05-04 11:48:09,905 INFO [train.py:715] (6/8) Epoch 3, batch 3650, loss[loss=0.1674, simple_loss=0.2337, pruned_loss=0.05057, over 4984.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2336, pruned_loss=0.04859, over 972988.24 frames.], batch size: 15, lr: 5.77e-04 +2022-05-04 11:48:49,628 INFO [train.py:715] (6/8) Epoch 3, batch 3700, loss[loss=0.1746, simple_loss=0.2402, pruned_loss=0.05451, over 4925.00 frames.], tot_loss[loss=0.1657, simple_loss=0.2338, pruned_loss=0.0488, over 972993.34 frames.], batch size: 17, lr: 5.77e-04 +2022-05-04 11:49:29,647 INFO [train.py:715] (6/8) Epoch 3, batch 3750, loss[loss=0.1535, simple_loss=0.227, pruned_loss=0.03999, over 4813.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2334, pruned_loss=0.0485, over 972424.71 frames.], batch size: 25, lr: 5.77e-04 +2022-05-04 11:50:09,333 INFO [train.py:715] (6/8) Epoch 3, batch 3800, loss[loss=0.1255, simple_loss=0.1935, pruned_loss=0.02874, over 4842.00 frames.], tot_loss[loss=0.1657, simple_loss=0.2335, pruned_loss=0.04898, over 972951.70 frames.], batch size: 13, lr: 5.76e-04 +2022-05-04 11:50:48,711 INFO [train.py:715] (6/8) Epoch 3, batch 3850, loss[loss=0.1925, simple_loss=0.2552, pruned_loss=0.06493, over 4834.00 frames.], tot_loss[loss=0.165, simple_loss=0.2326, pruned_loss=0.04875, over 971903.73 frames.], batch size: 15, lr: 5.76e-04 +2022-05-04 11:51:28,565 INFO [train.py:715] (6/8) Epoch 3, batch 3900, loss[loss=0.1585, simple_loss=0.2312, pruned_loss=0.04294, over 4873.00 frames.], tot_loss[loss=0.1659, simple_loss=0.2335, pruned_loss=0.04916, over 971549.31 frames.], batch size: 22, lr: 5.76e-04 +2022-05-04 11:52:08,066 INFO [train.py:715] (6/8) Epoch 3, batch 3950, loss[loss=0.1676, simple_loss=0.2389, pruned_loss=0.04812, over 4826.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2337, pruned_loss=0.04882, over 971418.08 frames.], batch size: 25, lr: 5.76e-04 +2022-05-04 11:52:47,083 INFO [train.py:715] (6/8) Epoch 3, batch 4000, loss[loss=0.1648, simple_loss=0.2435, pruned_loss=0.04305, over 4762.00 frames.], tot_loss[loss=0.1662, simple_loss=0.234, pruned_loss=0.04923, over 971193.20 frames.], batch size: 18, lr: 5.76e-04 +2022-05-04 11:53:26,530 INFO [train.py:715] (6/8) Epoch 3, batch 4050, loss[loss=0.1426, simple_loss=0.2142, pruned_loss=0.03548, over 4794.00 frames.], tot_loss[loss=0.1659, simple_loss=0.2339, pruned_loss=0.04898, over 970431.36 frames.], batch size: 21, lr: 5.76e-04 +2022-05-04 11:54:06,706 INFO [train.py:715] (6/8) Epoch 3, batch 4100, loss[loss=0.1948, simple_loss=0.2609, pruned_loss=0.06436, over 4870.00 frames.], tot_loss[loss=0.1663, simple_loss=0.2341, pruned_loss=0.04924, over 970736.94 frames.], batch size: 39, lr: 5.76e-04 +2022-05-04 11:54:45,659 INFO [train.py:715] (6/8) Epoch 3, batch 4150, loss[loss=0.1648, simple_loss=0.23, pruned_loss=0.04984, over 4839.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2334, pruned_loss=0.0486, over 971018.49 frames.], batch size: 30, lr: 5.76e-04 +2022-05-04 11:55:24,492 INFO [train.py:715] (6/8) Epoch 3, batch 4200, loss[loss=0.2021, simple_loss=0.2667, pruned_loss=0.06871, over 4873.00 frames.], tot_loss[loss=0.165, simple_loss=0.2329, pruned_loss=0.04848, over 971308.39 frames.], batch size: 16, lr: 5.75e-04 +2022-05-04 11:56:04,946 INFO [train.py:715] (6/8) Epoch 3, batch 4250, loss[loss=0.1256, simple_loss=0.1991, pruned_loss=0.02609, over 4786.00 frames.], tot_loss[loss=0.165, simple_loss=0.2333, pruned_loss=0.04837, over 971812.30 frames.], batch size: 21, lr: 5.75e-04 +2022-05-04 11:56:44,321 INFO [train.py:715] (6/8) Epoch 3, batch 4300, loss[loss=0.1697, simple_loss=0.2365, pruned_loss=0.0514, over 4855.00 frames.], tot_loss[loss=0.1657, simple_loss=0.234, pruned_loss=0.04869, over 972527.69 frames.], batch size: 20, lr: 5.75e-04 +2022-05-04 11:57:23,799 INFO [train.py:715] (6/8) Epoch 3, batch 4350, loss[loss=0.1588, simple_loss=0.2236, pruned_loss=0.04702, over 4989.00 frames.], tot_loss[loss=0.1659, simple_loss=0.2341, pruned_loss=0.04889, over 972585.55 frames.], batch size: 31, lr: 5.75e-04 +2022-05-04 11:58:03,484 INFO [train.py:715] (6/8) Epoch 3, batch 4400, loss[loss=0.1593, simple_loss=0.228, pruned_loss=0.04528, over 4818.00 frames.], tot_loss[loss=0.1657, simple_loss=0.234, pruned_loss=0.04869, over 972383.04 frames.], batch size: 25, lr: 5.75e-04 +2022-05-04 11:58:43,524 INFO [train.py:715] (6/8) Epoch 3, batch 4450, loss[loss=0.1485, simple_loss=0.2244, pruned_loss=0.03631, over 4928.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2329, pruned_loss=0.04839, over 972995.14 frames.], batch size: 29, lr: 5.75e-04 +2022-05-04 11:59:22,568 INFO [train.py:715] (6/8) Epoch 3, batch 4500, loss[loss=0.1738, simple_loss=0.2391, pruned_loss=0.05422, over 4693.00 frames.], tot_loss[loss=0.166, simple_loss=0.234, pruned_loss=0.04893, over 973288.48 frames.], batch size: 15, lr: 5.75e-04 +2022-05-04 12:00:01,995 INFO [train.py:715] (6/8) Epoch 3, batch 4550, loss[loss=0.1355, simple_loss=0.2082, pruned_loss=0.0314, over 4768.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2341, pruned_loss=0.04855, over 973169.90 frames.], batch size: 18, lr: 5.74e-04 +2022-05-04 12:00:41,749 INFO [train.py:715] (6/8) Epoch 3, batch 4600, loss[loss=0.1742, simple_loss=0.241, pruned_loss=0.05373, over 4969.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2339, pruned_loss=0.04852, over 972408.07 frames.], batch size: 15, lr: 5.74e-04 +2022-05-04 12:01:21,005 INFO [train.py:715] (6/8) Epoch 3, batch 4650, loss[loss=0.1533, simple_loss=0.2247, pruned_loss=0.04101, over 4885.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2325, pruned_loss=0.04783, over 971673.44 frames.], batch size: 22, lr: 5.74e-04 +2022-05-04 12:01:59,935 INFO [train.py:715] (6/8) Epoch 3, batch 4700, loss[loss=0.1334, simple_loss=0.2022, pruned_loss=0.03228, over 4829.00 frames.], tot_loss[loss=0.164, simple_loss=0.2321, pruned_loss=0.04799, over 971904.64 frames.], batch size: 13, lr: 5.74e-04 +2022-05-04 12:02:39,138 INFO [train.py:715] (6/8) Epoch 3, batch 4750, loss[loss=0.1746, simple_loss=0.2448, pruned_loss=0.05219, over 4834.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2317, pruned_loss=0.04792, over 972060.85 frames.], batch size: 30, lr: 5.74e-04 +2022-05-04 12:03:18,742 INFO [train.py:715] (6/8) Epoch 3, batch 4800, loss[loss=0.1477, simple_loss=0.2229, pruned_loss=0.03618, over 4794.00 frames.], tot_loss[loss=0.1642, simple_loss=0.232, pruned_loss=0.04821, over 972148.97 frames.], batch size: 14, lr: 5.74e-04 +2022-05-04 12:03:58,126 INFO [train.py:715] (6/8) Epoch 3, batch 4850, loss[loss=0.1528, simple_loss=0.2234, pruned_loss=0.0411, over 4753.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2315, pruned_loss=0.04782, over 972435.49 frames.], batch size: 19, lr: 5.74e-04 +2022-05-04 12:04:36,948 INFO [train.py:715] (6/8) Epoch 3, batch 4900, loss[loss=0.1569, simple_loss=0.2155, pruned_loss=0.04921, over 4846.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2313, pruned_loss=0.04793, over 972347.25 frames.], batch size: 30, lr: 5.74e-04 +2022-05-04 12:05:16,866 INFO [train.py:715] (6/8) Epoch 3, batch 4950, loss[loss=0.1843, simple_loss=0.2518, pruned_loss=0.05841, over 4874.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2313, pruned_loss=0.04805, over 972618.69 frames.], batch size: 16, lr: 5.73e-04 +2022-05-04 12:05:56,317 INFO [train.py:715] (6/8) Epoch 3, batch 5000, loss[loss=0.1636, simple_loss=0.2392, pruned_loss=0.044, over 4783.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2315, pruned_loss=0.04805, over 972387.34 frames.], batch size: 18, lr: 5.73e-04 +2022-05-04 12:06:35,123 INFO [train.py:715] (6/8) Epoch 3, batch 5050, loss[loss=0.1696, simple_loss=0.2404, pruned_loss=0.04942, over 4935.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2315, pruned_loss=0.04837, over 972175.74 frames.], batch size: 29, lr: 5.73e-04 +2022-05-04 12:07:14,489 INFO [train.py:715] (6/8) Epoch 3, batch 5100, loss[loss=0.1649, simple_loss=0.2451, pruned_loss=0.04237, over 4828.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2323, pruned_loss=0.04865, over 972578.85 frames.], batch size: 15, lr: 5.73e-04 +2022-05-04 12:07:54,250 INFO [train.py:715] (6/8) Epoch 3, batch 5150, loss[loss=0.1759, simple_loss=0.2397, pruned_loss=0.05601, over 4938.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2319, pruned_loss=0.04856, over 971926.02 frames.], batch size: 29, lr: 5.73e-04 +2022-05-04 12:08:32,993 INFO [train.py:715] (6/8) Epoch 3, batch 5200, loss[loss=0.1847, simple_loss=0.2459, pruned_loss=0.06182, over 4705.00 frames.], tot_loss[loss=0.1657, simple_loss=0.2329, pruned_loss=0.04923, over 971193.96 frames.], batch size: 15, lr: 5.73e-04 +2022-05-04 12:09:12,110 INFO [train.py:715] (6/8) Epoch 3, batch 5250, loss[loss=0.1912, simple_loss=0.2577, pruned_loss=0.06238, over 4880.00 frames.], tot_loss[loss=0.1662, simple_loss=0.2333, pruned_loss=0.04954, over 971384.61 frames.], batch size: 22, lr: 5.73e-04 +2022-05-04 12:09:52,199 INFO [train.py:715] (6/8) Epoch 3, batch 5300, loss[loss=0.1402, simple_loss=0.2166, pruned_loss=0.03189, over 4833.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2321, pruned_loss=0.04876, over 971224.27 frames.], batch size: 13, lr: 5.72e-04 +2022-05-04 12:10:31,380 INFO [train.py:715] (6/8) Epoch 3, batch 5350, loss[loss=0.1568, simple_loss=0.2245, pruned_loss=0.04457, over 4935.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2327, pruned_loss=0.04873, over 972691.42 frames.], batch size: 23, lr: 5.72e-04 +2022-05-04 12:11:10,308 INFO [train.py:715] (6/8) Epoch 3, batch 5400, loss[loss=0.1749, simple_loss=0.2534, pruned_loss=0.0482, over 4820.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2332, pruned_loss=0.04896, over 972053.80 frames.], batch size: 13, lr: 5.72e-04 +2022-05-04 12:11:49,951 INFO [train.py:715] (6/8) Epoch 3, batch 5450, loss[loss=0.1586, simple_loss=0.2365, pruned_loss=0.04037, over 4818.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2335, pruned_loss=0.04906, over 970904.02 frames.], batch size: 15, lr: 5.72e-04 +2022-05-04 12:12:30,204 INFO [train.py:715] (6/8) Epoch 3, batch 5500, loss[loss=0.1495, simple_loss=0.2254, pruned_loss=0.03674, over 4845.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2335, pruned_loss=0.04936, over 970765.97 frames.], batch size: 20, lr: 5.72e-04 +2022-05-04 12:13:09,482 INFO [train.py:715] (6/8) Epoch 3, batch 5550, loss[loss=0.1407, simple_loss=0.1957, pruned_loss=0.04289, over 4842.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2333, pruned_loss=0.04947, over 971249.60 frames.], batch size: 12, lr: 5.72e-04 +2022-05-04 12:13:49,882 INFO [train.py:715] (6/8) Epoch 3, batch 5600, loss[loss=0.1958, simple_loss=0.2657, pruned_loss=0.063, over 4887.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2326, pruned_loss=0.0492, over 971468.07 frames.], batch size: 22, lr: 5.72e-04 +2022-05-04 12:14:29,652 INFO [train.py:715] (6/8) Epoch 3, batch 5650, loss[loss=0.1735, simple_loss=0.245, pruned_loss=0.05098, over 4816.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2321, pruned_loss=0.0484, over 971781.53 frames.], batch size: 27, lr: 5.72e-04 +2022-05-04 12:15:08,738 INFO [train.py:715] (6/8) Epoch 3, batch 5700, loss[loss=0.118, simple_loss=0.1828, pruned_loss=0.02662, over 4756.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2311, pruned_loss=0.04791, over 972274.52 frames.], batch size: 19, lr: 5.71e-04 +2022-05-04 12:15:48,075 INFO [train.py:715] (6/8) Epoch 3, batch 5750, loss[loss=0.186, simple_loss=0.2547, pruned_loss=0.0586, over 4845.00 frames.], tot_loss[loss=0.165, simple_loss=0.2323, pruned_loss=0.04878, over 972433.43 frames.], batch size: 20, lr: 5.71e-04 +2022-05-04 12:16:27,889 INFO [train.py:715] (6/8) Epoch 3, batch 5800, loss[loss=0.1721, simple_loss=0.2269, pruned_loss=0.05865, over 4785.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2312, pruned_loss=0.04813, over 972972.18 frames.], batch size: 17, lr: 5.71e-04 +2022-05-04 12:17:07,630 INFO [train.py:715] (6/8) Epoch 3, batch 5850, loss[loss=0.1648, simple_loss=0.2402, pruned_loss=0.04469, over 4757.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2312, pruned_loss=0.04817, over 971359.63 frames.], batch size: 19, lr: 5.71e-04 +2022-05-04 12:17:46,988 INFO [train.py:715] (6/8) Epoch 3, batch 5900, loss[loss=0.1761, simple_loss=0.242, pruned_loss=0.05511, over 4821.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2313, pruned_loss=0.04807, over 970855.20 frames.], batch size: 26, lr: 5.71e-04 +2022-05-04 12:18:26,965 INFO [train.py:715] (6/8) Epoch 3, batch 5950, loss[loss=0.1199, simple_loss=0.1974, pruned_loss=0.02115, over 4815.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2307, pruned_loss=0.04782, over 970591.08 frames.], batch size: 13, lr: 5.71e-04 +2022-05-04 12:19:06,648 INFO [train.py:715] (6/8) Epoch 3, batch 6000, loss[loss=0.165, simple_loss=0.2246, pruned_loss=0.05273, over 4820.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2298, pruned_loss=0.04738, over 971406.18 frames.], batch size: 15, lr: 5.71e-04 +2022-05-04 12:19:06,648 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 12:19:15,396 INFO [train.py:742] (6/8) Epoch 3, validation: loss=0.1149, simple_loss=0.2013, pruned_loss=0.01424, over 914524.00 frames. +2022-05-04 12:19:55,211 INFO [train.py:715] (6/8) Epoch 3, batch 6050, loss[loss=0.1558, simple_loss=0.2385, pruned_loss=0.03652, over 4878.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2302, pruned_loss=0.04721, over 971131.93 frames.], batch size: 16, lr: 5.71e-04 +2022-05-04 12:20:34,637 INFO [train.py:715] (6/8) Epoch 3, batch 6100, loss[loss=0.2155, simple_loss=0.2743, pruned_loss=0.07837, over 4980.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2311, pruned_loss=0.04809, over 970626.73 frames.], batch size: 14, lr: 5.70e-04 +2022-05-04 12:21:13,564 INFO [train.py:715] (6/8) Epoch 3, batch 6150, loss[loss=0.156, simple_loss=0.2204, pruned_loss=0.0458, over 4817.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2325, pruned_loss=0.04852, over 970332.92 frames.], batch size: 25, lr: 5.70e-04 +2022-05-04 12:21:53,162 INFO [train.py:715] (6/8) Epoch 3, batch 6200, loss[loss=0.1351, simple_loss=0.2036, pruned_loss=0.03336, over 4803.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2329, pruned_loss=0.04863, over 970547.40 frames.], batch size: 12, lr: 5.70e-04 +2022-05-04 12:22:33,155 INFO [train.py:715] (6/8) Epoch 3, batch 6250, loss[loss=0.1298, simple_loss=0.2017, pruned_loss=0.02898, over 4990.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2323, pruned_loss=0.04835, over 971187.54 frames.], batch size: 28, lr: 5.70e-04 +2022-05-04 12:23:12,507 INFO [train.py:715] (6/8) Epoch 3, batch 6300, loss[loss=0.173, simple_loss=0.2372, pruned_loss=0.0544, over 4909.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2325, pruned_loss=0.04838, over 971223.83 frames.], batch size: 39, lr: 5.70e-04 +2022-05-04 12:23:51,736 INFO [train.py:715] (6/8) Epoch 3, batch 6350, loss[loss=0.1468, simple_loss=0.221, pruned_loss=0.03631, over 4776.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2327, pruned_loss=0.04875, over 972167.37 frames.], batch size: 18, lr: 5.70e-04 +2022-05-04 12:24:31,950 INFO [train.py:715] (6/8) Epoch 3, batch 6400, loss[loss=0.1627, simple_loss=0.2452, pruned_loss=0.04012, over 4894.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2315, pruned_loss=0.04818, over 971712.46 frames.], batch size: 19, lr: 5.70e-04 +2022-05-04 12:25:11,505 INFO [train.py:715] (6/8) Epoch 3, batch 6450, loss[loss=0.2083, simple_loss=0.2822, pruned_loss=0.06722, over 4690.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2333, pruned_loss=0.04886, over 971842.74 frames.], batch size: 15, lr: 5.70e-04 +2022-05-04 12:25:50,484 INFO [train.py:715] (6/8) Epoch 3, batch 6500, loss[loss=0.1621, simple_loss=0.2217, pruned_loss=0.05128, over 4805.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2322, pruned_loss=0.04834, over 972320.83 frames.], batch size: 12, lr: 5.69e-04 +2022-05-04 12:26:30,137 INFO [train.py:715] (6/8) Epoch 3, batch 6550, loss[loss=0.1677, simple_loss=0.2357, pruned_loss=0.04986, over 4849.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2334, pruned_loss=0.04912, over 972099.81 frames.], batch size: 30, lr: 5.69e-04 +2022-05-04 12:27:09,933 INFO [train.py:715] (6/8) Epoch 3, batch 6600, loss[loss=0.1503, simple_loss=0.2104, pruned_loss=0.04509, over 4859.00 frames.], tot_loss[loss=0.165, simple_loss=0.2326, pruned_loss=0.04867, over 971983.67 frames.], batch size: 32, lr: 5.69e-04 +2022-05-04 12:27:49,189 INFO [train.py:715] (6/8) Epoch 3, batch 6650, loss[loss=0.1697, simple_loss=0.2415, pruned_loss=0.04895, over 4876.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2327, pruned_loss=0.04883, over 972164.59 frames.], batch size: 22, lr: 5.69e-04 +2022-05-04 12:28:28,360 INFO [train.py:715] (6/8) Epoch 3, batch 6700, loss[loss=0.1606, simple_loss=0.2335, pruned_loss=0.04388, over 4896.00 frames.], tot_loss[loss=0.1655, simple_loss=0.233, pruned_loss=0.04906, over 972193.02 frames.], batch size: 19, lr: 5.69e-04 +2022-05-04 12:29:08,705 INFO [train.py:715] (6/8) Epoch 3, batch 6750, loss[loss=0.2027, simple_loss=0.261, pruned_loss=0.0722, over 4710.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2333, pruned_loss=0.04941, over 972490.93 frames.], batch size: 15, lr: 5.69e-04 +2022-05-04 12:29:47,746 INFO [train.py:715] (6/8) Epoch 3, batch 6800, loss[loss=0.1819, simple_loss=0.2451, pruned_loss=0.05933, over 4939.00 frames.], tot_loss[loss=0.1662, simple_loss=0.2336, pruned_loss=0.04942, over 973210.65 frames.], batch size: 29, lr: 5.69e-04 +2022-05-04 12:30:27,120 INFO [train.py:715] (6/8) Epoch 3, batch 6850, loss[loss=0.1681, simple_loss=0.2309, pruned_loss=0.05264, over 4856.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2328, pruned_loss=0.04842, over 973289.00 frames.], batch size: 30, lr: 5.68e-04 +2022-05-04 12:31:06,822 INFO [train.py:715] (6/8) Epoch 3, batch 6900, loss[loss=0.1694, simple_loss=0.2344, pruned_loss=0.05222, over 4687.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2327, pruned_loss=0.04844, over 972539.12 frames.], batch size: 15, lr: 5.68e-04 +2022-05-04 12:31:46,655 INFO [train.py:715] (6/8) Epoch 3, batch 6950, loss[loss=0.166, simple_loss=0.2293, pruned_loss=0.05138, over 4799.00 frames.], tot_loss[loss=0.1652, simple_loss=0.233, pruned_loss=0.04869, over 971605.96 frames.], batch size: 25, lr: 5.68e-04 +2022-05-04 12:32:25,811 INFO [train.py:715] (6/8) Epoch 3, batch 7000, loss[loss=0.1545, simple_loss=0.2213, pruned_loss=0.04388, over 4847.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2335, pruned_loss=0.04874, over 972408.19 frames.], batch size: 30, lr: 5.68e-04 +2022-05-04 12:33:05,833 INFO [train.py:715] (6/8) Epoch 3, batch 7050, loss[loss=0.1672, simple_loss=0.2398, pruned_loss=0.04725, over 4907.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2327, pruned_loss=0.04809, over 971430.33 frames.], batch size: 19, lr: 5.68e-04 +2022-05-04 12:33:45,724 INFO [train.py:715] (6/8) Epoch 3, batch 7100, loss[loss=0.1672, simple_loss=0.2341, pruned_loss=0.05013, over 4863.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2327, pruned_loss=0.04817, over 972051.14 frames.], batch size: 20, lr: 5.68e-04 +2022-05-04 12:34:24,808 INFO [train.py:715] (6/8) Epoch 3, batch 7150, loss[loss=0.1643, simple_loss=0.2375, pruned_loss=0.04561, over 4776.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2324, pruned_loss=0.04789, over 971218.74 frames.], batch size: 14, lr: 5.68e-04 +2022-05-04 12:35:04,380 INFO [train.py:715] (6/8) Epoch 3, batch 7200, loss[loss=0.1409, simple_loss=0.2115, pruned_loss=0.03517, over 4917.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2319, pruned_loss=0.04786, over 971059.15 frames.], batch size: 18, lr: 5.68e-04 +2022-05-04 12:35:44,152 INFO [train.py:715] (6/8) Epoch 3, batch 7250, loss[loss=0.1875, simple_loss=0.2431, pruned_loss=0.06591, over 4914.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2322, pruned_loss=0.048, over 970722.36 frames.], batch size: 17, lr: 5.67e-04 +2022-05-04 12:36:23,546 INFO [train.py:715] (6/8) Epoch 3, batch 7300, loss[loss=0.185, simple_loss=0.2491, pruned_loss=0.06047, over 4968.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2327, pruned_loss=0.04834, over 970867.76 frames.], batch size: 33, lr: 5.67e-04 +2022-05-04 12:37:03,016 INFO [train.py:715] (6/8) Epoch 3, batch 7350, loss[loss=0.1806, simple_loss=0.2351, pruned_loss=0.06304, over 4911.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2335, pruned_loss=0.0488, over 970875.32 frames.], batch size: 17, lr: 5.67e-04 +2022-05-04 12:37:42,379 INFO [train.py:715] (6/8) Epoch 3, batch 7400, loss[loss=0.1613, simple_loss=0.225, pruned_loss=0.04876, over 4952.00 frames.], tot_loss[loss=0.1657, simple_loss=0.2337, pruned_loss=0.04882, over 971193.76 frames.], batch size: 21, lr: 5.67e-04 +2022-05-04 12:38:22,636 INFO [train.py:715] (6/8) Epoch 3, batch 7450, loss[loss=0.1701, simple_loss=0.2347, pruned_loss=0.05275, over 4773.00 frames.], tot_loss[loss=0.1658, simple_loss=0.234, pruned_loss=0.04885, over 971064.12 frames.], batch size: 17, lr: 5.67e-04 +2022-05-04 12:39:01,782 INFO [train.py:715] (6/8) Epoch 3, batch 7500, loss[loss=0.1924, simple_loss=0.2628, pruned_loss=0.06103, over 4954.00 frames.], tot_loss[loss=0.1664, simple_loss=0.2346, pruned_loss=0.04912, over 971127.77 frames.], batch size: 35, lr: 5.67e-04 +2022-05-04 12:39:41,047 INFO [train.py:715] (6/8) Epoch 3, batch 7550, loss[loss=0.1773, simple_loss=0.2457, pruned_loss=0.05441, over 4789.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2342, pruned_loss=0.049, over 970898.60 frames.], batch size: 18, lr: 5.67e-04 +2022-05-04 12:40:22,800 INFO [train.py:715] (6/8) Epoch 3, batch 7600, loss[loss=0.2303, simple_loss=0.2789, pruned_loss=0.09085, over 4870.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2339, pruned_loss=0.04882, over 970548.28 frames.], batch size: 16, lr: 5.67e-04 +2022-05-04 12:41:02,153 INFO [train.py:715] (6/8) Epoch 3, batch 7650, loss[loss=0.1842, simple_loss=0.25, pruned_loss=0.05919, over 4904.00 frames.], tot_loss[loss=0.1661, simple_loss=0.234, pruned_loss=0.04912, over 971170.90 frames.], batch size: 17, lr: 5.66e-04 +2022-05-04 12:41:41,417 INFO [train.py:715] (6/8) Epoch 3, batch 7700, loss[loss=0.162, simple_loss=0.238, pruned_loss=0.04303, over 4817.00 frames.], tot_loss[loss=0.166, simple_loss=0.2339, pruned_loss=0.04903, over 971475.08 frames.], batch size: 25, lr: 5.66e-04 +2022-05-04 12:42:20,885 INFO [train.py:715] (6/8) Epoch 3, batch 7750, loss[loss=0.1757, simple_loss=0.2444, pruned_loss=0.0535, over 4798.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2337, pruned_loss=0.04871, over 971891.22 frames.], batch size: 24, lr: 5.66e-04 +2022-05-04 12:43:00,218 INFO [train.py:715] (6/8) Epoch 3, batch 7800, loss[loss=0.1595, simple_loss=0.2319, pruned_loss=0.04358, over 4783.00 frames.], tot_loss[loss=0.1643, simple_loss=0.233, pruned_loss=0.04779, over 971726.00 frames.], batch size: 18, lr: 5.66e-04 +2022-05-04 12:43:38,790 INFO [train.py:715] (6/8) Epoch 3, batch 7850, loss[loss=0.1557, simple_loss=0.2205, pruned_loss=0.04544, over 4835.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2322, pruned_loss=0.04731, over 972263.25 frames.], batch size: 13, lr: 5.66e-04 +2022-05-04 12:44:18,371 INFO [train.py:715] (6/8) Epoch 3, batch 7900, loss[loss=0.1741, simple_loss=0.2385, pruned_loss=0.05483, over 4935.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2321, pruned_loss=0.04747, over 972752.08 frames.], batch size: 23, lr: 5.66e-04 +2022-05-04 12:44:58,146 INFO [train.py:715] (6/8) Epoch 3, batch 7950, loss[loss=0.1547, simple_loss=0.2247, pruned_loss=0.0423, over 4795.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2326, pruned_loss=0.04838, over 972179.24 frames.], batch size: 21, lr: 5.66e-04 +2022-05-04 12:45:36,728 INFO [train.py:715] (6/8) Epoch 3, batch 8000, loss[loss=0.1443, simple_loss=0.2168, pruned_loss=0.03596, over 4921.00 frames.], tot_loss[loss=0.1638, simple_loss=0.232, pruned_loss=0.0478, over 971896.41 frames.], batch size: 23, lr: 5.66e-04 +2022-05-04 12:46:14,904 INFO [train.py:715] (6/8) Epoch 3, batch 8050, loss[loss=0.1443, simple_loss=0.22, pruned_loss=0.03427, over 4871.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2325, pruned_loss=0.04829, over 971746.30 frames.], batch size: 30, lr: 5.65e-04 +2022-05-04 12:46:53,636 INFO [train.py:715] (6/8) Epoch 3, batch 8100, loss[loss=0.1253, simple_loss=0.1896, pruned_loss=0.03051, over 4756.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2316, pruned_loss=0.04788, over 972040.11 frames.], batch size: 12, lr: 5.65e-04 +2022-05-04 12:47:31,942 INFO [train.py:715] (6/8) Epoch 3, batch 8150, loss[loss=0.1603, simple_loss=0.2279, pruned_loss=0.04639, over 4745.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2321, pruned_loss=0.04824, over 972364.12 frames.], batch size: 16, lr: 5.65e-04 +2022-05-04 12:48:10,085 INFO [train.py:715] (6/8) Epoch 3, batch 8200, loss[loss=0.1888, simple_loss=0.2378, pruned_loss=0.06987, over 4835.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2328, pruned_loss=0.04826, over 972205.36 frames.], batch size: 30, lr: 5.65e-04 +2022-05-04 12:48:49,887 INFO [train.py:715] (6/8) Epoch 3, batch 8250, loss[loss=0.1621, simple_loss=0.2346, pruned_loss=0.04483, over 4842.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2333, pruned_loss=0.04853, over 972210.06 frames.], batch size: 15, lr: 5.65e-04 +2022-05-04 12:49:30,617 INFO [train.py:715] (6/8) Epoch 3, batch 8300, loss[loss=0.1422, simple_loss=0.2155, pruned_loss=0.0345, over 4760.00 frames.], tot_loss[loss=0.165, simple_loss=0.2329, pruned_loss=0.0485, over 972047.32 frames.], batch size: 19, lr: 5.65e-04 +2022-05-04 12:50:10,666 INFO [train.py:715] (6/8) Epoch 3, batch 8350, loss[loss=0.161, simple_loss=0.2302, pruned_loss=0.04586, over 4984.00 frames.], tot_loss[loss=0.164, simple_loss=0.2318, pruned_loss=0.04812, over 971913.56 frames.], batch size: 15, lr: 5.65e-04 +2022-05-04 12:50:50,662 INFO [train.py:715] (6/8) Epoch 3, batch 8400, loss[loss=0.1528, simple_loss=0.2191, pruned_loss=0.04329, over 4821.00 frames.], tot_loss[loss=0.1647, simple_loss=0.232, pruned_loss=0.04871, over 972663.81 frames.], batch size: 13, lr: 5.65e-04 +2022-05-04 12:51:30,649 INFO [train.py:715] (6/8) Epoch 3, batch 8450, loss[loss=0.1592, simple_loss=0.232, pruned_loss=0.0432, over 4831.00 frames.], tot_loss[loss=0.165, simple_loss=0.2325, pruned_loss=0.04872, over 972415.43 frames.], batch size: 26, lr: 5.64e-04 +2022-05-04 12:52:10,872 INFO [train.py:715] (6/8) Epoch 3, batch 8500, loss[loss=0.153, simple_loss=0.22, pruned_loss=0.04305, over 4964.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2311, pruned_loss=0.04801, over 971740.82 frames.], batch size: 35, lr: 5.64e-04 +2022-05-04 12:52:49,927 INFO [train.py:715] (6/8) Epoch 3, batch 8550, loss[loss=0.1392, simple_loss=0.2123, pruned_loss=0.03304, over 4850.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2312, pruned_loss=0.04803, over 972570.30 frames.], batch size: 30, lr: 5.64e-04 +2022-05-04 12:53:31,565 INFO [train.py:715] (6/8) Epoch 3, batch 8600, loss[loss=0.147, simple_loss=0.221, pruned_loss=0.03656, over 4822.00 frames.], tot_loss[loss=0.163, simple_loss=0.231, pruned_loss=0.04748, over 971874.92 frames.], batch size: 25, lr: 5.64e-04 +2022-05-04 12:54:13,122 INFO [train.py:715] (6/8) Epoch 3, batch 8650, loss[loss=0.1648, simple_loss=0.2251, pruned_loss=0.05228, over 4801.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2312, pruned_loss=0.04735, over 971795.10 frames.], batch size: 18, lr: 5.64e-04 +2022-05-04 12:54:53,241 INFO [train.py:715] (6/8) Epoch 3, batch 8700, loss[loss=0.1913, simple_loss=0.268, pruned_loss=0.05731, over 4963.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2323, pruned_loss=0.04824, over 972121.76 frames.], batch size: 24, lr: 5.64e-04 +2022-05-04 12:55:34,488 INFO [train.py:715] (6/8) Epoch 3, batch 8750, loss[loss=0.1568, simple_loss=0.2221, pruned_loss=0.04577, over 4968.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2323, pruned_loss=0.04838, over 972341.10 frames.], batch size: 24, lr: 5.64e-04 +2022-05-04 12:56:14,900 INFO [train.py:715] (6/8) Epoch 3, batch 8800, loss[loss=0.1347, simple_loss=0.2073, pruned_loss=0.03103, over 4789.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2321, pruned_loss=0.04834, over 972334.39 frames.], batch size: 12, lr: 5.64e-04 +2022-05-04 12:56:55,636 INFO [train.py:715] (6/8) Epoch 3, batch 8850, loss[loss=0.1637, simple_loss=0.2328, pruned_loss=0.04732, over 4848.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2331, pruned_loss=0.04871, over 971899.37 frames.], batch size: 20, lr: 5.63e-04 +2022-05-04 12:57:35,612 INFO [train.py:715] (6/8) Epoch 3, batch 8900, loss[loss=0.147, simple_loss=0.2077, pruned_loss=0.04319, over 4732.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2324, pruned_loss=0.04802, over 971210.69 frames.], batch size: 16, lr: 5.63e-04 +2022-05-04 12:58:17,384 INFO [train.py:715] (6/8) Epoch 3, batch 8950, loss[loss=0.1608, simple_loss=0.2235, pruned_loss=0.04907, over 4872.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2325, pruned_loss=0.04859, over 971558.95 frames.], batch size: 13, lr: 5.63e-04 +2022-05-04 12:58:59,318 INFO [train.py:715] (6/8) Epoch 3, batch 9000, loss[loss=0.1372, simple_loss=0.2194, pruned_loss=0.02751, over 4921.00 frames.], tot_loss[loss=0.163, simple_loss=0.2312, pruned_loss=0.0474, over 972700.35 frames.], batch size: 23, lr: 5.63e-04 +2022-05-04 12:58:59,319 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 12:59:08,109 INFO [train.py:742] (6/8) Epoch 3, validation: loss=0.1147, simple_loss=0.2006, pruned_loss=0.01442, over 914524.00 frames. +2022-05-04 12:59:49,680 INFO [train.py:715] (6/8) Epoch 3, batch 9050, loss[loss=0.1622, simple_loss=0.2277, pruned_loss=0.04839, over 4897.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2315, pruned_loss=0.04789, over 972529.01 frames.], batch size: 19, lr: 5.63e-04 +2022-05-04 13:00:30,624 INFO [train.py:715] (6/8) Epoch 3, batch 9100, loss[loss=0.1949, simple_loss=0.2757, pruned_loss=0.05711, over 4902.00 frames.], tot_loss[loss=0.1627, simple_loss=0.231, pruned_loss=0.04725, over 972191.38 frames.], batch size: 17, lr: 5.63e-04 +2022-05-04 13:01:11,944 INFO [train.py:715] (6/8) Epoch 3, batch 9150, loss[loss=0.1461, simple_loss=0.2149, pruned_loss=0.0386, over 4752.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2309, pruned_loss=0.04714, over 972613.90 frames.], batch size: 12, lr: 5.63e-04 +2022-05-04 13:01:53,288 INFO [train.py:715] (6/8) Epoch 3, batch 9200, loss[loss=0.1458, simple_loss=0.215, pruned_loss=0.03836, over 4745.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2305, pruned_loss=0.04692, over 972523.83 frames.], batch size: 19, lr: 5.63e-04 +2022-05-04 13:02:34,665 INFO [train.py:715] (6/8) Epoch 3, batch 9250, loss[loss=0.1382, simple_loss=0.2109, pruned_loss=0.0328, over 4804.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2309, pruned_loss=0.04715, over 972820.68 frames.], batch size: 21, lr: 5.62e-04 +2022-05-04 13:03:15,391 INFO [train.py:715] (6/8) Epoch 3, batch 9300, loss[loss=0.1617, simple_loss=0.2178, pruned_loss=0.05282, over 4804.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2312, pruned_loss=0.04728, over 971843.82 frames.], batch size: 12, lr: 5.62e-04 +2022-05-04 13:03:56,647 INFO [train.py:715] (6/8) Epoch 3, batch 9350, loss[loss=0.1573, simple_loss=0.2259, pruned_loss=0.0444, over 4851.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2307, pruned_loss=0.04723, over 971963.70 frames.], batch size: 13, lr: 5.62e-04 +2022-05-04 13:04:38,912 INFO [train.py:715] (6/8) Epoch 3, batch 9400, loss[loss=0.1829, simple_loss=0.2469, pruned_loss=0.05944, over 4891.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2304, pruned_loss=0.04694, over 972640.37 frames.], batch size: 22, lr: 5.62e-04 +2022-05-04 13:05:19,295 INFO [train.py:715] (6/8) Epoch 3, batch 9450, loss[loss=0.1573, simple_loss=0.2265, pruned_loss=0.04406, over 4856.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2307, pruned_loss=0.04712, over 972556.43 frames.], batch size: 20, lr: 5.62e-04 +2022-05-04 13:06:00,824 INFO [train.py:715] (6/8) Epoch 3, batch 9500, loss[loss=0.2084, simple_loss=0.2574, pruned_loss=0.07969, over 4965.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2311, pruned_loss=0.04734, over 971968.76 frames.], batch size: 39, lr: 5.62e-04 +2022-05-04 13:06:42,708 INFO [train.py:715] (6/8) Epoch 3, batch 9550, loss[loss=0.1534, simple_loss=0.2306, pruned_loss=0.0381, over 4971.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2314, pruned_loss=0.04754, over 972440.61 frames.], batch size: 15, lr: 5.62e-04 +2022-05-04 13:07:24,286 INFO [train.py:715] (6/8) Epoch 3, batch 9600, loss[loss=0.1641, simple_loss=0.2362, pruned_loss=0.04599, over 4967.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2315, pruned_loss=0.048, over 972136.85 frames.], batch size: 24, lr: 5.62e-04 +2022-05-04 13:08:05,434 INFO [train.py:715] (6/8) Epoch 3, batch 9650, loss[loss=0.2011, simple_loss=0.2656, pruned_loss=0.06831, over 4974.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2321, pruned_loss=0.04841, over 973048.50 frames.], batch size: 24, lr: 5.61e-04 +2022-05-04 13:08:46,935 INFO [train.py:715] (6/8) Epoch 3, batch 9700, loss[loss=0.1571, simple_loss=0.2313, pruned_loss=0.04143, over 4874.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2323, pruned_loss=0.04839, over 972938.21 frames.], batch size: 22, lr: 5.61e-04 +2022-05-04 13:09:27,936 INFO [train.py:715] (6/8) Epoch 3, batch 9750, loss[loss=0.2152, simple_loss=0.25, pruned_loss=0.09023, over 4835.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2322, pruned_loss=0.04855, over 972350.24 frames.], batch size: 13, lr: 5.61e-04 +2022-05-04 13:10:08,827 INFO [train.py:715] (6/8) Epoch 3, batch 9800, loss[loss=0.1663, simple_loss=0.2393, pruned_loss=0.04666, over 4753.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2328, pruned_loss=0.04873, over 972487.79 frames.], batch size: 19, lr: 5.61e-04 +2022-05-04 13:10:50,543 INFO [train.py:715] (6/8) Epoch 3, batch 9850, loss[loss=0.1641, simple_loss=0.2364, pruned_loss=0.04591, over 4780.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2329, pruned_loss=0.04836, over 973240.28 frames.], batch size: 17, lr: 5.61e-04 +2022-05-04 13:11:32,512 INFO [train.py:715] (6/8) Epoch 3, batch 9900, loss[loss=0.166, simple_loss=0.234, pruned_loss=0.04903, over 4792.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2332, pruned_loss=0.04871, over 972809.78 frames.], batch size: 14, lr: 5.61e-04 +2022-05-04 13:12:13,004 INFO [train.py:715] (6/8) Epoch 3, batch 9950, loss[loss=0.1404, simple_loss=0.2262, pruned_loss=0.0273, over 4965.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2318, pruned_loss=0.04765, over 973343.93 frames.], batch size: 24, lr: 5.61e-04 +2022-05-04 13:12:54,729 INFO [train.py:715] (6/8) Epoch 3, batch 10000, loss[loss=0.1649, simple_loss=0.2333, pruned_loss=0.04826, over 4765.00 frames.], tot_loss[loss=0.164, simple_loss=0.2323, pruned_loss=0.04788, over 972535.55 frames.], batch size: 12, lr: 5.61e-04 +2022-05-04 13:13:36,178 INFO [train.py:715] (6/8) Epoch 3, batch 10050, loss[loss=0.1615, simple_loss=0.2373, pruned_loss=0.04281, over 4844.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2319, pruned_loss=0.04762, over 972856.86 frames.], batch size: 15, lr: 5.61e-04 +2022-05-04 13:14:17,623 INFO [train.py:715] (6/8) Epoch 3, batch 10100, loss[loss=0.1479, simple_loss=0.2236, pruned_loss=0.03609, over 4980.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2301, pruned_loss=0.04649, over 972458.26 frames.], batch size: 39, lr: 5.60e-04 +2022-05-04 13:14:58,619 INFO [train.py:715] (6/8) Epoch 3, batch 10150, loss[loss=0.1445, simple_loss=0.2168, pruned_loss=0.03615, over 4884.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2299, pruned_loss=0.04662, over 972241.36 frames.], batch size: 16, lr: 5.60e-04 +2022-05-04 13:15:40,199 INFO [train.py:715] (6/8) Epoch 3, batch 10200, loss[loss=0.153, simple_loss=0.2244, pruned_loss=0.04074, over 4811.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2305, pruned_loss=0.04684, over 973181.02 frames.], batch size: 26, lr: 5.60e-04 +2022-05-04 13:16:21,935 INFO [train.py:715] (6/8) Epoch 3, batch 10250, loss[loss=0.1662, simple_loss=0.2256, pruned_loss=0.05345, over 4987.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2305, pruned_loss=0.04705, over 972975.67 frames.], batch size: 14, lr: 5.60e-04 +2022-05-04 13:17:01,801 INFO [train.py:715] (6/8) Epoch 3, batch 10300, loss[loss=0.1566, simple_loss=0.2315, pruned_loss=0.0409, over 4903.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2311, pruned_loss=0.04724, over 972804.32 frames.], batch size: 19, lr: 5.60e-04 +2022-05-04 13:17:42,038 INFO [train.py:715] (6/8) Epoch 3, batch 10350, loss[loss=0.1903, simple_loss=0.2606, pruned_loss=0.06002, over 4986.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2317, pruned_loss=0.04772, over 973001.13 frames.], batch size: 28, lr: 5.60e-04 +2022-05-04 13:18:22,567 INFO [train.py:715] (6/8) Epoch 3, batch 10400, loss[loss=0.1889, simple_loss=0.2563, pruned_loss=0.06077, over 4791.00 frames.], tot_loss[loss=0.1637, simple_loss=0.232, pruned_loss=0.04773, over 971796.98 frames.], batch size: 17, lr: 5.60e-04 +2022-05-04 13:19:03,194 INFO [train.py:715] (6/8) Epoch 3, batch 10450, loss[loss=0.1625, simple_loss=0.2211, pruned_loss=0.05192, over 4786.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2323, pruned_loss=0.04861, over 972023.05 frames.], batch size: 14, lr: 5.60e-04 +2022-05-04 13:19:43,612 INFO [train.py:715] (6/8) Epoch 3, batch 10500, loss[loss=0.1654, simple_loss=0.2268, pruned_loss=0.05206, over 4914.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2319, pruned_loss=0.04818, over 972383.50 frames.], batch size: 18, lr: 5.59e-04 +2022-05-04 13:20:24,618 INFO [train.py:715] (6/8) Epoch 3, batch 10550, loss[loss=0.1655, simple_loss=0.2379, pruned_loss=0.0465, over 4799.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2317, pruned_loss=0.04794, over 972350.85 frames.], batch size: 21, lr: 5.59e-04 +2022-05-04 13:21:07,140 INFO [train.py:715] (6/8) Epoch 3, batch 10600, loss[loss=0.156, simple_loss=0.2184, pruned_loss=0.0468, over 4798.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2324, pruned_loss=0.04774, over 971435.50 frames.], batch size: 12, lr: 5.59e-04 +2022-05-04 13:21:48,626 INFO [train.py:715] (6/8) Epoch 3, batch 10650, loss[loss=0.1405, simple_loss=0.2177, pruned_loss=0.03163, over 4975.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2322, pruned_loss=0.04769, over 971826.11 frames.], batch size: 21, lr: 5.59e-04 +2022-05-04 13:22:30,749 INFO [train.py:715] (6/8) Epoch 3, batch 10700, loss[loss=0.1372, simple_loss=0.2037, pruned_loss=0.03535, over 4932.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2319, pruned_loss=0.04761, over 972901.79 frames.], batch size: 29, lr: 5.59e-04 +2022-05-04 13:23:13,523 INFO [train.py:715] (6/8) Epoch 3, batch 10750, loss[loss=0.1661, simple_loss=0.242, pruned_loss=0.0451, over 4824.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2311, pruned_loss=0.04712, over 972296.15 frames.], batch size: 26, lr: 5.59e-04 +2022-05-04 13:23:56,761 INFO [train.py:715] (6/8) Epoch 3, batch 10800, loss[loss=0.1771, simple_loss=0.2524, pruned_loss=0.05089, over 4745.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2308, pruned_loss=0.04697, over 972410.23 frames.], batch size: 16, lr: 5.59e-04 +2022-05-04 13:24:38,550 INFO [train.py:715] (6/8) Epoch 3, batch 10850, loss[loss=0.1559, simple_loss=0.2122, pruned_loss=0.04983, over 4859.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2309, pruned_loss=0.04727, over 972148.01 frames.], batch size: 22, lr: 5.59e-04 +2022-05-04 13:25:21,321 INFO [train.py:715] (6/8) Epoch 3, batch 10900, loss[loss=0.1295, simple_loss=0.1886, pruned_loss=0.03522, over 4965.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2318, pruned_loss=0.04762, over 971681.05 frames.], batch size: 14, lr: 5.58e-04 +2022-05-04 13:26:04,565 INFO [train.py:715] (6/8) Epoch 3, batch 10950, loss[loss=0.1848, simple_loss=0.2481, pruned_loss=0.06075, over 4962.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2315, pruned_loss=0.04742, over 971515.34 frames.], batch size: 15, lr: 5.58e-04 +2022-05-04 13:26:46,521 INFO [train.py:715] (6/8) Epoch 3, batch 11000, loss[loss=0.1291, simple_loss=0.2007, pruned_loss=0.02875, over 4807.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2315, pruned_loss=0.04733, over 971762.74 frames.], batch size: 26, lr: 5.58e-04 +2022-05-04 13:27:28,086 INFO [train.py:715] (6/8) Epoch 3, batch 11050, loss[loss=0.1644, simple_loss=0.2272, pruned_loss=0.05079, over 4916.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2318, pruned_loss=0.04788, over 971297.25 frames.], batch size: 17, lr: 5.58e-04 +2022-05-04 13:28:11,601 INFO [train.py:715] (6/8) Epoch 3, batch 11100, loss[loss=0.1643, simple_loss=0.2299, pruned_loss=0.04935, over 4745.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2318, pruned_loss=0.04764, over 970659.89 frames.], batch size: 16, lr: 5.58e-04 +2022-05-04 13:28:53,679 INFO [train.py:715] (6/8) Epoch 3, batch 11150, loss[loss=0.1544, simple_loss=0.2136, pruned_loss=0.04763, over 4858.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2318, pruned_loss=0.04776, over 971392.96 frames.], batch size: 30, lr: 5.58e-04 +2022-05-04 13:29:35,742 INFO [train.py:715] (6/8) Epoch 3, batch 11200, loss[loss=0.1805, simple_loss=0.2512, pruned_loss=0.0549, over 4927.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2318, pruned_loss=0.04776, over 970898.60 frames.], batch size: 23, lr: 5.58e-04 +2022-05-04 13:30:18,281 INFO [train.py:715] (6/8) Epoch 3, batch 11250, loss[loss=0.151, simple_loss=0.2104, pruned_loss=0.0458, over 4923.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2315, pruned_loss=0.04765, over 971179.67 frames.], batch size: 17, lr: 5.58e-04 +2022-05-04 13:31:01,503 INFO [train.py:715] (6/8) Epoch 3, batch 11300, loss[loss=0.1146, simple_loss=0.1885, pruned_loss=0.02029, over 4786.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2314, pruned_loss=0.04782, over 970713.73 frames.], batch size: 24, lr: 5.57e-04 +2022-05-04 13:31:42,784 INFO [train.py:715] (6/8) Epoch 3, batch 11350, loss[loss=0.1776, simple_loss=0.2405, pruned_loss=0.05736, over 4818.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2314, pruned_loss=0.04756, over 971268.02 frames.], batch size: 13, lr: 5.57e-04 +2022-05-04 13:32:25,116 INFO [train.py:715] (6/8) Epoch 3, batch 11400, loss[loss=0.1278, simple_loss=0.2008, pruned_loss=0.02744, over 4787.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2315, pruned_loss=0.04771, over 971126.34 frames.], batch size: 18, lr: 5.57e-04 +2022-05-04 13:33:08,056 INFO [train.py:715] (6/8) Epoch 3, batch 11450, loss[loss=0.1804, simple_loss=0.2352, pruned_loss=0.0628, over 4824.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2315, pruned_loss=0.04781, over 970583.24 frames.], batch size: 15, lr: 5.57e-04 +2022-05-04 13:33:50,183 INFO [train.py:715] (6/8) Epoch 3, batch 11500, loss[loss=0.1616, simple_loss=0.2374, pruned_loss=0.04286, over 4941.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2297, pruned_loss=0.04685, over 970603.81 frames.], batch size: 21, lr: 5.57e-04 +2022-05-04 13:34:32,235 INFO [train.py:715] (6/8) Epoch 3, batch 11550, loss[loss=0.1494, simple_loss=0.2146, pruned_loss=0.04207, over 4854.00 frames.], tot_loss[loss=0.1618, simple_loss=0.23, pruned_loss=0.04684, over 970215.69 frames.], batch size: 30, lr: 5.57e-04 +2022-05-04 13:35:14,415 INFO [train.py:715] (6/8) Epoch 3, batch 11600, loss[loss=0.1553, simple_loss=0.2361, pruned_loss=0.03729, over 4925.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2303, pruned_loss=0.04692, over 970802.24 frames.], batch size: 18, lr: 5.57e-04 +2022-05-04 13:35:57,177 INFO [train.py:715] (6/8) Epoch 3, batch 11650, loss[loss=0.1477, simple_loss=0.2191, pruned_loss=0.03812, over 4842.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2305, pruned_loss=0.04703, over 971456.69 frames.], batch size: 26, lr: 5.57e-04 +2022-05-04 13:36:39,271 INFO [train.py:715] (6/8) Epoch 3, batch 11700, loss[loss=0.1645, simple_loss=0.2289, pruned_loss=0.05007, over 4956.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2311, pruned_loss=0.0473, over 972341.99 frames.], batch size: 15, lr: 5.57e-04 +2022-05-04 13:37:21,485 INFO [train.py:715] (6/8) Epoch 3, batch 11750, loss[loss=0.1514, simple_loss=0.2251, pruned_loss=0.03882, over 4882.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2295, pruned_loss=0.0465, over 971435.15 frames.], batch size: 22, lr: 5.56e-04 +2022-05-04 13:38:05,299 INFO [train.py:715] (6/8) Epoch 3, batch 11800, loss[loss=0.1717, simple_loss=0.2407, pruned_loss=0.05139, over 4988.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2296, pruned_loss=0.04643, over 972295.69 frames.], batch size: 14, lr: 5.56e-04 +2022-05-04 13:38:47,459 INFO [train.py:715] (6/8) Epoch 3, batch 11850, loss[loss=0.1822, simple_loss=0.2415, pruned_loss=0.06142, over 4929.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2298, pruned_loss=0.0465, over 972537.42 frames.], batch size: 23, lr: 5.56e-04 +2022-05-04 13:39:29,617 INFO [train.py:715] (6/8) Epoch 3, batch 11900, loss[loss=0.1412, simple_loss=0.2075, pruned_loss=0.03743, over 4833.00 frames.], tot_loss[loss=0.162, simple_loss=0.2301, pruned_loss=0.04696, over 972201.34 frames.], batch size: 15, lr: 5.56e-04 +2022-05-04 13:40:11,708 INFO [train.py:715] (6/8) Epoch 3, batch 11950, loss[loss=0.1172, simple_loss=0.1893, pruned_loss=0.02255, over 4833.00 frames.], tot_loss[loss=0.162, simple_loss=0.2303, pruned_loss=0.04687, over 971321.11 frames.], batch size: 12, lr: 5.56e-04 +2022-05-04 13:40:54,208 INFO [train.py:715] (6/8) Epoch 3, batch 12000, loss[loss=0.1357, simple_loss=0.2036, pruned_loss=0.03391, over 4776.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2311, pruned_loss=0.04726, over 971526.40 frames.], batch size: 12, lr: 5.56e-04 +2022-05-04 13:40:54,209 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 13:41:02,572 INFO [train.py:742] (6/8) Epoch 3, validation: loss=0.1142, simple_loss=0.2003, pruned_loss=0.01401, over 914524.00 frames. +2022-05-04 13:41:44,686 INFO [train.py:715] (6/8) Epoch 3, batch 12050, loss[loss=0.1592, simple_loss=0.2197, pruned_loss=0.04942, over 4836.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2316, pruned_loss=0.04769, over 971373.99 frames.], batch size: 30, lr: 5.56e-04 +2022-05-04 13:42:26,380 INFO [train.py:715] (6/8) Epoch 3, batch 12100, loss[loss=0.1608, simple_loss=0.2362, pruned_loss=0.04271, over 4910.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2323, pruned_loss=0.04817, over 971450.16 frames.], batch size: 18, lr: 5.56e-04 +2022-05-04 13:43:08,785 INFO [train.py:715] (6/8) Epoch 3, batch 12150, loss[loss=0.1623, simple_loss=0.2333, pruned_loss=0.04569, over 4933.00 frames.], tot_loss[loss=0.1644, simple_loss=0.232, pruned_loss=0.0484, over 971746.37 frames.], batch size: 23, lr: 5.55e-04 +2022-05-04 13:43:52,025 INFO [train.py:715] (6/8) Epoch 3, batch 12200, loss[loss=0.1726, simple_loss=0.2544, pruned_loss=0.04538, over 4989.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2317, pruned_loss=0.04783, over 972221.53 frames.], batch size: 20, lr: 5.55e-04 +2022-05-04 13:44:33,694 INFO [train.py:715] (6/8) Epoch 3, batch 12250, loss[loss=0.1913, simple_loss=0.2535, pruned_loss=0.06455, over 4840.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2322, pruned_loss=0.04811, over 972236.28 frames.], batch size: 20, lr: 5.55e-04 +2022-05-04 13:45:15,605 INFO [train.py:715] (6/8) Epoch 3, batch 12300, loss[loss=0.1545, simple_loss=0.2267, pruned_loss=0.04116, over 4761.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2321, pruned_loss=0.04806, over 972843.37 frames.], batch size: 19, lr: 5.55e-04 +2022-05-04 13:45:58,061 INFO [train.py:715] (6/8) Epoch 3, batch 12350, loss[loss=0.1904, simple_loss=0.2626, pruned_loss=0.0591, over 4941.00 frames.], tot_loss[loss=0.164, simple_loss=0.2322, pruned_loss=0.04788, over 973155.41 frames.], batch size: 21, lr: 5.55e-04 +2022-05-04 13:46:41,406 INFO [train.py:715] (6/8) Epoch 3, batch 12400, loss[loss=0.1284, simple_loss=0.2032, pruned_loss=0.02675, over 4780.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2328, pruned_loss=0.04816, over 972964.03 frames.], batch size: 14, lr: 5.55e-04 +2022-05-04 13:47:23,078 INFO [train.py:715] (6/8) Epoch 3, batch 12450, loss[loss=0.2268, simple_loss=0.2907, pruned_loss=0.08138, over 4954.00 frames.], tot_loss[loss=0.164, simple_loss=0.2323, pruned_loss=0.0479, over 973492.41 frames.], batch size: 29, lr: 5.55e-04 +2022-05-04 13:48:04,575 INFO [train.py:715] (6/8) Epoch 3, batch 12500, loss[loss=0.1501, simple_loss=0.226, pruned_loss=0.03717, over 4858.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2325, pruned_loss=0.04819, over 973788.22 frames.], batch size: 20, lr: 5.55e-04 +2022-05-04 13:48:47,318 INFO [train.py:715] (6/8) Epoch 3, batch 12550, loss[loss=0.1814, simple_loss=0.243, pruned_loss=0.05985, over 4774.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2327, pruned_loss=0.0483, over 972936.93 frames.], batch size: 18, lr: 5.54e-04 +2022-05-04 13:49:29,602 INFO [train.py:715] (6/8) Epoch 3, batch 12600, loss[loss=0.152, simple_loss=0.2291, pruned_loss=0.03743, over 4866.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2323, pruned_loss=0.04807, over 972708.50 frames.], batch size: 32, lr: 5.54e-04 +2022-05-04 13:50:11,372 INFO [train.py:715] (6/8) Epoch 3, batch 12650, loss[loss=0.1827, simple_loss=0.2483, pruned_loss=0.05849, over 4778.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2315, pruned_loss=0.04737, over 972015.42 frames.], batch size: 17, lr: 5.54e-04 +2022-05-04 13:50:53,071 INFO [train.py:715] (6/8) Epoch 3, batch 12700, loss[loss=0.1593, simple_loss=0.2253, pruned_loss=0.04668, over 4877.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2312, pruned_loss=0.04733, over 971667.03 frames.], batch size: 32, lr: 5.54e-04 +2022-05-04 13:51:35,163 INFO [train.py:715] (6/8) Epoch 3, batch 12750, loss[loss=0.147, simple_loss=0.2193, pruned_loss=0.0373, over 4992.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2323, pruned_loss=0.04773, over 973340.70 frames.], batch size: 20, lr: 5.54e-04 +2022-05-04 13:52:17,442 INFO [train.py:715] (6/8) Epoch 3, batch 12800, loss[loss=0.1595, simple_loss=0.2223, pruned_loss=0.04833, over 4968.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2318, pruned_loss=0.04783, over 974000.79 frames.], batch size: 24, lr: 5.54e-04 +2022-05-04 13:52:58,264 INFO [train.py:715] (6/8) Epoch 3, batch 12850, loss[loss=0.1339, simple_loss=0.2014, pruned_loss=0.03316, over 4919.00 frames.], tot_loss[loss=0.1653, simple_loss=0.233, pruned_loss=0.04876, over 975228.73 frames.], batch size: 29, lr: 5.54e-04 +2022-05-04 13:53:40,963 INFO [train.py:715] (6/8) Epoch 3, batch 12900, loss[loss=0.1491, simple_loss=0.2176, pruned_loss=0.04033, over 4793.00 frames.], tot_loss[loss=0.165, simple_loss=0.2329, pruned_loss=0.04856, over 974412.99 frames.], batch size: 24, lr: 5.54e-04 +2022-05-04 13:54:23,564 INFO [train.py:715] (6/8) Epoch 3, batch 12950, loss[loss=0.1471, simple_loss=0.2163, pruned_loss=0.03896, over 4759.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2321, pruned_loss=0.04784, over 973851.13 frames.], batch size: 14, lr: 5.54e-04 +2022-05-04 13:55:04,933 INFO [train.py:715] (6/8) Epoch 3, batch 13000, loss[loss=0.1613, simple_loss=0.2307, pruned_loss=0.04599, over 4808.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2337, pruned_loss=0.04836, over 973047.79 frames.], batch size: 25, lr: 5.53e-04 +2022-05-04 13:55:46,799 INFO [train.py:715] (6/8) Epoch 3, batch 13050, loss[loss=0.2224, simple_loss=0.2666, pruned_loss=0.08913, over 4914.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2336, pruned_loss=0.0486, over 973227.09 frames.], batch size: 17, lr: 5.53e-04 +2022-05-04 13:56:28,794 INFO [train.py:715] (6/8) Epoch 3, batch 13100, loss[loss=0.1544, simple_loss=0.214, pruned_loss=0.04738, over 4890.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2335, pruned_loss=0.04846, over 973626.07 frames.], batch size: 16, lr: 5.53e-04 +2022-05-04 13:57:10,557 INFO [train.py:715] (6/8) Epoch 3, batch 13150, loss[loss=0.1577, simple_loss=0.2189, pruned_loss=0.04827, over 4982.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2342, pruned_loss=0.04874, over 973152.51 frames.], batch size: 14, lr: 5.53e-04 +2022-05-04 13:57:52,124 INFO [train.py:715] (6/8) Epoch 3, batch 13200, loss[loss=0.1522, simple_loss=0.2227, pruned_loss=0.04088, over 4866.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2335, pruned_loss=0.0484, over 972303.54 frames.], batch size: 22, lr: 5.53e-04 +2022-05-04 13:58:34,750 INFO [train.py:715] (6/8) Epoch 3, batch 13250, loss[loss=0.1301, simple_loss=0.2036, pruned_loss=0.02831, over 4983.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2325, pruned_loss=0.04812, over 972813.90 frames.], batch size: 28, lr: 5.53e-04 +2022-05-04 13:59:17,152 INFO [train.py:715] (6/8) Epoch 3, batch 13300, loss[loss=0.166, simple_loss=0.2255, pruned_loss=0.05325, over 4904.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2329, pruned_loss=0.04833, over 973045.88 frames.], batch size: 17, lr: 5.53e-04 +2022-05-04 13:59:58,639 INFO [train.py:715] (6/8) Epoch 3, batch 13350, loss[loss=0.1736, simple_loss=0.2377, pruned_loss=0.05473, over 4968.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2327, pruned_loss=0.04795, over 973649.87 frames.], batch size: 15, lr: 5.53e-04 +2022-05-04 14:00:40,471 INFO [train.py:715] (6/8) Epoch 3, batch 13400, loss[loss=0.1592, simple_loss=0.2305, pruned_loss=0.044, over 4924.00 frames.], tot_loss[loss=0.1637, simple_loss=0.232, pruned_loss=0.04771, over 973768.21 frames.], batch size: 23, lr: 5.52e-04 +2022-05-04 14:01:23,057 INFO [train.py:715] (6/8) Epoch 3, batch 13450, loss[loss=0.1446, simple_loss=0.2126, pruned_loss=0.03832, over 4799.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2327, pruned_loss=0.04843, over 973191.29 frames.], batch size: 12, lr: 5.52e-04 +2022-05-04 14:02:04,525 INFO [train.py:715] (6/8) Epoch 3, batch 13500, loss[loss=0.1758, simple_loss=0.2393, pruned_loss=0.05622, over 4950.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2323, pruned_loss=0.04817, over 974114.59 frames.], batch size: 21, lr: 5.52e-04 +2022-05-04 14:02:46,057 INFO [train.py:715] (6/8) Epoch 3, batch 13550, loss[loss=0.18, simple_loss=0.2304, pruned_loss=0.06479, over 4964.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2315, pruned_loss=0.04788, over 973324.05 frames.], batch size: 33, lr: 5.52e-04 +2022-05-04 14:03:28,383 INFO [train.py:715] (6/8) Epoch 3, batch 13600, loss[loss=0.1594, simple_loss=0.2309, pruned_loss=0.04398, over 4963.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2308, pruned_loss=0.04741, over 972508.85 frames.], batch size: 15, lr: 5.52e-04 +2022-05-04 14:04:10,292 INFO [train.py:715] (6/8) Epoch 3, batch 13650, loss[loss=0.1576, simple_loss=0.2281, pruned_loss=0.04359, over 4811.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2312, pruned_loss=0.04763, over 972962.21 frames.], batch size: 21, lr: 5.52e-04 +2022-05-04 14:04:51,703 INFO [train.py:715] (6/8) Epoch 3, batch 13700, loss[loss=0.1442, simple_loss=0.2254, pruned_loss=0.03152, over 4804.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2304, pruned_loss=0.04712, over 973518.64 frames.], batch size: 24, lr: 5.52e-04 +2022-05-04 14:05:34,468 INFO [train.py:715] (6/8) Epoch 3, batch 13750, loss[loss=0.1729, simple_loss=0.2477, pruned_loss=0.04899, over 4878.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2294, pruned_loss=0.04666, over 972544.49 frames.], batch size: 22, lr: 5.52e-04 +2022-05-04 14:06:16,552 INFO [train.py:715] (6/8) Epoch 3, batch 13800, loss[loss=0.1506, simple_loss=0.2333, pruned_loss=0.03398, over 4752.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2301, pruned_loss=0.04689, over 973038.55 frames.], batch size: 19, lr: 5.52e-04 +2022-05-04 14:06:58,037 INFO [train.py:715] (6/8) Epoch 3, batch 13850, loss[loss=0.2084, simple_loss=0.2753, pruned_loss=0.07075, over 4949.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2305, pruned_loss=0.04736, over 972603.88 frames.], batch size: 29, lr: 5.51e-04 +2022-05-04 14:07:39,273 INFO [train.py:715] (6/8) Epoch 3, batch 13900, loss[loss=0.1658, simple_loss=0.2247, pruned_loss=0.05344, over 4837.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2303, pruned_loss=0.04758, over 972814.44 frames.], batch size: 13, lr: 5.51e-04 +2022-05-04 14:08:21,711 INFO [train.py:715] (6/8) Epoch 3, batch 13950, loss[loss=0.1436, simple_loss=0.2193, pruned_loss=0.03388, over 4814.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2312, pruned_loss=0.04787, over 973349.07 frames.], batch size: 27, lr: 5.51e-04 +2022-05-04 14:09:04,166 INFO [train.py:715] (6/8) Epoch 3, batch 14000, loss[loss=0.1505, simple_loss=0.2217, pruned_loss=0.03968, over 4782.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2316, pruned_loss=0.04814, over 973245.14 frames.], batch size: 17, lr: 5.51e-04 +2022-05-04 14:09:45,590 INFO [train.py:715] (6/8) Epoch 3, batch 14050, loss[loss=0.1766, simple_loss=0.2499, pruned_loss=0.05163, over 4700.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2321, pruned_loss=0.04848, over 972718.63 frames.], batch size: 15, lr: 5.51e-04 +2022-05-04 14:10:28,394 INFO [train.py:715] (6/8) Epoch 3, batch 14100, loss[loss=0.1604, simple_loss=0.2338, pruned_loss=0.04348, over 4885.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2321, pruned_loss=0.04856, over 972724.54 frames.], batch size: 39, lr: 5.51e-04 +2022-05-04 14:11:10,225 INFO [train.py:715] (6/8) Epoch 3, batch 14150, loss[loss=0.1569, simple_loss=0.2304, pruned_loss=0.04172, over 4923.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2317, pruned_loss=0.04826, over 972502.67 frames.], batch size: 17, lr: 5.51e-04 +2022-05-04 14:11:51,369 INFO [train.py:715] (6/8) Epoch 3, batch 14200, loss[loss=0.177, simple_loss=0.2304, pruned_loss=0.06181, over 4768.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2316, pruned_loss=0.04806, over 972345.68 frames.], batch size: 14, lr: 5.51e-04 +2022-05-04 14:12:33,507 INFO [train.py:715] (6/8) Epoch 3, batch 14250, loss[loss=0.167, simple_loss=0.2342, pruned_loss=0.04993, over 4883.00 frames.], tot_loss[loss=0.1644, simple_loss=0.232, pruned_loss=0.04838, over 973235.64 frames.], batch size: 22, lr: 5.51e-04 +2022-05-04 14:13:15,876 INFO [train.py:715] (6/8) Epoch 3, batch 14300, loss[loss=0.1509, simple_loss=0.2269, pruned_loss=0.03741, over 4691.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2326, pruned_loss=0.04889, over 972688.18 frames.], batch size: 15, lr: 5.50e-04 +2022-05-04 14:13:58,179 INFO [train.py:715] (6/8) Epoch 3, batch 14350, loss[loss=0.1597, simple_loss=0.2291, pruned_loss=0.04515, over 4967.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2334, pruned_loss=0.04914, over 972209.39 frames.], batch size: 25, lr: 5.50e-04 +2022-05-04 14:14:38,954 INFO [train.py:715] (6/8) Epoch 3, batch 14400, loss[loss=0.1551, simple_loss=0.2302, pruned_loss=0.03996, over 4961.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2332, pruned_loss=0.04895, over 971989.66 frames.], batch size: 39, lr: 5.50e-04 +2022-05-04 14:15:21,408 INFO [train.py:715] (6/8) Epoch 3, batch 14450, loss[loss=0.1461, simple_loss=0.2185, pruned_loss=0.03688, over 4783.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2317, pruned_loss=0.04759, over 971493.14 frames.], batch size: 14, lr: 5.50e-04 +2022-05-04 14:16:03,347 INFO [train.py:715] (6/8) Epoch 3, batch 14500, loss[loss=0.1666, simple_loss=0.2351, pruned_loss=0.049, over 4796.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2316, pruned_loss=0.04765, over 971357.50 frames.], batch size: 17, lr: 5.50e-04 +2022-05-04 14:16:44,527 INFO [train.py:715] (6/8) Epoch 3, batch 14550, loss[loss=0.178, simple_loss=0.2453, pruned_loss=0.05533, over 4985.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2314, pruned_loss=0.04802, over 970493.05 frames.], batch size: 15, lr: 5.50e-04 +2022-05-04 14:17:26,989 INFO [train.py:715] (6/8) Epoch 3, batch 14600, loss[loss=0.1489, simple_loss=0.2229, pruned_loss=0.03745, over 4894.00 frames.], tot_loss[loss=0.1628, simple_loss=0.231, pruned_loss=0.04735, over 970516.33 frames.], batch size: 19, lr: 5.50e-04 +2022-05-04 14:18:08,865 INFO [train.py:715] (6/8) Epoch 3, batch 14650, loss[loss=0.1955, simple_loss=0.2712, pruned_loss=0.05994, over 4775.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2314, pruned_loss=0.04743, over 970882.75 frames.], batch size: 18, lr: 5.50e-04 +2022-05-04 14:18:50,924 INFO [train.py:715] (6/8) Epoch 3, batch 14700, loss[loss=0.157, simple_loss=0.2192, pruned_loss=0.04735, over 4875.00 frames.], tot_loss[loss=0.1632, simple_loss=0.231, pruned_loss=0.04765, over 971194.39 frames.], batch size: 32, lr: 5.49e-04 +2022-05-04 14:19:32,217 INFO [train.py:715] (6/8) Epoch 3, batch 14750, loss[loss=0.1991, simple_loss=0.271, pruned_loss=0.06358, over 4866.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2317, pruned_loss=0.04766, over 971149.75 frames.], batch size: 20, lr: 5.49e-04 +2022-05-04 14:20:14,636 INFO [train.py:715] (6/8) Epoch 3, batch 14800, loss[loss=0.1704, simple_loss=0.2337, pruned_loss=0.05357, over 4888.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2326, pruned_loss=0.04823, over 970918.86 frames.], batch size: 32, lr: 5.49e-04 +2022-05-04 14:20:56,944 INFO [train.py:715] (6/8) Epoch 3, batch 14850, loss[loss=0.1393, simple_loss=0.2138, pruned_loss=0.03239, over 4920.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2323, pruned_loss=0.04839, over 970793.73 frames.], batch size: 18, lr: 5.49e-04 +2022-05-04 14:21:37,858 INFO [train.py:715] (6/8) Epoch 3, batch 14900, loss[loss=0.1496, simple_loss=0.214, pruned_loss=0.04258, over 4985.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2322, pruned_loss=0.04817, over 971594.32 frames.], batch size: 14, lr: 5.49e-04 +2022-05-04 14:22:20,813 INFO [train.py:715] (6/8) Epoch 3, batch 14950, loss[loss=0.2061, simple_loss=0.2666, pruned_loss=0.07281, over 4940.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2325, pruned_loss=0.04804, over 972167.14 frames.], batch size: 39, lr: 5.49e-04 +2022-05-04 14:23:02,214 INFO [train.py:715] (6/8) Epoch 3, batch 15000, loss[loss=0.1653, simple_loss=0.2343, pruned_loss=0.04812, over 4816.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2322, pruned_loss=0.0477, over 972162.13 frames.], batch size: 27, lr: 5.49e-04 +2022-05-04 14:23:02,215 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 14:23:10,876 INFO [train.py:742] (6/8) Epoch 3, validation: loss=0.1142, simple_loss=0.2003, pruned_loss=0.01402, over 914524.00 frames. +2022-05-04 14:23:52,709 INFO [train.py:715] (6/8) Epoch 3, batch 15050, loss[loss=0.1441, simple_loss=0.2, pruned_loss=0.04411, over 4979.00 frames.], tot_loss[loss=0.1647, simple_loss=0.233, pruned_loss=0.0482, over 971921.91 frames.], batch size: 14, lr: 5.49e-04 +2022-05-04 14:24:34,029 INFO [train.py:715] (6/8) Epoch 3, batch 15100, loss[loss=0.1678, simple_loss=0.2402, pruned_loss=0.04768, over 4781.00 frames.], tot_loss[loss=0.164, simple_loss=0.232, pruned_loss=0.04801, over 971232.70 frames.], batch size: 18, lr: 5.49e-04 +2022-05-04 14:25:16,185 INFO [train.py:715] (6/8) Epoch 3, batch 15150, loss[loss=0.2159, simple_loss=0.2775, pruned_loss=0.07711, over 4972.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2325, pruned_loss=0.04828, over 971882.34 frames.], batch size: 15, lr: 5.48e-04 +2022-05-04 14:25:57,817 INFO [train.py:715] (6/8) Epoch 3, batch 15200, loss[loss=0.1931, simple_loss=0.2688, pruned_loss=0.05872, over 4923.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2328, pruned_loss=0.04837, over 972121.98 frames.], batch size: 23, lr: 5.48e-04 +2022-05-04 14:26:39,370 INFO [train.py:715] (6/8) Epoch 3, batch 15250, loss[loss=0.1996, simple_loss=0.2422, pruned_loss=0.07853, over 4839.00 frames.], tot_loss[loss=0.165, simple_loss=0.233, pruned_loss=0.04854, over 972459.00 frames.], batch size: 13, lr: 5.48e-04 +2022-05-04 14:27:20,721 INFO [train.py:715] (6/8) Epoch 3, batch 15300, loss[loss=0.1683, simple_loss=0.2428, pruned_loss=0.04689, over 4786.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2322, pruned_loss=0.04772, over 972796.76 frames.], batch size: 17, lr: 5.48e-04 +2022-05-04 14:28:02,538 INFO [train.py:715] (6/8) Epoch 3, batch 15350, loss[loss=0.2241, simple_loss=0.2762, pruned_loss=0.08599, over 4792.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2326, pruned_loss=0.04835, over 972938.41 frames.], batch size: 17, lr: 5.48e-04 +2022-05-04 14:28:44,650 INFO [train.py:715] (6/8) Epoch 3, batch 15400, loss[loss=0.1809, simple_loss=0.2424, pruned_loss=0.05965, over 4807.00 frames.], tot_loss[loss=0.1628, simple_loss=0.231, pruned_loss=0.04726, over 973141.56 frames.], batch size: 14, lr: 5.48e-04 +2022-05-04 14:29:25,742 INFO [train.py:715] (6/8) Epoch 3, batch 15450, loss[loss=0.1661, simple_loss=0.2321, pruned_loss=0.05004, over 4837.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2313, pruned_loss=0.04774, over 973025.00 frames.], batch size: 26, lr: 5.48e-04 +2022-05-04 14:30:08,684 INFO [train.py:715] (6/8) Epoch 3, batch 15500, loss[loss=0.1519, simple_loss=0.2172, pruned_loss=0.04325, over 4952.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2303, pruned_loss=0.04707, over 972893.20 frames.], batch size: 23, lr: 5.48e-04 +2022-05-04 14:30:50,506 INFO [train.py:715] (6/8) Epoch 3, batch 15550, loss[loss=0.206, simple_loss=0.2578, pruned_loss=0.07706, over 4887.00 frames.], tot_loss[loss=0.1629, simple_loss=0.231, pruned_loss=0.04742, over 973049.16 frames.], batch size: 22, lr: 5.48e-04 +2022-05-04 14:31:35,094 INFO [train.py:715] (6/8) Epoch 3, batch 15600, loss[loss=0.1534, simple_loss=0.2331, pruned_loss=0.03689, over 4930.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2314, pruned_loss=0.04816, over 973308.26 frames.], batch size: 29, lr: 5.47e-04 +2022-05-04 14:32:16,093 INFO [train.py:715] (6/8) Epoch 3, batch 15650, loss[loss=0.1511, simple_loss=0.2282, pruned_loss=0.03704, over 4952.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2311, pruned_loss=0.04759, over 973840.79 frames.], batch size: 24, lr: 5.47e-04 +2022-05-04 14:32:57,692 INFO [train.py:715] (6/8) Epoch 3, batch 15700, loss[loss=0.1603, simple_loss=0.2312, pruned_loss=0.04471, over 4812.00 frames.], tot_loss[loss=0.1629, simple_loss=0.231, pruned_loss=0.04745, over 972945.89 frames.], batch size: 21, lr: 5.47e-04 +2022-05-04 14:33:40,525 INFO [train.py:715] (6/8) Epoch 3, batch 15750, loss[loss=0.1647, simple_loss=0.2387, pruned_loss=0.04532, over 4986.00 frames.], tot_loss[loss=0.163, simple_loss=0.2311, pruned_loss=0.04743, over 972882.56 frames.], batch size: 31, lr: 5.47e-04 +2022-05-04 14:34:22,333 INFO [train.py:715] (6/8) Epoch 3, batch 15800, loss[loss=0.1477, simple_loss=0.2164, pruned_loss=0.03951, over 4746.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2307, pruned_loss=0.04711, over 972704.40 frames.], batch size: 19, lr: 5.47e-04 +2022-05-04 14:35:03,585 INFO [train.py:715] (6/8) Epoch 3, batch 15850, loss[loss=0.1647, simple_loss=0.2287, pruned_loss=0.05032, over 4976.00 frames.], tot_loss[loss=0.164, simple_loss=0.2321, pruned_loss=0.048, over 973064.68 frames.], batch size: 14, lr: 5.47e-04 +2022-05-04 14:35:45,957 INFO [train.py:715] (6/8) Epoch 3, batch 15900, loss[loss=0.1884, simple_loss=0.2533, pruned_loss=0.0618, over 4876.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2323, pruned_loss=0.04791, over 972749.69 frames.], batch size: 16, lr: 5.47e-04 +2022-05-04 14:36:28,598 INFO [train.py:715] (6/8) Epoch 3, batch 15950, loss[loss=0.1908, simple_loss=0.259, pruned_loss=0.06131, over 4779.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2314, pruned_loss=0.04691, over 973099.68 frames.], batch size: 17, lr: 5.47e-04 +2022-05-04 14:37:09,198 INFO [train.py:715] (6/8) Epoch 3, batch 16000, loss[loss=0.1528, simple_loss=0.2114, pruned_loss=0.04713, over 4772.00 frames.], tot_loss[loss=0.1632, simple_loss=0.232, pruned_loss=0.04726, over 972636.41 frames.], batch size: 18, lr: 5.47e-04 +2022-05-04 14:37:50,851 INFO [train.py:715] (6/8) Epoch 3, batch 16050, loss[loss=0.1823, simple_loss=0.2417, pruned_loss=0.06144, over 4844.00 frames.], tot_loss[loss=0.162, simple_loss=0.2307, pruned_loss=0.04665, over 972093.62 frames.], batch size: 15, lr: 5.46e-04 +2022-05-04 14:38:33,476 INFO [train.py:715] (6/8) Epoch 3, batch 16100, loss[loss=0.2181, simple_loss=0.2909, pruned_loss=0.07263, over 4827.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2308, pruned_loss=0.04645, over 971949.99 frames.], batch size: 26, lr: 5.46e-04 +2022-05-04 14:39:15,451 INFO [train.py:715] (6/8) Epoch 3, batch 16150, loss[loss=0.1542, simple_loss=0.232, pruned_loss=0.03822, over 4695.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2302, pruned_loss=0.04658, over 971542.50 frames.], batch size: 15, lr: 5.46e-04 +2022-05-04 14:39:56,185 INFO [train.py:715] (6/8) Epoch 3, batch 16200, loss[loss=0.1845, simple_loss=0.2346, pruned_loss=0.06722, over 4980.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2305, pruned_loss=0.04698, over 971536.77 frames.], batch size: 14, lr: 5.46e-04 +2022-05-04 14:40:38,482 INFO [train.py:715] (6/8) Epoch 3, batch 16250, loss[loss=0.1704, simple_loss=0.2587, pruned_loss=0.04104, over 4938.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2302, pruned_loss=0.04703, over 972105.89 frames.], batch size: 21, lr: 5.46e-04 +2022-05-04 14:41:20,557 INFO [train.py:715] (6/8) Epoch 3, batch 16300, loss[loss=0.1577, simple_loss=0.2196, pruned_loss=0.0479, over 4838.00 frames.], tot_loss[loss=0.1628, simple_loss=0.231, pruned_loss=0.04734, over 971693.23 frames.], batch size: 13, lr: 5.46e-04 +2022-05-04 14:42:01,215 INFO [train.py:715] (6/8) Epoch 3, batch 16350, loss[loss=0.1656, simple_loss=0.243, pruned_loss=0.04409, over 4825.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2317, pruned_loss=0.04762, over 971334.67 frames.], batch size: 26, lr: 5.46e-04 +2022-05-04 14:42:43,181 INFO [train.py:715] (6/8) Epoch 3, batch 16400, loss[loss=0.2114, simple_loss=0.2707, pruned_loss=0.07608, over 4831.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2314, pruned_loss=0.04781, over 971398.42 frames.], batch size: 15, lr: 5.46e-04 +2022-05-04 14:43:25,731 INFO [train.py:715] (6/8) Epoch 3, batch 16450, loss[loss=0.1446, simple_loss=0.2144, pruned_loss=0.03738, over 4995.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2308, pruned_loss=0.04751, over 972072.52 frames.], batch size: 14, lr: 5.45e-04 +2022-05-04 14:44:08,334 INFO [train.py:715] (6/8) Epoch 3, batch 16500, loss[loss=0.1626, simple_loss=0.2242, pruned_loss=0.05051, over 4849.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2304, pruned_loss=0.04703, over 971480.89 frames.], batch size: 13, lr: 5.45e-04 +2022-05-04 14:44:49,048 INFO [train.py:715] (6/8) Epoch 3, batch 16550, loss[loss=0.1983, simple_loss=0.2616, pruned_loss=0.06751, over 4931.00 frames.], tot_loss[loss=0.1632, simple_loss=0.231, pruned_loss=0.04766, over 971514.65 frames.], batch size: 23, lr: 5.45e-04 +2022-05-04 14:45:31,915 INFO [train.py:715] (6/8) Epoch 3, batch 16600, loss[loss=0.1669, simple_loss=0.2391, pruned_loss=0.04737, over 4919.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2318, pruned_loss=0.04821, over 970553.06 frames.], batch size: 39, lr: 5.45e-04 +2022-05-04 14:46:14,685 INFO [train.py:715] (6/8) Epoch 3, batch 16650, loss[loss=0.1621, simple_loss=0.2342, pruned_loss=0.04505, over 4891.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2305, pruned_loss=0.04752, over 971824.69 frames.], batch size: 19, lr: 5.45e-04 +2022-05-04 14:46:55,379 INFO [train.py:715] (6/8) Epoch 3, batch 16700, loss[loss=0.1684, simple_loss=0.2234, pruned_loss=0.05673, over 4808.00 frames.], tot_loss[loss=0.163, simple_loss=0.2307, pruned_loss=0.04765, over 970993.31 frames.], batch size: 14, lr: 5.45e-04 +2022-05-04 14:47:37,415 INFO [train.py:715] (6/8) Epoch 3, batch 16750, loss[loss=0.1631, simple_loss=0.225, pruned_loss=0.05053, over 4908.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2303, pruned_loss=0.04709, over 972030.39 frames.], batch size: 17, lr: 5.45e-04 +2022-05-04 14:48:19,859 INFO [train.py:715] (6/8) Epoch 3, batch 16800, loss[loss=0.1243, simple_loss=0.1923, pruned_loss=0.02811, over 4748.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2305, pruned_loss=0.04731, over 971740.23 frames.], batch size: 16, lr: 5.45e-04 +2022-05-04 14:49:01,335 INFO [train.py:715] (6/8) Epoch 3, batch 16850, loss[loss=0.1532, simple_loss=0.2293, pruned_loss=0.03854, over 4981.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2302, pruned_loss=0.04713, over 971784.80 frames.], batch size: 28, lr: 5.45e-04 +2022-05-04 14:49:42,737 INFO [train.py:715] (6/8) Epoch 3, batch 16900, loss[loss=0.1855, simple_loss=0.2569, pruned_loss=0.05702, over 4778.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2308, pruned_loss=0.04714, over 972543.40 frames.], batch size: 18, lr: 5.44e-04 +2022-05-04 14:50:24,690 INFO [train.py:715] (6/8) Epoch 3, batch 16950, loss[loss=0.1362, simple_loss=0.2142, pruned_loss=0.02912, over 4800.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2313, pruned_loss=0.04749, over 973212.48 frames.], batch size: 21, lr: 5.44e-04 +2022-05-04 14:51:07,235 INFO [train.py:715] (6/8) Epoch 3, batch 17000, loss[loss=0.1885, simple_loss=0.2566, pruned_loss=0.06021, over 4757.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2311, pruned_loss=0.04722, over 972659.61 frames.], batch size: 19, lr: 5.44e-04 +2022-05-04 14:51:47,562 INFO [train.py:715] (6/8) Epoch 3, batch 17050, loss[loss=0.1604, simple_loss=0.2228, pruned_loss=0.04899, over 4917.00 frames.], tot_loss[loss=0.162, simple_loss=0.2305, pruned_loss=0.04679, over 972667.45 frames.], batch size: 39, lr: 5.44e-04 +2022-05-04 14:52:29,483 INFO [train.py:715] (6/8) Epoch 3, batch 17100, loss[loss=0.1785, simple_loss=0.2539, pruned_loss=0.05154, over 4757.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2301, pruned_loss=0.04663, over 973294.14 frames.], batch size: 19, lr: 5.44e-04 +2022-05-04 14:53:11,182 INFO [train.py:715] (6/8) Epoch 3, batch 17150, loss[loss=0.1912, simple_loss=0.2507, pruned_loss=0.06584, over 4909.00 frames.], tot_loss[loss=0.1619, simple_loss=0.23, pruned_loss=0.04688, over 973616.43 frames.], batch size: 17, lr: 5.44e-04 +2022-05-04 14:53:52,359 INFO [train.py:715] (6/8) Epoch 3, batch 17200, loss[loss=0.1466, simple_loss=0.2166, pruned_loss=0.03832, over 4976.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2299, pruned_loss=0.04652, over 973293.58 frames.], batch size: 14, lr: 5.44e-04 +2022-05-04 14:54:33,057 INFO [train.py:715] (6/8) Epoch 3, batch 17250, loss[loss=0.161, simple_loss=0.2277, pruned_loss=0.04716, over 4918.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2299, pruned_loss=0.04636, over 972602.24 frames.], batch size: 23, lr: 5.44e-04 +2022-05-04 14:55:14,511 INFO [train.py:715] (6/8) Epoch 3, batch 17300, loss[loss=0.1854, simple_loss=0.2458, pruned_loss=0.06247, over 4962.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2294, pruned_loss=0.04602, over 973478.40 frames.], batch size: 39, lr: 5.44e-04 +2022-05-04 14:55:56,125 INFO [train.py:715] (6/8) Epoch 3, batch 17350, loss[loss=0.1708, simple_loss=0.2334, pruned_loss=0.05409, over 4773.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2298, pruned_loss=0.04635, over 973061.49 frames.], batch size: 18, lr: 5.43e-04 +2022-05-04 14:56:36,192 INFO [train.py:715] (6/8) Epoch 3, batch 17400, loss[loss=0.1822, simple_loss=0.2513, pruned_loss=0.05653, over 4962.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2312, pruned_loss=0.04697, over 972524.59 frames.], batch size: 24, lr: 5.43e-04 +2022-05-04 14:57:18,254 INFO [train.py:715] (6/8) Epoch 3, batch 17450, loss[loss=0.2085, simple_loss=0.2773, pruned_loss=0.06982, over 4974.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2309, pruned_loss=0.04676, over 972444.22 frames.], batch size: 25, lr: 5.43e-04 +2022-05-04 14:58:00,484 INFO [train.py:715] (6/8) Epoch 3, batch 17500, loss[loss=0.1713, simple_loss=0.2433, pruned_loss=0.04963, over 4805.00 frames.], tot_loss[loss=0.1634, simple_loss=0.232, pruned_loss=0.04735, over 972541.46 frames.], batch size: 21, lr: 5.43e-04 +2022-05-04 14:58:41,513 INFO [train.py:715] (6/8) Epoch 3, batch 17550, loss[loss=0.1435, simple_loss=0.2151, pruned_loss=0.03592, over 4734.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2297, pruned_loss=0.0462, over 971698.82 frames.], batch size: 16, lr: 5.43e-04 +2022-05-04 14:59:22,858 INFO [train.py:715] (6/8) Epoch 3, batch 17600, loss[loss=0.1143, simple_loss=0.1841, pruned_loss=0.02222, over 4789.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2298, pruned_loss=0.0463, over 971389.00 frames.], batch size: 14, lr: 5.43e-04 +2022-05-04 15:00:04,537 INFO [train.py:715] (6/8) Epoch 3, batch 17650, loss[loss=0.1739, simple_loss=0.2489, pruned_loss=0.04946, over 4932.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2302, pruned_loss=0.04663, over 972351.95 frames.], batch size: 23, lr: 5.43e-04 +2022-05-04 15:00:46,084 INFO [train.py:715] (6/8) Epoch 3, batch 17700, loss[loss=0.1538, simple_loss=0.2122, pruned_loss=0.04765, over 4980.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2292, pruned_loss=0.0462, over 972913.27 frames.], batch size: 35, lr: 5.43e-04 +2022-05-04 15:01:26,904 INFO [train.py:715] (6/8) Epoch 3, batch 17750, loss[loss=0.1855, simple_loss=0.2539, pruned_loss=0.0585, over 4897.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2294, pruned_loss=0.0462, over 972596.85 frames.], batch size: 17, lr: 5.43e-04 +2022-05-04 15:02:08,924 INFO [train.py:715] (6/8) Epoch 3, batch 17800, loss[loss=0.1691, simple_loss=0.2403, pruned_loss=0.04889, over 4929.00 frames.], tot_loss[loss=0.162, simple_loss=0.2305, pruned_loss=0.04672, over 972828.35 frames.], batch size: 29, lr: 5.42e-04 +2022-05-04 15:02:50,349 INFO [train.py:715] (6/8) Epoch 3, batch 17850, loss[loss=0.1429, simple_loss=0.2086, pruned_loss=0.03855, over 4883.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2299, pruned_loss=0.04599, over 973286.68 frames.], batch size: 22, lr: 5.42e-04 +2022-05-04 15:03:30,313 INFO [train.py:715] (6/8) Epoch 3, batch 17900, loss[loss=0.1193, simple_loss=0.1837, pruned_loss=0.02747, over 4743.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2305, pruned_loss=0.0466, over 972011.79 frames.], batch size: 12, lr: 5.42e-04 +2022-05-04 15:04:12,148 INFO [train.py:715] (6/8) Epoch 3, batch 17950, loss[loss=0.1564, simple_loss=0.2263, pruned_loss=0.04326, over 4977.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2293, pruned_loss=0.04553, over 972251.75 frames.], batch size: 25, lr: 5.42e-04 +2022-05-04 15:04:53,406 INFO [train.py:715] (6/8) Epoch 3, batch 18000, loss[loss=0.1875, simple_loss=0.2586, pruned_loss=0.0582, over 4816.00 frames.], tot_loss[loss=0.161, simple_loss=0.2301, pruned_loss=0.04597, over 971883.36 frames.], batch size: 27, lr: 5.42e-04 +2022-05-04 15:04:53,407 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 15:05:02,070 INFO [train.py:742] (6/8) Epoch 3, validation: loss=0.1143, simple_loss=0.2002, pruned_loss=0.01414, over 914524.00 frames. +2022-05-04 15:05:43,870 INFO [train.py:715] (6/8) Epoch 3, batch 18050, loss[loss=0.1138, simple_loss=0.1862, pruned_loss=0.02073, over 4910.00 frames.], tot_loss[loss=0.1612, simple_loss=0.23, pruned_loss=0.04617, over 972674.11 frames.], batch size: 17, lr: 5.42e-04 +2022-05-04 15:06:25,506 INFO [train.py:715] (6/8) Epoch 3, batch 18100, loss[loss=0.1798, simple_loss=0.238, pruned_loss=0.06074, over 4853.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2295, pruned_loss=0.04615, over 972340.46 frames.], batch size: 34, lr: 5.42e-04 +2022-05-04 15:07:06,179 INFO [train.py:715] (6/8) Epoch 3, batch 18150, loss[loss=0.1517, simple_loss=0.232, pruned_loss=0.0357, over 4818.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2302, pruned_loss=0.04676, over 972467.63 frames.], batch size: 27, lr: 5.42e-04 +2022-05-04 15:07:47,687 INFO [train.py:715] (6/8) Epoch 3, batch 18200, loss[loss=0.1569, simple_loss=0.2268, pruned_loss=0.04354, over 4818.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2305, pruned_loss=0.04761, over 972495.55 frames.], batch size: 25, lr: 5.42e-04 +2022-05-04 15:08:29,477 INFO [train.py:715] (6/8) Epoch 3, batch 18250, loss[loss=0.1676, simple_loss=0.2329, pruned_loss=0.05114, over 4983.00 frames.], tot_loss[loss=0.163, simple_loss=0.2308, pruned_loss=0.0476, over 972792.40 frames.], batch size: 14, lr: 5.41e-04 +2022-05-04 15:09:10,293 INFO [train.py:715] (6/8) Epoch 3, batch 18300, loss[loss=0.1374, simple_loss=0.2009, pruned_loss=0.03697, over 4939.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2299, pruned_loss=0.04719, over 972436.47 frames.], batch size: 29, lr: 5.41e-04 +2022-05-04 15:09:51,604 INFO [train.py:715] (6/8) Epoch 3, batch 18350, loss[loss=0.1478, simple_loss=0.2274, pruned_loss=0.03415, over 4672.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2308, pruned_loss=0.04731, over 972086.76 frames.], batch size: 13, lr: 5.41e-04 +2022-05-04 15:10:33,030 INFO [train.py:715] (6/8) Epoch 3, batch 18400, loss[loss=0.1566, simple_loss=0.229, pruned_loss=0.0421, over 4875.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2317, pruned_loss=0.04823, over 972456.00 frames.], batch size: 16, lr: 5.41e-04 +2022-05-04 15:11:13,987 INFO [train.py:715] (6/8) Epoch 3, batch 18450, loss[loss=0.1971, simple_loss=0.2564, pruned_loss=0.06892, over 4830.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2321, pruned_loss=0.048, over 972396.32 frames.], batch size: 15, lr: 5.41e-04 +2022-05-04 15:11:55,024 INFO [train.py:715] (6/8) Epoch 3, batch 18500, loss[loss=0.1655, simple_loss=0.2299, pruned_loss=0.05053, over 4888.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2327, pruned_loss=0.04812, over 972153.23 frames.], batch size: 19, lr: 5.41e-04 +2022-05-04 15:12:36,407 INFO [train.py:715] (6/8) Epoch 3, batch 18550, loss[loss=0.1534, simple_loss=0.2209, pruned_loss=0.04295, over 4814.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2325, pruned_loss=0.04795, over 972206.43 frames.], batch size: 13, lr: 5.41e-04 +2022-05-04 15:13:18,631 INFO [train.py:715] (6/8) Epoch 3, batch 18600, loss[loss=0.2075, simple_loss=0.2654, pruned_loss=0.07484, over 4882.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2323, pruned_loss=0.04818, over 971996.22 frames.], batch size: 22, lr: 5.41e-04 +2022-05-04 15:13:58,623 INFO [train.py:715] (6/8) Epoch 3, batch 18650, loss[loss=0.1527, simple_loss=0.2312, pruned_loss=0.0371, over 4889.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2307, pruned_loss=0.04703, over 972134.62 frames.], batch size: 22, lr: 5.41e-04 +2022-05-04 15:14:39,323 INFO [train.py:715] (6/8) Epoch 3, batch 18700, loss[loss=0.161, simple_loss=0.2302, pruned_loss=0.04593, over 4680.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2312, pruned_loss=0.04723, over 973101.60 frames.], batch size: 15, lr: 5.40e-04 +2022-05-04 15:15:20,422 INFO [train.py:715] (6/8) Epoch 3, batch 18750, loss[loss=0.1432, simple_loss=0.2095, pruned_loss=0.03845, over 4703.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2319, pruned_loss=0.04745, over 973214.40 frames.], batch size: 15, lr: 5.40e-04 +2022-05-04 15:16:00,287 INFO [train.py:715] (6/8) Epoch 3, batch 18800, loss[loss=0.1311, simple_loss=0.2002, pruned_loss=0.03101, over 4783.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2315, pruned_loss=0.04738, over 972236.82 frames.], batch size: 18, lr: 5.40e-04 +2022-05-04 15:16:41,102 INFO [train.py:715] (6/8) Epoch 3, batch 18850, loss[loss=0.1627, simple_loss=0.2348, pruned_loss=0.04528, over 4746.00 frames.], tot_loss[loss=0.1631, simple_loss=0.232, pruned_loss=0.04707, over 971925.38 frames.], batch size: 16, lr: 5.40e-04 +2022-05-04 15:17:21,062 INFO [train.py:715] (6/8) Epoch 3, batch 18900, loss[loss=0.1798, simple_loss=0.2377, pruned_loss=0.06099, over 4787.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2334, pruned_loss=0.04839, over 972928.72 frames.], batch size: 17, lr: 5.40e-04 +2022-05-04 15:18:01,540 INFO [train.py:715] (6/8) Epoch 3, batch 18950, loss[loss=0.1822, simple_loss=0.2526, pruned_loss=0.05592, over 4871.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2329, pruned_loss=0.04784, over 973404.82 frames.], batch size: 16, lr: 5.40e-04 +2022-05-04 15:18:40,944 INFO [train.py:715] (6/8) Epoch 3, batch 19000, loss[loss=0.2148, simple_loss=0.2729, pruned_loss=0.07841, over 4843.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2339, pruned_loss=0.04844, over 972849.95 frames.], batch size: 15, lr: 5.40e-04 +2022-05-04 15:19:20,768 INFO [train.py:715] (6/8) Epoch 3, batch 19050, loss[loss=0.1715, simple_loss=0.2451, pruned_loss=0.04895, over 4784.00 frames.], tot_loss[loss=0.1657, simple_loss=0.234, pruned_loss=0.04871, over 972031.33 frames.], batch size: 18, lr: 5.40e-04 +2022-05-04 15:20:01,078 INFO [train.py:715] (6/8) Epoch 3, batch 19100, loss[loss=0.1789, simple_loss=0.2541, pruned_loss=0.05186, over 4765.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2332, pruned_loss=0.04795, over 971887.43 frames.], batch size: 19, lr: 5.40e-04 +2022-05-04 15:20:40,499 INFO [train.py:715] (6/8) Epoch 3, batch 19150, loss[loss=0.1335, simple_loss=0.2177, pruned_loss=0.0247, over 4864.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2333, pruned_loss=0.04773, over 971813.80 frames.], batch size: 22, lr: 5.40e-04 +2022-05-04 15:21:20,182 INFO [train.py:715] (6/8) Epoch 3, batch 19200, loss[loss=0.1316, simple_loss=0.2019, pruned_loss=0.03068, over 4962.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2334, pruned_loss=0.04781, over 972139.78 frames.], batch size: 21, lr: 5.39e-04 +2022-05-04 15:21:59,823 INFO [train.py:715] (6/8) Epoch 3, batch 19250, loss[loss=0.1563, simple_loss=0.2235, pruned_loss=0.04449, over 4995.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2324, pruned_loss=0.04749, over 973353.14 frames.], batch size: 16, lr: 5.39e-04 +2022-05-04 15:22:40,130 INFO [train.py:715] (6/8) Epoch 3, batch 19300, loss[loss=0.1571, simple_loss=0.2238, pruned_loss=0.04513, over 4776.00 frames.], tot_loss[loss=0.1641, simple_loss=0.233, pruned_loss=0.04764, over 973256.44 frames.], batch size: 17, lr: 5.39e-04 +2022-05-04 15:23:19,473 INFO [train.py:715] (6/8) Epoch 3, batch 19350, loss[loss=0.2867, simple_loss=0.3323, pruned_loss=0.1205, over 4931.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2324, pruned_loss=0.04748, over 972101.89 frames.], batch size: 39, lr: 5.39e-04 +2022-05-04 15:23:59,204 INFO [train.py:715] (6/8) Epoch 3, batch 19400, loss[loss=0.17, simple_loss=0.2335, pruned_loss=0.05324, over 4792.00 frames.], tot_loss[loss=0.163, simple_loss=0.2315, pruned_loss=0.04726, over 971558.00 frames.], batch size: 14, lr: 5.39e-04 +2022-05-04 15:24:39,294 INFO [train.py:715] (6/8) Epoch 3, batch 19450, loss[loss=0.1371, simple_loss=0.2106, pruned_loss=0.03183, over 4979.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2312, pruned_loss=0.04704, over 972493.41 frames.], batch size: 16, lr: 5.39e-04 +2022-05-04 15:25:18,371 INFO [train.py:715] (6/8) Epoch 3, batch 19500, loss[loss=0.2226, simple_loss=0.269, pruned_loss=0.08807, over 4794.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2304, pruned_loss=0.04704, over 973109.94 frames.], batch size: 24, lr: 5.39e-04 +2022-05-04 15:25:58,129 INFO [train.py:715] (6/8) Epoch 3, batch 19550, loss[loss=0.1644, simple_loss=0.2246, pruned_loss=0.0521, over 4850.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2299, pruned_loss=0.04716, over 972114.21 frames.], batch size: 13, lr: 5.39e-04 +2022-05-04 15:26:37,669 INFO [train.py:715] (6/8) Epoch 3, batch 19600, loss[loss=0.1828, simple_loss=0.2415, pruned_loss=0.06212, over 4771.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2301, pruned_loss=0.04725, over 971603.25 frames.], batch size: 14, lr: 5.39e-04 +2022-05-04 15:27:17,577 INFO [train.py:715] (6/8) Epoch 3, batch 19650, loss[loss=0.1694, simple_loss=0.236, pruned_loss=0.05141, over 4938.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2308, pruned_loss=0.04753, over 972747.40 frames.], batch size: 21, lr: 5.38e-04 +2022-05-04 15:27:56,472 INFO [train.py:715] (6/8) Epoch 3, batch 19700, loss[loss=0.1898, simple_loss=0.2642, pruned_loss=0.05768, over 4773.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2317, pruned_loss=0.04836, over 973112.29 frames.], batch size: 17, lr: 5.38e-04 +2022-05-04 15:28:36,070 INFO [train.py:715] (6/8) Epoch 3, batch 19750, loss[loss=0.1952, simple_loss=0.2624, pruned_loss=0.06401, over 4865.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2314, pruned_loss=0.0482, over 973311.38 frames.], batch size: 38, lr: 5.38e-04 +2022-05-04 15:29:15,542 INFO [train.py:715] (6/8) Epoch 3, batch 19800, loss[loss=0.1847, simple_loss=0.2428, pruned_loss=0.06331, over 4966.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2315, pruned_loss=0.04791, over 973047.68 frames.], batch size: 15, lr: 5.38e-04 +2022-05-04 15:29:55,121 INFO [train.py:715] (6/8) Epoch 3, batch 19850, loss[loss=0.1669, simple_loss=0.2337, pruned_loss=0.05005, over 4893.00 frames.], tot_loss[loss=0.164, simple_loss=0.232, pruned_loss=0.04799, over 973178.70 frames.], batch size: 19, lr: 5.38e-04 +2022-05-04 15:30:34,820 INFO [train.py:715] (6/8) Epoch 3, batch 19900, loss[loss=0.1727, simple_loss=0.2359, pruned_loss=0.05475, over 4792.00 frames.], tot_loss[loss=0.163, simple_loss=0.2314, pruned_loss=0.04726, over 972303.08 frames.], batch size: 18, lr: 5.38e-04 +2022-05-04 15:31:15,115 INFO [train.py:715] (6/8) Epoch 3, batch 19950, loss[loss=0.1492, simple_loss=0.2221, pruned_loss=0.0382, over 4811.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2316, pruned_loss=0.04739, over 972382.92 frames.], batch size: 21, lr: 5.38e-04 +2022-05-04 15:31:54,890 INFO [train.py:715] (6/8) Epoch 3, batch 20000, loss[loss=0.1503, simple_loss=0.2208, pruned_loss=0.03992, over 4957.00 frames.], tot_loss[loss=0.1628, simple_loss=0.231, pruned_loss=0.04728, over 972202.45 frames.], batch size: 15, lr: 5.38e-04 +2022-05-04 15:32:34,160 INFO [train.py:715] (6/8) Epoch 3, batch 20050, loss[loss=0.171, simple_loss=0.2429, pruned_loss=0.04953, over 4932.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2311, pruned_loss=0.04723, over 972167.81 frames.], batch size: 29, lr: 5.38e-04 +2022-05-04 15:33:14,401 INFO [train.py:715] (6/8) Epoch 3, batch 20100, loss[loss=0.1366, simple_loss=0.2078, pruned_loss=0.0327, over 4865.00 frames.], tot_loss[loss=0.1636, simple_loss=0.232, pruned_loss=0.04758, over 972410.48 frames.], batch size: 30, lr: 5.37e-04 +2022-05-04 15:33:54,298 INFO [train.py:715] (6/8) Epoch 3, batch 20150, loss[loss=0.1703, simple_loss=0.2231, pruned_loss=0.0587, over 4975.00 frames.], tot_loss[loss=0.1623, simple_loss=0.231, pruned_loss=0.04674, over 971774.09 frames.], batch size: 31, lr: 5.37e-04 +2022-05-04 15:34:33,630 INFO [train.py:715] (6/8) Epoch 3, batch 20200, loss[loss=0.1545, simple_loss=0.2172, pruned_loss=0.04591, over 4803.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2296, pruned_loss=0.04615, over 971346.06 frames.], batch size: 17, lr: 5.37e-04 +2022-05-04 15:35:13,294 INFO [train.py:715] (6/8) Epoch 3, batch 20250, loss[loss=0.1719, simple_loss=0.2454, pruned_loss=0.04918, over 4778.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2304, pruned_loss=0.04614, over 971520.44 frames.], batch size: 17, lr: 5.37e-04 +2022-05-04 15:35:53,122 INFO [train.py:715] (6/8) Epoch 3, batch 20300, loss[loss=0.1577, simple_loss=0.2213, pruned_loss=0.04704, over 4901.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2304, pruned_loss=0.04654, over 972061.48 frames.], batch size: 17, lr: 5.37e-04 +2022-05-04 15:36:33,509 INFO [train.py:715] (6/8) Epoch 3, batch 20350, loss[loss=0.141, simple_loss=0.2135, pruned_loss=0.03428, over 4915.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2309, pruned_loss=0.04722, over 971881.49 frames.], batch size: 17, lr: 5.37e-04 +2022-05-04 15:37:12,088 INFO [train.py:715] (6/8) Epoch 3, batch 20400, loss[loss=0.1765, simple_loss=0.2375, pruned_loss=0.0578, over 4908.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2307, pruned_loss=0.04734, over 971283.47 frames.], batch size: 17, lr: 5.37e-04 +2022-05-04 15:37:51,789 INFO [train.py:715] (6/8) Epoch 3, batch 20450, loss[loss=0.2232, simple_loss=0.2882, pruned_loss=0.07911, over 4881.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2314, pruned_loss=0.04754, over 971842.36 frames.], batch size: 22, lr: 5.37e-04 +2022-05-04 15:38:31,867 INFO [train.py:715] (6/8) Epoch 3, batch 20500, loss[loss=0.2079, simple_loss=0.2667, pruned_loss=0.07459, over 4838.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2307, pruned_loss=0.04737, over 972315.36 frames.], batch size: 32, lr: 5.37e-04 +2022-05-04 15:39:10,986 INFO [train.py:715] (6/8) Epoch 3, batch 20550, loss[loss=0.1736, simple_loss=0.237, pruned_loss=0.05515, over 4851.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2319, pruned_loss=0.04815, over 972618.73 frames.], batch size: 32, lr: 5.36e-04 +2022-05-04 15:39:50,437 INFO [train.py:715] (6/8) Epoch 3, batch 20600, loss[loss=0.2094, simple_loss=0.278, pruned_loss=0.07038, over 4942.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2315, pruned_loss=0.04772, over 973655.99 frames.], batch size: 35, lr: 5.36e-04 +2022-05-04 15:40:30,881 INFO [train.py:715] (6/8) Epoch 3, batch 20650, loss[loss=0.1883, simple_loss=0.2556, pruned_loss=0.06046, over 4873.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2319, pruned_loss=0.04753, over 973284.13 frames.], batch size: 16, lr: 5.36e-04 +2022-05-04 15:41:10,756 INFO [train.py:715] (6/8) Epoch 3, batch 20700, loss[loss=0.1322, simple_loss=0.201, pruned_loss=0.03168, over 4823.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2312, pruned_loss=0.04718, over 973435.49 frames.], batch size: 13, lr: 5.36e-04 +2022-05-04 15:41:50,196 INFO [train.py:715] (6/8) Epoch 3, batch 20750, loss[loss=0.1884, simple_loss=0.2584, pruned_loss=0.0592, over 4880.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2312, pruned_loss=0.04696, over 972835.15 frames.], batch size: 32, lr: 5.36e-04 +2022-05-04 15:42:30,291 INFO [train.py:715] (6/8) Epoch 3, batch 20800, loss[loss=0.1655, simple_loss=0.2253, pruned_loss=0.0529, over 4873.00 frames.], tot_loss[loss=0.161, simple_loss=0.2302, pruned_loss=0.04592, over 972868.09 frames.], batch size: 30, lr: 5.36e-04 +2022-05-04 15:43:11,027 INFO [train.py:715] (6/8) Epoch 3, batch 20850, loss[loss=0.1818, simple_loss=0.258, pruned_loss=0.05281, over 4983.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2315, pruned_loss=0.04667, over 972344.44 frames.], batch size: 25, lr: 5.36e-04 +2022-05-04 15:43:50,803 INFO [train.py:715] (6/8) Epoch 3, batch 20900, loss[loss=0.1506, simple_loss=0.2284, pruned_loss=0.03639, over 4976.00 frames.], tot_loss[loss=0.1634, simple_loss=0.232, pruned_loss=0.04737, over 972460.24 frames.], batch size: 35, lr: 5.36e-04 +2022-05-04 15:44:31,208 INFO [train.py:715] (6/8) Epoch 3, batch 20950, loss[loss=0.1807, simple_loss=0.2445, pruned_loss=0.05846, over 4801.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2314, pruned_loss=0.04712, over 972799.38 frames.], batch size: 14, lr: 5.36e-04 +2022-05-04 15:45:11,743 INFO [train.py:715] (6/8) Epoch 3, batch 21000, loss[loss=0.1291, simple_loss=0.207, pruned_loss=0.02558, over 4954.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2308, pruned_loss=0.04675, over 973191.35 frames.], batch size: 21, lr: 5.36e-04 +2022-05-04 15:45:11,744 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 15:45:24,193 INFO [train.py:742] (6/8) Epoch 3, validation: loss=0.1137, simple_loss=0.1999, pruned_loss=0.01377, over 914524.00 frames. +2022-05-04 15:46:04,603 INFO [train.py:715] (6/8) Epoch 3, batch 21050, loss[loss=0.1398, simple_loss=0.2097, pruned_loss=0.03499, over 4841.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2308, pruned_loss=0.04669, over 972213.48 frames.], batch size: 15, lr: 5.35e-04 +2022-05-04 15:46:45,381 INFO [train.py:715] (6/8) Epoch 3, batch 21100, loss[loss=0.197, simple_loss=0.2543, pruned_loss=0.06987, over 4777.00 frames.], tot_loss[loss=0.162, simple_loss=0.231, pruned_loss=0.04649, over 971740.29 frames.], batch size: 17, lr: 5.35e-04 +2022-05-04 15:47:25,759 INFO [train.py:715] (6/8) Epoch 3, batch 21150, loss[loss=0.1942, simple_loss=0.2696, pruned_loss=0.05941, over 4794.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2311, pruned_loss=0.0465, over 971617.67 frames.], batch size: 24, lr: 5.35e-04 +2022-05-04 15:48:08,564 INFO [train.py:715] (6/8) Epoch 3, batch 21200, loss[loss=0.1504, simple_loss=0.2044, pruned_loss=0.04816, over 4817.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2316, pruned_loss=0.04684, over 972124.44 frames.], batch size: 26, lr: 5.35e-04 +2022-05-04 15:48:49,625 INFO [train.py:715] (6/8) Epoch 3, batch 21250, loss[loss=0.1554, simple_loss=0.2235, pruned_loss=0.04365, over 4697.00 frames.], tot_loss[loss=0.163, simple_loss=0.2317, pruned_loss=0.04708, over 972037.45 frames.], batch size: 15, lr: 5.35e-04 +2022-05-04 15:49:28,343 INFO [train.py:715] (6/8) Epoch 3, batch 21300, loss[loss=0.1653, simple_loss=0.2313, pruned_loss=0.04968, over 4749.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2316, pruned_loss=0.04679, over 972353.38 frames.], batch size: 16, lr: 5.35e-04 +2022-05-04 15:50:10,524 INFO [train.py:715] (6/8) Epoch 3, batch 21350, loss[loss=0.1605, simple_loss=0.2203, pruned_loss=0.05032, over 4971.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2308, pruned_loss=0.04635, over 972755.99 frames.], batch size: 14, lr: 5.35e-04 +2022-05-04 15:50:51,356 INFO [train.py:715] (6/8) Epoch 3, batch 21400, loss[loss=0.1826, simple_loss=0.2502, pruned_loss=0.05748, over 4868.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2309, pruned_loss=0.0469, over 971997.42 frames.], batch size: 20, lr: 5.35e-04 +2022-05-04 15:51:30,338 INFO [train.py:715] (6/8) Epoch 3, batch 21450, loss[loss=0.162, simple_loss=0.2199, pruned_loss=0.05204, over 4786.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2301, pruned_loss=0.04633, over 972210.05 frames.], batch size: 21, lr: 5.35e-04 +2022-05-04 15:52:08,620 INFO [train.py:715] (6/8) Epoch 3, batch 21500, loss[loss=0.2125, simple_loss=0.2657, pruned_loss=0.07962, over 4970.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2303, pruned_loss=0.04631, over 972514.88 frames.], batch size: 15, lr: 5.34e-04 +2022-05-04 15:52:47,661 INFO [train.py:715] (6/8) Epoch 3, batch 21550, loss[loss=0.138, simple_loss=0.2024, pruned_loss=0.03679, over 4869.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2289, pruned_loss=0.04595, over 972418.46 frames.], batch size: 20, lr: 5.34e-04 +2022-05-04 15:53:27,189 INFO [train.py:715] (6/8) Epoch 3, batch 21600, loss[loss=0.1449, simple_loss=0.2183, pruned_loss=0.03572, over 4809.00 frames.], tot_loss[loss=0.1595, simple_loss=0.2281, pruned_loss=0.04549, over 972323.59 frames.], batch size: 15, lr: 5.34e-04 +2022-05-04 15:54:06,106 INFO [train.py:715] (6/8) Epoch 3, batch 21650, loss[loss=0.1553, simple_loss=0.2241, pruned_loss=0.04322, over 4833.00 frames.], tot_loss[loss=0.1595, simple_loss=0.228, pruned_loss=0.04546, over 972893.12 frames.], batch size: 27, lr: 5.34e-04 +2022-05-04 15:54:46,386 INFO [train.py:715] (6/8) Epoch 3, batch 21700, loss[loss=0.1548, simple_loss=0.2197, pruned_loss=0.04493, over 4767.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2287, pruned_loss=0.04576, over 972412.65 frames.], batch size: 14, lr: 5.34e-04 +2022-05-04 15:55:26,897 INFO [train.py:715] (6/8) Epoch 3, batch 21750, loss[loss=0.1965, simple_loss=0.2778, pruned_loss=0.05759, over 4881.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2296, pruned_loss=0.04609, over 972154.49 frames.], batch size: 22, lr: 5.34e-04 +2022-05-04 15:56:06,025 INFO [train.py:715] (6/8) Epoch 3, batch 21800, loss[loss=0.1865, simple_loss=0.2599, pruned_loss=0.05655, over 4943.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2298, pruned_loss=0.04643, over 972154.53 frames.], batch size: 21, lr: 5.34e-04 +2022-05-04 15:56:44,182 INFO [train.py:715] (6/8) Epoch 3, batch 21850, loss[loss=0.1579, simple_loss=0.2194, pruned_loss=0.04814, over 4886.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2286, pruned_loss=0.0459, over 972384.82 frames.], batch size: 32, lr: 5.34e-04 +2022-05-04 15:57:22,931 INFO [train.py:715] (6/8) Epoch 3, batch 21900, loss[loss=0.1448, simple_loss=0.2084, pruned_loss=0.04059, over 4841.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2283, pruned_loss=0.04591, over 971920.54 frames.], batch size: 32, lr: 5.34e-04 +2022-05-04 15:58:03,639 INFO [train.py:715] (6/8) Epoch 3, batch 21950, loss[loss=0.1487, simple_loss=0.217, pruned_loss=0.04024, over 4834.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2297, pruned_loss=0.04673, over 971702.60 frames.], batch size: 25, lr: 5.34e-04 +2022-05-04 15:58:43,254 INFO [train.py:715] (6/8) Epoch 3, batch 22000, loss[loss=0.1598, simple_loss=0.221, pruned_loss=0.04931, over 4785.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2289, pruned_loss=0.04611, over 972135.98 frames.], batch size: 18, lr: 5.33e-04 +2022-05-04 15:59:23,575 INFO [train.py:715] (6/8) Epoch 3, batch 22050, loss[loss=0.175, simple_loss=0.2342, pruned_loss=0.05788, over 4799.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2307, pruned_loss=0.04716, over 971821.29 frames.], batch size: 14, lr: 5.33e-04 +2022-05-04 16:00:04,304 INFO [train.py:715] (6/8) Epoch 3, batch 22100, loss[loss=0.1691, simple_loss=0.2495, pruned_loss=0.04435, over 4980.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2306, pruned_loss=0.04712, over 972329.22 frames.], batch size: 15, lr: 5.33e-04 +2022-05-04 16:00:44,830 INFO [train.py:715] (6/8) Epoch 3, batch 22150, loss[loss=0.164, simple_loss=0.2346, pruned_loss=0.04665, over 4881.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2297, pruned_loss=0.04647, over 971927.89 frames.], batch size: 20, lr: 5.33e-04 +2022-05-04 16:01:24,045 INFO [train.py:715] (6/8) Epoch 3, batch 22200, loss[loss=0.1728, simple_loss=0.2574, pruned_loss=0.04411, over 4775.00 frames.], tot_loss[loss=0.161, simple_loss=0.2298, pruned_loss=0.04608, over 971937.60 frames.], batch size: 18, lr: 5.33e-04 +2022-05-04 16:02:04,301 INFO [train.py:715] (6/8) Epoch 3, batch 22250, loss[loss=0.1308, simple_loss=0.2026, pruned_loss=0.02953, over 4898.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2303, pruned_loss=0.04626, over 971926.14 frames.], batch size: 19, lr: 5.33e-04 +2022-05-04 16:02:45,557 INFO [train.py:715] (6/8) Epoch 3, batch 22300, loss[loss=0.1508, simple_loss=0.2232, pruned_loss=0.03918, over 4871.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2296, pruned_loss=0.04582, over 972130.05 frames.], batch size: 13, lr: 5.33e-04 +2022-05-04 16:03:24,557 INFO [train.py:715] (6/8) Epoch 3, batch 22350, loss[loss=0.1487, simple_loss=0.2086, pruned_loss=0.04438, over 4771.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2299, pruned_loss=0.04616, over 972135.28 frames.], batch size: 12, lr: 5.33e-04 +2022-05-04 16:04:04,624 INFO [train.py:715] (6/8) Epoch 3, batch 22400, loss[loss=0.1447, simple_loss=0.2237, pruned_loss=0.03292, over 4933.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2301, pruned_loss=0.04588, over 971691.02 frames.], batch size: 21, lr: 5.33e-04 +2022-05-04 16:04:45,525 INFO [train.py:715] (6/8) Epoch 3, batch 22450, loss[loss=0.1469, simple_loss=0.222, pruned_loss=0.03587, over 4873.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2303, pruned_loss=0.046, over 972445.03 frames.], batch size: 16, lr: 5.32e-04 +2022-05-04 16:05:25,982 INFO [train.py:715] (6/8) Epoch 3, batch 22500, loss[loss=0.1721, simple_loss=0.2501, pruned_loss=0.04701, over 4989.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2305, pruned_loss=0.04635, over 972577.87 frames.], batch size: 27, lr: 5.32e-04 +2022-05-04 16:06:05,390 INFO [train.py:715] (6/8) Epoch 3, batch 22550, loss[loss=0.1459, simple_loss=0.2, pruned_loss=0.04593, over 4891.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2296, pruned_loss=0.0458, over 972237.92 frames.], batch size: 32, lr: 5.32e-04 +2022-05-04 16:06:45,639 INFO [train.py:715] (6/8) Epoch 3, batch 22600, loss[loss=0.196, simple_loss=0.2552, pruned_loss=0.06837, over 4895.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2299, pruned_loss=0.04611, over 971762.92 frames.], batch size: 39, lr: 5.32e-04 +2022-05-04 16:07:26,488 INFO [train.py:715] (6/8) Epoch 3, batch 22650, loss[loss=0.1952, simple_loss=0.2585, pruned_loss=0.0659, over 4916.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2295, pruned_loss=0.04615, over 972081.63 frames.], batch size: 18, lr: 5.32e-04 +2022-05-04 16:08:06,314 INFO [train.py:715] (6/8) Epoch 3, batch 22700, loss[loss=0.1619, simple_loss=0.2322, pruned_loss=0.0458, over 4897.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2297, pruned_loss=0.04635, over 972326.14 frames.], batch size: 19, lr: 5.32e-04 +2022-05-04 16:08:46,709 INFO [train.py:715] (6/8) Epoch 3, batch 22750, loss[loss=0.1387, simple_loss=0.2146, pruned_loss=0.03139, over 4956.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2305, pruned_loss=0.04634, over 972412.29 frames.], batch size: 21, lr: 5.32e-04 +2022-05-04 16:09:27,111 INFO [train.py:715] (6/8) Epoch 3, batch 22800, loss[loss=0.188, simple_loss=0.261, pruned_loss=0.0575, over 4849.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2294, pruned_loss=0.04558, over 973450.78 frames.], batch size: 20, lr: 5.32e-04 +2022-05-04 16:10:07,181 INFO [train.py:715] (6/8) Epoch 3, batch 22850, loss[loss=0.1862, simple_loss=0.2334, pruned_loss=0.0695, over 4931.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2298, pruned_loss=0.04568, over 972975.02 frames.], batch size: 35, lr: 5.32e-04 +2022-05-04 16:10:46,922 INFO [train.py:715] (6/8) Epoch 3, batch 22900, loss[loss=0.178, simple_loss=0.2498, pruned_loss=0.0531, over 4861.00 frames.], tot_loss[loss=0.161, simple_loss=0.2299, pruned_loss=0.04603, over 972831.27 frames.], batch size: 30, lr: 5.32e-04 +2022-05-04 16:11:27,350 INFO [train.py:715] (6/8) Epoch 3, batch 22950, loss[loss=0.192, simple_loss=0.2501, pruned_loss=0.06694, over 4751.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2307, pruned_loss=0.04693, over 972195.36 frames.], batch size: 19, lr: 5.31e-04 +2022-05-04 16:12:08,457 INFO [train.py:715] (6/8) Epoch 3, batch 23000, loss[loss=0.191, simple_loss=0.2421, pruned_loss=0.06994, over 4982.00 frames.], tot_loss[loss=0.1624, simple_loss=0.231, pruned_loss=0.04693, over 971957.38 frames.], batch size: 31, lr: 5.31e-04 +2022-05-04 16:12:48,303 INFO [train.py:715] (6/8) Epoch 3, batch 23050, loss[loss=0.1539, simple_loss=0.2193, pruned_loss=0.04425, over 4748.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2314, pruned_loss=0.04737, over 971858.19 frames.], batch size: 16, lr: 5.31e-04 +2022-05-04 16:13:28,632 INFO [train.py:715] (6/8) Epoch 3, batch 23100, loss[loss=0.1526, simple_loss=0.2197, pruned_loss=0.0428, over 4759.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2318, pruned_loss=0.04748, over 971839.98 frames.], batch size: 19, lr: 5.31e-04 +2022-05-04 16:14:09,397 INFO [train.py:715] (6/8) Epoch 3, batch 23150, loss[loss=0.1541, simple_loss=0.2261, pruned_loss=0.04105, over 4869.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2306, pruned_loss=0.04658, over 972239.98 frames.], batch size: 20, lr: 5.31e-04 +2022-05-04 16:14:49,970 INFO [train.py:715] (6/8) Epoch 3, batch 23200, loss[loss=0.1722, simple_loss=0.2486, pruned_loss=0.04791, over 4888.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2304, pruned_loss=0.04604, over 972326.88 frames.], batch size: 19, lr: 5.31e-04 +2022-05-04 16:15:29,495 INFO [train.py:715] (6/8) Epoch 3, batch 23250, loss[loss=0.1452, simple_loss=0.2204, pruned_loss=0.035, over 4798.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2307, pruned_loss=0.04604, over 972404.06 frames.], batch size: 24, lr: 5.31e-04 +2022-05-04 16:16:10,286 INFO [train.py:715] (6/8) Epoch 3, batch 23300, loss[loss=0.1781, simple_loss=0.2525, pruned_loss=0.0519, over 4756.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2308, pruned_loss=0.04611, over 972359.14 frames.], batch size: 16, lr: 5.31e-04 +2022-05-04 16:16:49,872 INFO [train.py:715] (6/8) Epoch 3, batch 23350, loss[loss=0.1631, simple_loss=0.2284, pruned_loss=0.04888, over 4789.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2308, pruned_loss=0.04652, over 972504.60 frames.], batch size: 21, lr: 5.31e-04 +2022-05-04 16:17:27,678 INFO [train.py:715] (6/8) Epoch 3, batch 23400, loss[loss=0.1578, simple_loss=0.2203, pruned_loss=0.04765, over 4780.00 frames.], tot_loss[loss=0.1614, simple_loss=0.23, pruned_loss=0.04642, over 971967.00 frames.], batch size: 14, lr: 5.30e-04 +2022-05-04 16:18:06,227 INFO [train.py:715] (6/8) Epoch 3, batch 23450, loss[loss=0.1545, simple_loss=0.2369, pruned_loss=0.03611, over 4869.00 frames.], tot_loss[loss=0.1614, simple_loss=0.23, pruned_loss=0.04639, over 972055.72 frames.], batch size: 22, lr: 5.30e-04 +2022-05-04 16:18:44,912 INFO [train.py:715] (6/8) Epoch 3, batch 23500, loss[loss=0.1778, simple_loss=0.2475, pruned_loss=0.05402, over 4923.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2303, pruned_loss=0.04649, over 972513.81 frames.], batch size: 18, lr: 5.30e-04 +2022-05-04 16:19:24,109 INFO [train.py:715] (6/8) Epoch 3, batch 23550, loss[loss=0.1701, simple_loss=0.2329, pruned_loss=0.05364, over 4869.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2298, pruned_loss=0.04638, over 972379.42 frames.], batch size: 39, lr: 5.30e-04 +2022-05-04 16:20:05,335 INFO [train.py:715] (6/8) Epoch 3, batch 23600, loss[loss=0.1621, simple_loss=0.23, pruned_loss=0.04715, over 4745.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2311, pruned_loss=0.04711, over 972399.17 frames.], batch size: 16, lr: 5.30e-04 +2022-05-04 16:20:44,865 INFO [train.py:715] (6/8) Epoch 3, batch 23650, loss[loss=0.1547, simple_loss=0.2381, pruned_loss=0.03562, over 4965.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2305, pruned_loss=0.04652, over 972260.33 frames.], batch size: 24, lr: 5.30e-04 +2022-05-04 16:21:24,821 INFO [train.py:715] (6/8) Epoch 3, batch 23700, loss[loss=0.1459, simple_loss=0.2113, pruned_loss=0.04025, over 4826.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2312, pruned_loss=0.04676, over 972704.97 frames.], batch size: 15, lr: 5.30e-04 +2022-05-04 16:22:03,572 INFO [train.py:715] (6/8) Epoch 3, batch 23750, loss[loss=0.1459, simple_loss=0.2175, pruned_loss=0.03719, over 4949.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2309, pruned_loss=0.04673, over 973891.50 frames.], batch size: 29, lr: 5.30e-04 +2022-05-04 16:22:43,184 INFO [train.py:715] (6/8) Epoch 3, batch 23800, loss[loss=0.1372, simple_loss=0.2057, pruned_loss=0.03436, over 4767.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2315, pruned_loss=0.04734, over 972869.34 frames.], batch size: 14, lr: 5.30e-04 +2022-05-04 16:23:22,781 INFO [train.py:715] (6/8) Epoch 3, batch 23850, loss[loss=0.133, simple_loss=0.1992, pruned_loss=0.03336, over 4808.00 frames.], tot_loss[loss=0.1625, simple_loss=0.231, pruned_loss=0.04699, over 972723.22 frames.], batch size: 12, lr: 5.30e-04 +2022-05-04 16:24:02,500 INFO [train.py:715] (6/8) Epoch 3, batch 23900, loss[loss=0.1419, simple_loss=0.2167, pruned_loss=0.03356, over 4777.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2304, pruned_loss=0.04641, over 972269.79 frames.], batch size: 18, lr: 5.29e-04 +2022-05-04 16:24:41,554 INFO [train.py:715] (6/8) Epoch 3, batch 23950, loss[loss=0.1928, simple_loss=0.2577, pruned_loss=0.06398, over 4946.00 frames.], tot_loss[loss=0.1625, simple_loss=0.231, pruned_loss=0.04698, over 972606.17 frames.], batch size: 39, lr: 5.29e-04 +2022-05-04 16:25:20,402 INFO [train.py:715] (6/8) Epoch 3, batch 24000, loss[loss=0.1733, simple_loss=0.2386, pruned_loss=0.054, over 4931.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2303, pruned_loss=0.04652, over 973088.70 frames.], batch size: 39, lr: 5.29e-04 +2022-05-04 16:25:20,402 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 16:25:32,861 INFO [train.py:742] (6/8) Epoch 3, validation: loss=0.1132, simple_loss=0.1992, pruned_loss=0.0136, over 914524.00 frames. +2022-05-04 16:26:12,216 INFO [train.py:715] (6/8) Epoch 3, batch 24050, loss[loss=0.1481, simple_loss=0.2228, pruned_loss=0.03676, over 4697.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2308, pruned_loss=0.04709, over 973311.01 frames.], batch size: 15, lr: 5.29e-04 +2022-05-04 16:26:52,064 INFO [train.py:715] (6/8) Epoch 3, batch 24100, loss[loss=0.1969, simple_loss=0.2523, pruned_loss=0.07082, over 4807.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2309, pruned_loss=0.04715, over 972290.63 frames.], batch size: 12, lr: 5.29e-04 +2022-05-04 16:27:30,865 INFO [train.py:715] (6/8) Epoch 3, batch 24150, loss[loss=0.1545, simple_loss=0.2257, pruned_loss=0.04167, over 4874.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2315, pruned_loss=0.04744, over 971747.77 frames.], batch size: 16, lr: 5.29e-04 +2022-05-04 16:28:10,126 INFO [train.py:715] (6/8) Epoch 3, batch 24200, loss[loss=0.137, simple_loss=0.2069, pruned_loss=0.03359, over 4775.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2307, pruned_loss=0.04695, over 971828.83 frames.], batch size: 14, lr: 5.29e-04 +2022-05-04 16:28:50,518 INFO [train.py:715] (6/8) Epoch 3, batch 24250, loss[loss=0.1618, simple_loss=0.2172, pruned_loss=0.05322, over 4672.00 frames.], tot_loss[loss=0.1623, simple_loss=0.231, pruned_loss=0.0468, over 971686.98 frames.], batch size: 13, lr: 5.29e-04 +2022-05-04 16:29:30,768 INFO [train.py:715] (6/8) Epoch 3, batch 24300, loss[loss=0.1661, simple_loss=0.235, pruned_loss=0.04863, over 4905.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2306, pruned_loss=0.04656, over 972403.35 frames.], batch size: 39, lr: 5.29e-04 +2022-05-04 16:30:10,089 INFO [train.py:715] (6/8) Epoch 3, batch 24350, loss[loss=0.1563, simple_loss=0.2236, pruned_loss=0.0445, over 4843.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2301, pruned_loss=0.04624, over 972368.21 frames.], batch size: 30, lr: 5.29e-04 +2022-05-04 16:30:49,733 INFO [train.py:715] (6/8) Epoch 3, batch 24400, loss[loss=0.1471, simple_loss=0.215, pruned_loss=0.03957, over 4743.00 frames.], tot_loss[loss=0.162, simple_loss=0.2306, pruned_loss=0.04665, over 972082.79 frames.], batch size: 16, lr: 5.28e-04 +2022-05-04 16:31:29,802 INFO [train.py:715] (6/8) Epoch 3, batch 24450, loss[loss=0.1698, simple_loss=0.2342, pruned_loss=0.05274, over 4831.00 frames.], tot_loss[loss=0.161, simple_loss=0.2294, pruned_loss=0.04634, over 972613.07 frames.], batch size: 26, lr: 5.28e-04 +2022-05-04 16:32:09,119 INFO [train.py:715] (6/8) Epoch 3, batch 24500, loss[loss=0.156, simple_loss=0.2252, pruned_loss=0.04347, over 4838.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2294, pruned_loss=0.04624, over 972282.83 frames.], batch size: 30, lr: 5.28e-04 +2022-05-04 16:32:48,535 INFO [train.py:715] (6/8) Epoch 3, batch 24550, loss[loss=0.1615, simple_loss=0.2276, pruned_loss=0.04769, over 4946.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2295, pruned_loss=0.0464, over 972207.09 frames.], batch size: 23, lr: 5.28e-04 +2022-05-04 16:33:28,753 INFO [train.py:715] (6/8) Epoch 3, batch 24600, loss[loss=0.1372, simple_loss=0.2164, pruned_loss=0.02901, over 4913.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2286, pruned_loss=0.04583, over 971539.06 frames.], batch size: 17, lr: 5.28e-04 +2022-05-04 16:34:08,291 INFO [train.py:715] (6/8) Epoch 3, batch 24650, loss[loss=0.1562, simple_loss=0.2321, pruned_loss=0.04017, over 4904.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2289, pruned_loss=0.04626, over 971720.22 frames.], batch size: 19, lr: 5.28e-04 +2022-05-04 16:34:47,794 INFO [train.py:715] (6/8) Epoch 3, batch 24700, loss[loss=0.1796, simple_loss=0.2363, pruned_loss=0.0614, over 4947.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2297, pruned_loss=0.047, over 971991.02 frames.], batch size: 35, lr: 5.28e-04 +2022-05-04 16:35:26,413 INFO [train.py:715] (6/8) Epoch 3, batch 24750, loss[loss=0.1463, simple_loss=0.2111, pruned_loss=0.04081, over 4760.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2297, pruned_loss=0.04665, over 972405.01 frames.], batch size: 16, lr: 5.28e-04 +2022-05-04 16:36:07,062 INFO [train.py:715] (6/8) Epoch 3, batch 24800, loss[loss=0.1487, simple_loss=0.2264, pruned_loss=0.03556, over 4816.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2306, pruned_loss=0.04658, over 971760.02 frames.], batch size: 27, lr: 5.28e-04 +2022-05-04 16:36:46,788 INFO [train.py:715] (6/8) Epoch 3, batch 24850, loss[loss=0.1719, simple_loss=0.2267, pruned_loss=0.05858, over 4877.00 frames.], tot_loss[loss=0.1614, simple_loss=0.23, pruned_loss=0.0464, over 971578.09 frames.], batch size: 16, lr: 5.28e-04 +2022-05-04 16:37:25,564 INFO [train.py:715] (6/8) Epoch 3, batch 24900, loss[loss=0.1754, simple_loss=0.2397, pruned_loss=0.05553, over 4891.00 frames.], tot_loss[loss=0.162, simple_loss=0.2305, pruned_loss=0.04674, over 972341.61 frames.], batch size: 22, lr: 5.27e-04 +2022-05-04 16:38:05,481 INFO [train.py:715] (6/8) Epoch 3, batch 24950, loss[loss=0.1719, simple_loss=0.2409, pruned_loss=0.05146, over 4990.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2297, pruned_loss=0.04626, over 972863.86 frames.], batch size: 25, lr: 5.27e-04 +2022-05-04 16:38:45,657 INFO [train.py:715] (6/8) Epoch 3, batch 25000, loss[loss=0.1402, simple_loss=0.2193, pruned_loss=0.03056, over 4922.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2302, pruned_loss=0.04659, over 973387.27 frames.], batch size: 23, lr: 5.27e-04 +2022-05-04 16:39:25,198 INFO [train.py:715] (6/8) Epoch 3, batch 25050, loss[loss=0.177, simple_loss=0.2376, pruned_loss=0.05818, over 4959.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2303, pruned_loss=0.04665, over 973011.33 frames.], batch size: 14, lr: 5.27e-04 +2022-05-04 16:40:04,371 INFO [train.py:715] (6/8) Epoch 3, batch 25100, loss[loss=0.1488, simple_loss=0.221, pruned_loss=0.03826, over 4908.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2302, pruned_loss=0.04636, over 971504.18 frames.], batch size: 17, lr: 5.27e-04 +2022-05-04 16:40:44,398 INFO [train.py:715] (6/8) Epoch 3, batch 25150, loss[loss=0.1386, simple_loss=0.2145, pruned_loss=0.03138, over 4699.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2303, pruned_loss=0.04617, over 972253.64 frames.], batch size: 15, lr: 5.27e-04 +2022-05-04 16:41:23,894 INFO [train.py:715] (6/8) Epoch 3, batch 25200, loss[loss=0.1759, simple_loss=0.2552, pruned_loss=0.04829, over 4821.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2303, pruned_loss=0.04641, over 972724.11 frames.], batch size: 25, lr: 5.27e-04 +2022-05-04 16:42:03,030 INFO [train.py:715] (6/8) Epoch 3, batch 25250, loss[loss=0.1554, simple_loss=0.2303, pruned_loss=0.04029, over 4813.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2298, pruned_loss=0.04639, over 972132.81 frames.], batch size: 25, lr: 5.27e-04 +2022-05-04 16:42:43,126 INFO [train.py:715] (6/8) Epoch 3, batch 25300, loss[loss=0.1436, simple_loss=0.2087, pruned_loss=0.03919, over 4952.00 frames.], tot_loss[loss=0.162, simple_loss=0.2304, pruned_loss=0.04681, over 972459.37 frames.], batch size: 39, lr: 5.27e-04 +2022-05-04 16:43:22,957 INFO [train.py:715] (6/8) Epoch 3, batch 25350, loss[loss=0.2145, simple_loss=0.2869, pruned_loss=0.07105, over 4846.00 frames.], tot_loss[loss=0.163, simple_loss=0.2314, pruned_loss=0.04723, over 972765.01 frames.], batch size: 20, lr: 5.26e-04 +2022-05-04 16:44:02,966 INFO [train.py:715] (6/8) Epoch 3, batch 25400, loss[loss=0.1734, simple_loss=0.2428, pruned_loss=0.05201, over 4784.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2316, pruned_loss=0.04761, over 973064.83 frames.], batch size: 17, lr: 5.26e-04 +2022-05-04 16:44:42,165 INFO [train.py:715] (6/8) Epoch 3, batch 25450, loss[loss=0.1359, simple_loss=0.2068, pruned_loss=0.03248, over 4791.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2311, pruned_loss=0.0472, over 973545.59 frames.], batch size: 12, lr: 5.26e-04 +2022-05-04 16:45:22,338 INFO [train.py:715] (6/8) Epoch 3, batch 25500, loss[loss=0.1393, simple_loss=0.2034, pruned_loss=0.03763, over 4780.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2321, pruned_loss=0.0473, over 972999.00 frames.], batch size: 12, lr: 5.26e-04 +2022-05-04 16:46:02,171 INFO [train.py:715] (6/8) Epoch 3, batch 25550, loss[loss=0.1409, simple_loss=0.2081, pruned_loss=0.03679, over 4780.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2318, pruned_loss=0.04699, over 972589.80 frames.], batch size: 17, lr: 5.26e-04 +2022-05-04 16:46:41,626 INFO [train.py:715] (6/8) Epoch 3, batch 25600, loss[loss=0.1479, simple_loss=0.216, pruned_loss=0.03994, over 4798.00 frames.], tot_loss[loss=0.162, simple_loss=0.2306, pruned_loss=0.04665, over 971907.15 frames.], batch size: 24, lr: 5.26e-04 +2022-05-04 16:47:22,011 INFO [train.py:715] (6/8) Epoch 3, batch 25650, loss[loss=0.1396, simple_loss=0.207, pruned_loss=0.0361, over 4833.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2304, pruned_loss=0.04666, over 971805.33 frames.], batch size: 15, lr: 5.26e-04 +2022-05-04 16:48:02,207 INFO [train.py:715] (6/8) Epoch 3, batch 25700, loss[loss=0.1559, simple_loss=0.227, pruned_loss=0.04244, over 4913.00 frames.], tot_loss[loss=0.1614, simple_loss=0.23, pruned_loss=0.04637, over 972151.62 frames.], batch size: 17, lr: 5.26e-04 +2022-05-04 16:48:41,537 INFO [train.py:715] (6/8) Epoch 3, batch 25750, loss[loss=0.1755, simple_loss=0.2373, pruned_loss=0.05688, over 4918.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2296, pruned_loss=0.04605, over 971897.19 frames.], batch size: 39, lr: 5.26e-04 +2022-05-04 16:49:21,102 INFO [train.py:715] (6/8) Epoch 3, batch 25800, loss[loss=0.1607, simple_loss=0.2313, pruned_loss=0.04508, over 4821.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2303, pruned_loss=0.04643, over 971824.19 frames.], batch size: 27, lr: 5.26e-04 +2022-05-04 16:50:01,080 INFO [train.py:715] (6/8) Epoch 3, batch 25850, loss[loss=0.1534, simple_loss=0.2238, pruned_loss=0.04149, over 4888.00 frames.], tot_loss[loss=0.161, simple_loss=0.2295, pruned_loss=0.04626, over 971959.78 frames.], batch size: 22, lr: 5.25e-04 +2022-05-04 16:50:39,400 INFO [train.py:715] (6/8) Epoch 3, batch 25900, loss[loss=0.1709, simple_loss=0.243, pruned_loss=0.04936, over 4904.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2291, pruned_loss=0.046, over 971222.71 frames.], batch size: 19, lr: 5.25e-04 +2022-05-04 16:51:18,330 INFO [train.py:715] (6/8) Epoch 3, batch 25950, loss[loss=0.1614, simple_loss=0.2331, pruned_loss=0.04489, over 4769.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2285, pruned_loss=0.04593, over 971955.05 frames.], batch size: 18, lr: 5.25e-04 +2022-05-04 16:51:58,436 INFO [train.py:715] (6/8) Epoch 3, batch 26000, loss[loss=0.1923, simple_loss=0.2482, pruned_loss=0.06818, over 4820.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2299, pruned_loss=0.04676, over 972008.89 frames.], batch size: 27, lr: 5.25e-04 +2022-05-04 16:52:37,680 INFO [train.py:715] (6/8) Epoch 3, batch 26050, loss[loss=0.1587, simple_loss=0.225, pruned_loss=0.0462, over 4860.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2286, pruned_loss=0.04591, over 971487.04 frames.], batch size: 20, lr: 5.25e-04 +2022-05-04 16:53:16,016 INFO [train.py:715] (6/8) Epoch 3, batch 26100, loss[loss=0.1497, simple_loss=0.2131, pruned_loss=0.04314, over 4898.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2288, pruned_loss=0.04569, over 971686.39 frames.], batch size: 19, lr: 5.25e-04 +2022-05-04 16:53:55,505 INFO [train.py:715] (6/8) Epoch 3, batch 26150, loss[loss=0.1758, simple_loss=0.2317, pruned_loss=0.05995, over 4946.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2287, pruned_loss=0.04612, over 972364.22 frames.], batch size: 39, lr: 5.25e-04 +2022-05-04 16:54:35,541 INFO [train.py:715] (6/8) Epoch 3, batch 26200, loss[loss=0.1891, simple_loss=0.2735, pruned_loss=0.05239, over 4905.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2275, pruned_loss=0.04494, over 972185.78 frames.], batch size: 18, lr: 5.25e-04 +2022-05-04 16:55:13,650 INFO [train.py:715] (6/8) Epoch 3, batch 26250, loss[loss=0.1907, simple_loss=0.2566, pruned_loss=0.06245, over 4938.00 frames.], tot_loss[loss=0.1598, simple_loss=0.2289, pruned_loss=0.04536, over 972580.11 frames.], batch size: 21, lr: 5.25e-04 +2022-05-04 16:55:52,859 INFO [train.py:715] (6/8) Epoch 3, batch 26300, loss[loss=0.1483, simple_loss=0.2168, pruned_loss=0.03989, over 4965.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2283, pruned_loss=0.04497, over 972148.56 frames.], batch size: 14, lr: 5.25e-04 +2022-05-04 16:56:32,822 INFO [train.py:715] (6/8) Epoch 3, batch 26350, loss[loss=0.1445, simple_loss=0.2104, pruned_loss=0.0393, over 4853.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2288, pruned_loss=0.04531, over 972240.65 frames.], batch size: 34, lr: 5.24e-04 +2022-05-04 16:57:12,189 INFO [train.py:715] (6/8) Epoch 3, batch 26400, loss[loss=0.1392, simple_loss=0.2121, pruned_loss=0.03313, over 4778.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2291, pruned_loss=0.0459, over 972311.47 frames.], batch size: 18, lr: 5.24e-04 +2022-05-04 16:57:51,176 INFO [train.py:715] (6/8) Epoch 3, batch 26450, loss[loss=0.1634, simple_loss=0.2292, pruned_loss=0.04883, over 4865.00 frames.], tot_loss[loss=0.1592, simple_loss=0.228, pruned_loss=0.0452, over 972950.54 frames.], batch size: 20, lr: 5.24e-04 +2022-05-04 16:58:30,424 INFO [train.py:715] (6/8) Epoch 3, batch 26500, loss[loss=0.1386, simple_loss=0.2144, pruned_loss=0.03141, over 4991.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2276, pruned_loss=0.04497, over 973325.00 frames.], batch size: 16, lr: 5.24e-04 +2022-05-04 16:59:09,910 INFO [train.py:715] (6/8) Epoch 3, batch 26550, loss[loss=0.1825, simple_loss=0.2372, pruned_loss=0.06387, over 4927.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2274, pruned_loss=0.04511, over 972215.22 frames.], batch size: 17, lr: 5.24e-04 +2022-05-04 16:59:48,117 INFO [train.py:715] (6/8) Epoch 3, batch 26600, loss[loss=0.1622, simple_loss=0.2357, pruned_loss=0.0443, over 4811.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2283, pruned_loss=0.04518, over 971433.58 frames.], batch size: 26, lr: 5.24e-04 +2022-05-04 17:00:27,336 INFO [train.py:715] (6/8) Epoch 3, batch 26650, loss[loss=0.1817, simple_loss=0.2458, pruned_loss=0.0588, over 4892.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2291, pruned_loss=0.04592, over 972736.45 frames.], batch size: 22, lr: 5.24e-04 +2022-05-04 17:01:07,876 INFO [train.py:715] (6/8) Epoch 3, batch 26700, loss[loss=0.1919, simple_loss=0.2486, pruned_loss=0.06762, over 4859.00 frames.], tot_loss[loss=0.16, simple_loss=0.2285, pruned_loss=0.04572, over 972159.81 frames.], batch size: 34, lr: 5.24e-04 +2022-05-04 17:01:47,356 INFO [train.py:715] (6/8) Epoch 3, batch 26750, loss[loss=0.1385, simple_loss=0.2155, pruned_loss=0.03079, over 4926.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2291, pruned_loss=0.0459, over 972550.13 frames.], batch size: 29, lr: 5.24e-04 +2022-05-04 17:02:26,603 INFO [train.py:715] (6/8) Epoch 3, batch 26800, loss[loss=0.1843, simple_loss=0.2596, pruned_loss=0.05446, over 4773.00 frames.], tot_loss[loss=0.162, simple_loss=0.2304, pruned_loss=0.04681, over 973357.69 frames.], batch size: 17, lr: 5.24e-04 +2022-05-04 17:03:06,726 INFO [train.py:715] (6/8) Epoch 3, batch 26850, loss[loss=0.1724, simple_loss=0.2342, pruned_loss=0.0553, over 4906.00 frames.], tot_loss[loss=0.162, simple_loss=0.2304, pruned_loss=0.04676, over 972808.82 frames.], batch size: 18, lr: 5.23e-04 +2022-05-04 17:03:47,106 INFO [train.py:715] (6/8) Epoch 3, batch 26900, loss[loss=0.1486, simple_loss=0.2248, pruned_loss=0.03617, over 4855.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2297, pruned_loss=0.04655, over 974022.95 frames.], batch size: 20, lr: 5.23e-04 +2022-05-04 17:04:26,664 INFO [train.py:715] (6/8) Epoch 3, batch 26950, loss[loss=0.1765, simple_loss=0.2421, pruned_loss=0.05543, over 4966.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2305, pruned_loss=0.04714, over 972710.90 frames.], batch size: 28, lr: 5.23e-04 +2022-05-04 17:05:05,431 INFO [train.py:715] (6/8) Epoch 3, batch 27000, loss[loss=0.1541, simple_loss=0.2191, pruned_loss=0.04453, over 4784.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2312, pruned_loss=0.04762, over 972419.58 frames.], batch size: 18, lr: 5.23e-04 +2022-05-04 17:05:05,432 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 17:05:14,909 INFO [train.py:742] (6/8) Epoch 3, validation: loss=0.1134, simple_loss=0.1995, pruned_loss=0.01366, over 914524.00 frames. +2022-05-04 17:05:54,569 INFO [train.py:715] (6/8) Epoch 3, batch 27050, loss[loss=0.1813, simple_loss=0.2437, pruned_loss=0.05943, over 4849.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2323, pruned_loss=0.04838, over 973345.86 frames.], batch size: 30, lr: 5.23e-04 +2022-05-04 17:06:34,893 INFO [train.py:715] (6/8) Epoch 3, batch 27100, loss[loss=0.1623, simple_loss=0.2259, pruned_loss=0.04929, over 4933.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2322, pruned_loss=0.0477, over 973137.46 frames.], batch size: 21, lr: 5.23e-04 +2022-05-04 17:07:14,172 INFO [train.py:715] (6/8) Epoch 3, batch 27150, loss[loss=0.1479, simple_loss=0.228, pruned_loss=0.03385, over 4908.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2309, pruned_loss=0.04691, over 973468.90 frames.], batch size: 17, lr: 5.23e-04 +2022-05-04 17:07:52,933 INFO [train.py:715] (6/8) Epoch 3, batch 27200, loss[loss=0.1443, simple_loss=0.2142, pruned_loss=0.03722, over 4917.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2302, pruned_loss=0.04613, over 974351.62 frames.], batch size: 29, lr: 5.23e-04 +2022-05-04 17:08:32,671 INFO [train.py:715] (6/8) Epoch 3, batch 27250, loss[loss=0.1507, simple_loss=0.2106, pruned_loss=0.04536, over 4821.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2303, pruned_loss=0.04633, over 972506.30 frames.], batch size: 13, lr: 5.23e-04 +2022-05-04 17:09:12,369 INFO [train.py:715] (6/8) Epoch 3, batch 27300, loss[loss=0.1975, simple_loss=0.2509, pruned_loss=0.07208, over 4979.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2301, pruned_loss=0.04579, over 973411.83 frames.], batch size: 15, lr: 5.23e-04 +2022-05-04 17:09:51,025 INFO [train.py:715] (6/8) Epoch 3, batch 27350, loss[loss=0.1629, simple_loss=0.2357, pruned_loss=0.04502, over 4931.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2297, pruned_loss=0.04563, over 973071.17 frames.], batch size: 23, lr: 5.22e-04 +2022-05-04 17:10:30,273 INFO [train.py:715] (6/8) Epoch 3, batch 27400, loss[loss=0.1564, simple_loss=0.2358, pruned_loss=0.03854, over 4800.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2301, pruned_loss=0.04637, over 972646.53 frames.], batch size: 21, lr: 5.22e-04 +2022-05-04 17:11:10,419 INFO [train.py:715] (6/8) Epoch 3, batch 27450, loss[loss=0.1656, simple_loss=0.2222, pruned_loss=0.05447, over 4852.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2294, pruned_loss=0.04572, over 971985.55 frames.], batch size: 32, lr: 5.22e-04 +2022-05-04 17:11:49,746 INFO [train.py:715] (6/8) Epoch 3, batch 27500, loss[loss=0.1877, simple_loss=0.2428, pruned_loss=0.06629, over 4969.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2294, pruned_loss=0.0456, over 973104.30 frames.], batch size: 15, lr: 5.22e-04 +2022-05-04 17:12:28,644 INFO [train.py:715] (6/8) Epoch 3, batch 27550, loss[loss=0.1496, simple_loss=0.2162, pruned_loss=0.04153, over 4835.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2294, pruned_loss=0.04567, over 972390.64 frames.], batch size: 26, lr: 5.22e-04 +2022-05-04 17:13:08,356 INFO [train.py:715] (6/8) Epoch 3, batch 27600, loss[loss=0.1418, simple_loss=0.2164, pruned_loss=0.03365, over 4768.00 frames.], tot_loss[loss=0.1597, simple_loss=0.229, pruned_loss=0.04523, over 971312.32 frames.], batch size: 14, lr: 5.22e-04 +2022-05-04 17:13:47,999 INFO [train.py:715] (6/8) Epoch 3, batch 27650, loss[loss=0.1885, simple_loss=0.2534, pruned_loss=0.06186, over 4918.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2283, pruned_loss=0.04473, over 972002.36 frames.], batch size: 18, lr: 5.22e-04 +2022-05-04 17:14:26,623 INFO [train.py:715] (6/8) Epoch 3, batch 27700, loss[loss=0.1739, simple_loss=0.245, pruned_loss=0.05138, over 4773.00 frames.], tot_loss[loss=0.1598, simple_loss=0.229, pruned_loss=0.04531, over 971950.98 frames.], batch size: 14, lr: 5.22e-04 +2022-05-04 17:15:06,398 INFO [train.py:715] (6/8) Epoch 3, batch 27750, loss[loss=0.149, simple_loss=0.2297, pruned_loss=0.03417, over 4776.00 frames.], tot_loss[loss=0.1595, simple_loss=0.2288, pruned_loss=0.04509, over 971763.61 frames.], batch size: 18, lr: 5.22e-04 +2022-05-04 17:15:46,353 INFO [train.py:715] (6/8) Epoch 3, batch 27800, loss[loss=0.1744, simple_loss=0.2415, pruned_loss=0.05369, over 4782.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2277, pruned_loss=0.04481, over 972563.13 frames.], batch size: 14, lr: 5.22e-04 +2022-05-04 17:16:25,747 INFO [train.py:715] (6/8) Epoch 3, batch 27850, loss[loss=0.189, simple_loss=0.262, pruned_loss=0.05801, over 4727.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2281, pruned_loss=0.04551, over 973175.84 frames.], batch size: 16, lr: 5.21e-04 +2022-05-04 17:17:04,213 INFO [train.py:715] (6/8) Epoch 3, batch 27900, loss[loss=0.1508, simple_loss=0.2236, pruned_loss=0.03894, over 4776.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2279, pruned_loss=0.0453, over 972635.21 frames.], batch size: 17, lr: 5.21e-04 +2022-05-04 17:17:43,815 INFO [train.py:715] (6/8) Epoch 3, batch 27950, loss[loss=0.1303, simple_loss=0.2039, pruned_loss=0.02833, over 4939.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2279, pruned_loss=0.04515, over 972581.55 frames.], batch size: 23, lr: 5.21e-04 +2022-05-04 17:18:23,718 INFO [train.py:715] (6/8) Epoch 3, batch 28000, loss[loss=0.1465, simple_loss=0.2269, pruned_loss=0.0331, over 4773.00 frames.], tot_loss[loss=0.159, simple_loss=0.2281, pruned_loss=0.04496, over 972775.80 frames.], batch size: 19, lr: 5.21e-04 +2022-05-04 17:19:02,280 INFO [train.py:715] (6/8) Epoch 3, batch 28050, loss[loss=0.1636, simple_loss=0.2305, pruned_loss=0.04839, over 4845.00 frames.], tot_loss[loss=0.1598, simple_loss=0.2285, pruned_loss=0.04555, over 972504.44 frames.], batch size: 32, lr: 5.21e-04 +2022-05-04 17:19:41,713 INFO [train.py:715] (6/8) Epoch 3, batch 28100, loss[loss=0.204, simple_loss=0.2705, pruned_loss=0.06878, over 4837.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2285, pruned_loss=0.04568, over 972729.65 frames.], batch size: 30, lr: 5.21e-04 +2022-05-04 17:20:21,592 INFO [train.py:715] (6/8) Epoch 3, batch 28150, loss[loss=0.1586, simple_loss=0.2285, pruned_loss=0.04437, over 4795.00 frames.], tot_loss[loss=0.16, simple_loss=0.2287, pruned_loss=0.04563, over 972851.16 frames.], batch size: 18, lr: 5.21e-04 +2022-05-04 17:21:00,810 INFO [train.py:715] (6/8) Epoch 3, batch 28200, loss[loss=0.1421, simple_loss=0.2156, pruned_loss=0.03427, over 4958.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2292, pruned_loss=0.04601, over 973427.30 frames.], batch size: 35, lr: 5.21e-04 +2022-05-04 17:21:39,661 INFO [train.py:715] (6/8) Epoch 3, batch 28250, loss[loss=0.1426, simple_loss=0.2246, pruned_loss=0.03026, over 4957.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2292, pruned_loss=0.04555, over 973424.71 frames.], batch size: 21, lr: 5.21e-04 +2022-05-04 17:22:19,001 INFO [train.py:715] (6/8) Epoch 3, batch 28300, loss[loss=0.1293, simple_loss=0.2044, pruned_loss=0.02708, over 4694.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2296, pruned_loss=0.04576, over 972133.51 frames.], batch size: 15, lr: 5.21e-04 +2022-05-04 17:22:58,006 INFO [train.py:715] (6/8) Epoch 3, batch 28350, loss[loss=0.1313, simple_loss=0.2125, pruned_loss=0.02503, over 4970.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2287, pruned_loss=0.04541, over 972070.35 frames.], batch size: 24, lr: 5.21e-04 +2022-05-04 17:23:37,196 INFO [train.py:715] (6/8) Epoch 3, batch 28400, loss[loss=0.1427, simple_loss=0.2031, pruned_loss=0.04119, over 4961.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2292, pruned_loss=0.04563, over 971960.93 frames.], batch size: 15, lr: 5.20e-04 +2022-05-04 17:24:15,831 INFO [train.py:715] (6/8) Epoch 3, batch 28450, loss[loss=0.1519, simple_loss=0.23, pruned_loss=0.03694, over 4775.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2295, pruned_loss=0.04589, over 971638.05 frames.], batch size: 14, lr: 5.20e-04 +2022-05-04 17:24:55,568 INFO [train.py:715] (6/8) Epoch 3, batch 28500, loss[loss=0.1383, simple_loss=0.2144, pruned_loss=0.03107, over 4967.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2294, pruned_loss=0.0457, over 971663.95 frames.], batch size: 25, lr: 5.20e-04 +2022-05-04 17:25:34,507 INFO [train.py:715] (6/8) Epoch 3, batch 28550, loss[loss=0.1707, simple_loss=0.243, pruned_loss=0.04918, over 4940.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2293, pruned_loss=0.04523, over 972192.66 frames.], batch size: 21, lr: 5.20e-04 +2022-05-04 17:26:13,421 INFO [train.py:715] (6/8) Epoch 3, batch 28600, loss[loss=0.15, simple_loss=0.2266, pruned_loss=0.0367, over 4851.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2298, pruned_loss=0.0458, over 972646.40 frames.], batch size: 13, lr: 5.20e-04 +2022-05-04 17:26:53,137 INFO [train.py:715] (6/8) Epoch 3, batch 28650, loss[loss=0.1626, simple_loss=0.2334, pruned_loss=0.04584, over 4823.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2291, pruned_loss=0.04566, over 971888.82 frames.], batch size: 27, lr: 5.20e-04 +2022-05-04 17:27:33,010 INFO [train.py:715] (6/8) Epoch 3, batch 28700, loss[loss=0.1779, simple_loss=0.2446, pruned_loss=0.05555, over 4975.00 frames.], tot_loss[loss=0.159, simple_loss=0.2284, pruned_loss=0.04484, over 972055.92 frames.], batch size: 15, lr: 5.20e-04 +2022-05-04 17:28:12,159 INFO [train.py:715] (6/8) Epoch 3, batch 28750, loss[loss=0.1307, simple_loss=0.1994, pruned_loss=0.03097, over 4819.00 frames.], tot_loss[loss=0.1597, simple_loss=0.229, pruned_loss=0.04519, over 973058.93 frames.], batch size: 27, lr: 5.20e-04 +2022-05-04 17:28:52,003 INFO [train.py:715] (6/8) Epoch 3, batch 28800, loss[loss=0.1509, simple_loss=0.2097, pruned_loss=0.04611, over 4801.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2291, pruned_loss=0.04564, over 972233.96 frames.], batch size: 13, lr: 5.20e-04 +2022-05-04 17:29:32,019 INFO [train.py:715] (6/8) Epoch 3, batch 28850, loss[loss=0.1481, simple_loss=0.2247, pruned_loss=0.03574, over 4934.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2305, pruned_loss=0.04598, over 972354.51 frames.], batch size: 29, lr: 5.20e-04 +2022-05-04 17:30:11,202 INFO [train.py:715] (6/8) Epoch 3, batch 28900, loss[loss=0.1628, simple_loss=0.2225, pruned_loss=0.05151, over 4804.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2303, pruned_loss=0.04613, over 972841.44 frames.], batch size: 25, lr: 5.19e-04 +2022-05-04 17:30:50,083 INFO [train.py:715] (6/8) Epoch 3, batch 28950, loss[loss=0.1822, simple_loss=0.2502, pruned_loss=0.05706, over 4924.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2301, pruned_loss=0.04624, over 971906.20 frames.], batch size: 18, lr: 5.19e-04 +2022-05-04 17:31:29,815 INFO [train.py:715] (6/8) Epoch 3, batch 29000, loss[loss=0.1761, simple_loss=0.2319, pruned_loss=0.06013, over 4741.00 frames.], tot_loss[loss=0.1623, simple_loss=0.231, pruned_loss=0.04677, over 971603.23 frames.], batch size: 16, lr: 5.19e-04 +2022-05-04 17:32:10,060 INFO [train.py:715] (6/8) Epoch 3, batch 29050, loss[loss=0.1544, simple_loss=0.2292, pruned_loss=0.03986, over 4870.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2304, pruned_loss=0.04658, over 971073.13 frames.], batch size: 22, lr: 5.19e-04 +2022-05-04 17:32:48,618 INFO [train.py:715] (6/8) Epoch 3, batch 29100, loss[loss=0.1496, simple_loss=0.2153, pruned_loss=0.0419, over 4782.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2302, pruned_loss=0.04644, over 970536.77 frames.], batch size: 18, lr: 5.19e-04 +2022-05-04 17:33:28,198 INFO [train.py:715] (6/8) Epoch 3, batch 29150, loss[loss=0.155, simple_loss=0.2161, pruned_loss=0.04693, over 4840.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2298, pruned_loss=0.0459, over 970169.26 frames.], batch size: 32, lr: 5.19e-04 +2022-05-04 17:34:08,094 INFO [train.py:715] (6/8) Epoch 3, batch 29200, loss[loss=0.1697, simple_loss=0.2294, pruned_loss=0.05499, over 4920.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2303, pruned_loss=0.04601, over 971109.23 frames.], batch size: 29, lr: 5.19e-04 +2022-05-04 17:34:47,191 INFO [train.py:715] (6/8) Epoch 3, batch 29250, loss[loss=0.1488, simple_loss=0.215, pruned_loss=0.04125, over 4872.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2305, pruned_loss=0.04606, over 971383.14 frames.], batch size: 22, lr: 5.19e-04 +2022-05-04 17:35:26,073 INFO [train.py:715] (6/8) Epoch 3, batch 29300, loss[loss=0.1587, simple_loss=0.2389, pruned_loss=0.03921, over 4953.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2301, pruned_loss=0.04633, over 971735.06 frames.], batch size: 21, lr: 5.19e-04 +2022-05-04 17:36:06,264 INFO [train.py:715] (6/8) Epoch 3, batch 29350, loss[loss=0.1518, simple_loss=0.2159, pruned_loss=0.04389, over 4740.00 frames.], tot_loss[loss=0.1612, simple_loss=0.23, pruned_loss=0.04624, over 972097.04 frames.], batch size: 16, lr: 5.19e-04 +2022-05-04 17:36:45,939 INFO [train.py:715] (6/8) Epoch 3, batch 29400, loss[loss=0.14, simple_loss=0.2122, pruned_loss=0.03392, over 4880.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2306, pruned_loss=0.04629, over 971633.77 frames.], batch size: 22, lr: 5.18e-04 +2022-05-04 17:37:24,687 INFO [train.py:715] (6/8) Epoch 3, batch 29450, loss[loss=0.1421, simple_loss=0.2152, pruned_loss=0.03448, over 4824.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2315, pruned_loss=0.04675, over 972062.55 frames.], batch size: 26, lr: 5.18e-04 +2022-05-04 17:38:03,874 INFO [train.py:715] (6/8) Epoch 3, batch 29500, loss[loss=0.155, simple_loss=0.2288, pruned_loss=0.04063, over 4948.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2309, pruned_loss=0.04684, over 972665.76 frames.], batch size: 23, lr: 5.18e-04 +2022-05-04 17:38:43,455 INFO [train.py:715] (6/8) Epoch 3, batch 29550, loss[loss=0.1513, simple_loss=0.223, pruned_loss=0.03978, over 4697.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2309, pruned_loss=0.04694, over 971899.30 frames.], batch size: 15, lr: 5.18e-04 +2022-05-04 17:39:22,771 INFO [train.py:715] (6/8) Epoch 3, batch 29600, loss[loss=0.1894, simple_loss=0.2474, pruned_loss=0.06569, over 4812.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2308, pruned_loss=0.04674, over 971248.69 frames.], batch size: 13, lr: 5.18e-04 +2022-05-04 17:40:01,841 INFO [train.py:715] (6/8) Epoch 3, batch 29650, loss[loss=0.1547, simple_loss=0.2196, pruned_loss=0.04492, over 4828.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2305, pruned_loss=0.0466, over 971267.00 frames.], batch size: 12, lr: 5.18e-04 +2022-05-04 17:40:41,988 INFO [train.py:715] (6/8) Epoch 3, batch 29700, loss[loss=0.1574, simple_loss=0.2276, pruned_loss=0.04358, over 4649.00 frames.], tot_loss[loss=0.162, simple_loss=0.2305, pruned_loss=0.04677, over 970982.14 frames.], batch size: 13, lr: 5.18e-04 +2022-05-04 17:41:22,019 INFO [train.py:715] (6/8) Epoch 3, batch 29750, loss[loss=0.151, simple_loss=0.2087, pruned_loss=0.04669, over 4813.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2291, pruned_loss=0.04605, over 970739.82 frames.], batch size: 21, lr: 5.18e-04 +2022-05-04 17:42:00,528 INFO [train.py:715] (6/8) Epoch 3, batch 29800, loss[loss=0.2356, simple_loss=0.2655, pruned_loss=0.1028, over 4703.00 frames.], tot_loss[loss=0.16, simple_loss=0.2287, pruned_loss=0.0457, over 970990.15 frames.], batch size: 15, lr: 5.18e-04 +2022-05-04 17:42:40,509 INFO [train.py:715] (6/8) Epoch 3, batch 29850, loss[loss=0.1562, simple_loss=0.2204, pruned_loss=0.04595, over 4964.00 frames.], tot_loss[loss=0.1609, simple_loss=0.229, pruned_loss=0.04637, over 971570.69 frames.], batch size: 35, lr: 5.18e-04 +2022-05-04 17:43:20,049 INFO [train.py:715] (6/8) Epoch 3, batch 29900, loss[loss=0.1702, simple_loss=0.2401, pruned_loss=0.05018, over 4815.00 frames.], tot_loss[loss=0.1612, simple_loss=0.229, pruned_loss=0.04664, over 972487.32 frames.], batch size: 21, lr: 5.18e-04 +2022-05-04 17:43:58,723 INFO [train.py:715] (6/8) Epoch 3, batch 29950, loss[loss=0.1462, simple_loss=0.2223, pruned_loss=0.03507, over 4899.00 frames.], tot_loss[loss=0.1608, simple_loss=0.229, pruned_loss=0.04635, over 972505.64 frames.], batch size: 22, lr: 5.17e-04 +2022-05-04 17:44:37,454 INFO [train.py:715] (6/8) Epoch 3, batch 30000, loss[loss=0.1405, simple_loss=0.2163, pruned_loss=0.03233, over 4807.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2285, pruned_loss=0.046, over 973395.28 frames.], batch size: 24, lr: 5.17e-04 +2022-05-04 17:44:37,455 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 17:44:47,857 INFO [train.py:742] (6/8) Epoch 3, validation: loss=0.1135, simple_loss=0.1993, pruned_loss=0.01381, over 914524.00 frames. +2022-05-04 17:45:26,666 INFO [train.py:715] (6/8) Epoch 3, batch 30050, loss[loss=0.1702, simple_loss=0.2426, pruned_loss=0.04891, over 4738.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2285, pruned_loss=0.04558, over 972392.00 frames.], batch size: 16, lr: 5.17e-04 +2022-05-04 17:46:06,308 INFO [train.py:715] (6/8) Epoch 3, batch 30100, loss[loss=0.1611, simple_loss=0.2356, pruned_loss=0.04325, over 4897.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2281, pruned_loss=0.04507, over 973066.77 frames.], batch size: 22, lr: 5.17e-04 +2022-05-04 17:46:46,378 INFO [train.py:715] (6/8) Epoch 3, batch 30150, loss[loss=0.1417, simple_loss=0.2063, pruned_loss=0.03859, over 4646.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2276, pruned_loss=0.04482, over 972009.77 frames.], batch size: 13, lr: 5.17e-04 +2022-05-04 17:47:24,506 INFO [train.py:715] (6/8) Epoch 3, batch 30200, loss[loss=0.1457, simple_loss=0.2096, pruned_loss=0.04088, over 4927.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2274, pruned_loss=0.04459, over 972179.55 frames.], batch size: 21, lr: 5.17e-04 +2022-05-04 17:48:04,129 INFO [train.py:715] (6/8) Epoch 3, batch 30250, loss[loss=0.1646, simple_loss=0.2353, pruned_loss=0.04694, over 4952.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2282, pruned_loss=0.04506, over 971910.01 frames.], batch size: 24, lr: 5.17e-04 +2022-05-04 17:48:44,313 INFO [train.py:715] (6/8) Epoch 3, batch 30300, loss[loss=0.1749, simple_loss=0.2324, pruned_loss=0.05868, over 4795.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2286, pruned_loss=0.04558, over 971750.05 frames.], batch size: 21, lr: 5.17e-04 +2022-05-04 17:49:23,083 INFO [train.py:715] (6/8) Epoch 3, batch 30350, loss[loss=0.1258, simple_loss=0.1929, pruned_loss=0.02931, over 4784.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2286, pruned_loss=0.04541, over 971622.65 frames.], batch size: 12, lr: 5.17e-04 +2022-05-04 17:50:02,739 INFO [train.py:715] (6/8) Epoch 3, batch 30400, loss[loss=0.1306, simple_loss=0.2037, pruned_loss=0.02876, over 4843.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2291, pruned_loss=0.04575, over 971735.53 frames.], batch size: 20, lr: 5.17e-04 +2022-05-04 17:50:42,524 INFO [train.py:715] (6/8) Epoch 3, batch 30450, loss[loss=0.1256, simple_loss=0.1902, pruned_loss=0.03049, over 4874.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2298, pruned_loss=0.04638, over 971838.38 frames.], batch size: 16, lr: 5.16e-04 +2022-05-04 17:51:22,936 INFO [train.py:715] (6/8) Epoch 3, batch 30500, loss[loss=0.1938, simple_loss=0.2518, pruned_loss=0.06788, over 4937.00 frames.], tot_loss[loss=0.1613, simple_loss=0.23, pruned_loss=0.04625, over 972580.63 frames.], batch size: 35, lr: 5.16e-04 +2022-05-04 17:52:02,158 INFO [train.py:715] (6/8) Epoch 3, batch 30550, loss[loss=0.1471, simple_loss=0.2239, pruned_loss=0.03512, over 4952.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2288, pruned_loss=0.04566, over 971564.85 frames.], batch size: 21, lr: 5.16e-04 +2022-05-04 17:52:41,691 INFO [train.py:715] (6/8) Epoch 3, batch 30600, loss[loss=0.1732, simple_loss=0.2386, pruned_loss=0.05391, over 4981.00 frames.], tot_loss[loss=0.161, simple_loss=0.2297, pruned_loss=0.04611, over 972042.07 frames.], batch size: 31, lr: 5.16e-04 +2022-05-04 17:53:21,646 INFO [train.py:715] (6/8) Epoch 3, batch 30650, loss[loss=0.1534, simple_loss=0.2248, pruned_loss=0.041, over 4952.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2297, pruned_loss=0.04568, over 972624.16 frames.], batch size: 29, lr: 5.16e-04 +2022-05-04 17:54:00,308 INFO [train.py:715] (6/8) Epoch 3, batch 30700, loss[loss=0.1464, simple_loss=0.2228, pruned_loss=0.03498, over 4782.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2303, pruned_loss=0.04615, over 972290.24 frames.], batch size: 14, lr: 5.16e-04 +2022-05-04 17:54:39,868 INFO [train.py:715] (6/8) Epoch 3, batch 30750, loss[loss=0.1423, simple_loss=0.2208, pruned_loss=0.03184, over 4817.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2299, pruned_loss=0.04592, over 971660.74 frames.], batch size: 27, lr: 5.16e-04 +2022-05-04 17:55:19,277 INFO [train.py:715] (6/8) Epoch 3, batch 30800, loss[loss=0.1607, simple_loss=0.2334, pruned_loss=0.04399, over 4958.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2307, pruned_loss=0.04641, over 970849.44 frames.], batch size: 14, lr: 5.16e-04 +2022-05-04 17:55:59,089 INFO [train.py:715] (6/8) Epoch 3, batch 30850, loss[loss=0.1433, simple_loss=0.2167, pruned_loss=0.03489, over 4987.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2296, pruned_loss=0.04589, over 970635.75 frames.], batch size: 15, lr: 5.16e-04 +2022-05-04 17:56:37,369 INFO [train.py:715] (6/8) Epoch 3, batch 30900, loss[loss=0.1596, simple_loss=0.2243, pruned_loss=0.04749, over 4802.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2297, pruned_loss=0.04582, over 970956.23 frames.], batch size: 21, lr: 5.16e-04 +2022-05-04 17:57:16,438 INFO [train.py:715] (6/8) Epoch 3, batch 30950, loss[loss=0.1485, simple_loss=0.2161, pruned_loss=0.04044, over 4768.00 frames.], tot_loss[loss=0.161, simple_loss=0.2299, pruned_loss=0.04601, over 972043.48 frames.], batch size: 18, lr: 5.15e-04 +2022-05-04 17:57:55,759 INFO [train.py:715] (6/8) Epoch 3, batch 31000, loss[loss=0.143, simple_loss=0.2276, pruned_loss=0.02919, over 4859.00 frames.], tot_loss[loss=0.162, simple_loss=0.2312, pruned_loss=0.04638, over 972746.12 frames.], batch size: 22, lr: 5.15e-04 +2022-05-04 17:58:35,036 INFO [train.py:715] (6/8) Epoch 3, batch 31050, loss[loss=0.1603, simple_loss=0.227, pruned_loss=0.04683, over 4982.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2305, pruned_loss=0.04592, over 973052.78 frames.], batch size: 15, lr: 5.15e-04 +2022-05-04 17:59:13,607 INFO [train.py:715] (6/8) Epoch 3, batch 31100, loss[loss=0.1508, simple_loss=0.2334, pruned_loss=0.03415, over 4876.00 frames.], tot_loss[loss=0.1615, simple_loss=0.231, pruned_loss=0.04605, over 972367.67 frames.], batch size: 16, lr: 5.15e-04 +2022-05-04 17:59:53,190 INFO [train.py:715] (6/8) Epoch 3, batch 31150, loss[loss=0.1237, simple_loss=0.193, pruned_loss=0.02719, over 4728.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2311, pruned_loss=0.04613, over 972364.55 frames.], batch size: 12, lr: 5.15e-04 +2022-05-04 18:00:32,422 INFO [train.py:715] (6/8) Epoch 3, batch 31200, loss[loss=0.1502, simple_loss=0.2175, pruned_loss=0.04147, over 4879.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2299, pruned_loss=0.04571, over 972115.02 frames.], batch size: 16, lr: 5.15e-04 +2022-05-04 18:01:11,062 INFO [train.py:715] (6/8) Epoch 3, batch 31250, loss[loss=0.178, simple_loss=0.2467, pruned_loss=0.05464, over 4819.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2298, pruned_loss=0.04603, over 971947.14 frames.], batch size: 27, lr: 5.15e-04 +2022-05-04 18:01:50,137 INFO [train.py:715] (6/8) Epoch 3, batch 31300, loss[loss=0.1371, simple_loss=0.2083, pruned_loss=0.03292, over 4946.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2302, pruned_loss=0.04651, over 972283.25 frames.], batch size: 24, lr: 5.15e-04 +2022-05-04 18:02:29,486 INFO [train.py:715] (6/8) Epoch 3, batch 31350, loss[loss=0.1558, simple_loss=0.2212, pruned_loss=0.0452, over 4954.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2299, pruned_loss=0.04637, over 971237.95 frames.], batch size: 24, lr: 5.15e-04 +2022-05-04 18:03:08,649 INFO [train.py:715] (6/8) Epoch 3, batch 31400, loss[loss=0.1629, simple_loss=0.2314, pruned_loss=0.04725, over 4771.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2301, pruned_loss=0.04678, over 971718.11 frames.], batch size: 18, lr: 5.15e-04 +2022-05-04 18:03:47,233 INFO [train.py:715] (6/8) Epoch 3, batch 31450, loss[loss=0.1868, simple_loss=0.2408, pruned_loss=0.06639, over 4827.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2306, pruned_loss=0.04692, over 972770.04 frames.], batch size: 25, lr: 5.15e-04 +2022-05-04 18:04:26,979 INFO [train.py:715] (6/8) Epoch 3, batch 31500, loss[loss=0.1902, simple_loss=0.2534, pruned_loss=0.06348, over 4701.00 frames.], tot_loss[loss=0.1626, simple_loss=0.231, pruned_loss=0.04709, over 972230.24 frames.], batch size: 15, lr: 5.14e-04 +2022-05-04 18:05:06,851 INFO [train.py:715] (6/8) Epoch 3, batch 31550, loss[loss=0.1581, simple_loss=0.2362, pruned_loss=0.03998, over 4941.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2304, pruned_loss=0.04661, over 972636.55 frames.], batch size: 29, lr: 5.14e-04 +2022-05-04 18:05:47,993 INFO [train.py:715] (6/8) Epoch 3, batch 31600, loss[loss=0.1262, simple_loss=0.2105, pruned_loss=0.02094, over 4757.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2309, pruned_loss=0.04735, over 972677.48 frames.], batch size: 19, lr: 5.14e-04 +2022-05-04 18:06:26,994 INFO [train.py:715] (6/8) Epoch 3, batch 31650, loss[loss=0.1265, simple_loss=0.2067, pruned_loss=0.0232, over 4819.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2309, pruned_loss=0.04701, over 972565.04 frames.], batch size: 25, lr: 5.14e-04 +2022-05-04 18:07:07,175 INFO [train.py:715] (6/8) Epoch 3, batch 31700, loss[loss=0.2089, simple_loss=0.2741, pruned_loss=0.07189, over 4954.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2305, pruned_loss=0.04659, over 972557.48 frames.], batch size: 39, lr: 5.14e-04 +2022-05-04 18:07:46,361 INFO [train.py:715] (6/8) Epoch 3, batch 31750, loss[loss=0.1441, simple_loss=0.2104, pruned_loss=0.03885, over 4919.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2294, pruned_loss=0.04569, over 973257.30 frames.], batch size: 23, lr: 5.14e-04 +2022-05-04 18:08:24,495 INFO [train.py:715] (6/8) Epoch 3, batch 31800, loss[loss=0.1316, simple_loss=0.2013, pruned_loss=0.03096, over 4919.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2296, pruned_loss=0.04548, over 973249.14 frames.], batch size: 29, lr: 5.14e-04 +2022-05-04 18:09:04,275 INFO [train.py:715] (6/8) Epoch 3, batch 31850, loss[loss=0.1395, simple_loss=0.2246, pruned_loss=0.02718, over 4821.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2303, pruned_loss=0.04557, over 973399.40 frames.], batch size: 15, lr: 5.14e-04 +2022-05-04 18:09:43,772 INFO [train.py:715] (6/8) Epoch 3, batch 31900, loss[loss=0.1601, simple_loss=0.219, pruned_loss=0.05056, over 4881.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2297, pruned_loss=0.04589, over 972851.12 frames.], batch size: 16, lr: 5.14e-04 +2022-05-04 18:10:22,478 INFO [train.py:715] (6/8) Epoch 3, batch 31950, loss[loss=0.1393, simple_loss=0.2003, pruned_loss=0.03915, over 4773.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2301, pruned_loss=0.04615, over 973013.07 frames.], batch size: 14, lr: 5.14e-04 +2022-05-04 18:11:01,414 INFO [train.py:715] (6/8) Epoch 3, batch 32000, loss[loss=0.1689, simple_loss=0.2168, pruned_loss=0.06044, over 4855.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2293, pruned_loss=0.04561, over 972746.33 frames.], batch size: 32, lr: 5.14e-04 +2022-05-04 18:11:41,007 INFO [train.py:715] (6/8) Epoch 3, batch 32050, loss[loss=0.1583, simple_loss=0.229, pruned_loss=0.04374, over 4876.00 frames.], tot_loss[loss=0.1598, simple_loss=0.2287, pruned_loss=0.04542, over 972940.13 frames.], batch size: 22, lr: 5.13e-04 +2022-05-04 18:12:19,207 INFO [train.py:715] (6/8) Epoch 3, batch 32100, loss[loss=0.1767, simple_loss=0.2549, pruned_loss=0.04929, over 4865.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2292, pruned_loss=0.04528, over 972663.09 frames.], batch size: 20, lr: 5.13e-04 +2022-05-04 18:12:58,310 INFO [train.py:715] (6/8) Epoch 3, batch 32150, loss[loss=0.2004, simple_loss=0.2545, pruned_loss=0.07316, over 4876.00 frames.], tot_loss[loss=0.1595, simple_loss=0.2287, pruned_loss=0.04514, over 973646.71 frames.], batch size: 30, lr: 5.13e-04 +2022-05-04 18:13:37,855 INFO [train.py:715] (6/8) Epoch 3, batch 32200, loss[loss=0.1657, simple_loss=0.2235, pruned_loss=0.05398, over 4690.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2283, pruned_loss=0.04503, over 973016.11 frames.], batch size: 15, lr: 5.13e-04 +2022-05-04 18:14:16,676 INFO [train.py:715] (6/8) Epoch 3, batch 32250, loss[loss=0.1648, simple_loss=0.2326, pruned_loss=0.04849, over 4769.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2285, pruned_loss=0.04539, over 972491.84 frames.], batch size: 19, lr: 5.13e-04 +2022-05-04 18:14:55,235 INFO [train.py:715] (6/8) Epoch 3, batch 32300, loss[loss=0.1374, simple_loss=0.2076, pruned_loss=0.03357, over 4866.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2283, pruned_loss=0.04523, over 972402.92 frames.], batch size: 16, lr: 5.13e-04 +2022-05-04 18:15:34,902 INFO [train.py:715] (6/8) Epoch 3, batch 32350, loss[loss=0.142, simple_loss=0.2144, pruned_loss=0.0348, over 4925.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2279, pruned_loss=0.0447, over 973589.50 frames.], batch size: 23, lr: 5.13e-04 +2022-05-04 18:16:14,621 INFO [train.py:715] (6/8) Epoch 3, batch 32400, loss[loss=0.1855, simple_loss=0.2474, pruned_loss=0.06178, over 4911.00 frames.], tot_loss[loss=0.1595, simple_loss=0.2289, pruned_loss=0.04498, over 973244.98 frames.], batch size: 18, lr: 5.13e-04 +2022-05-04 18:16:52,602 INFO [train.py:715] (6/8) Epoch 3, batch 32450, loss[loss=0.1602, simple_loss=0.2378, pruned_loss=0.04129, over 4952.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2294, pruned_loss=0.04564, over 972788.93 frames.], batch size: 21, lr: 5.13e-04 +2022-05-04 18:17:32,081 INFO [train.py:715] (6/8) Epoch 3, batch 32500, loss[loss=0.1633, simple_loss=0.2278, pruned_loss=0.04941, over 4861.00 frames.], tot_loss[loss=0.1601, simple_loss=0.229, pruned_loss=0.04564, over 973136.67 frames.], batch size: 20, lr: 5.13e-04 +2022-05-04 18:18:11,714 INFO [train.py:715] (6/8) Epoch 3, batch 32550, loss[loss=0.1606, simple_loss=0.231, pruned_loss=0.04508, over 4815.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2293, pruned_loss=0.04588, over 972575.62 frames.], batch size: 15, lr: 5.12e-04 +2022-05-04 18:18:50,229 INFO [train.py:715] (6/8) Epoch 3, batch 32600, loss[loss=0.1904, simple_loss=0.2546, pruned_loss=0.06309, over 4751.00 frames.], tot_loss[loss=0.1598, simple_loss=0.229, pruned_loss=0.04529, over 972809.66 frames.], batch size: 16, lr: 5.12e-04 +2022-05-04 18:19:29,061 INFO [train.py:715] (6/8) Epoch 3, batch 32650, loss[loss=0.1609, simple_loss=0.23, pruned_loss=0.04589, over 4983.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2289, pruned_loss=0.04543, over 973149.81 frames.], batch size: 25, lr: 5.12e-04 +2022-05-04 18:20:08,686 INFO [train.py:715] (6/8) Epoch 3, batch 32700, loss[loss=0.1446, simple_loss=0.2253, pruned_loss=0.03197, over 4938.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2295, pruned_loss=0.04563, over 973167.73 frames.], batch size: 29, lr: 5.12e-04 +2022-05-04 18:20:47,710 INFO [train.py:715] (6/8) Epoch 3, batch 32750, loss[loss=0.1522, simple_loss=0.2189, pruned_loss=0.04278, over 4794.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2305, pruned_loss=0.04635, over 972058.58 frames.], batch size: 21, lr: 5.12e-04 +2022-05-04 18:21:26,287 INFO [train.py:715] (6/8) Epoch 3, batch 32800, loss[loss=0.162, simple_loss=0.2351, pruned_loss=0.04439, over 4810.00 frames.], tot_loss[loss=0.161, simple_loss=0.2299, pruned_loss=0.04605, over 971458.59 frames.], batch size: 26, lr: 5.12e-04 +2022-05-04 18:22:05,409 INFO [train.py:715] (6/8) Epoch 3, batch 32850, loss[loss=0.133, simple_loss=0.2105, pruned_loss=0.02769, over 4894.00 frames.], tot_loss[loss=0.16, simple_loss=0.2291, pruned_loss=0.0455, over 972110.23 frames.], batch size: 22, lr: 5.12e-04 +2022-05-04 18:22:44,593 INFO [train.py:715] (6/8) Epoch 3, batch 32900, loss[loss=0.1616, simple_loss=0.224, pruned_loss=0.04962, over 4808.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2291, pruned_loss=0.04588, over 971536.60 frames.], batch size: 21, lr: 5.12e-04 +2022-05-04 18:23:23,658 INFO [train.py:715] (6/8) Epoch 3, batch 32950, loss[loss=0.1523, simple_loss=0.2151, pruned_loss=0.04478, over 4750.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2288, pruned_loss=0.04602, over 971826.44 frames.], batch size: 14, lr: 5.12e-04 +2022-05-04 18:24:02,390 INFO [train.py:715] (6/8) Epoch 3, batch 33000, loss[loss=0.1487, simple_loss=0.2104, pruned_loss=0.04352, over 4790.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2294, pruned_loss=0.04649, over 971320.32 frames.], batch size: 18, lr: 5.12e-04 +2022-05-04 18:24:02,390 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 18:24:11,705 INFO [train.py:742] (6/8) Epoch 3, validation: loss=0.1131, simple_loss=0.199, pruned_loss=0.01363, over 914524.00 frames. +2022-05-04 18:24:50,804 INFO [train.py:715] (6/8) Epoch 3, batch 33050, loss[loss=0.1476, simple_loss=0.2315, pruned_loss=0.03186, over 4976.00 frames.], tot_loss[loss=0.161, simple_loss=0.2295, pruned_loss=0.04621, over 971153.23 frames.], batch size: 28, lr: 5.12e-04 +2022-05-04 18:25:30,714 INFO [train.py:715] (6/8) Epoch 3, batch 33100, loss[loss=0.1634, simple_loss=0.2347, pruned_loss=0.04604, over 4790.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2305, pruned_loss=0.04665, over 971506.53 frames.], batch size: 17, lr: 5.11e-04 +2022-05-04 18:26:09,586 INFO [train.py:715] (6/8) Epoch 3, batch 33150, loss[loss=0.1451, simple_loss=0.2191, pruned_loss=0.03555, over 4753.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2298, pruned_loss=0.04618, over 971958.94 frames.], batch size: 19, lr: 5.11e-04 +2022-05-04 18:26:48,267 INFO [train.py:715] (6/8) Epoch 3, batch 33200, loss[loss=0.1438, simple_loss=0.2213, pruned_loss=0.03315, over 4930.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2295, pruned_loss=0.04588, over 971787.86 frames.], batch size: 35, lr: 5.11e-04 +2022-05-04 18:27:28,165 INFO [train.py:715] (6/8) Epoch 3, batch 33250, loss[loss=0.1752, simple_loss=0.2364, pruned_loss=0.05699, over 4916.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2296, pruned_loss=0.04578, over 972124.13 frames.], batch size: 17, lr: 5.11e-04 +2022-05-04 18:28:07,724 INFO [train.py:715] (6/8) Epoch 3, batch 33300, loss[loss=0.1885, simple_loss=0.2572, pruned_loss=0.05991, over 4840.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2294, pruned_loss=0.04572, over 972574.50 frames.], batch size: 26, lr: 5.11e-04 +2022-05-04 18:28:46,234 INFO [train.py:715] (6/8) Epoch 3, batch 33350, loss[loss=0.1275, simple_loss=0.2003, pruned_loss=0.02739, over 4952.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2303, pruned_loss=0.04652, over 972521.75 frames.], batch size: 24, lr: 5.11e-04 +2022-05-04 18:29:25,534 INFO [train.py:715] (6/8) Epoch 3, batch 33400, loss[loss=0.1546, simple_loss=0.2249, pruned_loss=0.04218, over 4951.00 frames.], tot_loss[loss=0.1621, simple_loss=0.231, pruned_loss=0.04662, over 972655.66 frames.], batch size: 21, lr: 5.11e-04 +2022-05-04 18:30:05,187 INFO [train.py:715] (6/8) Epoch 3, batch 33450, loss[loss=0.1663, simple_loss=0.2454, pruned_loss=0.04358, over 4762.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2299, pruned_loss=0.04609, over 971482.59 frames.], batch size: 14, lr: 5.11e-04 +2022-05-04 18:30:44,206 INFO [train.py:715] (6/8) Epoch 3, batch 33500, loss[loss=0.1588, simple_loss=0.2274, pruned_loss=0.04513, over 4858.00 frames.], tot_loss[loss=0.1597, simple_loss=0.229, pruned_loss=0.04522, over 972501.18 frames.], batch size: 32, lr: 5.11e-04 +2022-05-04 18:31:23,295 INFO [train.py:715] (6/8) Epoch 3, batch 33550, loss[loss=0.1645, simple_loss=0.2373, pruned_loss=0.0458, over 4785.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2278, pruned_loss=0.04468, over 972802.05 frames.], batch size: 17, lr: 5.11e-04 +2022-05-04 18:32:03,657 INFO [train.py:715] (6/8) Epoch 3, batch 33600, loss[loss=0.184, simple_loss=0.2576, pruned_loss=0.05522, over 4781.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2276, pruned_loss=0.0445, over 972170.66 frames.], batch size: 17, lr: 5.11e-04 +2022-05-04 18:32:43,014 INFO [train.py:715] (6/8) Epoch 3, batch 33650, loss[loss=0.1542, simple_loss=0.214, pruned_loss=0.04717, over 4815.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2276, pruned_loss=0.04457, over 972268.72 frames.], batch size: 12, lr: 5.10e-04 +2022-05-04 18:33:21,655 INFO [train.py:715] (6/8) Epoch 3, batch 33700, loss[loss=0.1565, simple_loss=0.231, pruned_loss=0.04101, over 4875.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2279, pruned_loss=0.04482, over 972282.31 frames.], batch size: 22, lr: 5.10e-04 +2022-05-04 18:34:01,456 INFO [train.py:715] (6/8) Epoch 3, batch 33750, loss[loss=0.1466, simple_loss=0.2193, pruned_loss=0.03697, over 4845.00 frames.], tot_loss[loss=0.159, simple_loss=0.2283, pruned_loss=0.04482, over 972330.62 frames.], batch size: 13, lr: 5.10e-04 +2022-05-04 18:34:40,937 INFO [train.py:715] (6/8) Epoch 3, batch 33800, loss[loss=0.1767, simple_loss=0.2512, pruned_loss=0.05115, over 4848.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2297, pruned_loss=0.04529, over 972755.28 frames.], batch size: 32, lr: 5.10e-04 +2022-05-04 18:35:19,313 INFO [train.py:715] (6/8) Epoch 3, batch 33850, loss[loss=0.1567, simple_loss=0.2383, pruned_loss=0.03758, over 4919.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2294, pruned_loss=0.04558, over 972646.02 frames.], batch size: 17, lr: 5.10e-04 +2022-05-04 18:35:58,145 INFO [train.py:715] (6/8) Epoch 3, batch 33900, loss[loss=0.1501, simple_loss=0.2287, pruned_loss=0.03577, over 4950.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2288, pruned_loss=0.04486, over 973107.35 frames.], batch size: 21, lr: 5.10e-04 +2022-05-04 18:36:38,303 INFO [train.py:715] (6/8) Epoch 3, batch 33950, loss[loss=0.1409, simple_loss=0.222, pruned_loss=0.02985, over 4944.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2278, pruned_loss=0.04465, over 972851.64 frames.], batch size: 21, lr: 5.10e-04 +2022-05-04 18:37:17,242 INFO [train.py:715] (6/8) Epoch 3, batch 34000, loss[loss=0.1838, simple_loss=0.2461, pruned_loss=0.06076, over 4783.00 frames.], tot_loss[loss=0.1591, simple_loss=0.228, pruned_loss=0.04512, over 972529.41 frames.], batch size: 14, lr: 5.10e-04 +2022-05-04 18:37:55,984 INFO [train.py:715] (6/8) Epoch 3, batch 34050, loss[loss=0.172, simple_loss=0.2548, pruned_loss=0.04465, over 4693.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2283, pruned_loss=0.04511, over 971365.90 frames.], batch size: 15, lr: 5.10e-04 +2022-05-04 18:38:35,314 INFO [train.py:715] (6/8) Epoch 3, batch 34100, loss[loss=0.1441, simple_loss=0.2171, pruned_loss=0.03558, over 4792.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2283, pruned_loss=0.0454, over 971588.56 frames.], batch size: 17, lr: 5.10e-04 +2022-05-04 18:39:15,283 INFO [train.py:715] (6/8) Epoch 3, batch 34150, loss[loss=0.1817, simple_loss=0.2407, pruned_loss=0.06138, over 4960.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2279, pruned_loss=0.0453, over 971699.54 frames.], batch size: 35, lr: 5.10e-04 +2022-05-04 18:39:53,560 INFO [train.py:715] (6/8) Epoch 3, batch 34200, loss[loss=0.1582, simple_loss=0.2211, pruned_loss=0.04765, over 4869.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2274, pruned_loss=0.04476, over 971975.90 frames.], batch size: 16, lr: 5.09e-04 +2022-05-04 18:40:33,006 INFO [train.py:715] (6/8) Epoch 3, batch 34250, loss[loss=0.1661, simple_loss=0.2418, pruned_loss=0.04515, over 4922.00 frames.], tot_loss[loss=0.158, simple_loss=0.2274, pruned_loss=0.04429, over 972860.47 frames.], batch size: 39, lr: 5.09e-04 +2022-05-04 18:41:13,066 INFO [train.py:715] (6/8) Epoch 3, batch 34300, loss[loss=0.1371, simple_loss=0.217, pruned_loss=0.02857, over 4977.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2267, pruned_loss=0.04418, over 972625.10 frames.], batch size: 25, lr: 5.09e-04 +2022-05-04 18:41:52,486 INFO [train.py:715] (6/8) Epoch 3, batch 34350, loss[loss=0.1643, simple_loss=0.2293, pruned_loss=0.0497, over 4793.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2276, pruned_loss=0.04463, over 972363.90 frames.], batch size: 14, lr: 5.09e-04 +2022-05-04 18:42:31,606 INFO [train.py:715] (6/8) Epoch 3, batch 34400, loss[loss=0.1801, simple_loss=0.2518, pruned_loss=0.05419, over 4902.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2277, pruned_loss=0.04458, over 972898.17 frames.], batch size: 39, lr: 5.09e-04 +2022-05-04 18:43:11,187 INFO [train.py:715] (6/8) Epoch 3, batch 34450, loss[loss=0.172, simple_loss=0.2395, pruned_loss=0.05232, over 4938.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2279, pruned_loss=0.04492, over 972715.16 frames.], batch size: 39, lr: 5.09e-04 +2022-05-04 18:43:51,341 INFO [train.py:715] (6/8) Epoch 3, batch 34500, loss[loss=0.1633, simple_loss=0.2293, pruned_loss=0.04867, over 4865.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2277, pruned_loss=0.04489, over 973063.97 frames.], batch size: 16, lr: 5.09e-04 +2022-05-04 18:44:29,768 INFO [train.py:715] (6/8) Epoch 3, batch 34550, loss[loss=0.1383, simple_loss=0.2146, pruned_loss=0.03097, over 4923.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2281, pruned_loss=0.04523, over 972634.92 frames.], batch size: 18, lr: 5.09e-04 +2022-05-04 18:45:08,807 INFO [train.py:715] (6/8) Epoch 3, batch 34600, loss[loss=0.133, simple_loss=0.202, pruned_loss=0.03199, over 4798.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2283, pruned_loss=0.04525, over 972726.95 frames.], batch size: 21, lr: 5.09e-04 +2022-05-04 18:45:49,191 INFO [train.py:715] (6/8) Epoch 3, batch 34650, loss[loss=0.1451, simple_loss=0.2233, pruned_loss=0.03347, over 4891.00 frames.], tot_loss[loss=0.1597, simple_loss=0.228, pruned_loss=0.04564, over 973506.82 frames.], batch size: 22, lr: 5.09e-04 +2022-05-04 18:46:28,782 INFO [train.py:715] (6/8) Epoch 3, batch 34700, loss[loss=0.1492, simple_loss=0.2213, pruned_loss=0.03856, over 4890.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2293, pruned_loss=0.04645, over 973480.57 frames.], batch size: 17, lr: 5.09e-04 +2022-05-04 18:47:07,061 INFO [train.py:715] (6/8) Epoch 3, batch 34750, loss[loss=0.1604, simple_loss=0.2311, pruned_loss=0.04486, over 4827.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2295, pruned_loss=0.04612, over 973906.73 frames.], batch size: 13, lr: 5.08e-04 +2022-05-04 18:47:44,763 INFO [train.py:715] (6/8) Epoch 3, batch 34800, loss[loss=0.1147, simple_loss=0.1923, pruned_loss=0.0186, over 4776.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2276, pruned_loss=0.04529, over 972765.81 frames.], batch size: 12, lr: 5.08e-04 +2022-05-04 18:48:35,149 INFO [train.py:715] (6/8) Epoch 4, batch 0, loss[loss=0.2018, simple_loss=0.2639, pruned_loss=0.06984, over 4926.00 frames.], tot_loss[loss=0.2018, simple_loss=0.2639, pruned_loss=0.06984, over 4926.00 frames.], batch size: 29, lr: 4.78e-04 +2022-05-04 18:49:16,509 INFO [train.py:715] (6/8) Epoch 4, batch 50, loss[loss=0.1396, simple_loss=0.2201, pruned_loss=0.02956, over 4815.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2264, pruned_loss=0.04524, over 220080.89 frames.], batch size: 13, lr: 4.78e-04 +2022-05-04 18:49:57,171 INFO [train.py:715] (6/8) Epoch 4, batch 100, loss[loss=0.1719, simple_loss=0.2343, pruned_loss=0.0547, over 4877.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2259, pruned_loss=0.04516, over 386438.80 frames.], batch size: 22, lr: 4.78e-04 +2022-05-04 18:50:37,985 INFO [train.py:715] (6/8) Epoch 4, batch 150, loss[loss=0.1593, simple_loss=0.2304, pruned_loss=0.04408, over 4888.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2284, pruned_loss=0.046, over 516762.36 frames.], batch size: 22, lr: 4.78e-04 +2022-05-04 18:51:19,058 INFO [train.py:715] (6/8) Epoch 4, batch 200, loss[loss=0.1893, simple_loss=0.2502, pruned_loss=0.06415, over 4830.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2292, pruned_loss=0.04576, over 618531.02 frames.], batch size: 15, lr: 4.78e-04 +2022-05-04 18:52:00,243 INFO [train.py:715] (6/8) Epoch 4, batch 250, loss[loss=0.1665, simple_loss=0.2408, pruned_loss=0.04609, over 4806.00 frames.], tot_loss[loss=0.1613, simple_loss=0.23, pruned_loss=0.04634, over 696612.89 frames.], batch size: 21, lr: 4.77e-04 +2022-05-04 18:52:41,179 INFO [train.py:715] (6/8) Epoch 4, batch 300, loss[loss=0.1377, simple_loss=0.2044, pruned_loss=0.0355, over 4775.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2292, pruned_loss=0.04603, over 757193.36 frames.], batch size: 17, lr: 4.77e-04 +2022-05-04 18:53:22,431 INFO [train.py:715] (6/8) Epoch 4, batch 350, loss[loss=0.1936, simple_loss=0.262, pruned_loss=0.06259, over 4950.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2303, pruned_loss=0.04614, over 805346.79 frames.], batch size: 35, lr: 4.77e-04 +2022-05-04 18:54:04,557 INFO [train.py:715] (6/8) Epoch 4, batch 400, loss[loss=0.1406, simple_loss=0.2206, pruned_loss=0.03035, over 4874.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2304, pruned_loss=0.04586, over 842885.76 frames.], batch size: 22, lr: 4.77e-04 +2022-05-04 18:54:45,180 INFO [train.py:715] (6/8) Epoch 4, batch 450, loss[loss=0.1429, simple_loss=0.2113, pruned_loss=0.03728, over 4809.00 frames.], tot_loss[loss=0.1598, simple_loss=0.2292, pruned_loss=0.04516, over 871308.66 frames.], batch size: 25, lr: 4.77e-04 +2022-05-04 18:55:26,266 INFO [train.py:715] (6/8) Epoch 4, batch 500, loss[loss=0.185, simple_loss=0.244, pruned_loss=0.06299, over 4960.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2293, pruned_loss=0.04568, over 893469.11 frames.], batch size: 39, lr: 4.77e-04 +2022-05-04 18:56:07,510 INFO [train.py:715] (6/8) Epoch 4, batch 550, loss[loss=0.156, simple_loss=0.218, pruned_loss=0.04704, over 4905.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2281, pruned_loss=0.04522, over 910025.24 frames.], batch size: 17, lr: 4.77e-04 +2022-05-04 18:56:48,418 INFO [train.py:715] (6/8) Epoch 4, batch 600, loss[loss=0.139, simple_loss=0.2112, pruned_loss=0.0334, over 4922.00 frames.], tot_loss[loss=0.1598, simple_loss=0.2288, pruned_loss=0.04542, over 924126.89 frames.], batch size: 23, lr: 4.77e-04 +2022-05-04 18:57:28,923 INFO [train.py:715] (6/8) Epoch 4, batch 650, loss[loss=0.1612, simple_loss=0.2284, pruned_loss=0.04701, over 4875.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2294, pruned_loss=0.04605, over 934743.86 frames.], batch size: 16, lr: 4.77e-04 +2022-05-04 18:58:10,010 INFO [train.py:715] (6/8) Epoch 4, batch 700, loss[loss=0.1627, simple_loss=0.2319, pruned_loss=0.04668, over 4782.00 frames.], tot_loss[loss=0.161, simple_loss=0.2296, pruned_loss=0.04615, over 943322.35 frames.], batch size: 17, lr: 4.77e-04 +2022-05-04 18:58:51,944 INFO [train.py:715] (6/8) Epoch 4, batch 750, loss[loss=0.1536, simple_loss=0.2191, pruned_loss=0.04403, over 4987.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2296, pruned_loss=0.04595, over 950265.54 frames.], batch size: 14, lr: 4.77e-04 +2022-05-04 18:59:33,009 INFO [train.py:715] (6/8) Epoch 4, batch 800, loss[loss=0.1518, simple_loss=0.2311, pruned_loss=0.03631, over 4812.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2295, pruned_loss=0.04586, over 955065.92 frames.], batch size: 13, lr: 4.77e-04 +2022-05-04 19:00:13,438 INFO [train.py:715] (6/8) Epoch 4, batch 850, loss[loss=0.1543, simple_loss=0.2294, pruned_loss=0.0396, over 4783.00 frames.], tot_loss[loss=0.1595, simple_loss=0.2287, pruned_loss=0.04514, over 959047.60 frames.], batch size: 14, lr: 4.76e-04 +2022-05-04 19:00:54,499 INFO [train.py:715] (6/8) Epoch 4, batch 900, loss[loss=0.1643, simple_loss=0.2211, pruned_loss=0.05376, over 4851.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2285, pruned_loss=0.0452, over 961395.20 frames.], batch size: 32, lr: 4.76e-04 +2022-05-04 19:01:35,347 INFO [train.py:715] (6/8) Epoch 4, batch 950, loss[loss=0.1907, simple_loss=0.2486, pruned_loss=0.06634, over 4785.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2277, pruned_loss=0.04509, over 964094.78 frames.], batch size: 18, lr: 4.76e-04 +2022-05-04 19:02:16,230 INFO [train.py:715] (6/8) Epoch 4, batch 1000, loss[loss=0.1553, simple_loss=0.2208, pruned_loss=0.04491, over 4809.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2276, pruned_loss=0.04529, over 965854.86 frames.], batch size: 27, lr: 4.76e-04 +2022-05-04 19:02:56,963 INFO [train.py:715] (6/8) Epoch 4, batch 1050, loss[loss=0.1598, simple_loss=0.2251, pruned_loss=0.04728, over 4777.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2276, pruned_loss=0.0456, over 967091.91 frames.], batch size: 17, lr: 4.76e-04 +2022-05-04 19:03:38,131 INFO [train.py:715] (6/8) Epoch 4, batch 1100, loss[loss=0.1556, simple_loss=0.2262, pruned_loss=0.04252, over 4774.00 frames.], tot_loss[loss=0.1585, simple_loss=0.227, pruned_loss=0.04499, over 968270.62 frames.], batch size: 18, lr: 4.76e-04 +2022-05-04 19:04:18,530 INFO [train.py:715] (6/8) Epoch 4, batch 1150, loss[loss=0.1531, simple_loss=0.2246, pruned_loss=0.04078, over 4939.00 frames.], tot_loss[loss=0.158, simple_loss=0.2268, pruned_loss=0.04457, over 968962.52 frames.], batch size: 29, lr: 4.76e-04 +2022-05-04 19:04:58,030 INFO [train.py:715] (6/8) Epoch 4, batch 1200, loss[loss=0.1349, simple_loss=0.2113, pruned_loss=0.02932, over 4983.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2269, pruned_loss=0.04429, over 969556.71 frames.], batch size: 25, lr: 4.76e-04 +2022-05-04 19:05:38,589 INFO [train.py:715] (6/8) Epoch 4, batch 1250, loss[loss=0.1557, simple_loss=0.2183, pruned_loss=0.04654, over 4903.00 frames.], tot_loss[loss=0.1593, simple_loss=0.228, pruned_loss=0.04524, over 969730.26 frames.], batch size: 17, lr: 4.76e-04 +2022-05-04 19:06:19,663 INFO [train.py:715] (6/8) Epoch 4, batch 1300, loss[loss=0.1748, simple_loss=0.2367, pruned_loss=0.05639, over 4841.00 frames.], tot_loss[loss=0.16, simple_loss=0.2285, pruned_loss=0.04578, over 969973.20 frames.], batch size: 32, lr: 4.76e-04 +2022-05-04 19:06:59,667 INFO [train.py:715] (6/8) Epoch 4, batch 1350, loss[loss=0.1739, simple_loss=0.2499, pruned_loss=0.04898, over 4985.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2279, pruned_loss=0.04527, over 970289.24 frames.], batch size: 28, lr: 4.76e-04 +2022-05-04 19:07:40,380 INFO [train.py:715] (6/8) Epoch 4, batch 1400, loss[loss=0.147, simple_loss=0.2177, pruned_loss=0.03809, over 4844.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2289, pruned_loss=0.04583, over 969840.89 frames.], batch size: 20, lr: 4.76e-04 +2022-05-04 19:08:21,352 INFO [train.py:715] (6/8) Epoch 4, batch 1450, loss[loss=0.1718, simple_loss=0.2347, pruned_loss=0.05441, over 4987.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2275, pruned_loss=0.04541, over 969782.38 frames.], batch size: 31, lr: 4.75e-04 +2022-05-04 19:09:02,426 INFO [train.py:715] (6/8) Epoch 4, batch 1500, loss[loss=0.1516, simple_loss=0.2248, pruned_loss=0.03921, over 4958.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2276, pruned_loss=0.0451, over 970490.68 frames.], batch size: 14, lr: 4.75e-04 +2022-05-04 19:09:42,048 INFO [train.py:715] (6/8) Epoch 4, batch 1550, loss[loss=0.1365, simple_loss=0.2077, pruned_loss=0.03262, over 4989.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2277, pruned_loss=0.0446, over 970548.94 frames.], batch size: 25, lr: 4.75e-04 +2022-05-04 19:10:23,010 INFO [train.py:715] (6/8) Epoch 4, batch 1600, loss[loss=0.1448, simple_loss=0.2173, pruned_loss=0.03615, over 4925.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2281, pruned_loss=0.04463, over 971475.68 frames.], batch size: 21, lr: 4.75e-04 +2022-05-04 19:11:04,751 INFO [train.py:715] (6/8) Epoch 4, batch 1650, loss[loss=0.154, simple_loss=0.2271, pruned_loss=0.04047, over 4952.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2276, pruned_loss=0.04413, over 971583.30 frames.], batch size: 23, lr: 4.75e-04 +2022-05-04 19:11:45,106 INFO [train.py:715] (6/8) Epoch 4, batch 1700, loss[loss=0.1574, simple_loss=0.232, pruned_loss=0.04139, over 4922.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2276, pruned_loss=0.04457, over 972007.55 frames.], batch size: 29, lr: 4.75e-04 +2022-05-04 19:12:25,116 INFO [train.py:715] (6/8) Epoch 4, batch 1750, loss[loss=0.1668, simple_loss=0.2266, pruned_loss=0.05354, over 4916.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2283, pruned_loss=0.04507, over 972116.23 frames.], batch size: 19, lr: 4.75e-04 +2022-05-04 19:13:06,319 INFO [train.py:715] (6/8) Epoch 4, batch 1800, loss[loss=0.1516, simple_loss=0.2392, pruned_loss=0.03203, over 4785.00 frames.], tot_loss[loss=0.1588, simple_loss=0.228, pruned_loss=0.04477, over 971993.45 frames.], batch size: 18, lr: 4.75e-04 +2022-05-04 19:13:47,667 INFO [train.py:715] (6/8) Epoch 4, batch 1850, loss[loss=0.15, simple_loss=0.2296, pruned_loss=0.03515, over 4949.00 frames.], tot_loss[loss=0.159, simple_loss=0.2285, pruned_loss=0.04474, over 972332.12 frames.], batch size: 24, lr: 4.75e-04 +2022-05-04 19:14:27,702 INFO [train.py:715] (6/8) Epoch 4, batch 1900, loss[loss=0.1552, simple_loss=0.2304, pruned_loss=0.03999, over 4842.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2276, pruned_loss=0.04451, over 972539.84 frames.], batch size: 25, lr: 4.75e-04 +2022-05-04 19:15:08,452 INFO [train.py:715] (6/8) Epoch 4, batch 1950, loss[loss=0.1807, simple_loss=0.2338, pruned_loss=0.06385, over 4974.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2268, pruned_loss=0.04379, over 972463.42 frames.], batch size: 35, lr: 4.75e-04 +2022-05-04 19:15:48,969 INFO [train.py:715] (6/8) Epoch 4, batch 2000, loss[loss=0.1603, simple_loss=0.233, pruned_loss=0.04382, over 4978.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2276, pruned_loss=0.04426, over 972946.11 frames.], batch size: 15, lr: 4.74e-04 +2022-05-04 19:16:28,972 INFO [train.py:715] (6/8) Epoch 4, batch 2050, loss[loss=0.1581, simple_loss=0.2308, pruned_loss=0.04266, over 4805.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2282, pruned_loss=0.04447, over 972879.81 frames.], batch size: 21, lr: 4.74e-04 +2022-05-04 19:17:08,519 INFO [train.py:715] (6/8) Epoch 4, batch 2100, loss[loss=0.1601, simple_loss=0.2237, pruned_loss=0.04825, over 4986.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2286, pruned_loss=0.04485, over 973426.01 frames.], batch size: 14, lr: 4.74e-04 +2022-05-04 19:17:48,261 INFO [train.py:715] (6/8) Epoch 4, batch 2150, loss[loss=0.1564, simple_loss=0.2255, pruned_loss=0.04364, over 4804.00 frames.], tot_loss[loss=0.159, simple_loss=0.2284, pruned_loss=0.0448, over 972762.37 frames.], batch size: 17, lr: 4.74e-04 +2022-05-04 19:18:29,065 INFO [train.py:715] (6/8) Epoch 4, batch 2200, loss[loss=0.1614, simple_loss=0.2449, pruned_loss=0.03896, over 4841.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2281, pruned_loss=0.04469, over 972843.21 frames.], batch size: 20, lr: 4.74e-04 +2022-05-04 19:19:09,443 INFO [train.py:715] (6/8) Epoch 4, batch 2250, loss[loss=0.1623, simple_loss=0.2363, pruned_loss=0.04414, over 4899.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2281, pruned_loss=0.04481, over 971651.57 frames.], batch size: 19, lr: 4.74e-04 +2022-05-04 19:19:48,813 INFO [train.py:715] (6/8) Epoch 4, batch 2300, loss[loss=0.1535, simple_loss=0.2215, pruned_loss=0.04279, over 4873.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2271, pruned_loss=0.04429, over 972336.63 frames.], batch size: 16, lr: 4.74e-04 +2022-05-04 19:20:28,748 INFO [train.py:715] (6/8) Epoch 4, batch 2350, loss[loss=0.1475, simple_loss=0.216, pruned_loss=0.03955, over 4781.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2263, pruned_loss=0.04421, over 972940.24 frames.], batch size: 17, lr: 4.74e-04 +2022-05-04 19:21:08,837 INFO [train.py:715] (6/8) Epoch 4, batch 2400, loss[loss=0.1449, simple_loss=0.2147, pruned_loss=0.03754, over 4938.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2266, pruned_loss=0.04458, over 973443.73 frames.], batch size: 35, lr: 4.74e-04 +2022-05-04 19:21:48,319 INFO [train.py:715] (6/8) Epoch 4, batch 2450, loss[loss=0.1302, simple_loss=0.2067, pruned_loss=0.02688, over 4815.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2261, pruned_loss=0.04403, over 972580.97 frames.], batch size: 27, lr: 4.74e-04 +2022-05-04 19:22:28,664 INFO [train.py:715] (6/8) Epoch 4, batch 2500, loss[loss=0.1599, simple_loss=0.2242, pruned_loss=0.04776, over 4773.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2267, pruned_loss=0.04423, over 972417.02 frames.], batch size: 14, lr: 4.74e-04 +2022-05-04 19:23:09,577 INFO [train.py:715] (6/8) Epoch 4, batch 2550, loss[loss=0.1928, simple_loss=0.2653, pruned_loss=0.06011, over 4891.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2266, pruned_loss=0.04385, over 972816.19 frames.], batch size: 32, lr: 4.74e-04 +2022-05-04 19:23:49,886 INFO [train.py:715] (6/8) Epoch 4, batch 2600, loss[loss=0.1843, simple_loss=0.2436, pruned_loss=0.06252, over 4889.00 frames.], tot_loss[loss=0.1567, simple_loss=0.226, pruned_loss=0.04373, over 972473.34 frames.], batch size: 19, lr: 4.73e-04 +2022-05-04 19:24:29,138 INFO [train.py:715] (6/8) Epoch 4, batch 2650, loss[loss=0.1406, simple_loss=0.2118, pruned_loss=0.03475, over 4831.00 frames.], tot_loss[loss=0.1585, simple_loss=0.228, pruned_loss=0.04455, over 972720.54 frames.], batch size: 13, lr: 4.73e-04 +2022-05-04 19:25:09,504 INFO [train.py:715] (6/8) Epoch 4, batch 2700, loss[loss=0.1303, simple_loss=0.2068, pruned_loss=0.02685, over 4824.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2277, pruned_loss=0.04454, over 972883.20 frames.], batch size: 25, lr: 4.73e-04 +2022-05-04 19:25:49,767 INFO [train.py:715] (6/8) Epoch 4, batch 2750, loss[loss=0.1517, simple_loss=0.233, pruned_loss=0.03519, over 4966.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2278, pruned_loss=0.04446, over 972135.73 frames.], batch size: 24, lr: 4.73e-04 +2022-05-04 19:26:29,543 INFO [train.py:715] (6/8) Epoch 4, batch 2800, loss[loss=0.1622, simple_loss=0.2364, pruned_loss=0.04401, over 4773.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2277, pruned_loss=0.04442, over 971412.99 frames.], batch size: 18, lr: 4.73e-04 +2022-05-04 19:27:08,937 INFO [train.py:715] (6/8) Epoch 4, batch 2850, loss[loss=0.1859, simple_loss=0.2561, pruned_loss=0.05782, over 4821.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2276, pruned_loss=0.0447, over 972302.58 frames.], batch size: 27, lr: 4.73e-04 +2022-05-04 19:27:49,244 INFO [train.py:715] (6/8) Epoch 4, batch 2900, loss[loss=0.1319, simple_loss=0.213, pruned_loss=0.02544, over 4824.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2279, pruned_loss=0.04458, over 972581.94 frames.], batch size: 25, lr: 4.73e-04 +2022-05-04 19:28:29,135 INFO [train.py:715] (6/8) Epoch 4, batch 2950, loss[loss=0.1605, simple_loss=0.221, pruned_loss=0.05005, over 4868.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2283, pruned_loss=0.04491, over 973531.16 frames.], batch size: 32, lr: 4.73e-04 +2022-05-04 19:29:08,452 INFO [train.py:715] (6/8) Epoch 4, batch 3000, loss[loss=0.1553, simple_loss=0.2175, pruned_loss=0.04651, over 4906.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2286, pruned_loss=0.04527, over 972872.46 frames.], batch size: 17, lr: 4.73e-04 +2022-05-04 19:29:08,452 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 19:29:17,943 INFO [train.py:742] (6/8) Epoch 4, validation: loss=0.1127, simple_loss=0.1984, pruned_loss=0.01346, over 914524.00 frames. +2022-05-04 19:29:57,097 INFO [train.py:715] (6/8) Epoch 4, batch 3050, loss[loss=0.1393, simple_loss=0.2138, pruned_loss=0.03244, over 4892.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2286, pruned_loss=0.04525, over 972817.87 frames.], batch size: 19, lr: 4.73e-04 +2022-05-04 19:30:37,140 INFO [train.py:715] (6/8) Epoch 4, batch 3100, loss[loss=0.149, simple_loss=0.2243, pruned_loss=0.03688, over 4797.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2275, pruned_loss=0.04471, over 972744.83 frames.], batch size: 17, lr: 4.73e-04 +2022-05-04 19:31:17,415 INFO [train.py:715] (6/8) Epoch 4, batch 3150, loss[loss=0.1854, simple_loss=0.2653, pruned_loss=0.0528, over 4742.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2273, pruned_loss=0.04476, over 972366.99 frames.], batch size: 16, lr: 4.73e-04 +2022-05-04 19:31:57,026 INFO [train.py:715] (6/8) Epoch 4, batch 3200, loss[loss=0.124, simple_loss=0.1916, pruned_loss=0.02824, over 4829.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2276, pruned_loss=0.0447, over 972978.54 frames.], batch size: 13, lr: 4.72e-04 +2022-05-04 19:32:36,974 INFO [train.py:715] (6/8) Epoch 4, batch 3250, loss[loss=0.1617, simple_loss=0.2361, pruned_loss=0.04361, over 4982.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2279, pruned_loss=0.04494, over 973808.50 frames.], batch size: 24, lr: 4.72e-04 +2022-05-04 19:33:16,914 INFO [train.py:715] (6/8) Epoch 4, batch 3300, loss[loss=0.1497, simple_loss=0.2279, pruned_loss=0.03578, over 4963.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2281, pruned_loss=0.045, over 973971.58 frames.], batch size: 24, lr: 4.72e-04 +2022-05-04 19:33:56,288 INFO [train.py:715] (6/8) Epoch 4, batch 3350, loss[loss=0.1229, simple_loss=0.191, pruned_loss=0.02742, over 4807.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2268, pruned_loss=0.04445, over 973890.30 frames.], batch size: 26, lr: 4.72e-04 +2022-05-04 19:34:35,332 INFO [train.py:715] (6/8) Epoch 4, batch 3400, loss[loss=0.1674, simple_loss=0.2326, pruned_loss=0.05112, over 4695.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2278, pruned_loss=0.04476, over 973081.88 frames.], batch size: 15, lr: 4.72e-04 +2022-05-04 19:35:15,775 INFO [train.py:715] (6/8) Epoch 4, batch 3450, loss[loss=0.13, simple_loss=0.2052, pruned_loss=0.02739, over 4820.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2284, pruned_loss=0.04473, over 971997.57 frames.], batch size: 21, lr: 4.72e-04 +2022-05-04 19:35:55,193 INFO [train.py:715] (6/8) Epoch 4, batch 3500, loss[loss=0.1403, simple_loss=0.2107, pruned_loss=0.03489, over 4765.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2285, pruned_loss=0.045, over 971858.33 frames.], batch size: 17, lr: 4.72e-04 +2022-05-04 19:36:34,862 INFO [train.py:715] (6/8) Epoch 4, batch 3550, loss[loss=0.1633, simple_loss=0.2423, pruned_loss=0.04217, over 4940.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2285, pruned_loss=0.04469, over 971831.95 frames.], batch size: 23, lr: 4.72e-04 +2022-05-04 19:37:14,698 INFO [train.py:715] (6/8) Epoch 4, batch 3600, loss[loss=0.1396, simple_loss=0.2144, pruned_loss=0.03238, over 4835.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2282, pruned_loss=0.04444, over 971892.90 frames.], batch size: 26, lr: 4.72e-04 +2022-05-04 19:37:54,700 INFO [train.py:715] (6/8) Epoch 4, batch 3650, loss[loss=0.1161, simple_loss=0.1806, pruned_loss=0.02582, over 4792.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2277, pruned_loss=0.04463, over 972272.47 frames.], batch size: 12, lr: 4.72e-04 +2022-05-04 19:38:34,071 INFO [train.py:715] (6/8) Epoch 4, batch 3700, loss[loss=0.1525, simple_loss=0.22, pruned_loss=0.04255, over 4947.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2272, pruned_loss=0.04406, over 972351.28 frames.], batch size: 35, lr: 4.72e-04 +2022-05-04 19:39:13,352 INFO [train.py:715] (6/8) Epoch 4, batch 3750, loss[loss=0.1658, simple_loss=0.2433, pruned_loss=0.04413, over 4983.00 frames.], tot_loss[loss=0.1571, simple_loss=0.227, pruned_loss=0.0436, over 971754.29 frames.], batch size: 15, lr: 4.72e-04 +2022-05-04 19:39:53,220 INFO [train.py:715] (6/8) Epoch 4, batch 3800, loss[loss=0.1469, simple_loss=0.2288, pruned_loss=0.03248, over 4819.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2263, pruned_loss=0.04321, over 970913.53 frames.], batch size: 26, lr: 4.72e-04 +2022-05-04 19:40:32,937 INFO [train.py:715] (6/8) Epoch 4, batch 3850, loss[loss=0.1362, simple_loss=0.2109, pruned_loss=0.03069, over 4985.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2262, pruned_loss=0.04302, over 971235.46 frames.], batch size: 14, lr: 4.71e-04 +2022-05-04 19:41:13,115 INFO [train.py:715] (6/8) Epoch 4, batch 3900, loss[loss=0.1295, simple_loss=0.1979, pruned_loss=0.03058, over 4838.00 frames.], tot_loss[loss=0.156, simple_loss=0.2261, pruned_loss=0.04299, over 972445.42 frames.], batch size: 12, lr: 4.71e-04 +2022-05-04 19:41:53,261 INFO [train.py:715] (6/8) Epoch 4, batch 3950, loss[loss=0.1643, simple_loss=0.2273, pruned_loss=0.05062, over 4759.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2272, pruned_loss=0.04381, over 972301.67 frames.], batch size: 14, lr: 4.71e-04 +2022-05-04 19:42:33,635 INFO [train.py:715] (6/8) Epoch 4, batch 4000, loss[loss=0.157, simple_loss=0.2287, pruned_loss=0.04269, over 4805.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2263, pruned_loss=0.0436, over 972151.40 frames.], batch size: 24, lr: 4.71e-04 +2022-05-04 19:43:13,667 INFO [train.py:715] (6/8) Epoch 4, batch 4050, loss[loss=0.1657, simple_loss=0.2339, pruned_loss=0.04876, over 4820.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2259, pruned_loss=0.0437, over 972651.50 frames.], batch size: 25, lr: 4.71e-04 +2022-05-04 19:43:53,250 INFO [train.py:715] (6/8) Epoch 4, batch 4100, loss[loss=0.2105, simple_loss=0.2779, pruned_loss=0.07155, over 4883.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2269, pruned_loss=0.04428, over 972418.28 frames.], batch size: 39, lr: 4.71e-04 +2022-05-04 19:44:33,951 INFO [train.py:715] (6/8) Epoch 4, batch 4150, loss[loss=0.1456, simple_loss=0.209, pruned_loss=0.04113, over 4827.00 frames.], tot_loss[loss=0.1588, simple_loss=0.228, pruned_loss=0.04487, over 972551.11 frames.], batch size: 13, lr: 4.71e-04 +2022-05-04 19:45:13,437 INFO [train.py:715] (6/8) Epoch 4, batch 4200, loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03091, over 4986.00 frames.], tot_loss[loss=0.159, simple_loss=0.2283, pruned_loss=0.04487, over 972511.28 frames.], batch size: 28, lr: 4.71e-04 +2022-05-04 19:45:52,914 INFO [train.py:715] (6/8) Epoch 4, batch 4250, loss[loss=0.1685, simple_loss=0.24, pruned_loss=0.04851, over 4883.00 frames.], tot_loss[loss=0.1585, simple_loss=0.228, pruned_loss=0.04453, over 972317.31 frames.], batch size: 16, lr: 4.71e-04 +2022-05-04 19:46:33,016 INFO [train.py:715] (6/8) Epoch 4, batch 4300, loss[loss=0.1571, simple_loss=0.2281, pruned_loss=0.043, over 4902.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2267, pruned_loss=0.04442, over 971706.35 frames.], batch size: 17, lr: 4.71e-04 +2022-05-04 19:47:13,037 INFO [train.py:715] (6/8) Epoch 4, batch 4350, loss[loss=0.1336, simple_loss=0.2117, pruned_loss=0.02776, over 4955.00 frames.], tot_loss[loss=0.1574, simple_loss=0.227, pruned_loss=0.0439, over 972386.63 frames.], batch size: 24, lr: 4.71e-04 +2022-05-04 19:47:52,120 INFO [train.py:715] (6/8) Epoch 4, batch 4400, loss[loss=0.1415, simple_loss=0.214, pruned_loss=0.0345, over 4876.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2274, pruned_loss=0.04392, over 972129.93 frames.], batch size: 22, lr: 4.71e-04 +2022-05-04 19:48:31,830 INFO [train.py:715] (6/8) Epoch 4, batch 4450, loss[loss=0.174, simple_loss=0.2372, pruned_loss=0.05539, over 4742.00 frames.], tot_loss[loss=0.158, simple_loss=0.2275, pruned_loss=0.04423, over 972184.72 frames.], batch size: 16, lr: 4.70e-04 +2022-05-04 19:49:12,004 INFO [train.py:715] (6/8) Epoch 4, batch 4500, loss[loss=0.1501, simple_loss=0.2302, pruned_loss=0.03506, over 4927.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2278, pruned_loss=0.0447, over 972719.97 frames.], batch size: 18, lr: 4.70e-04 +2022-05-04 19:49:51,274 INFO [train.py:715] (6/8) Epoch 4, batch 4550, loss[loss=0.1619, simple_loss=0.2328, pruned_loss=0.04551, over 4736.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2279, pruned_loss=0.0447, over 973037.95 frames.], batch size: 16, lr: 4.70e-04 +2022-05-04 19:50:30,679 INFO [train.py:715] (6/8) Epoch 4, batch 4600, loss[loss=0.1546, simple_loss=0.2318, pruned_loss=0.03869, over 4903.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2278, pruned_loss=0.04481, over 973270.68 frames.], batch size: 23, lr: 4.70e-04 +2022-05-04 19:51:10,987 INFO [train.py:715] (6/8) Epoch 4, batch 4650, loss[loss=0.1554, simple_loss=0.2245, pruned_loss=0.04316, over 4818.00 frames.], tot_loss[loss=0.158, simple_loss=0.2273, pruned_loss=0.04437, over 972596.81 frames.], batch size: 25, lr: 4.70e-04 +2022-05-04 19:51:51,344 INFO [train.py:715] (6/8) Epoch 4, batch 4700, loss[loss=0.1443, simple_loss=0.2138, pruned_loss=0.03744, over 4963.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2271, pruned_loss=0.04432, over 972202.43 frames.], batch size: 25, lr: 4.70e-04 +2022-05-04 19:52:31,247 INFO [train.py:715] (6/8) Epoch 4, batch 4750, loss[loss=0.1234, simple_loss=0.1962, pruned_loss=0.02531, over 4764.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2267, pruned_loss=0.04395, over 972204.87 frames.], batch size: 18, lr: 4.70e-04 +2022-05-04 19:53:13,037 INFO [train.py:715] (6/8) Epoch 4, batch 4800, loss[loss=0.1379, simple_loss=0.1988, pruned_loss=0.03848, over 4757.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2257, pruned_loss=0.04372, over 973020.58 frames.], batch size: 16, lr: 4.70e-04 +2022-05-04 19:53:53,560 INFO [train.py:715] (6/8) Epoch 4, batch 4850, loss[loss=0.1618, simple_loss=0.2291, pruned_loss=0.04724, over 4984.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2253, pruned_loss=0.04316, over 972766.06 frames.], batch size: 28, lr: 4.70e-04 +2022-05-04 19:54:32,958 INFO [train.py:715] (6/8) Epoch 4, batch 4900, loss[loss=0.1686, simple_loss=0.2344, pruned_loss=0.0514, over 4883.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2254, pruned_loss=0.04315, over 973483.72 frames.], batch size: 16, lr: 4.70e-04 +2022-05-04 19:55:12,350 INFO [train.py:715] (6/8) Epoch 4, batch 4950, loss[loss=0.1529, simple_loss=0.2385, pruned_loss=0.03362, over 4952.00 frames.], tot_loss[loss=0.157, simple_loss=0.2268, pruned_loss=0.04363, over 973342.90 frames.], batch size: 21, lr: 4.70e-04 +2022-05-04 19:55:52,411 INFO [train.py:715] (6/8) Epoch 4, batch 5000, loss[loss=0.1356, simple_loss=0.1994, pruned_loss=0.03586, over 4816.00 frames.], tot_loss[loss=0.158, simple_loss=0.2275, pruned_loss=0.04422, over 972681.93 frames.], batch size: 12, lr: 4.70e-04 +2022-05-04 19:56:32,444 INFO [train.py:715] (6/8) Epoch 4, batch 5050, loss[loss=0.1593, simple_loss=0.2459, pruned_loss=0.03632, over 4790.00 frames.], tot_loss[loss=0.159, simple_loss=0.2277, pruned_loss=0.04514, over 972114.47 frames.], batch size: 17, lr: 4.69e-04 +2022-05-04 19:57:12,351 INFO [train.py:715] (6/8) Epoch 4, batch 5100, loss[loss=0.1337, simple_loss=0.2133, pruned_loss=0.02708, over 4848.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2275, pruned_loss=0.04484, over 972272.16 frames.], batch size: 20, lr: 4.69e-04 +2022-05-04 19:57:51,522 INFO [train.py:715] (6/8) Epoch 4, batch 5150, loss[loss=0.1415, simple_loss=0.216, pruned_loss=0.03346, over 4815.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2277, pruned_loss=0.0445, over 972331.49 frames.], batch size: 26, lr: 4.69e-04 +2022-05-04 19:58:31,725 INFO [train.py:715] (6/8) Epoch 4, batch 5200, loss[loss=0.1724, simple_loss=0.2418, pruned_loss=0.05148, over 4911.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2275, pruned_loss=0.04462, over 971603.39 frames.], batch size: 17, lr: 4.69e-04 +2022-05-04 19:59:11,083 INFO [train.py:715] (6/8) Epoch 4, batch 5250, loss[loss=0.1566, simple_loss=0.2282, pruned_loss=0.04255, over 4692.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2285, pruned_loss=0.04503, over 971432.65 frames.], batch size: 15, lr: 4.69e-04 +2022-05-04 19:59:50,716 INFO [train.py:715] (6/8) Epoch 4, batch 5300, loss[loss=0.1414, simple_loss=0.2079, pruned_loss=0.0374, over 4906.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2279, pruned_loss=0.04494, over 971520.37 frames.], batch size: 18, lr: 4.69e-04 +2022-05-04 20:00:30,981 INFO [train.py:715] (6/8) Epoch 4, batch 5350, loss[loss=0.1596, simple_loss=0.2366, pruned_loss=0.04127, over 4983.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2274, pruned_loss=0.04492, over 971928.91 frames.], batch size: 25, lr: 4.69e-04 +2022-05-04 20:01:11,128 INFO [train.py:715] (6/8) Epoch 4, batch 5400, loss[loss=0.1334, simple_loss=0.2145, pruned_loss=0.02621, over 4874.00 frames.], tot_loss[loss=0.159, simple_loss=0.2276, pruned_loss=0.04516, over 972041.70 frames.], batch size: 16, lr: 4.69e-04 +2022-05-04 20:01:51,431 INFO [train.py:715] (6/8) Epoch 4, batch 5450, loss[loss=0.1661, simple_loss=0.233, pruned_loss=0.04963, over 4779.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2277, pruned_loss=0.04502, over 971883.70 frames.], batch size: 14, lr: 4.69e-04 +2022-05-04 20:02:30,843 INFO [train.py:715] (6/8) Epoch 4, batch 5500, loss[loss=0.1457, simple_loss=0.2075, pruned_loss=0.04197, over 4753.00 frames.], tot_loss[loss=0.1593, simple_loss=0.228, pruned_loss=0.04535, over 971932.25 frames.], batch size: 19, lr: 4.69e-04 +2022-05-04 20:03:11,388 INFO [train.py:715] (6/8) Epoch 4, batch 5550, loss[loss=0.1438, simple_loss=0.2109, pruned_loss=0.03836, over 4938.00 frames.], tot_loss[loss=0.159, simple_loss=0.2279, pruned_loss=0.04511, over 972361.18 frames.], batch size: 14, lr: 4.69e-04 +2022-05-04 20:03:51,126 INFO [train.py:715] (6/8) Epoch 4, batch 5600, loss[loss=0.1502, simple_loss=0.2182, pruned_loss=0.04104, over 4789.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2275, pruned_loss=0.04511, over 971718.48 frames.], batch size: 14, lr: 4.69e-04 +2022-05-04 20:04:31,032 INFO [train.py:715] (6/8) Epoch 4, batch 5650, loss[loss=0.1442, simple_loss=0.218, pruned_loss=0.0352, over 4959.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2266, pruned_loss=0.0443, over 972158.00 frames.], batch size: 35, lr: 4.68e-04 +2022-05-04 20:05:10,993 INFO [train.py:715] (6/8) Epoch 4, batch 5700, loss[loss=0.1922, simple_loss=0.2499, pruned_loss=0.06729, over 4753.00 frames.], tot_loss[loss=0.158, simple_loss=0.2272, pruned_loss=0.0444, over 972029.69 frames.], batch size: 19, lr: 4.68e-04 +2022-05-04 20:05:51,209 INFO [train.py:715] (6/8) Epoch 4, batch 5750, loss[loss=0.1379, simple_loss=0.2116, pruned_loss=0.03213, over 4925.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2268, pruned_loss=0.04428, over 971965.90 frames.], batch size: 23, lr: 4.68e-04 +2022-05-04 20:06:31,309 INFO [train.py:715] (6/8) Epoch 4, batch 5800, loss[loss=0.141, simple_loss=0.2007, pruned_loss=0.04064, over 4855.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2275, pruned_loss=0.04488, over 972936.91 frames.], batch size: 13, lr: 4.68e-04 +2022-05-04 20:07:10,965 INFO [train.py:715] (6/8) Epoch 4, batch 5850, loss[loss=0.2143, simple_loss=0.2765, pruned_loss=0.07603, over 4738.00 frames.], tot_loss[loss=0.1583, simple_loss=0.227, pruned_loss=0.04476, over 973231.26 frames.], batch size: 16, lr: 4.68e-04 +2022-05-04 20:07:51,261 INFO [train.py:715] (6/8) Epoch 4, batch 5900, loss[loss=0.1468, simple_loss=0.2163, pruned_loss=0.03866, over 4785.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2275, pruned_loss=0.04491, over 972769.70 frames.], batch size: 18, lr: 4.68e-04 +2022-05-04 20:08:30,942 INFO [train.py:715] (6/8) Epoch 4, batch 5950, loss[loss=0.1599, simple_loss=0.2292, pruned_loss=0.04536, over 4872.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2278, pruned_loss=0.04502, over 972801.36 frames.], batch size: 16, lr: 4.68e-04 +2022-05-04 20:09:10,575 INFO [train.py:715] (6/8) Epoch 4, batch 6000, loss[loss=0.1745, simple_loss=0.2435, pruned_loss=0.05276, over 4874.00 frames.], tot_loss[loss=0.158, simple_loss=0.2274, pruned_loss=0.04431, over 972680.41 frames.], batch size: 22, lr: 4.68e-04 +2022-05-04 20:09:10,576 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 20:09:20,451 INFO [train.py:742] (6/8) Epoch 4, validation: loss=0.1124, simple_loss=0.1981, pruned_loss=0.01337, over 914524.00 frames. +2022-05-04 20:10:00,565 INFO [train.py:715] (6/8) Epoch 4, batch 6050, loss[loss=0.1902, simple_loss=0.262, pruned_loss=0.05914, over 4874.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2272, pruned_loss=0.0442, over 973290.14 frames.], batch size: 16, lr: 4.68e-04 +2022-05-04 20:10:40,787 INFO [train.py:715] (6/8) Epoch 4, batch 6100, loss[loss=0.1752, simple_loss=0.2448, pruned_loss=0.05283, over 4948.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2277, pruned_loss=0.04457, over 973498.67 frames.], batch size: 21, lr: 4.68e-04 +2022-05-04 20:11:21,166 INFO [train.py:715] (6/8) Epoch 4, batch 6150, loss[loss=0.1466, simple_loss=0.2188, pruned_loss=0.03715, over 4851.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2275, pruned_loss=0.04453, over 973980.78 frames.], batch size: 32, lr: 4.68e-04 +2022-05-04 20:12:01,192 INFO [train.py:715] (6/8) Epoch 4, batch 6200, loss[loss=0.176, simple_loss=0.241, pruned_loss=0.05548, over 4925.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2278, pruned_loss=0.04465, over 973242.09 frames.], batch size: 18, lr: 4.68e-04 +2022-05-04 20:12:40,827 INFO [train.py:715] (6/8) Epoch 4, batch 6250, loss[loss=0.1595, simple_loss=0.2299, pruned_loss=0.04452, over 4901.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2268, pruned_loss=0.04449, over 972387.74 frames.], batch size: 19, lr: 4.68e-04 +2022-05-04 20:13:21,469 INFO [train.py:715] (6/8) Epoch 4, batch 6300, loss[loss=0.1964, simple_loss=0.2492, pruned_loss=0.07176, over 4845.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2267, pruned_loss=0.04418, over 972070.35 frames.], batch size: 20, lr: 4.67e-04 +2022-05-04 20:14:00,896 INFO [train.py:715] (6/8) Epoch 4, batch 6350, loss[loss=0.1656, simple_loss=0.2307, pruned_loss=0.05029, over 4811.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2268, pruned_loss=0.04428, over 972478.03 frames.], batch size: 15, lr: 4.67e-04 +2022-05-04 20:14:41,824 INFO [train.py:715] (6/8) Epoch 4, batch 6400, loss[loss=0.1757, simple_loss=0.2352, pruned_loss=0.05807, over 4688.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2274, pruned_loss=0.04459, over 972550.03 frames.], batch size: 15, lr: 4.67e-04 +2022-05-04 20:15:21,564 INFO [train.py:715] (6/8) Epoch 4, batch 6450, loss[loss=0.1484, simple_loss=0.2158, pruned_loss=0.04052, over 4925.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2264, pruned_loss=0.04408, over 972836.56 frames.], batch size: 23, lr: 4.67e-04 +2022-05-04 20:16:01,666 INFO [train.py:715] (6/8) Epoch 4, batch 6500, loss[loss=0.1613, simple_loss=0.2289, pruned_loss=0.04684, over 4838.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2263, pruned_loss=0.04369, over 972511.50 frames.], batch size: 30, lr: 4.67e-04 +2022-05-04 20:16:41,332 INFO [train.py:715] (6/8) Epoch 4, batch 6550, loss[loss=0.185, simple_loss=0.2502, pruned_loss=0.05987, over 4925.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2268, pruned_loss=0.04415, over 971764.52 frames.], batch size: 39, lr: 4.67e-04 +2022-05-04 20:17:20,666 INFO [train.py:715] (6/8) Epoch 4, batch 6600, loss[loss=0.1564, simple_loss=0.2328, pruned_loss=0.04004, over 4841.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2278, pruned_loss=0.04462, over 972304.33 frames.], batch size: 26, lr: 4.67e-04 +2022-05-04 20:18:01,341 INFO [train.py:715] (6/8) Epoch 4, batch 6650, loss[loss=0.1525, simple_loss=0.2362, pruned_loss=0.03435, over 4917.00 frames.], tot_loss[loss=0.158, simple_loss=0.2276, pruned_loss=0.04426, over 973582.90 frames.], batch size: 23, lr: 4.67e-04 +2022-05-04 20:18:40,889 INFO [train.py:715] (6/8) Epoch 4, batch 6700, loss[loss=0.1514, simple_loss=0.2239, pruned_loss=0.03944, over 4985.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2278, pruned_loss=0.04477, over 973149.73 frames.], batch size: 28, lr: 4.67e-04 +2022-05-04 20:19:21,001 INFO [train.py:715] (6/8) Epoch 4, batch 6750, loss[loss=0.186, simple_loss=0.2578, pruned_loss=0.05708, over 4829.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2281, pruned_loss=0.04458, over 972553.99 frames.], batch size: 30, lr: 4.67e-04 +2022-05-04 20:20:00,777 INFO [train.py:715] (6/8) Epoch 4, batch 6800, loss[loss=0.1729, simple_loss=0.2452, pruned_loss=0.05031, over 4899.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2276, pruned_loss=0.04406, over 972514.78 frames.], batch size: 22, lr: 4.67e-04 +2022-05-04 20:20:40,794 INFO [train.py:715] (6/8) Epoch 4, batch 6850, loss[loss=0.1473, simple_loss=0.2257, pruned_loss=0.03439, over 4915.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2272, pruned_loss=0.04368, over 973372.09 frames.], batch size: 29, lr: 4.67e-04 +2022-05-04 20:21:20,108 INFO [train.py:715] (6/8) Epoch 4, batch 6900, loss[loss=0.1417, simple_loss=0.2069, pruned_loss=0.03826, over 4900.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2272, pruned_loss=0.04368, over 972905.30 frames.], batch size: 16, lr: 4.66e-04 +2022-05-04 20:21:59,582 INFO [train.py:715] (6/8) Epoch 4, batch 6950, loss[loss=0.1566, simple_loss=0.2301, pruned_loss=0.04151, over 4865.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2274, pruned_loss=0.04388, over 972236.42 frames.], batch size: 20, lr: 4.66e-04 +2022-05-04 20:22:39,327 INFO [train.py:715] (6/8) Epoch 4, batch 7000, loss[loss=0.1705, simple_loss=0.2378, pruned_loss=0.05159, over 4779.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2274, pruned_loss=0.04399, over 972767.64 frames.], batch size: 17, lr: 4.66e-04 +2022-05-04 20:23:19,199 INFO [train.py:715] (6/8) Epoch 4, batch 7050, loss[loss=0.1696, simple_loss=0.249, pruned_loss=0.04506, over 4807.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2281, pruned_loss=0.04429, over 972738.62 frames.], batch size: 24, lr: 4.66e-04 +2022-05-04 20:23:58,928 INFO [train.py:715] (6/8) Epoch 4, batch 7100, loss[loss=0.1358, simple_loss=0.2169, pruned_loss=0.02738, over 4934.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2264, pruned_loss=0.04341, over 971312.55 frames.], batch size: 21, lr: 4.66e-04 +2022-05-04 20:24:39,020 INFO [train.py:715] (6/8) Epoch 4, batch 7150, loss[loss=0.1448, simple_loss=0.2149, pruned_loss=0.03734, over 4769.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2262, pruned_loss=0.04333, over 971728.48 frames.], batch size: 18, lr: 4.66e-04 +2022-05-04 20:25:18,944 INFO [train.py:715] (6/8) Epoch 4, batch 7200, loss[loss=0.2069, simple_loss=0.2627, pruned_loss=0.07551, over 4975.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2261, pruned_loss=0.04358, over 971776.22 frames.], batch size: 14, lr: 4.66e-04 +2022-05-04 20:25:59,101 INFO [train.py:715] (6/8) Epoch 4, batch 7250, loss[loss=0.1952, simple_loss=0.256, pruned_loss=0.06721, over 4725.00 frames.], tot_loss[loss=0.1575, simple_loss=0.227, pruned_loss=0.04399, over 971500.90 frames.], batch size: 16, lr: 4.66e-04 +2022-05-04 20:26:38,423 INFO [train.py:715] (6/8) Epoch 4, batch 7300, loss[loss=0.1486, simple_loss=0.2175, pruned_loss=0.0399, over 4978.00 frames.], tot_loss[loss=0.1574, simple_loss=0.227, pruned_loss=0.04384, over 972193.16 frames.], batch size: 14, lr: 4.66e-04 +2022-05-04 20:27:18,104 INFO [train.py:715] (6/8) Epoch 4, batch 7350, loss[loss=0.1386, simple_loss=0.2171, pruned_loss=0.02999, over 4933.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2275, pruned_loss=0.04404, over 971999.00 frames.], batch size: 29, lr: 4.66e-04 +2022-05-04 20:27:58,077 INFO [train.py:715] (6/8) Epoch 4, batch 7400, loss[loss=0.1628, simple_loss=0.2436, pruned_loss=0.041, over 4972.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2268, pruned_loss=0.04374, over 971973.69 frames.], batch size: 28, lr: 4.66e-04 +2022-05-04 20:28:38,817 INFO [train.py:715] (6/8) Epoch 4, batch 7450, loss[loss=0.1426, simple_loss=0.2172, pruned_loss=0.03394, over 4791.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2271, pruned_loss=0.04361, over 971675.76 frames.], batch size: 17, lr: 4.66e-04 +2022-05-04 20:29:18,224 INFO [train.py:715] (6/8) Epoch 4, batch 7500, loss[loss=0.1676, simple_loss=0.2326, pruned_loss=0.05134, over 4872.00 frames.], tot_loss[loss=0.157, simple_loss=0.227, pruned_loss=0.04351, over 972253.87 frames.], batch size: 16, lr: 4.66e-04 +2022-05-04 20:29:58,245 INFO [train.py:715] (6/8) Epoch 4, batch 7550, loss[loss=0.1311, simple_loss=0.2074, pruned_loss=0.02737, over 4936.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2272, pruned_loss=0.04396, over 971522.97 frames.], batch size: 29, lr: 4.65e-04 +2022-05-04 20:30:38,898 INFO [train.py:715] (6/8) Epoch 4, batch 7600, loss[loss=0.1443, simple_loss=0.2184, pruned_loss=0.03506, over 4853.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2268, pruned_loss=0.04367, over 971027.78 frames.], batch size: 30, lr: 4.65e-04 +2022-05-04 20:31:18,410 INFO [train.py:715] (6/8) Epoch 4, batch 7650, loss[loss=0.1744, simple_loss=0.2366, pruned_loss=0.05612, over 4939.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2259, pruned_loss=0.04323, over 971656.96 frames.], batch size: 21, lr: 4.65e-04 +2022-05-04 20:31:58,071 INFO [train.py:715] (6/8) Epoch 4, batch 7700, loss[loss=0.1532, simple_loss=0.2185, pruned_loss=0.04397, over 4989.00 frames.], tot_loss[loss=0.157, simple_loss=0.2266, pruned_loss=0.04372, over 971764.82 frames.], batch size: 14, lr: 4.65e-04 +2022-05-04 20:32:38,173 INFO [train.py:715] (6/8) Epoch 4, batch 7750, loss[loss=0.1528, simple_loss=0.2128, pruned_loss=0.04634, over 4843.00 frames.], tot_loss[loss=0.158, simple_loss=0.2271, pruned_loss=0.04447, over 971639.27 frames.], batch size: 30, lr: 4.65e-04 +2022-05-04 20:33:18,308 INFO [train.py:715] (6/8) Epoch 4, batch 7800, loss[loss=0.1394, simple_loss=0.2123, pruned_loss=0.03327, over 4876.00 frames.], tot_loss[loss=0.158, simple_loss=0.2272, pruned_loss=0.04442, over 972172.01 frames.], batch size: 22, lr: 4.65e-04 +2022-05-04 20:33:57,329 INFO [train.py:715] (6/8) Epoch 4, batch 7850, loss[loss=0.1656, simple_loss=0.2412, pruned_loss=0.04497, over 4926.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2269, pruned_loss=0.04372, over 972552.61 frames.], batch size: 23, lr: 4.65e-04 +2022-05-04 20:34:36,905 INFO [train.py:715] (6/8) Epoch 4, batch 7900, loss[loss=0.1668, simple_loss=0.2396, pruned_loss=0.04701, over 4843.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2268, pruned_loss=0.04389, over 972126.72 frames.], batch size: 30, lr: 4.65e-04 +2022-05-04 20:35:16,766 INFO [train.py:715] (6/8) Epoch 4, batch 7950, loss[loss=0.1849, simple_loss=0.2551, pruned_loss=0.05735, over 4695.00 frames.], tot_loss[loss=0.158, simple_loss=0.2275, pruned_loss=0.04424, over 971127.16 frames.], batch size: 15, lr: 4.65e-04 +2022-05-04 20:35:56,349 INFO [train.py:715] (6/8) Epoch 4, batch 8000, loss[loss=0.1371, simple_loss=0.2153, pruned_loss=0.02946, over 4921.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2276, pruned_loss=0.04428, over 971108.42 frames.], batch size: 17, lr: 4.65e-04 +2022-05-04 20:36:36,312 INFO [train.py:715] (6/8) Epoch 4, batch 8050, loss[loss=0.152, simple_loss=0.2262, pruned_loss=0.03889, over 4943.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2287, pruned_loss=0.04505, over 971135.63 frames.], batch size: 29, lr: 4.65e-04 +2022-05-04 20:37:16,272 INFO [train.py:715] (6/8) Epoch 4, batch 8100, loss[loss=0.1749, simple_loss=0.2421, pruned_loss=0.05382, over 4769.00 frames.], tot_loss[loss=0.1595, simple_loss=0.2285, pruned_loss=0.04529, over 970929.54 frames.], batch size: 19, lr: 4.65e-04 +2022-05-04 20:37:56,510 INFO [train.py:715] (6/8) Epoch 4, batch 8150, loss[loss=0.1437, simple_loss=0.2161, pruned_loss=0.03562, over 4936.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2274, pruned_loss=0.04475, over 971382.68 frames.], batch size: 23, lr: 4.65e-04 +2022-05-04 20:38:35,992 INFO [train.py:715] (6/8) Epoch 4, batch 8200, loss[loss=0.162, simple_loss=0.2327, pruned_loss=0.04563, over 4883.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2281, pruned_loss=0.04457, over 972061.29 frames.], batch size: 16, lr: 4.64e-04 +2022-05-04 20:39:15,731 INFO [train.py:715] (6/8) Epoch 4, batch 8250, loss[loss=0.1745, simple_loss=0.2399, pruned_loss=0.05459, over 4746.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2281, pruned_loss=0.04455, over 971169.82 frames.], batch size: 16, lr: 4.64e-04 +2022-05-04 20:39:55,880 INFO [train.py:715] (6/8) Epoch 4, batch 8300, loss[loss=0.1525, simple_loss=0.2251, pruned_loss=0.03992, over 4972.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2289, pruned_loss=0.04498, over 972020.19 frames.], batch size: 24, lr: 4.64e-04 +2022-05-04 20:40:35,315 INFO [train.py:715] (6/8) Epoch 4, batch 8350, loss[loss=0.1436, simple_loss=0.2146, pruned_loss=0.03628, over 4838.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2283, pruned_loss=0.04436, over 973082.49 frames.], batch size: 25, lr: 4.64e-04 +2022-05-04 20:41:15,405 INFO [train.py:715] (6/8) Epoch 4, batch 8400, loss[loss=0.1598, simple_loss=0.2301, pruned_loss=0.04472, over 4946.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2278, pruned_loss=0.04428, over 973509.89 frames.], batch size: 24, lr: 4.64e-04 +2022-05-04 20:41:55,747 INFO [train.py:715] (6/8) Epoch 4, batch 8450, loss[loss=0.1592, simple_loss=0.2258, pruned_loss=0.04629, over 4867.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2271, pruned_loss=0.04408, over 973816.64 frames.], batch size: 32, lr: 4.64e-04 +2022-05-04 20:42:35,855 INFO [train.py:715] (6/8) Epoch 4, batch 8500, loss[loss=0.1619, simple_loss=0.2302, pruned_loss=0.04677, over 4862.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2279, pruned_loss=0.04484, over 974607.03 frames.], batch size: 20, lr: 4.64e-04 +2022-05-04 20:43:15,265 INFO [train.py:715] (6/8) Epoch 4, batch 8550, loss[loss=0.1423, simple_loss=0.2247, pruned_loss=0.03001, over 4779.00 frames.], tot_loss[loss=0.158, simple_loss=0.2275, pruned_loss=0.0442, over 973284.29 frames.], batch size: 18, lr: 4.64e-04 +2022-05-04 20:43:55,081 INFO [train.py:715] (6/8) Epoch 4, batch 8600, loss[loss=0.1376, simple_loss=0.2193, pruned_loss=0.02795, over 4844.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2279, pruned_loss=0.04445, over 972157.60 frames.], batch size: 20, lr: 4.64e-04 +2022-05-04 20:44:35,243 INFO [train.py:715] (6/8) Epoch 4, batch 8650, loss[loss=0.1797, simple_loss=0.2396, pruned_loss=0.05992, over 4960.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2279, pruned_loss=0.04429, over 972638.81 frames.], batch size: 21, lr: 4.64e-04 +2022-05-04 20:45:14,873 INFO [train.py:715] (6/8) Epoch 4, batch 8700, loss[loss=0.192, simple_loss=0.2595, pruned_loss=0.06222, over 4964.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2292, pruned_loss=0.04514, over 972099.71 frames.], batch size: 21, lr: 4.64e-04 +2022-05-04 20:45:55,168 INFO [train.py:715] (6/8) Epoch 4, batch 8750, loss[loss=0.1413, simple_loss=0.2126, pruned_loss=0.03495, over 4824.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2279, pruned_loss=0.04481, over 973082.66 frames.], batch size: 26, lr: 4.64e-04 +2022-05-04 20:46:35,399 INFO [train.py:715] (6/8) Epoch 4, batch 8800, loss[loss=0.163, simple_loss=0.2324, pruned_loss=0.04676, over 4868.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2277, pruned_loss=0.04463, over 972421.29 frames.], batch size: 20, lr: 4.63e-04 +2022-05-04 20:47:15,433 INFO [train.py:715] (6/8) Epoch 4, batch 8850, loss[loss=0.1846, simple_loss=0.2514, pruned_loss=0.05888, over 4756.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2275, pruned_loss=0.04452, over 972361.07 frames.], batch size: 16, lr: 4.63e-04 +2022-05-04 20:47:55,135 INFO [train.py:715] (6/8) Epoch 4, batch 8900, loss[loss=0.1455, simple_loss=0.2182, pruned_loss=0.03644, over 4740.00 frames.], tot_loss[loss=0.1575, simple_loss=0.227, pruned_loss=0.04403, over 972020.57 frames.], batch size: 16, lr: 4.63e-04 +2022-05-04 20:48:34,762 INFO [train.py:715] (6/8) Epoch 4, batch 8950, loss[loss=0.1671, simple_loss=0.2304, pruned_loss=0.05191, over 4968.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2268, pruned_loss=0.04402, over 972315.32 frames.], batch size: 39, lr: 4.63e-04 +2022-05-04 20:49:15,026 INFO [train.py:715] (6/8) Epoch 4, batch 9000, loss[loss=0.1495, simple_loss=0.2199, pruned_loss=0.03956, over 4832.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2277, pruned_loss=0.04455, over 972326.95 frames.], batch size: 15, lr: 4.63e-04 +2022-05-04 20:49:15,026 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 20:49:24,977 INFO [train.py:742] (6/8) Epoch 4, validation: loss=0.1123, simple_loss=0.1979, pruned_loss=0.01336, over 914524.00 frames. +2022-05-04 20:50:05,306 INFO [train.py:715] (6/8) Epoch 4, batch 9050, loss[loss=0.149, simple_loss=0.2262, pruned_loss=0.03591, over 4981.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2279, pruned_loss=0.04464, over 971791.54 frames.], batch size: 15, lr: 4.63e-04 +2022-05-04 20:50:45,315 INFO [train.py:715] (6/8) Epoch 4, batch 9100, loss[loss=0.1489, simple_loss=0.2183, pruned_loss=0.03972, over 4972.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2269, pruned_loss=0.04426, over 971960.56 frames.], batch size: 25, lr: 4.63e-04 +2022-05-04 20:51:24,713 INFO [train.py:715] (6/8) Epoch 4, batch 9150, loss[loss=0.1559, simple_loss=0.2351, pruned_loss=0.03834, over 4978.00 frames.], tot_loss[loss=0.158, simple_loss=0.2274, pruned_loss=0.04426, over 972354.30 frames.], batch size: 15, lr: 4.63e-04 +2022-05-04 20:52:04,887 INFO [train.py:715] (6/8) Epoch 4, batch 9200, loss[loss=0.1312, simple_loss=0.2151, pruned_loss=0.0236, over 4769.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2277, pruned_loss=0.0446, over 972081.49 frames.], batch size: 12, lr: 4.63e-04 +2022-05-04 20:52:45,290 INFO [train.py:715] (6/8) Epoch 4, batch 9250, loss[loss=0.117, simple_loss=0.1909, pruned_loss=0.02153, over 4978.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2286, pruned_loss=0.04514, over 972711.08 frames.], batch size: 25, lr: 4.63e-04 +2022-05-04 20:53:24,538 INFO [train.py:715] (6/8) Epoch 4, batch 9300, loss[loss=0.1378, simple_loss=0.2188, pruned_loss=0.02845, over 4710.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2279, pruned_loss=0.04481, over 972313.02 frames.], batch size: 15, lr: 4.63e-04 +2022-05-04 20:54:04,530 INFO [train.py:715] (6/8) Epoch 4, batch 9350, loss[loss=0.1618, simple_loss=0.2265, pruned_loss=0.04858, over 4808.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2274, pruned_loss=0.0446, over 973197.56 frames.], batch size: 14, lr: 4.63e-04 +2022-05-04 20:54:44,474 INFO [train.py:715] (6/8) Epoch 4, batch 9400, loss[loss=0.1649, simple_loss=0.2278, pruned_loss=0.05098, over 4828.00 frames.], tot_loss[loss=0.159, simple_loss=0.228, pruned_loss=0.045, over 972874.25 frames.], batch size: 15, lr: 4.63e-04 +2022-05-04 20:55:24,006 INFO [train.py:715] (6/8) Epoch 4, batch 9450, loss[loss=0.1785, simple_loss=0.2525, pruned_loss=0.05222, over 4793.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2274, pruned_loss=0.0447, over 972893.81 frames.], batch size: 18, lr: 4.62e-04 +2022-05-04 20:56:04,100 INFO [train.py:715] (6/8) Epoch 4, batch 9500, loss[loss=0.1923, simple_loss=0.2475, pruned_loss=0.06854, over 4762.00 frames.], tot_loss[loss=0.158, simple_loss=0.227, pruned_loss=0.04448, over 972347.80 frames.], batch size: 19, lr: 4.62e-04 +2022-05-04 20:56:44,153 INFO [train.py:715] (6/8) Epoch 4, batch 9550, loss[loss=0.1516, simple_loss=0.2249, pruned_loss=0.03918, over 4807.00 frames.], tot_loss[loss=0.1577, simple_loss=0.227, pruned_loss=0.04416, over 971847.94 frames.], batch size: 21, lr: 4.62e-04 +2022-05-04 20:57:24,672 INFO [train.py:715] (6/8) Epoch 4, batch 9600, loss[loss=0.1514, simple_loss=0.2157, pruned_loss=0.04351, over 4971.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2268, pruned_loss=0.04431, over 971872.31 frames.], batch size: 24, lr: 4.62e-04 +2022-05-04 20:58:04,098 INFO [train.py:715] (6/8) Epoch 4, batch 9650, loss[loss=0.1413, simple_loss=0.2106, pruned_loss=0.03599, over 4893.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2267, pruned_loss=0.04419, over 972224.86 frames.], batch size: 19, lr: 4.62e-04 +2022-05-04 20:58:44,664 INFO [train.py:715] (6/8) Epoch 4, batch 9700, loss[loss=0.1657, simple_loss=0.2381, pruned_loss=0.04665, over 4991.00 frames.], tot_loss[loss=0.1568, simple_loss=0.226, pruned_loss=0.04384, over 971978.99 frames.], batch size: 26, lr: 4.62e-04 +2022-05-04 20:59:25,200 INFO [train.py:715] (6/8) Epoch 4, batch 9750, loss[loss=0.1293, simple_loss=0.2021, pruned_loss=0.02819, over 4906.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2256, pruned_loss=0.04345, over 972532.42 frames.], batch size: 19, lr: 4.62e-04 +2022-05-04 21:00:04,728 INFO [train.py:715] (6/8) Epoch 4, batch 9800, loss[loss=0.1488, simple_loss=0.2306, pruned_loss=0.03355, over 4870.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2256, pruned_loss=0.04346, over 972278.51 frames.], batch size: 20, lr: 4.62e-04 +2022-05-04 21:00:43,865 INFO [train.py:715] (6/8) Epoch 4, batch 9850, loss[loss=0.1651, simple_loss=0.233, pruned_loss=0.04861, over 4914.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2267, pruned_loss=0.04356, over 972500.62 frames.], batch size: 39, lr: 4.62e-04 +2022-05-04 21:01:23,904 INFO [train.py:715] (6/8) Epoch 4, batch 9900, loss[loss=0.1681, simple_loss=0.233, pruned_loss=0.05162, over 4906.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2267, pruned_loss=0.04335, over 971749.94 frames.], batch size: 19, lr: 4.62e-04 +2022-05-04 21:02:03,381 INFO [train.py:715] (6/8) Epoch 4, batch 9950, loss[loss=0.1563, simple_loss=0.2349, pruned_loss=0.0389, over 4848.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2259, pruned_loss=0.04281, over 972914.16 frames.], batch size: 13, lr: 4.62e-04 +2022-05-04 21:02:42,755 INFO [train.py:715] (6/8) Epoch 4, batch 10000, loss[loss=0.1431, simple_loss=0.219, pruned_loss=0.03362, over 4933.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2255, pruned_loss=0.04232, over 972625.58 frames.], batch size: 21, lr: 4.62e-04 +2022-05-04 21:03:22,517 INFO [train.py:715] (6/8) Epoch 4, batch 10050, loss[loss=0.1668, simple_loss=0.2444, pruned_loss=0.04457, over 4962.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2266, pruned_loss=0.04347, over 973064.37 frames.], batch size: 24, lr: 4.62e-04 +2022-05-04 21:04:02,315 INFO [train.py:715] (6/8) Epoch 4, batch 10100, loss[loss=0.1433, simple_loss=0.2242, pruned_loss=0.03117, over 4824.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2261, pruned_loss=0.04275, over 972521.39 frames.], batch size: 15, lr: 4.61e-04 +2022-05-04 21:04:41,555 INFO [train.py:715] (6/8) Epoch 4, batch 10150, loss[loss=0.1594, simple_loss=0.2381, pruned_loss=0.04038, over 4925.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2267, pruned_loss=0.04358, over 972738.23 frames.], batch size: 23, lr: 4.61e-04 +2022-05-04 21:05:21,482 INFO [train.py:715] (6/8) Epoch 4, batch 10200, loss[loss=0.1527, simple_loss=0.2275, pruned_loss=0.03892, over 4869.00 frames.], tot_loss[loss=0.1563, simple_loss=0.226, pruned_loss=0.04333, over 972395.09 frames.], batch size: 38, lr: 4.61e-04 +2022-05-04 21:06:02,062 INFO [train.py:715] (6/8) Epoch 4, batch 10250, loss[loss=0.1689, simple_loss=0.2297, pruned_loss=0.05408, over 4848.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2269, pruned_loss=0.04384, over 972210.65 frames.], batch size: 30, lr: 4.61e-04 +2022-05-04 21:06:41,847 INFO [train.py:715] (6/8) Epoch 4, batch 10300, loss[loss=0.181, simple_loss=0.2463, pruned_loss=0.05788, over 4791.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2266, pruned_loss=0.04359, over 971954.64 frames.], batch size: 17, lr: 4.61e-04 +2022-05-04 21:07:21,505 INFO [train.py:715] (6/8) Epoch 4, batch 10350, loss[loss=0.1158, simple_loss=0.1895, pruned_loss=0.02104, over 4815.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2261, pruned_loss=0.0435, over 972605.50 frames.], batch size: 26, lr: 4.61e-04 +2022-05-04 21:08:01,707 INFO [train.py:715] (6/8) Epoch 4, batch 10400, loss[loss=0.1516, simple_loss=0.212, pruned_loss=0.04564, over 4762.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2264, pruned_loss=0.04336, over 972397.71 frames.], batch size: 14, lr: 4.61e-04 +2022-05-04 21:08:42,284 INFO [train.py:715] (6/8) Epoch 4, batch 10450, loss[loss=0.1977, simple_loss=0.2585, pruned_loss=0.06845, over 4956.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2262, pruned_loss=0.04342, over 972183.02 frames.], batch size: 35, lr: 4.61e-04 +2022-05-04 21:09:21,890 INFO [train.py:715] (6/8) Epoch 4, batch 10500, loss[loss=0.1648, simple_loss=0.237, pruned_loss=0.04626, over 4960.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2265, pruned_loss=0.04351, over 972932.86 frames.], batch size: 35, lr: 4.61e-04 +2022-05-04 21:10:02,143 INFO [train.py:715] (6/8) Epoch 4, batch 10550, loss[loss=0.1448, simple_loss=0.2073, pruned_loss=0.04118, over 4796.00 frames.], tot_loss[loss=0.156, simple_loss=0.2256, pruned_loss=0.04322, over 972400.90 frames.], batch size: 13, lr: 4.61e-04 +2022-05-04 21:10:42,497 INFO [train.py:715] (6/8) Epoch 4, batch 10600, loss[loss=0.1418, simple_loss=0.2162, pruned_loss=0.03372, over 4919.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2267, pruned_loss=0.04414, over 972708.33 frames.], batch size: 23, lr: 4.61e-04 +2022-05-04 21:11:22,294 INFO [train.py:715] (6/8) Epoch 4, batch 10650, loss[loss=0.1521, simple_loss=0.2107, pruned_loss=0.04677, over 4800.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2276, pruned_loss=0.0448, over 973498.32 frames.], batch size: 12, lr: 4.61e-04 +2022-05-04 21:12:02,343 INFO [train.py:715] (6/8) Epoch 4, batch 10700, loss[loss=0.1604, simple_loss=0.2351, pruned_loss=0.04282, over 4788.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2274, pruned_loss=0.04435, over 973598.34 frames.], batch size: 17, lr: 4.61e-04 +2022-05-04 21:12:42,038 INFO [train.py:715] (6/8) Epoch 4, batch 10750, loss[loss=0.1679, simple_loss=0.2459, pruned_loss=0.04494, over 4952.00 frames.], tot_loss[loss=0.158, simple_loss=0.2275, pruned_loss=0.04428, over 973290.32 frames.], batch size: 29, lr: 4.60e-04 +2022-05-04 21:13:22,458 INFO [train.py:715] (6/8) Epoch 4, batch 10800, loss[loss=0.1286, simple_loss=0.1999, pruned_loss=0.02867, over 4868.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2273, pruned_loss=0.04443, over 973157.47 frames.], batch size: 20, lr: 4.60e-04 +2022-05-04 21:14:01,773 INFO [train.py:715] (6/8) Epoch 4, batch 10850, loss[loss=0.1279, simple_loss=0.2003, pruned_loss=0.02772, over 4990.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2273, pruned_loss=0.04478, over 973130.14 frames.], batch size: 14, lr: 4.60e-04 +2022-05-04 21:14:41,704 INFO [train.py:715] (6/8) Epoch 4, batch 10900, loss[loss=0.1558, simple_loss=0.217, pruned_loss=0.04729, over 4861.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2273, pruned_loss=0.04451, over 972590.31 frames.], batch size: 16, lr: 4.60e-04 +2022-05-04 21:15:22,022 INFO [train.py:715] (6/8) Epoch 4, batch 10950, loss[loss=0.1402, simple_loss=0.2019, pruned_loss=0.03922, over 4848.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2276, pruned_loss=0.04468, over 971987.35 frames.], batch size: 13, lr: 4.60e-04 +2022-05-04 21:16:01,655 INFO [train.py:715] (6/8) Epoch 4, batch 11000, loss[loss=0.16, simple_loss=0.2295, pruned_loss=0.0453, over 4928.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2272, pruned_loss=0.04444, over 972092.70 frames.], batch size: 29, lr: 4.60e-04 +2022-05-04 21:16:44,071 INFO [train.py:715] (6/8) Epoch 4, batch 11050, loss[loss=0.1339, simple_loss=0.2163, pruned_loss=0.02578, over 4908.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2266, pruned_loss=0.044, over 972370.87 frames.], batch size: 17, lr: 4.60e-04 +2022-05-04 21:17:24,550 INFO [train.py:715] (6/8) Epoch 4, batch 11100, loss[loss=0.1413, simple_loss=0.2151, pruned_loss=0.03378, over 4877.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2256, pruned_loss=0.04357, over 972228.25 frames.], batch size: 20, lr: 4.60e-04 +2022-05-04 21:18:07,347 INFO [train.py:715] (6/8) Epoch 4, batch 11150, loss[loss=0.2066, simple_loss=0.2728, pruned_loss=0.07018, over 4839.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2264, pruned_loss=0.04407, over 972223.07 frames.], batch size: 15, lr: 4.60e-04 +2022-05-04 21:18:49,568 INFO [train.py:715] (6/8) Epoch 4, batch 11200, loss[loss=0.1728, simple_loss=0.2439, pruned_loss=0.05089, over 4890.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2265, pruned_loss=0.04383, over 972555.83 frames.], batch size: 17, lr: 4.60e-04 +2022-05-04 21:19:29,994 INFO [train.py:715] (6/8) Epoch 4, batch 11250, loss[loss=0.1584, simple_loss=0.2175, pruned_loss=0.04962, over 4982.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2264, pruned_loss=0.04413, over 972041.82 frames.], batch size: 28, lr: 4.60e-04 +2022-05-04 21:20:12,900 INFO [train.py:715] (6/8) Epoch 4, batch 11300, loss[loss=0.162, simple_loss=0.2324, pruned_loss=0.04575, over 4748.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2259, pruned_loss=0.04383, over 972356.00 frames.], batch size: 16, lr: 4.60e-04 +2022-05-04 21:20:52,358 INFO [train.py:715] (6/8) Epoch 4, batch 11350, loss[loss=0.1607, simple_loss=0.2271, pruned_loss=0.04718, over 4868.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2258, pruned_loss=0.04381, over 972280.25 frames.], batch size: 20, lr: 4.60e-04 +2022-05-04 21:21:31,874 INFO [train.py:715] (6/8) Epoch 4, batch 11400, loss[loss=0.1357, simple_loss=0.2163, pruned_loss=0.02754, over 4904.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2252, pruned_loss=0.04307, over 972329.40 frames.], batch size: 29, lr: 4.59e-04 +2022-05-04 21:22:11,699 INFO [train.py:715] (6/8) Epoch 4, batch 11450, loss[loss=0.1415, simple_loss=0.215, pruned_loss=0.03399, over 4930.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2246, pruned_loss=0.04286, over 972981.20 frames.], batch size: 21, lr: 4.59e-04 +2022-05-04 21:22:51,362 INFO [train.py:715] (6/8) Epoch 4, batch 11500, loss[loss=0.1573, simple_loss=0.2277, pruned_loss=0.04342, over 4958.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2249, pruned_loss=0.04295, over 973037.40 frames.], batch size: 35, lr: 4.59e-04 +2022-05-04 21:23:30,603 INFO [train.py:715] (6/8) Epoch 4, batch 11550, loss[loss=0.1596, simple_loss=0.2284, pruned_loss=0.04543, over 4745.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2256, pruned_loss=0.0434, over 972046.59 frames.], batch size: 19, lr: 4.59e-04 +2022-05-04 21:24:09,868 INFO [train.py:715] (6/8) Epoch 4, batch 11600, loss[loss=0.1481, simple_loss=0.2134, pruned_loss=0.04142, over 4917.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2254, pruned_loss=0.04339, over 972544.55 frames.], batch size: 18, lr: 4.59e-04 +2022-05-04 21:24:50,381 INFO [train.py:715] (6/8) Epoch 4, batch 11650, loss[loss=0.2029, simple_loss=0.2646, pruned_loss=0.07056, over 4866.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2253, pruned_loss=0.04367, over 973334.16 frames.], batch size: 20, lr: 4.59e-04 +2022-05-04 21:25:30,282 INFO [train.py:715] (6/8) Epoch 4, batch 11700, loss[loss=0.1639, simple_loss=0.2341, pruned_loss=0.04686, over 4978.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2254, pruned_loss=0.04376, over 972342.67 frames.], batch size: 14, lr: 4.59e-04 +2022-05-04 21:26:10,252 INFO [train.py:715] (6/8) Epoch 4, batch 11750, loss[loss=0.1399, simple_loss=0.2236, pruned_loss=0.02805, over 4952.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2253, pruned_loss=0.04373, over 972061.97 frames.], batch size: 29, lr: 4.59e-04 +2022-05-04 21:26:50,005 INFO [train.py:715] (6/8) Epoch 4, batch 11800, loss[loss=0.1549, simple_loss=0.2202, pruned_loss=0.04476, over 4747.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2251, pruned_loss=0.04369, over 971703.03 frames.], batch size: 19, lr: 4.59e-04 +2022-05-04 21:27:30,269 INFO [train.py:715] (6/8) Epoch 4, batch 11850, loss[loss=0.1934, simple_loss=0.2527, pruned_loss=0.06712, over 4829.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2258, pruned_loss=0.04371, over 972607.89 frames.], batch size: 30, lr: 4.59e-04 +2022-05-04 21:28:09,524 INFO [train.py:715] (6/8) Epoch 4, batch 11900, loss[loss=0.1439, simple_loss=0.2145, pruned_loss=0.03669, over 4771.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2257, pruned_loss=0.04344, over 971419.59 frames.], batch size: 18, lr: 4.59e-04 +2022-05-04 21:28:49,304 INFO [train.py:715] (6/8) Epoch 4, batch 11950, loss[loss=0.1631, simple_loss=0.2352, pruned_loss=0.04547, over 4803.00 frames.], tot_loss[loss=0.156, simple_loss=0.2254, pruned_loss=0.04333, over 971085.88 frames.], batch size: 21, lr: 4.59e-04 +2022-05-04 21:29:29,760 INFO [train.py:715] (6/8) Epoch 4, batch 12000, loss[loss=0.134, simple_loss=0.2032, pruned_loss=0.03243, over 4925.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2261, pruned_loss=0.04347, over 971820.70 frames.], batch size: 23, lr: 4.59e-04 +2022-05-04 21:29:29,761 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 21:29:49,526 INFO [train.py:742] (6/8) Epoch 4, validation: loss=0.1122, simple_loss=0.198, pruned_loss=0.01324, over 914524.00 frames. +2022-05-04 21:30:30,060 INFO [train.py:715] (6/8) Epoch 4, batch 12050, loss[loss=0.1471, simple_loss=0.2185, pruned_loss=0.03787, over 4877.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2262, pruned_loss=0.0433, over 972460.20 frames.], batch size: 16, lr: 4.58e-04 +2022-05-04 21:31:09,877 INFO [train.py:715] (6/8) Epoch 4, batch 12100, loss[loss=0.1464, simple_loss=0.2171, pruned_loss=0.03786, over 4908.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2276, pruned_loss=0.04471, over 973356.26 frames.], batch size: 19, lr: 4.58e-04 +2022-05-04 21:31:50,049 INFO [train.py:715] (6/8) Epoch 4, batch 12150, loss[loss=0.1399, simple_loss=0.2175, pruned_loss=0.03114, over 4986.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2267, pruned_loss=0.04395, over 972227.31 frames.], batch size: 28, lr: 4.58e-04 +2022-05-04 21:32:30,093 INFO [train.py:715] (6/8) Epoch 4, batch 12200, loss[loss=0.1332, simple_loss=0.2128, pruned_loss=0.02679, over 4988.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2256, pruned_loss=0.04355, over 971906.16 frames.], batch size: 24, lr: 4.58e-04 +2022-05-04 21:33:10,431 INFO [train.py:715] (6/8) Epoch 4, batch 12250, loss[loss=0.1492, simple_loss=0.2194, pruned_loss=0.03954, over 4816.00 frames.], tot_loss[loss=0.1563, simple_loss=0.226, pruned_loss=0.04335, over 972111.84 frames.], batch size: 21, lr: 4.58e-04 +2022-05-04 21:33:49,413 INFO [train.py:715] (6/8) Epoch 4, batch 12300, loss[loss=0.1475, simple_loss=0.2198, pruned_loss=0.03756, over 4814.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2272, pruned_loss=0.04377, over 971862.64 frames.], batch size: 25, lr: 4.58e-04 +2022-05-04 21:34:29,450 INFO [train.py:715] (6/8) Epoch 4, batch 12350, loss[loss=0.1624, simple_loss=0.2481, pruned_loss=0.03836, over 4934.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2272, pruned_loss=0.0433, over 972133.19 frames.], batch size: 21, lr: 4.58e-04 +2022-05-04 21:35:10,020 INFO [train.py:715] (6/8) Epoch 4, batch 12400, loss[loss=0.1543, simple_loss=0.2155, pruned_loss=0.0466, over 4964.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2267, pruned_loss=0.04327, over 972031.04 frames.], batch size: 28, lr: 4.58e-04 +2022-05-04 21:35:49,229 INFO [train.py:715] (6/8) Epoch 4, batch 12450, loss[loss=0.1586, simple_loss=0.2272, pruned_loss=0.04498, over 4776.00 frames.], tot_loss[loss=0.1562, simple_loss=0.226, pruned_loss=0.04321, over 971587.68 frames.], batch size: 18, lr: 4.58e-04 +2022-05-04 21:36:29,195 INFO [train.py:715] (6/8) Epoch 4, batch 12500, loss[loss=0.1463, simple_loss=0.2138, pruned_loss=0.03938, over 4814.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2256, pruned_loss=0.04329, over 971931.46 frames.], batch size: 27, lr: 4.58e-04 +2022-05-04 21:37:08,759 INFO [train.py:715] (6/8) Epoch 4, batch 12550, loss[loss=0.1803, simple_loss=0.243, pruned_loss=0.05879, over 4847.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2254, pruned_loss=0.04337, over 972341.04 frames.], batch size: 13, lr: 4.58e-04 +2022-05-04 21:37:48,536 INFO [train.py:715] (6/8) Epoch 4, batch 12600, loss[loss=0.1655, simple_loss=0.237, pruned_loss=0.04706, over 4960.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2255, pruned_loss=0.04346, over 971343.03 frames.], batch size: 15, lr: 4.58e-04 +2022-05-04 21:38:27,425 INFO [train.py:715] (6/8) Epoch 4, batch 12650, loss[loss=0.1817, simple_loss=0.2538, pruned_loss=0.05479, over 4882.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2255, pruned_loss=0.04383, over 971071.37 frames.], batch size: 39, lr: 4.58e-04 +2022-05-04 21:39:07,271 INFO [train.py:715] (6/8) Epoch 4, batch 12700, loss[loss=0.187, simple_loss=0.2595, pruned_loss=0.05727, over 4935.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2266, pruned_loss=0.04418, over 972210.43 frames.], batch size: 18, lr: 4.58e-04 +2022-05-04 21:39:47,345 INFO [train.py:715] (6/8) Epoch 4, batch 12750, loss[loss=0.1548, simple_loss=0.2271, pruned_loss=0.04128, over 4953.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2269, pruned_loss=0.04422, over 972535.23 frames.], batch size: 31, lr: 4.57e-04 +2022-05-04 21:40:29,593 INFO [train.py:715] (6/8) Epoch 4, batch 12800, loss[loss=0.1691, simple_loss=0.2337, pruned_loss=0.05221, over 4862.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2278, pruned_loss=0.04443, over 972128.33 frames.], batch size: 32, lr: 4.57e-04 +2022-05-04 21:41:08,989 INFO [train.py:715] (6/8) Epoch 4, batch 12850, loss[loss=0.1885, simple_loss=0.2606, pruned_loss=0.05824, over 4875.00 frames.], tot_loss[loss=0.1575, simple_loss=0.227, pruned_loss=0.04404, over 972337.51 frames.], batch size: 22, lr: 4.57e-04 +2022-05-04 21:41:49,118 INFO [train.py:715] (6/8) Epoch 4, batch 12900, loss[loss=0.1361, simple_loss=0.2111, pruned_loss=0.03058, over 4982.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2268, pruned_loss=0.04407, over 972201.20 frames.], batch size: 28, lr: 4.57e-04 +2022-05-04 21:42:29,043 INFO [train.py:715] (6/8) Epoch 4, batch 12950, loss[loss=0.1667, simple_loss=0.2353, pruned_loss=0.04903, over 4782.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2276, pruned_loss=0.04394, over 971977.58 frames.], batch size: 17, lr: 4.57e-04 +2022-05-04 21:43:07,909 INFO [train.py:715] (6/8) Epoch 4, batch 13000, loss[loss=0.1644, simple_loss=0.2349, pruned_loss=0.04697, over 4862.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2276, pruned_loss=0.04444, over 970740.43 frames.], batch size: 32, lr: 4.57e-04 +2022-05-04 21:43:47,495 INFO [train.py:715] (6/8) Epoch 4, batch 13050, loss[loss=0.1776, simple_loss=0.2428, pruned_loss=0.05617, over 4927.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2284, pruned_loss=0.04497, over 971322.29 frames.], batch size: 18, lr: 4.57e-04 +2022-05-04 21:44:27,454 INFO [train.py:715] (6/8) Epoch 4, batch 13100, loss[loss=0.1399, simple_loss=0.2014, pruned_loss=0.03923, over 4796.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2278, pruned_loss=0.04496, over 971852.38 frames.], batch size: 14, lr: 4.57e-04 +2022-05-04 21:45:06,499 INFO [train.py:715] (6/8) Epoch 4, batch 13150, loss[loss=0.2013, simple_loss=0.2686, pruned_loss=0.06703, over 4965.00 frames.], tot_loss[loss=0.158, simple_loss=0.2271, pruned_loss=0.0445, over 972013.13 frames.], batch size: 15, lr: 4.57e-04 +2022-05-04 21:45:46,237 INFO [train.py:715] (6/8) Epoch 4, batch 13200, loss[loss=0.1585, simple_loss=0.235, pruned_loss=0.04098, over 4783.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2276, pruned_loss=0.04429, over 972115.66 frames.], batch size: 18, lr: 4.57e-04 +2022-05-04 21:46:26,559 INFO [train.py:715] (6/8) Epoch 4, batch 13250, loss[loss=0.1664, simple_loss=0.2268, pruned_loss=0.05303, over 4695.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2272, pruned_loss=0.04421, over 972118.42 frames.], batch size: 15, lr: 4.57e-04 +2022-05-04 21:47:06,164 INFO [train.py:715] (6/8) Epoch 4, batch 13300, loss[loss=0.1529, simple_loss=0.2187, pruned_loss=0.04352, over 4705.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2266, pruned_loss=0.04362, over 972154.16 frames.], batch size: 15, lr: 4.57e-04 +2022-05-04 21:47:45,772 INFO [train.py:715] (6/8) Epoch 4, batch 13350, loss[loss=0.1533, simple_loss=0.2324, pruned_loss=0.03714, over 4779.00 frames.], tot_loss[loss=0.1571, simple_loss=0.226, pruned_loss=0.0441, over 971855.10 frames.], batch size: 12, lr: 4.57e-04 +2022-05-04 21:48:25,393 INFO [train.py:715] (6/8) Epoch 4, batch 13400, loss[loss=0.1551, simple_loss=0.2194, pruned_loss=0.04535, over 4841.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2256, pruned_loss=0.04368, over 971711.54 frames.], batch size: 13, lr: 4.56e-04 +2022-05-04 21:49:05,422 INFO [train.py:715] (6/8) Epoch 4, batch 13450, loss[loss=0.1395, simple_loss=0.2172, pruned_loss=0.03087, over 4898.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2253, pruned_loss=0.04311, over 972802.67 frames.], batch size: 39, lr: 4.56e-04 +2022-05-04 21:49:45,241 INFO [train.py:715] (6/8) Epoch 4, batch 13500, loss[loss=0.1266, simple_loss=0.2058, pruned_loss=0.02368, over 4944.00 frames.], tot_loss[loss=0.1565, simple_loss=0.226, pruned_loss=0.0435, over 972707.11 frames.], batch size: 29, lr: 4.56e-04 +2022-05-04 21:50:27,086 INFO [train.py:715] (6/8) Epoch 4, batch 13550, loss[loss=0.1475, simple_loss=0.2102, pruned_loss=0.04239, over 4802.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2256, pruned_loss=0.04346, over 972257.00 frames.], batch size: 14, lr: 4.56e-04 +2022-05-04 21:51:07,660 INFO [train.py:715] (6/8) Epoch 4, batch 13600, loss[loss=0.1261, simple_loss=0.2041, pruned_loss=0.02402, over 4846.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2258, pruned_loss=0.04318, over 972817.36 frames.], batch size: 20, lr: 4.56e-04 +2022-05-04 21:51:47,208 INFO [train.py:715] (6/8) Epoch 4, batch 13650, loss[loss=0.1626, simple_loss=0.2358, pruned_loss=0.04469, over 4969.00 frames.], tot_loss[loss=0.1573, simple_loss=0.227, pruned_loss=0.04379, over 972169.36 frames.], batch size: 15, lr: 4.56e-04 +2022-05-04 21:52:26,522 INFO [train.py:715] (6/8) Epoch 4, batch 13700, loss[loss=0.1626, simple_loss=0.2406, pruned_loss=0.04224, over 4756.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2275, pruned_loss=0.04409, over 972424.35 frames.], batch size: 19, lr: 4.56e-04 +2022-05-04 21:53:06,450 INFO [train.py:715] (6/8) Epoch 4, batch 13750, loss[loss=0.144, simple_loss=0.2122, pruned_loss=0.03794, over 4685.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2274, pruned_loss=0.04418, over 972031.89 frames.], batch size: 15, lr: 4.56e-04 +2022-05-04 21:53:48,110 INFO [train.py:715] (6/8) Epoch 4, batch 13800, loss[loss=0.1791, simple_loss=0.2612, pruned_loss=0.0485, over 4745.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2279, pruned_loss=0.04479, over 972587.48 frames.], batch size: 16, lr: 4.56e-04 +2022-05-04 21:54:29,030 INFO [train.py:715] (6/8) Epoch 4, batch 13850, loss[loss=0.1253, simple_loss=0.1951, pruned_loss=0.02777, over 4904.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2268, pruned_loss=0.04453, over 971981.17 frames.], batch size: 17, lr: 4.56e-04 +2022-05-04 21:55:10,917 INFO [train.py:715] (6/8) Epoch 4, batch 13900, loss[loss=0.2158, simple_loss=0.2662, pruned_loss=0.08268, over 4845.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2267, pruned_loss=0.04423, over 972085.96 frames.], batch size: 15, lr: 4.56e-04 +2022-05-04 21:55:52,328 INFO [train.py:715] (6/8) Epoch 4, batch 13950, loss[loss=0.1734, simple_loss=0.2519, pruned_loss=0.04746, over 4905.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2256, pruned_loss=0.04342, over 972280.06 frames.], batch size: 17, lr: 4.56e-04 +2022-05-04 21:56:31,848 INFO [train.py:715] (6/8) Epoch 4, batch 14000, loss[loss=0.1389, simple_loss=0.2221, pruned_loss=0.02783, over 4930.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2254, pruned_loss=0.04304, over 972886.07 frames.], batch size: 39, lr: 4.56e-04 +2022-05-04 21:57:12,896 INFO [train.py:715] (6/8) Epoch 4, batch 14050, loss[loss=0.1376, simple_loss=0.2134, pruned_loss=0.03092, over 4913.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2244, pruned_loss=0.04274, over 971756.11 frames.], batch size: 18, lr: 4.55e-04 +2022-05-04 21:57:52,559 INFO [train.py:715] (6/8) Epoch 4, batch 14100, loss[loss=0.1368, simple_loss=0.2109, pruned_loss=0.03142, over 4821.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2258, pruned_loss=0.0433, over 971263.76 frames.], batch size: 13, lr: 4.55e-04 +2022-05-04 21:58:32,925 INFO [train.py:715] (6/8) Epoch 4, batch 14150, loss[loss=0.1559, simple_loss=0.2275, pruned_loss=0.04211, over 4914.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2267, pruned_loss=0.04417, over 971843.38 frames.], batch size: 18, lr: 4.55e-04 +2022-05-04 21:59:12,280 INFO [train.py:715] (6/8) Epoch 4, batch 14200, loss[loss=0.139, simple_loss=0.2095, pruned_loss=0.03427, over 4860.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2248, pruned_loss=0.04345, over 971230.98 frames.], batch size: 20, lr: 4.55e-04 +2022-05-04 21:59:51,970 INFO [train.py:715] (6/8) Epoch 4, batch 14250, loss[loss=0.1356, simple_loss=0.2096, pruned_loss=0.03078, over 4975.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2246, pruned_loss=0.04347, over 970750.37 frames.], batch size: 25, lr: 4.55e-04 +2022-05-04 22:00:32,125 INFO [train.py:715] (6/8) Epoch 4, batch 14300, loss[loss=0.1661, simple_loss=0.2412, pruned_loss=0.04553, over 4770.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2257, pruned_loss=0.04381, over 969738.90 frames.], batch size: 18, lr: 4.55e-04 +2022-05-04 22:01:10,594 INFO [train.py:715] (6/8) Epoch 4, batch 14350, loss[loss=0.1643, simple_loss=0.2419, pruned_loss=0.04339, over 4803.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2272, pruned_loss=0.04427, over 969916.44 frames.], batch size: 25, lr: 4.55e-04 +2022-05-04 22:01:50,875 INFO [train.py:715] (6/8) Epoch 4, batch 14400, loss[loss=0.1115, simple_loss=0.1822, pruned_loss=0.02043, over 4791.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2261, pruned_loss=0.04368, over 970725.81 frames.], batch size: 14, lr: 4.55e-04 +2022-05-04 22:02:30,292 INFO [train.py:715] (6/8) Epoch 4, batch 14450, loss[loss=0.1749, simple_loss=0.2338, pruned_loss=0.05805, over 4941.00 frames.], tot_loss[loss=0.1567, simple_loss=0.226, pruned_loss=0.04372, over 970732.24 frames.], batch size: 35, lr: 4.55e-04 +2022-05-04 22:03:09,284 INFO [train.py:715] (6/8) Epoch 4, batch 14500, loss[loss=0.1984, simple_loss=0.2591, pruned_loss=0.06886, over 4862.00 frames.], tot_loss[loss=0.158, simple_loss=0.2267, pruned_loss=0.04467, over 970200.67 frames.], batch size: 32, lr: 4.55e-04 +2022-05-04 22:03:48,137 INFO [train.py:715] (6/8) Epoch 4, batch 14550, loss[loss=0.1539, simple_loss=0.2307, pruned_loss=0.0385, over 4922.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2273, pruned_loss=0.0452, over 969504.66 frames.], batch size: 19, lr: 4.55e-04 +2022-05-04 22:04:27,639 INFO [train.py:715] (6/8) Epoch 4, batch 14600, loss[loss=0.1649, simple_loss=0.231, pruned_loss=0.04942, over 4914.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2266, pruned_loss=0.04457, over 969960.19 frames.], batch size: 23, lr: 4.55e-04 +2022-05-04 22:05:07,571 INFO [train.py:715] (6/8) Epoch 4, batch 14650, loss[loss=0.1414, simple_loss=0.2167, pruned_loss=0.03306, over 4737.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2266, pruned_loss=0.04399, over 970217.05 frames.], batch size: 16, lr: 4.55e-04 +2022-05-04 22:05:46,286 INFO [train.py:715] (6/8) Epoch 4, batch 14700, loss[loss=0.136, simple_loss=0.2122, pruned_loss=0.02989, over 4775.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2258, pruned_loss=0.04358, over 969971.75 frames.], batch size: 18, lr: 4.55e-04 +2022-05-04 22:06:26,139 INFO [train.py:715] (6/8) Epoch 4, batch 14750, loss[loss=0.1465, simple_loss=0.2201, pruned_loss=0.03639, over 4885.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2254, pruned_loss=0.04348, over 970100.14 frames.], batch size: 16, lr: 4.54e-04 +2022-05-04 22:07:06,149 INFO [train.py:715] (6/8) Epoch 4, batch 14800, loss[loss=0.1459, simple_loss=0.2204, pruned_loss=0.03569, over 4920.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2252, pruned_loss=0.04326, over 971739.98 frames.], batch size: 39, lr: 4.54e-04 +2022-05-04 22:07:51,021 INFO [train.py:715] (6/8) Epoch 4, batch 14850, loss[loss=0.1895, simple_loss=0.2426, pruned_loss=0.06821, over 4832.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2253, pruned_loss=0.04316, over 972165.43 frames.], batch size: 15, lr: 4.54e-04 +2022-05-04 22:08:31,238 INFO [train.py:715] (6/8) Epoch 4, batch 14900, loss[loss=0.149, simple_loss=0.221, pruned_loss=0.03849, over 4770.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2257, pruned_loss=0.0428, over 971426.89 frames.], batch size: 17, lr: 4.54e-04 +2022-05-04 22:09:11,324 INFO [train.py:715] (6/8) Epoch 4, batch 14950, loss[loss=0.1708, simple_loss=0.2331, pruned_loss=0.05424, over 4963.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2256, pruned_loss=0.04283, over 971407.60 frames.], batch size: 35, lr: 4.54e-04 +2022-05-04 22:09:51,651 INFO [train.py:715] (6/8) Epoch 4, batch 15000, loss[loss=0.1304, simple_loss=0.2028, pruned_loss=0.02899, over 4806.00 frames.], tot_loss[loss=0.155, simple_loss=0.2245, pruned_loss=0.04269, over 971142.11 frames.], batch size: 21, lr: 4.54e-04 +2022-05-04 22:09:51,652 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 22:10:32,003 INFO [train.py:742] (6/8) Epoch 4, validation: loss=0.1122, simple_loss=0.1978, pruned_loss=0.01336, over 914524.00 frames. +2022-05-04 22:11:12,733 INFO [train.py:715] (6/8) Epoch 4, batch 15050, loss[loss=0.1202, simple_loss=0.1943, pruned_loss=0.02307, over 4824.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2254, pruned_loss=0.0428, over 971379.20 frames.], batch size: 15, lr: 4.54e-04 +2022-05-04 22:11:52,176 INFO [train.py:715] (6/8) Epoch 4, batch 15100, loss[loss=0.1503, simple_loss=0.2267, pruned_loss=0.03689, over 4920.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2252, pruned_loss=0.04276, over 972107.52 frames.], batch size: 29, lr: 4.54e-04 +2022-05-04 22:12:32,068 INFO [train.py:715] (6/8) Epoch 4, batch 15150, loss[loss=0.1652, simple_loss=0.2423, pruned_loss=0.04405, over 4856.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2259, pruned_loss=0.04315, over 972033.60 frames.], batch size: 20, lr: 4.54e-04 +2022-05-04 22:13:12,025 INFO [train.py:715] (6/8) Epoch 4, batch 15200, loss[loss=0.1659, simple_loss=0.2483, pruned_loss=0.04171, over 4786.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2263, pruned_loss=0.04339, over 972548.43 frames.], batch size: 18, lr: 4.54e-04 +2022-05-04 22:13:51,744 INFO [train.py:715] (6/8) Epoch 4, batch 15250, loss[loss=0.1418, simple_loss=0.2099, pruned_loss=0.03685, over 4979.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2261, pruned_loss=0.04374, over 971633.32 frames.], batch size: 14, lr: 4.54e-04 +2022-05-04 22:14:31,961 INFO [train.py:715] (6/8) Epoch 4, batch 15300, loss[loss=0.1542, simple_loss=0.224, pruned_loss=0.04218, over 4873.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2268, pruned_loss=0.0437, over 971144.81 frames.], batch size: 16, lr: 4.54e-04 +2022-05-04 22:15:12,437 INFO [train.py:715] (6/8) Epoch 4, batch 15350, loss[loss=0.1324, simple_loss=0.2111, pruned_loss=0.02681, over 4979.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2258, pruned_loss=0.04286, over 971641.72 frames.], batch size: 15, lr: 4.54e-04 +2022-05-04 22:15:52,261 INFO [train.py:715] (6/8) Epoch 4, batch 15400, loss[loss=0.1677, simple_loss=0.2394, pruned_loss=0.04797, over 4913.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2264, pruned_loss=0.04349, over 971874.93 frames.], batch size: 39, lr: 4.53e-04 +2022-05-04 22:16:32,477 INFO [train.py:715] (6/8) Epoch 4, batch 15450, loss[loss=0.143, simple_loss=0.2095, pruned_loss=0.03826, over 4966.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2267, pruned_loss=0.04382, over 971351.60 frames.], batch size: 24, lr: 4.53e-04 +2022-05-04 22:17:12,935 INFO [train.py:715] (6/8) Epoch 4, batch 15500, loss[loss=0.1709, simple_loss=0.2488, pruned_loss=0.04651, over 4938.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2263, pruned_loss=0.04375, over 972416.07 frames.], batch size: 35, lr: 4.53e-04 +2022-05-04 22:17:53,291 INFO [train.py:715] (6/8) Epoch 4, batch 15550, loss[loss=0.1317, simple_loss=0.1958, pruned_loss=0.03379, over 4989.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2264, pruned_loss=0.04422, over 971737.07 frames.], batch size: 15, lr: 4.53e-04 +2022-05-04 22:18:32,668 INFO [train.py:715] (6/8) Epoch 4, batch 15600, loss[loss=0.1982, simple_loss=0.2596, pruned_loss=0.0684, over 4981.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2268, pruned_loss=0.04406, over 973108.05 frames.], batch size: 14, lr: 4.53e-04 +2022-05-04 22:19:13,499 INFO [train.py:715] (6/8) Epoch 4, batch 15650, loss[loss=0.1523, simple_loss=0.2262, pruned_loss=0.03922, over 4835.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2266, pruned_loss=0.04394, over 972411.92 frames.], batch size: 15, lr: 4.53e-04 +2022-05-04 22:19:53,093 INFO [train.py:715] (6/8) Epoch 4, batch 15700, loss[loss=0.1533, simple_loss=0.2168, pruned_loss=0.04483, over 4809.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2262, pruned_loss=0.04348, over 971344.54 frames.], batch size: 13, lr: 4.53e-04 +2022-05-04 22:20:33,281 INFO [train.py:715] (6/8) Epoch 4, batch 15750, loss[loss=0.1849, simple_loss=0.2465, pruned_loss=0.06161, over 4738.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2259, pruned_loss=0.04343, over 972002.36 frames.], batch size: 16, lr: 4.53e-04 +2022-05-04 22:21:12,815 INFO [train.py:715] (6/8) Epoch 4, batch 15800, loss[loss=0.1211, simple_loss=0.189, pruned_loss=0.02658, over 4802.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2263, pruned_loss=0.04353, over 972391.60 frames.], batch size: 12, lr: 4.53e-04 +2022-05-04 22:21:53,795 INFO [train.py:715] (6/8) Epoch 4, batch 15850, loss[loss=0.1446, simple_loss=0.2214, pruned_loss=0.03393, over 4764.00 frames.], tot_loss[loss=0.156, simple_loss=0.2257, pruned_loss=0.04317, over 972504.29 frames.], batch size: 19, lr: 4.53e-04 +2022-05-04 22:22:34,974 INFO [train.py:715] (6/8) Epoch 4, batch 15900, loss[loss=0.1643, simple_loss=0.2261, pruned_loss=0.05121, over 4850.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2255, pruned_loss=0.04277, over 972758.47 frames.], batch size: 32, lr: 4.53e-04 +2022-05-04 22:23:14,300 INFO [train.py:715] (6/8) Epoch 4, batch 15950, loss[loss=0.1401, simple_loss=0.2056, pruned_loss=0.03731, over 4735.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2258, pruned_loss=0.04304, over 972463.44 frames.], batch size: 12, lr: 4.53e-04 +2022-05-04 22:23:54,445 INFO [train.py:715] (6/8) Epoch 4, batch 16000, loss[loss=0.1341, simple_loss=0.2016, pruned_loss=0.0333, over 4696.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2253, pruned_loss=0.04251, over 971447.39 frames.], batch size: 15, lr: 4.53e-04 +2022-05-04 22:24:34,930 INFO [train.py:715] (6/8) Epoch 4, batch 16050, loss[loss=0.2058, simple_loss=0.2722, pruned_loss=0.06969, over 4970.00 frames.], tot_loss[loss=0.1567, simple_loss=0.227, pruned_loss=0.04317, over 971771.66 frames.], batch size: 15, lr: 4.53e-04 +2022-05-04 22:25:14,768 INFO [train.py:715] (6/8) Epoch 4, batch 16100, loss[loss=0.1413, simple_loss=0.21, pruned_loss=0.03633, over 4797.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2277, pruned_loss=0.04391, over 971875.65 frames.], batch size: 14, lr: 4.52e-04 +2022-05-04 22:25:54,146 INFO [train.py:715] (6/8) Epoch 4, batch 16150, loss[loss=0.1678, simple_loss=0.2363, pruned_loss=0.04967, over 4746.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2285, pruned_loss=0.04466, over 971935.93 frames.], batch size: 16, lr: 4.52e-04 +2022-05-04 22:26:34,756 INFO [train.py:715] (6/8) Epoch 4, batch 16200, loss[loss=0.1612, simple_loss=0.2365, pruned_loss=0.04291, over 4799.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2283, pruned_loss=0.04464, over 971768.98 frames.], batch size: 21, lr: 4.52e-04 +2022-05-04 22:27:15,080 INFO [train.py:715] (6/8) Epoch 4, batch 16250, loss[loss=0.162, simple_loss=0.2347, pruned_loss=0.04469, over 4909.00 frames.], tot_loss[loss=0.1575, simple_loss=0.227, pruned_loss=0.04399, over 971236.39 frames.], batch size: 17, lr: 4.52e-04 +2022-05-04 22:27:54,411 INFO [train.py:715] (6/8) Epoch 4, batch 16300, loss[loss=0.1583, simple_loss=0.2245, pruned_loss=0.04605, over 4986.00 frames.], tot_loss[loss=0.158, simple_loss=0.2276, pruned_loss=0.04419, over 971804.49 frames.], batch size: 31, lr: 4.52e-04 +2022-05-04 22:28:34,990 INFO [train.py:715] (6/8) Epoch 4, batch 16350, loss[loss=0.1355, simple_loss=0.2142, pruned_loss=0.02844, over 4894.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2285, pruned_loss=0.04453, over 971046.20 frames.], batch size: 39, lr: 4.52e-04 +2022-05-04 22:29:15,663 INFO [train.py:715] (6/8) Epoch 4, batch 16400, loss[loss=0.1247, simple_loss=0.1995, pruned_loss=0.02494, over 4696.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2283, pruned_loss=0.0443, over 971520.60 frames.], batch size: 15, lr: 4.52e-04 +2022-05-04 22:29:56,024 INFO [train.py:715] (6/8) Epoch 4, batch 16450, loss[loss=0.1406, simple_loss=0.2049, pruned_loss=0.03815, over 4825.00 frames.], tot_loss[loss=0.157, simple_loss=0.2267, pruned_loss=0.0436, over 972497.72 frames.], batch size: 25, lr: 4.52e-04 +2022-05-04 22:30:35,458 INFO [train.py:715] (6/8) Epoch 4, batch 16500, loss[loss=0.1571, simple_loss=0.2177, pruned_loss=0.0483, over 4869.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2265, pruned_loss=0.04385, over 972476.76 frames.], batch size: 32, lr: 4.52e-04 +2022-05-04 22:31:15,349 INFO [train.py:715] (6/8) Epoch 4, batch 16550, loss[loss=0.1428, simple_loss=0.2116, pruned_loss=0.037, over 4745.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2267, pruned_loss=0.04401, over 971315.65 frames.], batch size: 19, lr: 4.52e-04 +2022-05-04 22:31:55,170 INFO [train.py:715] (6/8) Epoch 4, batch 16600, loss[loss=0.1643, simple_loss=0.2407, pruned_loss=0.04391, over 4793.00 frames.], tot_loss[loss=0.1575, simple_loss=0.227, pruned_loss=0.04401, over 971686.25 frames.], batch size: 21, lr: 4.52e-04 +2022-05-04 22:32:33,985 INFO [train.py:715] (6/8) Epoch 4, batch 16650, loss[loss=0.178, simple_loss=0.2491, pruned_loss=0.0535, over 4768.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2267, pruned_loss=0.04356, over 972083.87 frames.], batch size: 19, lr: 4.52e-04 +2022-05-04 22:33:12,846 INFO [train.py:715] (6/8) Epoch 4, batch 16700, loss[loss=0.1659, simple_loss=0.2401, pruned_loss=0.04583, over 4693.00 frames.], tot_loss[loss=0.1574, simple_loss=0.227, pruned_loss=0.04388, over 972593.15 frames.], batch size: 15, lr: 4.52e-04 +2022-05-04 22:33:52,191 INFO [train.py:715] (6/8) Epoch 4, batch 16750, loss[loss=0.1529, simple_loss=0.2315, pruned_loss=0.0371, over 4938.00 frames.], tot_loss[loss=0.1575, simple_loss=0.227, pruned_loss=0.04402, over 972369.16 frames.], batch size: 21, lr: 4.52e-04 +2022-05-04 22:34:31,640 INFO [train.py:715] (6/8) Epoch 4, batch 16800, loss[loss=0.142, simple_loss=0.225, pruned_loss=0.02945, over 4890.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2267, pruned_loss=0.04355, over 972843.30 frames.], batch size: 22, lr: 4.51e-04 +2022-05-04 22:35:10,392 INFO [train.py:715] (6/8) Epoch 4, batch 16850, loss[loss=0.1048, simple_loss=0.1827, pruned_loss=0.01346, over 4784.00 frames.], tot_loss[loss=0.1564, simple_loss=0.226, pruned_loss=0.0434, over 971795.52 frames.], batch size: 12, lr: 4.51e-04 +2022-05-04 22:35:50,730 INFO [train.py:715] (6/8) Epoch 4, batch 16900, loss[loss=0.142, simple_loss=0.2145, pruned_loss=0.03472, over 4862.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2267, pruned_loss=0.04382, over 972404.53 frames.], batch size: 20, lr: 4.51e-04 +2022-05-04 22:36:31,077 INFO [train.py:715] (6/8) Epoch 4, batch 16950, loss[loss=0.1598, simple_loss=0.2297, pruned_loss=0.04492, over 4970.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2264, pruned_loss=0.04362, over 973038.80 frames.], batch size: 35, lr: 4.51e-04 +2022-05-04 22:37:10,621 INFO [train.py:715] (6/8) Epoch 4, batch 17000, loss[loss=0.1509, simple_loss=0.2166, pruned_loss=0.04259, over 4792.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2258, pruned_loss=0.04327, over 972654.34 frames.], batch size: 14, lr: 4.51e-04 +2022-05-04 22:37:50,447 INFO [train.py:715] (6/8) Epoch 4, batch 17050, loss[loss=0.1866, simple_loss=0.2484, pruned_loss=0.06238, over 4940.00 frames.], tot_loss[loss=0.155, simple_loss=0.2247, pruned_loss=0.04271, over 971666.62 frames.], batch size: 35, lr: 4.51e-04 +2022-05-04 22:38:30,852 INFO [train.py:715] (6/8) Epoch 4, batch 17100, loss[loss=0.1837, simple_loss=0.2526, pruned_loss=0.05734, over 4882.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2262, pruned_loss=0.04339, over 972753.52 frames.], batch size: 16, lr: 4.51e-04 +2022-05-04 22:39:10,955 INFO [train.py:715] (6/8) Epoch 4, batch 17150, loss[loss=0.1457, simple_loss=0.2265, pruned_loss=0.03246, over 4751.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2263, pruned_loss=0.04359, over 972298.02 frames.], batch size: 19, lr: 4.51e-04 +2022-05-04 22:39:50,096 INFO [train.py:715] (6/8) Epoch 4, batch 17200, loss[loss=0.1296, simple_loss=0.2051, pruned_loss=0.02706, over 4958.00 frames.], tot_loss[loss=0.157, simple_loss=0.2266, pruned_loss=0.04373, over 972163.96 frames.], batch size: 28, lr: 4.51e-04 +2022-05-04 22:40:30,244 INFO [train.py:715] (6/8) Epoch 4, batch 17250, loss[loss=0.1414, simple_loss=0.2055, pruned_loss=0.03867, over 4977.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2257, pruned_loss=0.04336, over 971775.46 frames.], batch size: 14, lr: 4.51e-04 +2022-05-04 22:41:10,188 INFO [train.py:715] (6/8) Epoch 4, batch 17300, loss[loss=0.1763, simple_loss=0.2363, pruned_loss=0.05818, over 4985.00 frames.], tot_loss[loss=0.1563, simple_loss=0.226, pruned_loss=0.04327, over 972118.26 frames.], batch size: 28, lr: 4.51e-04 +2022-05-04 22:41:49,921 INFO [train.py:715] (6/8) Epoch 4, batch 17350, loss[loss=0.1527, simple_loss=0.2296, pruned_loss=0.03793, over 4804.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2272, pruned_loss=0.04374, over 973235.08 frames.], batch size: 21, lr: 4.51e-04 +2022-05-04 22:42:29,440 INFO [train.py:715] (6/8) Epoch 4, batch 17400, loss[loss=0.1419, simple_loss=0.2204, pruned_loss=0.03172, over 4931.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2278, pruned_loss=0.04401, over 973297.79 frames.], batch size: 18, lr: 4.51e-04 +2022-05-04 22:43:09,750 INFO [train.py:715] (6/8) Epoch 4, batch 17450, loss[loss=0.1569, simple_loss=0.232, pruned_loss=0.04091, over 4824.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2283, pruned_loss=0.04391, over 973644.89 frames.], batch size: 25, lr: 4.51e-04 +2022-05-04 22:43:50,023 INFO [train.py:715] (6/8) Epoch 4, batch 17500, loss[loss=0.151, simple_loss=0.2282, pruned_loss=0.03685, over 4638.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2271, pruned_loss=0.04384, over 973393.54 frames.], batch size: 13, lr: 4.50e-04 +2022-05-04 22:44:29,241 INFO [train.py:715] (6/8) Epoch 4, batch 17550, loss[loss=0.1502, simple_loss=0.2125, pruned_loss=0.044, over 4878.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2281, pruned_loss=0.04431, over 973370.71 frames.], batch size: 20, lr: 4.50e-04 +2022-05-04 22:45:09,112 INFO [train.py:715] (6/8) Epoch 4, batch 17600, loss[loss=0.1416, simple_loss=0.2132, pruned_loss=0.035, over 4969.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2282, pruned_loss=0.0443, over 972393.17 frames.], batch size: 24, lr: 4.50e-04 +2022-05-04 22:45:49,508 INFO [train.py:715] (6/8) Epoch 4, batch 17650, loss[loss=0.1313, simple_loss=0.2086, pruned_loss=0.02695, over 4842.00 frames.], tot_loss[loss=0.157, simple_loss=0.227, pruned_loss=0.04353, over 972591.46 frames.], batch size: 30, lr: 4.50e-04 +2022-05-04 22:46:29,567 INFO [train.py:715] (6/8) Epoch 4, batch 17700, loss[loss=0.1515, simple_loss=0.2124, pruned_loss=0.04536, over 4973.00 frames.], tot_loss[loss=0.156, simple_loss=0.2258, pruned_loss=0.04306, over 972669.42 frames.], batch size: 24, lr: 4.50e-04 +2022-05-04 22:47:09,154 INFO [train.py:715] (6/8) Epoch 4, batch 17750, loss[loss=0.1545, simple_loss=0.2218, pruned_loss=0.04357, over 4909.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2266, pruned_loss=0.04342, over 972098.19 frames.], batch size: 19, lr: 4.50e-04 +2022-05-04 22:47:49,267 INFO [train.py:715] (6/8) Epoch 4, batch 17800, loss[loss=0.178, simple_loss=0.2263, pruned_loss=0.06486, over 4931.00 frames.], tot_loss[loss=0.158, simple_loss=0.2276, pruned_loss=0.04416, over 973142.67 frames.], batch size: 21, lr: 4.50e-04 +2022-05-04 22:48:29,922 INFO [train.py:715] (6/8) Epoch 4, batch 17850, loss[loss=0.1828, simple_loss=0.2387, pruned_loss=0.06345, over 4807.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2277, pruned_loss=0.04463, over 973294.28 frames.], batch size: 21, lr: 4.50e-04 +2022-05-04 22:49:09,039 INFO [train.py:715] (6/8) Epoch 4, batch 17900, loss[loss=0.1668, simple_loss=0.2374, pruned_loss=0.04808, over 4905.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2289, pruned_loss=0.04497, over 973437.26 frames.], batch size: 23, lr: 4.50e-04 +2022-05-04 22:49:49,019 INFO [train.py:715] (6/8) Epoch 4, batch 17950, loss[loss=0.1526, simple_loss=0.2281, pruned_loss=0.03855, over 4785.00 frames.], tot_loss[loss=0.159, simple_loss=0.2288, pruned_loss=0.04465, over 973664.12 frames.], batch size: 17, lr: 4.50e-04 +2022-05-04 22:50:29,177 INFO [train.py:715] (6/8) Epoch 4, batch 18000, loss[loss=0.1651, simple_loss=0.2262, pruned_loss=0.05197, over 4805.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2286, pruned_loss=0.04442, over 973221.66 frames.], batch size: 12, lr: 4.50e-04 +2022-05-04 22:50:29,178 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 22:50:38,824 INFO [train.py:742] (6/8) Epoch 4, validation: loss=0.1119, simple_loss=0.1976, pruned_loss=0.01313, over 914524.00 frames. +2022-05-04 22:51:19,290 INFO [train.py:715] (6/8) Epoch 4, batch 18050, loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03092, over 4802.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2285, pruned_loss=0.04447, over 973024.88 frames.], batch size: 21, lr: 4.50e-04 +2022-05-04 22:51:59,526 INFO [train.py:715] (6/8) Epoch 4, batch 18100, loss[loss=0.1539, simple_loss=0.2248, pruned_loss=0.04148, over 4876.00 frames.], tot_loss[loss=0.158, simple_loss=0.2274, pruned_loss=0.04436, over 972611.91 frames.], batch size: 32, lr: 4.50e-04 +2022-05-04 22:52:39,090 INFO [train.py:715] (6/8) Epoch 4, batch 18150, loss[loss=0.1595, simple_loss=0.2327, pruned_loss=0.04315, over 4764.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2271, pruned_loss=0.04414, over 972335.35 frames.], batch size: 19, lr: 4.50e-04 +2022-05-04 22:53:19,399 INFO [train.py:715] (6/8) Epoch 4, batch 18200, loss[loss=0.1387, simple_loss=0.2105, pruned_loss=0.03347, over 4915.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2278, pruned_loss=0.0446, over 973293.35 frames.], batch size: 17, lr: 4.49e-04 +2022-05-04 22:53:59,864 INFO [train.py:715] (6/8) Epoch 4, batch 18250, loss[loss=0.1872, simple_loss=0.2637, pruned_loss=0.0553, over 4792.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2281, pruned_loss=0.04448, over 972924.17 frames.], batch size: 17, lr: 4.49e-04 +2022-05-04 22:54:39,562 INFO [train.py:715] (6/8) Epoch 4, batch 18300, loss[loss=0.1784, simple_loss=0.2607, pruned_loss=0.04806, over 4891.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2277, pruned_loss=0.04444, over 972257.25 frames.], batch size: 19, lr: 4.49e-04 +2022-05-04 22:55:19,275 INFO [train.py:715] (6/8) Epoch 4, batch 18350, loss[loss=0.1607, simple_loss=0.2303, pruned_loss=0.04552, over 4937.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2271, pruned_loss=0.04365, over 971666.99 frames.], batch size: 21, lr: 4.49e-04 +2022-05-04 22:56:00,408 INFO [train.py:715] (6/8) Epoch 4, batch 18400, loss[loss=0.1683, simple_loss=0.2326, pruned_loss=0.05197, over 4900.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2265, pruned_loss=0.0437, over 972470.95 frames.], batch size: 19, lr: 4.49e-04 +2022-05-04 22:56:40,822 INFO [train.py:715] (6/8) Epoch 4, batch 18450, loss[loss=0.1615, simple_loss=0.2235, pruned_loss=0.04977, over 4835.00 frames.], tot_loss[loss=0.157, simple_loss=0.227, pruned_loss=0.04351, over 971952.46 frames.], batch size: 15, lr: 4.49e-04 +2022-05-04 22:57:20,892 INFO [train.py:715] (6/8) Epoch 4, batch 18500, loss[loss=0.1381, simple_loss=0.2157, pruned_loss=0.03028, over 4958.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2269, pruned_loss=0.04377, over 971952.05 frames.], batch size: 24, lr: 4.49e-04 +2022-05-04 22:58:01,182 INFO [train.py:715] (6/8) Epoch 4, batch 18550, loss[loss=0.1598, simple_loss=0.2322, pruned_loss=0.04372, over 4903.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2272, pruned_loss=0.044, over 971495.28 frames.], batch size: 17, lr: 4.49e-04 +2022-05-04 22:58:41,892 INFO [train.py:715] (6/8) Epoch 4, batch 18600, loss[loss=0.1743, simple_loss=0.2405, pruned_loss=0.05398, over 4908.00 frames.], tot_loss[loss=0.1576, simple_loss=0.227, pruned_loss=0.04409, over 971589.46 frames.], batch size: 18, lr: 4.49e-04 +2022-05-04 22:59:21,449 INFO [train.py:715] (6/8) Epoch 4, batch 18650, loss[loss=0.1419, simple_loss=0.2102, pruned_loss=0.03678, over 4980.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2259, pruned_loss=0.04382, over 971008.53 frames.], batch size: 27, lr: 4.49e-04 +2022-05-04 23:00:01,607 INFO [train.py:715] (6/8) Epoch 4, batch 18700, loss[loss=0.1644, simple_loss=0.2312, pruned_loss=0.04877, over 4838.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2267, pruned_loss=0.04357, over 971684.34 frames.], batch size: 15, lr: 4.49e-04 +2022-05-04 23:00:42,462 INFO [train.py:715] (6/8) Epoch 4, batch 18750, loss[loss=0.1312, simple_loss=0.2008, pruned_loss=0.03078, over 4924.00 frames.], tot_loss[loss=0.156, simple_loss=0.2257, pruned_loss=0.04313, over 971169.52 frames.], batch size: 29, lr: 4.49e-04 +2022-05-04 23:01:21,936 INFO [train.py:715] (6/8) Epoch 4, batch 18800, loss[loss=0.176, simple_loss=0.2414, pruned_loss=0.0553, over 4940.00 frames.], tot_loss[loss=0.156, simple_loss=0.2259, pruned_loss=0.04303, over 971953.39 frames.], batch size: 23, lr: 4.49e-04 +2022-05-04 23:02:02,020 INFO [train.py:715] (6/8) Epoch 4, batch 18850, loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.03068, over 4941.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2259, pruned_loss=0.04333, over 972581.77 frames.], batch size: 35, lr: 4.49e-04 +2022-05-04 23:02:42,419 INFO [train.py:715] (6/8) Epoch 4, batch 18900, loss[loss=0.175, simple_loss=0.2465, pruned_loss=0.05174, over 4748.00 frames.], tot_loss[loss=0.157, simple_loss=0.2262, pruned_loss=0.04387, over 971676.66 frames.], batch size: 19, lr: 4.48e-04 +2022-05-04 23:03:22,739 INFO [train.py:715] (6/8) Epoch 4, batch 18950, loss[loss=0.2078, simple_loss=0.2677, pruned_loss=0.07396, over 4802.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2267, pruned_loss=0.04405, over 971901.14 frames.], batch size: 21, lr: 4.48e-04 +2022-05-04 23:04:01,990 INFO [train.py:715] (6/8) Epoch 4, batch 19000, loss[loss=0.1445, simple_loss=0.2262, pruned_loss=0.03137, over 4787.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2257, pruned_loss=0.04331, over 972147.75 frames.], batch size: 17, lr: 4.48e-04 +2022-05-04 23:04:42,494 INFO [train.py:715] (6/8) Epoch 4, batch 19050, loss[loss=0.134, simple_loss=0.2116, pruned_loss=0.02815, over 4650.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2258, pruned_loss=0.04327, over 972403.68 frames.], batch size: 13, lr: 4.48e-04 +2022-05-04 23:05:23,216 INFO [train.py:715] (6/8) Epoch 4, batch 19100, loss[loss=0.1342, simple_loss=0.2046, pruned_loss=0.03192, over 4816.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2257, pruned_loss=0.04289, over 971799.02 frames.], batch size: 13, lr: 4.48e-04 +2022-05-04 23:06:03,170 INFO [train.py:715] (6/8) Epoch 4, batch 19150, loss[loss=0.1748, simple_loss=0.2357, pruned_loss=0.05694, over 4912.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2264, pruned_loss=0.04336, over 971726.46 frames.], batch size: 18, lr: 4.48e-04 +2022-05-04 23:06:43,533 INFO [train.py:715] (6/8) Epoch 4, batch 19200, loss[loss=0.1356, simple_loss=0.2055, pruned_loss=0.03279, over 4962.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2245, pruned_loss=0.04237, over 971027.87 frames.], batch size: 24, lr: 4.48e-04 +2022-05-04 23:07:24,317 INFO [train.py:715] (6/8) Epoch 4, batch 19250, loss[loss=0.1538, simple_loss=0.2288, pruned_loss=0.03942, over 4905.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2243, pruned_loss=0.04204, over 970625.35 frames.], batch size: 38, lr: 4.48e-04 +2022-05-04 23:08:04,918 INFO [train.py:715] (6/8) Epoch 4, batch 19300, loss[loss=0.1233, simple_loss=0.199, pruned_loss=0.02377, over 4780.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2253, pruned_loss=0.0426, over 971002.14 frames.], batch size: 18, lr: 4.48e-04 +2022-05-04 23:08:44,091 INFO [train.py:715] (6/8) Epoch 4, batch 19350, loss[loss=0.1795, simple_loss=0.2428, pruned_loss=0.05807, over 4907.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2257, pruned_loss=0.04308, over 971163.56 frames.], batch size: 39, lr: 4.48e-04 +2022-05-04 23:09:24,784 INFO [train.py:715] (6/8) Epoch 4, batch 19400, loss[loss=0.1526, simple_loss=0.227, pruned_loss=0.03911, over 4758.00 frames.], tot_loss[loss=0.156, simple_loss=0.226, pruned_loss=0.04299, over 970371.27 frames.], batch size: 19, lr: 4.48e-04 +2022-05-04 23:10:06,274 INFO [train.py:715] (6/8) Epoch 4, batch 19450, loss[loss=0.1803, simple_loss=0.2497, pruned_loss=0.05545, over 4819.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2258, pruned_loss=0.04276, over 971658.76 frames.], batch size: 25, lr: 4.48e-04 +2022-05-04 23:10:47,426 INFO [train.py:715] (6/8) Epoch 4, batch 19500, loss[loss=0.1363, simple_loss=0.2132, pruned_loss=0.02972, over 4866.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2259, pruned_loss=0.04289, over 972209.26 frames.], batch size: 20, lr: 4.48e-04 +2022-05-04 23:11:27,083 INFO [train.py:715] (6/8) Epoch 4, batch 19550, loss[loss=0.1528, simple_loss=0.2191, pruned_loss=0.04323, over 4771.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2266, pruned_loss=0.04331, over 971430.98 frames.], batch size: 14, lr: 4.48e-04 +2022-05-04 23:12:07,478 INFO [train.py:715] (6/8) Epoch 4, batch 19600, loss[loss=0.1442, simple_loss=0.2124, pruned_loss=0.03801, over 4802.00 frames.], tot_loss[loss=0.1561, simple_loss=0.226, pruned_loss=0.04304, over 971557.01 frames.], batch size: 24, lr: 4.47e-04 +2022-05-04 23:12:47,696 INFO [train.py:715] (6/8) Epoch 4, batch 19650, loss[loss=0.1368, simple_loss=0.2064, pruned_loss=0.0336, over 4918.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2257, pruned_loss=0.04305, over 972200.77 frames.], batch size: 18, lr: 4.47e-04 +2022-05-04 23:13:26,462 INFO [train.py:715] (6/8) Epoch 4, batch 19700, loss[loss=0.159, simple_loss=0.2343, pruned_loss=0.04191, over 4970.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2263, pruned_loss=0.0433, over 971449.03 frames.], batch size: 28, lr: 4.47e-04 +2022-05-04 23:14:07,137 INFO [train.py:715] (6/8) Epoch 4, batch 19750, loss[loss=0.1729, simple_loss=0.2398, pruned_loss=0.05301, over 4929.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2274, pruned_loss=0.04391, over 971518.01 frames.], batch size: 21, lr: 4.47e-04 +2022-05-04 23:14:47,959 INFO [train.py:715] (6/8) Epoch 4, batch 19800, loss[loss=0.1527, simple_loss=0.2226, pruned_loss=0.04142, over 4916.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2271, pruned_loss=0.04359, over 971603.16 frames.], batch size: 18, lr: 4.47e-04 +2022-05-04 23:15:27,701 INFO [train.py:715] (6/8) Epoch 4, batch 19850, loss[loss=0.1695, simple_loss=0.249, pruned_loss=0.04496, over 4794.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2268, pruned_loss=0.0434, over 971277.66 frames.], batch size: 24, lr: 4.47e-04 +2022-05-04 23:16:07,779 INFO [train.py:715] (6/8) Epoch 4, batch 19900, loss[loss=0.141, simple_loss=0.2208, pruned_loss=0.03063, over 4764.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2275, pruned_loss=0.04364, over 971195.55 frames.], batch size: 19, lr: 4.47e-04 +2022-05-04 23:16:47,892 INFO [train.py:715] (6/8) Epoch 4, batch 19950, loss[loss=0.1768, simple_loss=0.2587, pruned_loss=0.0475, over 4942.00 frames.], tot_loss[loss=0.157, simple_loss=0.2269, pruned_loss=0.04359, over 971954.54 frames.], batch size: 21, lr: 4.47e-04 +2022-05-04 23:17:28,061 INFO [train.py:715] (6/8) Epoch 4, batch 20000, loss[loss=0.1383, simple_loss=0.217, pruned_loss=0.0298, over 4800.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2257, pruned_loss=0.04306, over 972286.01 frames.], batch size: 24, lr: 4.47e-04 +2022-05-04 23:18:06,763 INFO [train.py:715] (6/8) Epoch 4, batch 20050, loss[loss=0.154, simple_loss=0.2215, pruned_loss=0.0432, over 4982.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2258, pruned_loss=0.04353, over 972196.75 frames.], batch size: 28, lr: 4.47e-04 +2022-05-04 23:18:46,555 INFO [train.py:715] (6/8) Epoch 4, batch 20100, loss[loss=0.2043, simple_loss=0.282, pruned_loss=0.06333, over 4942.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2262, pruned_loss=0.04382, over 972218.18 frames.], batch size: 23, lr: 4.47e-04 +2022-05-04 23:19:26,622 INFO [train.py:715] (6/8) Epoch 4, batch 20150, loss[loss=0.135, simple_loss=0.2162, pruned_loss=0.02686, over 4973.00 frames.], tot_loss[loss=0.157, simple_loss=0.2263, pruned_loss=0.04388, over 972522.41 frames.], batch size: 25, lr: 4.47e-04 +2022-05-04 23:20:06,047 INFO [train.py:715] (6/8) Epoch 4, batch 20200, loss[loss=0.1517, simple_loss=0.2191, pruned_loss=0.04214, over 4811.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2262, pruned_loss=0.04347, over 972138.06 frames.], batch size: 13, lr: 4.47e-04 +2022-05-04 23:20:45,790 INFO [train.py:715] (6/8) Epoch 4, batch 20250, loss[loss=0.148, simple_loss=0.2108, pruned_loss=0.04254, over 4803.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2259, pruned_loss=0.04325, over 972454.25 frames.], batch size: 24, lr: 4.47e-04 +2022-05-04 23:21:26,108 INFO [train.py:715] (6/8) Epoch 4, batch 20300, loss[loss=0.1409, simple_loss=0.2193, pruned_loss=0.03129, over 4889.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2249, pruned_loss=0.04289, over 972280.25 frames.], batch size: 19, lr: 4.46e-04 +2022-05-04 23:22:06,209 INFO [train.py:715] (6/8) Epoch 4, batch 20350, loss[loss=0.167, simple_loss=0.2336, pruned_loss=0.05023, over 4866.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2261, pruned_loss=0.04363, over 973240.86 frames.], batch size: 32, lr: 4.46e-04 +2022-05-04 23:22:45,044 INFO [train.py:715] (6/8) Epoch 4, batch 20400, loss[loss=0.179, simple_loss=0.2413, pruned_loss=0.0583, over 4855.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2262, pruned_loss=0.04369, over 973320.26 frames.], batch size: 32, lr: 4.46e-04 +2022-05-04 23:23:25,031 INFO [train.py:715] (6/8) Epoch 4, batch 20450, loss[loss=0.1839, simple_loss=0.2545, pruned_loss=0.05665, over 4832.00 frames.], tot_loss[loss=0.1564, simple_loss=0.226, pruned_loss=0.04338, over 973170.80 frames.], batch size: 15, lr: 4.46e-04 +2022-05-04 23:24:04,954 INFO [train.py:715] (6/8) Epoch 4, batch 20500, loss[loss=0.1565, simple_loss=0.2385, pruned_loss=0.03722, over 4955.00 frames.], tot_loss[loss=0.156, simple_loss=0.2257, pruned_loss=0.04318, over 972651.93 frames.], batch size: 24, lr: 4.46e-04 +2022-05-04 23:24:44,746 INFO [train.py:715] (6/8) Epoch 4, batch 20550, loss[loss=0.1695, simple_loss=0.2361, pruned_loss=0.05144, over 4956.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2255, pruned_loss=0.04271, over 972113.41 frames.], batch size: 24, lr: 4.46e-04 +2022-05-04 23:25:23,714 INFO [train.py:715] (6/8) Epoch 4, batch 20600, loss[loss=0.1491, simple_loss=0.2192, pruned_loss=0.03954, over 4938.00 frames.], tot_loss[loss=0.1547, simple_loss=0.225, pruned_loss=0.04215, over 973644.83 frames.], batch size: 23, lr: 4.46e-04 +2022-05-04 23:26:03,652 INFO [train.py:715] (6/8) Epoch 4, batch 20650, loss[loss=0.1987, simple_loss=0.2606, pruned_loss=0.06835, over 4826.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2255, pruned_loss=0.04287, over 973122.69 frames.], batch size: 13, lr: 4.46e-04 +2022-05-04 23:26:44,151 INFO [train.py:715] (6/8) Epoch 4, batch 20700, loss[loss=0.1854, simple_loss=0.2544, pruned_loss=0.05815, over 4848.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2261, pruned_loss=0.04354, over 973120.92 frames.], batch size: 15, lr: 4.46e-04 +2022-05-04 23:27:22,806 INFO [train.py:715] (6/8) Epoch 4, batch 20750, loss[loss=0.1557, simple_loss=0.238, pruned_loss=0.03673, over 4932.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2266, pruned_loss=0.04351, over 972899.10 frames.], batch size: 23, lr: 4.46e-04 +2022-05-04 23:28:04,809 INFO [train.py:715] (6/8) Epoch 4, batch 20800, loss[loss=0.1449, simple_loss=0.215, pruned_loss=0.03741, over 4699.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2262, pruned_loss=0.04322, over 972059.35 frames.], batch size: 15, lr: 4.46e-04 +2022-05-04 23:28:44,591 INFO [train.py:715] (6/8) Epoch 4, batch 20850, loss[loss=0.148, simple_loss=0.2063, pruned_loss=0.04486, over 4975.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2251, pruned_loss=0.04289, over 972783.77 frames.], batch size: 24, lr: 4.46e-04 +2022-05-04 23:29:24,434 INFO [train.py:715] (6/8) Epoch 4, batch 20900, loss[loss=0.1535, simple_loss=0.2223, pruned_loss=0.04239, over 4836.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2252, pruned_loss=0.04286, over 972139.10 frames.], batch size: 30, lr: 4.46e-04 +2022-05-04 23:30:03,465 INFO [train.py:715] (6/8) Epoch 4, batch 20950, loss[loss=0.1197, simple_loss=0.1876, pruned_loss=0.02588, over 4813.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2254, pruned_loss=0.04267, over 972422.01 frames.], batch size: 26, lr: 4.46e-04 +2022-05-04 23:30:43,440 INFO [train.py:715] (6/8) Epoch 4, batch 21000, loss[loss=0.1348, simple_loss=0.2199, pruned_loss=0.02487, over 4992.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2249, pruned_loss=0.04238, over 972512.91 frames.], batch size: 15, lr: 4.46e-04 +2022-05-04 23:30:43,441 INFO [train.py:733] (6/8) Computing validation loss +2022-05-04 23:30:52,894 INFO [train.py:742] (6/8) Epoch 4, validation: loss=0.1116, simple_loss=0.1973, pruned_loss=0.01293, over 914524.00 frames. +2022-05-04 23:31:33,183 INFO [train.py:715] (6/8) Epoch 4, batch 21050, loss[loss=0.1449, simple_loss=0.2171, pruned_loss=0.03637, over 4990.00 frames.], tot_loss[loss=0.156, simple_loss=0.2262, pruned_loss=0.04287, over 972823.04 frames.], batch size: 15, lr: 4.45e-04 +2022-05-04 23:32:12,977 INFO [train.py:715] (6/8) Epoch 4, batch 21100, loss[loss=0.1318, simple_loss=0.2086, pruned_loss=0.02748, over 4787.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2271, pruned_loss=0.04311, over 972410.58 frames.], batch size: 12, lr: 4.45e-04 +2022-05-04 23:32:52,567 INFO [train.py:715] (6/8) Epoch 4, batch 21150, loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03008, over 4800.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2261, pruned_loss=0.04279, over 971918.19 frames.], batch size: 25, lr: 4.45e-04 +2022-05-04 23:33:32,142 INFO [train.py:715] (6/8) Epoch 4, batch 21200, loss[loss=0.1674, simple_loss=0.2337, pruned_loss=0.05056, over 4769.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2272, pruned_loss=0.04323, over 972043.15 frames.], batch size: 14, lr: 4.45e-04 +2022-05-04 23:34:12,357 INFO [train.py:715] (6/8) Epoch 4, batch 21250, loss[loss=0.1487, simple_loss=0.2105, pruned_loss=0.04346, over 4778.00 frames.], tot_loss[loss=0.1568, simple_loss=0.227, pruned_loss=0.04325, over 972744.93 frames.], batch size: 14, lr: 4.45e-04 +2022-05-04 23:34:51,182 INFO [train.py:715] (6/8) Epoch 4, batch 21300, loss[loss=0.1501, simple_loss=0.2117, pruned_loss=0.04427, over 4824.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2269, pruned_loss=0.04399, over 972861.50 frames.], batch size: 13, lr: 4.45e-04 +2022-05-04 23:35:30,238 INFO [train.py:715] (6/8) Epoch 4, batch 21350, loss[loss=0.1492, simple_loss=0.2107, pruned_loss=0.04388, over 4978.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2271, pruned_loss=0.04382, over 973207.22 frames.], batch size: 28, lr: 4.45e-04 +2022-05-04 23:36:09,888 INFO [train.py:715] (6/8) Epoch 4, batch 21400, loss[loss=0.145, simple_loss=0.2144, pruned_loss=0.03779, over 4987.00 frames.], tot_loss[loss=0.157, simple_loss=0.2266, pruned_loss=0.04367, over 973490.82 frames.], batch size: 14, lr: 4.45e-04 +2022-05-04 23:36:49,452 INFO [train.py:715] (6/8) Epoch 4, batch 21450, loss[loss=0.1589, simple_loss=0.2339, pruned_loss=0.04196, over 4918.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2264, pruned_loss=0.04355, over 973278.10 frames.], batch size: 19, lr: 4.45e-04 +2022-05-04 23:37:28,639 INFO [train.py:715] (6/8) Epoch 4, batch 21500, loss[loss=0.1599, simple_loss=0.2418, pruned_loss=0.03896, over 4776.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2261, pruned_loss=0.04343, over 972994.88 frames.], batch size: 18, lr: 4.45e-04 +2022-05-04 23:38:08,472 INFO [train.py:715] (6/8) Epoch 4, batch 21550, loss[loss=0.1837, simple_loss=0.2486, pruned_loss=0.05944, over 4979.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2271, pruned_loss=0.04404, over 972057.34 frames.], batch size: 15, lr: 4.45e-04 +2022-05-04 23:38:48,843 INFO [train.py:715] (6/8) Epoch 4, batch 21600, loss[loss=0.1581, simple_loss=0.2291, pruned_loss=0.04355, over 4757.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2285, pruned_loss=0.04427, over 971705.19 frames.], batch size: 16, lr: 4.45e-04 +2022-05-04 23:39:28,093 INFO [train.py:715] (6/8) Epoch 4, batch 21650, loss[loss=0.1796, simple_loss=0.2628, pruned_loss=0.04819, over 4829.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2289, pruned_loss=0.04471, over 971930.90 frames.], batch size: 25, lr: 4.45e-04 +2022-05-04 23:40:08,348 INFO [train.py:715] (6/8) Epoch 4, batch 21700, loss[loss=0.1644, simple_loss=0.2313, pruned_loss=0.04877, over 4934.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2276, pruned_loss=0.0437, over 972748.33 frames.], batch size: 23, lr: 4.45e-04 +2022-05-04 23:40:49,372 INFO [train.py:715] (6/8) Epoch 4, batch 21750, loss[loss=0.1615, simple_loss=0.2276, pruned_loss=0.0477, over 4927.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2273, pruned_loss=0.04354, over 972854.23 frames.], batch size: 21, lr: 4.44e-04 +2022-05-04 23:41:29,007 INFO [train.py:715] (6/8) Epoch 4, batch 21800, loss[loss=0.1511, simple_loss=0.2252, pruned_loss=0.03845, over 4918.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2269, pruned_loss=0.04329, over 972925.85 frames.], batch size: 23, lr: 4.44e-04 +2022-05-04 23:42:08,601 INFO [train.py:715] (6/8) Epoch 4, batch 21850, loss[loss=0.1652, simple_loss=0.2326, pruned_loss=0.04891, over 4925.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2258, pruned_loss=0.04276, over 973002.45 frames.], batch size: 23, lr: 4.44e-04 +2022-05-04 23:42:48,643 INFO [train.py:715] (6/8) Epoch 4, batch 21900, loss[loss=0.1489, simple_loss=0.2185, pruned_loss=0.03962, over 4818.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2256, pruned_loss=0.04338, over 972457.15 frames.], batch size: 13, lr: 4.44e-04 +2022-05-04 23:43:29,089 INFO [train.py:715] (6/8) Epoch 4, batch 21950, loss[loss=0.1709, simple_loss=0.2374, pruned_loss=0.05225, over 4933.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2258, pruned_loss=0.04326, over 972114.82 frames.], batch size: 39, lr: 4.44e-04 +2022-05-04 23:44:08,288 INFO [train.py:715] (6/8) Epoch 4, batch 22000, loss[loss=0.176, simple_loss=0.2381, pruned_loss=0.05697, over 4950.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2262, pruned_loss=0.04342, over 972048.29 frames.], batch size: 35, lr: 4.44e-04 +2022-05-04 23:44:48,072 INFO [train.py:715] (6/8) Epoch 4, batch 22050, loss[loss=0.1477, simple_loss=0.2232, pruned_loss=0.03613, over 4768.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2264, pruned_loss=0.04367, over 972055.33 frames.], batch size: 19, lr: 4.44e-04 +2022-05-04 23:45:28,540 INFO [train.py:715] (6/8) Epoch 4, batch 22100, loss[loss=0.1627, simple_loss=0.2266, pruned_loss=0.04937, over 4817.00 frames.], tot_loss[loss=0.157, simple_loss=0.2262, pruned_loss=0.04385, over 971606.73 frames.], batch size: 15, lr: 4.44e-04 +2022-05-04 23:46:08,383 INFO [train.py:715] (6/8) Epoch 4, batch 22150, loss[loss=0.1816, simple_loss=0.2477, pruned_loss=0.05778, over 4832.00 frames.], tot_loss[loss=0.156, simple_loss=0.225, pruned_loss=0.04349, over 971405.03 frames.], batch size: 15, lr: 4.44e-04 +2022-05-04 23:46:47,293 INFO [train.py:715] (6/8) Epoch 4, batch 22200, loss[loss=0.144, simple_loss=0.2148, pruned_loss=0.03658, over 4964.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2251, pruned_loss=0.04323, over 971504.06 frames.], batch size: 24, lr: 4.44e-04 +2022-05-04 23:47:27,358 INFO [train.py:715] (6/8) Epoch 4, batch 22250, loss[loss=0.1508, simple_loss=0.2259, pruned_loss=0.03784, over 4933.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2251, pruned_loss=0.04311, over 972122.59 frames.], batch size: 29, lr: 4.44e-04 +2022-05-04 23:48:07,757 INFO [train.py:715] (6/8) Epoch 4, batch 22300, loss[loss=0.1873, simple_loss=0.2549, pruned_loss=0.05985, over 4913.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2246, pruned_loss=0.04257, over 971535.24 frames.], batch size: 17, lr: 4.44e-04 +2022-05-04 23:48:46,510 INFO [train.py:715] (6/8) Epoch 4, batch 22350, loss[loss=0.1525, simple_loss=0.2223, pruned_loss=0.04133, over 4912.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2257, pruned_loss=0.04354, over 971130.47 frames.], batch size: 17, lr: 4.44e-04 +2022-05-04 23:49:25,541 INFO [train.py:715] (6/8) Epoch 4, batch 22400, loss[loss=0.1668, simple_loss=0.2399, pruned_loss=0.04682, over 4827.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2249, pruned_loss=0.04282, over 971333.51 frames.], batch size: 25, lr: 4.44e-04 +2022-05-04 23:50:06,127 INFO [train.py:715] (6/8) Epoch 4, batch 22450, loss[loss=0.1319, simple_loss=0.2047, pruned_loss=0.02959, over 4833.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2253, pruned_loss=0.04285, over 972433.47 frames.], batch size: 13, lr: 4.44e-04 +2022-05-04 23:50:45,306 INFO [train.py:715] (6/8) Epoch 4, batch 22500, loss[loss=0.1526, simple_loss=0.2222, pruned_loss=0.04151, over 4980.00 frames.], tot_loss[loss=0.156, simple_loss=0.2259, pruned_loss=0.04305, over 973060.61 frames.], batch size: 28, lr: 4.43e-04 +2022-05-04 23:51:24,250 INFO [train.py:715] (6/8) Epoch 4, batch 22550, loss[loss=0.1409, simple_loss=0.2114, pruned_loss=0.03516, over 4810.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2263, pruned_loss=0.04355, over 972927.53 frames.], batch size: 25, lr: 4.43e-04 +2022-05-04 23:52:04,194 INFO [train.py:715] (6/8) Epoch 4, batch 22600, loss[loss=0.1395, simple_loss=0.2245, pruned_loss=0.02729, over 4848.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2264, pruned_loss=0.04374, over 972458.65 frames.], batch size: 20, lr: 4.43e-04 +2022-05-04 23:52:44,017 INFO [train.py:715] (6/8) Epoch 4, batch 22650, loss[loss=0.1448, simple_loss=0.225, pruned_loss=0.03231, over 4782.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2269, pruned_loss=0.04366, over 971320.28 frames.], batch size: 17, lr: 4.43e-04 +2022-05-04 23:53:22,939 INFO [train.py:715] (6/8) Epoch 4, batch 22700, loss[loss=0.1463, simple_loss=0.2222, pruned_loss=0.03519, over 4957.00 frames.], tot_loss[loss=0.157, simple_loss=0.2272, pruned_loss=0.04342, over 971521.86 frames.], batch size: 15, lr: 4.43e-04 +2022-05-04 23:54:02,341 INFO [train.py:715] (6/8) Epoch 4, batch 22750, loss[loss=0.1415, simple_loss=0.212, pruned_loss=0.0355, over 4781.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2267, pruned_loss=0.04341, over 971925.08 frames.], batch size: 18, lr: 4.43e-04 +2022-05-04 23:54:42,045 INFO [train.py:715] (6/8) Epoch 4, batch 22800, loss[loss=0.1889, simple_loss=0.2713, pruned_loss=0.05321, over 4797.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2265, pruned_loss=0.04314, over 971747.82 frames.], batch size: 21, lr: 4.43e-04 +2022-05-04 23:55:21,165 INFO [train.py:715] (6/8) Epoch 4, batch 22850, loss[loss=0.1402, simple_loss=0.1961, pruned_loss=0.04214, over 4983.00 frames.], tot_loss[loss=0.156, simple_loss=0.2263, pruned_loss=0.04288, over 971716.93 frames.], batch size: 14, lr: 4.43e-04 +2022-05-04 23:55:59,896 INFO [train.py:715] (6/8) Epoch 4, batch 22900, loss[loss=0.1526, simple_loss=0.2291, pruned_loss=0.03809, over 4745.00 frames.], tot_loss[loss=0.1567, simple_loss=0.227, pruned_loss=0.04322, over 971811.71 frames.], batch size: 16, lr: 4.43e-04 +2022-05-04 23:56:39,563 INFO [train.py:715] (6/8) Epoch 4, batch 22950, loss[loss=0.1492, simple_loss=0.2213, pruned_loss=0.03858, over 4818.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2264, pruned_loss=0.04314, over 971933.89 frames.], batch size: 26, lr: 4.43e-04 +2022-05-04 23:57:19,675 INFO [train.py:715] (6/8) Epoch 4, batch 23000, loss[loss=0.1731, simple_loss=0.2395, pruned_loss=0.05336, over 4960.00 frames.], tot_loss[loss=0.1557, simple_loss=0.226, pruned_loss=0.04263, over 971832.27 frames.], batch size: 39, lr: 4.43e-04 +2022-05-04 23:57:58,011 INFO [train.py:715] (6/8) Epoch 4, batch 23050, loss[loss=0.1715, simple_loss=0.2367, pruned_loss=0.05316, over 4800.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2269, pruned_loss=0.04348, over 973099.12 frames.], batch size: 21, lr: 4.43e-04 +2022-05-04 23:58:37,637 INFO [train.py:715] (6/8) Epoch 4, batch 23100, loss[loss=0.1565, simple_loss=0.2205, pruned_loss=0.04623, over 4751.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2268, pruned_loss=0.04352, over 973397.98 frames.], batch size: 16, lr: 4.43e-04 +2022-05-04 23:59:18,002 INFO [train.py:715] (6/8) Epoch 4, batch 23150, loss[loss=0.147, simple_loss=0.2044, pruned_loss=0.04477, over 4785.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2264, pruned_loss=0.0433, over 973322.45 frames.], batch size: 12, lr: 4.43e-04 +2022-05-04 23:59:57,819 INFO [train.py:715] (6/8) Epoch 4, batch 23200, loss[loss=0.1424, simple_loss=0.2147, pruned_loss=0.03502, over 4898.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2268, pruned_loss=0.04347, over 973420.92 frames.], batch size: 17, lr: 4.42e-04 +2022-05-05 00:00:36,520 INFO [train.py:715] (6/8) Epoch 4, batch 23250, loss[loss=0.1401, simple_loss=0.2155, pruned_loss=0.03239, over 4873.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2262, pruned_loss=0.04345, over 973760.19 frames.], batch size: 20, lr: 4.42e-04 +2022-05-05 00:01:16,397 INFO [train.py:715] (6/8) Epoch 4, batch 23300, loss[loss=0.1454, simple_loss=0.2193, pruned_loss=0.0357, over 4974.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2253, pruned_loss=0.04272, over 973155.63 frames.], batch size: 28, lr: 4.42e-04 +2022-05-05 00:01:56,690 INFO [train.py:715] (6/8) Epoch 4, batch 23350, loss[loss=0.1415, simple_loss=0.2148, pruned_loss=0.03409, over 4778.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2253, pruned_loss=0.04272, over 972720.23 frames.], batch size: 14, lr: 4.42e-04 +2022-05-05 00:02:35,076 INFO [train.py:715] (6/8) Epoch 4, batch 23400, loss[loss=0.1223, simple_loss=0.1933, pruned_loss=0.02569, over 4986.00 frames.], tot_loss[loss=0.155, simple_loss=0.2252, pruned_loss=0.04239, over 972957.32 frames.], batch size: 28, lr: 4.42e-04 +2022-05-05 00:03:14,410 INFO [train.py:715] (6/8) Epoch 4, batch 23450, loss[loss=0.1383, simple_loss=0.2121, pruned_loss=0.03228, over 4892.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2255, pruned_loss=0.0429, over 972239.47 frames.], batch size: 39, lr: 4.42e-04 +2022-05-05 00:03:55,001 INFO [train.py:715] (6/8) Epoch 4, batch 23500, loss[loss=0.1509, simple_loss=0.2318, pruned_loss=0.03504, over 4810.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2258, pruned_loss=0.043, over 971571.62 frames.], batch size: 12, lr: 4.42e-04 +2022-05-05 00:04:33,368 INFO [train.py:715] (6/8) Epoch 4, batch 23550, loss[loss=0.1474, simple_loss=0.2259, pruned_loss=0.03445, over 4978.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2252, pruned_loss=0.04252, over 971137.66 frames.], batch size: 28, lr: 4.42e-04 +2022-05-05 00:05:12,654 INFO [train.py:715] (6/8) Epoch 4, batch 23600, loss[loss=0.1647, simple_loss=0.2387, pruned_loss=0.04537, over 4636.00 frames.], tot_loss[loss=0.1549, simple_loss=0.225, pruned_loss=0.04241, over 970556.40 frames.], batch size: 13, lr: 4.42e-04 +2022-05-05 00:05:53,466 INFO [train.py:715] (6/8) Epoch 4, batch 23650, loss[loss=0.1532, simple_loss=0.218, pruned_loss=0.04417, over 4772.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2263, pruned_loss=0.04328, over 970276.52 frames.], batch size: 14, lr: 4.42e-04 +2022-05-05 00:06:34,857 INFO [train.py:715] (6/8) Epoch 4, batch 23700, loss[loss=0.1917, simple_loss=0.2621, pruned_loss=0.06065, over 4923.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2265, pruned_loss=0.04329, over 970618.42 frames.], batch size: 23, lr: 4.42e-04 +2022-05-05 00:07:14,367 INFO [train.py:715] (6/8) Epoch 4, batch 23750, loss[loss=0.1398, simple_loss=0.2142, pruned_loss=0.03275, over 4916.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2273, pruned_loss=0.04356, over 970518.05 frames.], batch size: 18, lr: 4.42e-04 +2022-05-05 00:07:53,776 INFO [train.py:715] (6/8) Epoch 4, batch 23800, loss[loss=0.1448, simple_loss=0.2121, pruned_loss=0.0387, over 4749.00 frames.], tot_loss[loss=0.157, simple_loss=0.2273, pruned_loss=0.04341, over 970317.49 frames.], batch size: 19, lr: 4.42e-04 +2022-05-05 00:08:34,377 INFO [train.py:715] (6/8) Epoch 4, batch 23850, loss[loss=0.1252, simple_loss=0.1996, pruned_loss=0.02537, over 4791.00 frames.], tot_loss[loss=0.158, simple_loss=0.2281, pruned_loss=0.04398, over 969875.65 frames.], batch size: 24, lr: 4.42e-04 +2022-05-05 00:09:13,909 INFO [train.py:715] (6/8) Epoch 4, batch 23900, loss[loss=0.1508, simple_loss=0.2233, pruned_loss=0.03916, over 4914.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2273, pruned_loss=0.0438, over 969132.53 frames.], batch size: 23, lr: 4.42e-04 +2022-05-05 00:09:53,743 INFO [train.py:715] (6/8) Epoch 4, batch 23950, loss[loss=0.1709, simple_loss=0.2393, pruned_loss=0.05127, over 4957.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2274, pruned_loss=0.04403, over 969409.95 frames.], batch size: 35, lr: 4.41e-04 +2022-05-05 00:10:34,501 INFO [train.py:715] (6/8) Epoch 4, batch 24000, loss[loss=0.1568, simple_loss=0.2276, pruned_loss=0.04301, over 4788.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2271, pruned_loss=0.04364, over 969168.79 frames.], batch size: 17, lr: 4.41e-04 +2022-05-05 00:10:34,501 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 00:10:44,332 INFO [train.py:742] (6/8) Epoch 4, validation: loss=0.1115, simple_loss=0.1974, pruned_loss=0.01276, over 914524.00 frames. +2022-05-05 00:11:25,476 INFO [train.py:715] (6/8) Epoch 4, batch 24050, loss[loss=0.1555, simple_loss=0.2296, pruned_loss=0.04067, over 4700.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2283, pruned_loss=0.04473, over 969433.40 frames.], batch size: 15, lr: 4.41e-04 +2022-05-05 00:12:06,072 INFO [train.py:715] (6/8) Epoch 4, batch 24100, loss[loss=0.1598, simple_loss=0.2147, pruned_loss=0.05243, over 4983.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2271, pruned_loss=0.04388, over 970112.46 frames.], batch size: 15, lr: 4.41e-04 +2022-05-05 00:12:45,929 INFO [train.py:715] (6/8) Epoch 4, batch 24150, loss[loss=0.1256, simple_loss=0.2033, pruned_loss=0.02395, over 4943.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2264, pruned_loss=0.04368, over 970079.85 frames.], batch size: 18, lr: 4.41e-04 +2022-05-05 00:13:25,915 INFO [train.py:715] (6/8) Epoch 4, batch 24200, loss[loss=0.1563, simple_loss=0.2257, pruned_loss=0.04343, over 4900.00 frames.], tot_loss[loss=0.157, simple_loss=0.2264, pruned_loss=0.0438, over 971051.35 frames.], batch size: 17, lr: 4.41e-04 +2022-05-05 00:14:07,335 INFO [train.py:715] (6/8) Epoch 4, batch 24250, loss[loss=0.1544, simple_loss=0.2245, pruned_loss=0.04211, over 4852.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2264, pruned_loss=0.04408, over 970691.13 frames.], batch size: 13, lr: 4.41e-04 +2022-05-05 00:14:46,254 INFO [train.py:715] (6/8) Epoch 4, batch 24300, loss[loss=0.1504, simple_loss=0.2176, pruned_loss=0.0416, over 4984.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2267, pruned_loss=0.0441, over 971750.74 frames.], batch size: 35, lr: 4.41e-04 +2022-05-05 00:15:26,712 INFO [train.py:715] (6/8) Epoch 4, batch 24350, loss[loss=0.1395, simple_loss=0.2176, pruned_loss=0.03072, over 4958.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2254, pruned_loss=0.04287, over 971954.94 frames.], batch size: 39, lr: 4.41e-04 +2022-05-05 00:16:07,678 INFO [train.py:715] (6/8) Epoch 4, batch 24400, loss[loss=0.1502, simple_loss=0.2174, pruned_loss=0.04149, over 4824.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2252, pruned_loss=0.04269, over 971969.11 frames.], batch size: 26, lr: 4.41e-04 +2022-05-05 00:16:47,242 INFO [train.py:715] (6/8) Epoch 4, batch 24450, loss[loss=0.1368, simple_loss=0.209, pruned_loss=0.03223, over 4882.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2246, pruned_loss=0.04252, over 971883.92 frames.], batch size: 13, lr: 4.41e-04 +2022-05-05 00:17:27,013 INFO [train.py:715] (6/8) Epoch 4, batch 24500, loss[loss=0.1534, simple_loss=0.2257, pruned_loss=0.04052, over 4650.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2256, pruned_loss=0.04343, over 971420.94 frames.], batch size: 13, lr: 4.41e-04 +2022-05-05 00:18:06,875 INFO [train.py:715] (6/8) Epoch 4, batch 24550, loss[loss=0.1412, simple_loss=0.1961, pruned_loss=0.04317, over 4769.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2264, pruned_loss=0.04406, over 970919.21 frames.], batch size: 14, lr: 4.41e-04 +2022-05-05 00:18:48,138 INFO [train.py:715] (6/8) Epoch 4, batch 24600, loss[loss=0.1533, simple_loss=0.2132, pruned_loss=0.0467, over 4911.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2254, pruned_loss=0.04318, over 971477.98 frames.], batch size: 18, lr: 4.41e-04 +2022-05-05 00:19:27,476 INFO [train.py:715] (6/8) Epoch 4, batch 24650, loss[loss=0.1608, simple_loss=0.2182, pruned_loss=0.05176, over 4863.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2261, pruned_loss=0.04325, over 970562.18 frames.], batch size: 20, lr: 4.41e-04 +2022-05-05 00:20:08,194 INFO [train.py:715] (6/8) Epoch 4, batch 24700, loss[loss=0.15, simple_loss=0.2124, pruned_loss=0.04384, over 4743.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2264, pruned_loss=0.04352, over 970614.90 frames.], batch size: 16, lr: 4.40e-04 +2022-05-05 00:20:49,272 INFO [train.py:715] (6/8) Epoch 4, batch 24750, loss[loss=0.1581, simple_loss=0.2292, pruned_loss=0.04346, over 4826.00 frames.], tot_loss[loss=0.1565, simple_loss=0.226, pruned_loss=0.0435, over 971124.13 frames.], batch size: 15, lr: 4.40e-04 +2022-05-05 00:21:28,792 INFO [train.py:715] (6/8) Epoch 4, batch 24800, loss[loss=0.1503, simple_loss=0.2256, pruned_loss=0.03752, over 4738.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2251, pruned_loss=0.0427, over 969872.09 frames.], batch size: 16, lr: 4.40e-04 +2022-05-05 00:22:08,801 INFO [train.py:715] (6/8) Epoch 4, batch 24850, loss[loss=0.1575, simple_loss=0.2181, pruned_loss=0.0485, over 4810.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2254, pruned_loss=0.04261, over 970393.59 frames.], batch size: 12, lr: 4.40e-04 +2022-05-05 00:22:49,032 INFO [train.py:715] (6/8) Epoch 4, batch 24900, loss[loss=0.1577, simple_loss=0.2184, pruned_loss=0.04853, over 4803.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2253, pruned_loss=0.04249, over 970041.23 frames.], batch size: 17, lr: 4.40e-04 +2022-05-05 00:23:30,184 INFO [train.py:715] (6/8) Epoch 4, batch 24950, loss[loss=0.1668, simple_loss=0.2408, pruned_loss=0.0464, over 4750.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2265, pruned_loss=0.04336, over 970057.99 frames.], batch size: 16, lr: 4.40e-04 +2022-05-05 00:24:09,089 INFO [train.py:715] (6/8) Epoch 4, batch 25000, loss[loss=0.1635, simple_loss=0.2273, pruned_loss=0.04987, over 4939.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2264, pruned_loss=0.04366, over 970394.58 frames.], batch size: 29, lr: 4.40e-04 +2022-05-05 00:24:49,328 INFO [train.py:715] (6/8) Epoch 4, batch 25050, loss[loss=0.1519, simple_loss=0.2233, pruned_loss=0.0403, over 4766.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2263, pruned_loss=0.04358, over 970747.33 frames.], batch size: 19, lr: 4.40e-04 +2022-05-05 00:25:30,443 INFO [train.py:715] (6/8) Epoch 4, batch 25100, loss[loss=0.1782, simple_loss=0.2427, pruned_loss=0.05685, over 4973.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2256, pruned_loss=0.0431, over 971598.38 frames.], batch size: 35, lr: 4.40e-04 +2022-05-05 00:26:10,368 INFO [train.py:715] (6/8) Epoch 4, batch 25150, loss[loss=0.1507, simple_loss=0.2273, pruned_loss=0.03701, over 4866.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2265, pruned_loss=0.04318, over 972566.98 frames.], batch size: 16, lr: 4.40e-04 +2022-05-05 00:26:49,787 INFO [train.py:715] (6/8) Epoch 4, batch 25200, loss[loss=0.1624, simple_loss=0.2453, pruned_loss=0.03979, over 4821.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2264, pruned_loss=0.04304, over 971926.66 frames.], batch size: 25, lr: 4.40e-04 +2022-05-05 00:27:30,055 INFO [train.py:715] (6/8) Epoch 4, batch 25250, loss[loss=0.1472, simple_loss=0.2284, pruned_loss=0.03299, over 4746.00 frames.], tot_loss[loss=0.156, simple_loss=0.2263, pruned_loss=0.04288, over 972489.33 frames.], batch size: 16, lr: 4.40e-04 +2022-05-05 00:28:10,081 INFO [train.py:715] (6/8) Epoch 4, batch 25300, loss[loss=0.1363, simple_loss=0.2065, pruned_loss=0.03306, over 4952.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2252, pruned_loss=0.0428, over 972993.38 frames.], batch size: 39, lr: 4.40e-04 +2022-05-05 00:28:47,882 INFO [train.py:715] (6/8) Epoch 4, batch 25350, loss[loss=0.1424, simple_loss=0.2027, pruned_loss=0.04106, over 4852.00 frames.], tot_loss[loss=0.155, simple_loss=0.2247, pruned_loss=0.04266, over 972973.75 frames.], batch size: 30, lr: 4.40e-04 +2022-05-05 00:29:26,741 INFO [train.py:715] (6/8) Epoch 4, batch 25400, loss[loss=0.1331, simple_loss=0.1911, pruned_loss=0.03752, over 4823.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2247, pruned_loss=0.04258, over 971246.10 frames.], batch size: 12, lr: 4.40e-04 +2022-05-05 00:30:06,412 INFO [train.py:715] (6/8) Epoch 4, batch 25450, loss[loss=0.1806, simple_loss=0.2422, pruned_loss=0.05948, over 4987.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2251, pruned_loss=0.04224, over 971649.12 frames.], batch size: 16, lr: 4.39e-04 +2022-05-05 00:30:45,459 INFO [train.py:715] (6/8) Epoch 4, batch 25500, loss[loss=0.1379, simple_loss=0.2101, pruned_loss=0.03284, over 4960.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2247, pruned_loss=0.04201, over 970938.39 frames.], batch size: 21, lr: 4.39e-04 +2022-05-05 00:31:25,317 INFO [train.py:715] (6/8) Epoch 4, batch 25550, loss[loss=0.1599, simple_loss=0.2244, pruned_loss=0.04771, over 4745.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2244, pruned_loss=0.04194, over 971675.69 frames.], batch size: 16, lr: 4.39e-04 +2022-05-05 00:32:05,295 INFO [train.py:715] (6/8) Epoch 4, batch 25600, loss[loss=0.1498, simple_loss=0.2267, pruned_loss=0.03642, over 4980.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2249, pruned_loss=0.0421, over 971835.26 frames.], batch size: 24, lr: 4.39e-04 +2022-05-05 00:32:45,565 INFO [train.py:715] (6/8) Epoch 4, batch 25650, loss[loss=0.1618, simple_loss=0.2238, pruned_loss=0.04989, over 4976.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2253, pruned_loss=0.04222, over 972520.62 frames.], batch size: 25, lr: 4.39e-04 +2022-05-05 00:33:24,677 INFO [train.py:715] (6/8) Epoch 4, batch 25700, loss[loss=0.148, simple_loss=0.2209, pruned_loss=0.03753, over 4912.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2255, pruned_loss=0.04271, over 972328.14 frames.], batch size: 19, lr: 4.39e-04 +2022-05-05 00:34:04,660 INFO [train.py:715] (6/8) Epoch 4, batch 25750, loss[loss=0.1337, simple_loss=0.2086, pruned_loss=0.02946, over 4863.00 frames.], tot_loss[loss=0.156, simple_loss=0.2261, pruned_loss=0.04295, over 972069.18 frames.], batch size: 20, lr: 4.39e-04 +2022-05-05 00:34:45,104 INFO [train.py:715] (6/8) Epoch 4, batch 25800, loss[loss=0.1148, simple_loss=0.1934, pruned_loss=0.01811, over 4689.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2261, pruned_loss=0.04256, over 971660.28 frames.], batch size: 15, lr: 4.39e-04 +2022-05-05 00:35:24,457 INFO [train.py:715] (6/8) Epoch 4, batch 25850, loss[loss=0.1788, simple_loss=0.2481, pruned_loss=0.05476, over 4693.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2254, pruned_loss=0.04257, over 971795.92 frames.], batch size: 15, lr: 4.39e-04 +2022-05-05 00:36:03,600 INFO [train.py:715] (6/8) Epoch 4, batch 25900, loss[loss=0.1519, simple_loss=0.2259, pruned_loss=0.03895, over 4921.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2249, pruned_loss=0.04239, over 971418.84 frames.], batch size: 23, lr: 4.39e-04 +2022-05-05 00:36:43,847 INFO [train.py:715] (6/8) Epoch 4, batch 25950, loss[loss=0.1465, simple_loss=0.2078, pruned_loss=0.04263, over 4824.00 frames.], tot_loss[loss=0.155, simple_loss=0.225, pruned_loss=0.0425, over 970902.56 frames.], batch size: 13, lr: 4.39e-04 +2022-05-05 00:37:24,111 INFO [train.py:715] (6/8) Epoch 4, batch 26000, loss[loss=0.165, simple_loss=0.2377, pruned_loss=0.04611, over 4892.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2258, pruned_loss=0.04292, over 971723.32 frames.], batch size: 19, lr: 4.39e-04 +2022-05-05 00:38:02,822 INFO [train.py:715] (6/8) Epoch 4, batch 26050, loss[loss=0.1772, simple_loss=0.2439, pruned_loss=0.05528, over 4975.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2261, pruned_loss=0.0433, over 971665.87 frames.], batch size: 25, lr: 4.39e-04 +2022-05-05 00:38:42,228 INFO [train.py:715] (6/8) Epoch 4, batch 26100, loss[loss=0.1393, simple_loss=0.2174, pruned_loss=0.0306, over 4978.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2263, pruned_loss=0.04326, over 971783.44 frames.], batch size: 14, lr: 4.39e-04 +2022-05-05 00:39:22,679 INFO [train.py:715] (6/8) Epoch 4, batch 26150, loss[loss=0.1713, simple_loss=0.2441, pruned_loss=0.0493, over 4907.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2264, pruned_loss=0.04333, over 972292.80 frames.], batch size: 18, lr: 4.39e-04 +2022-05-05 00:40:01,760 INFO [train.py:715] (6/8) Epoch 4, batch 26200, loss[loss=0.1647, simple_loss=0.227, pruned_loss=0.05119, over 4823.00 frames.], tot_loss[loss=0.156, simple_loss=0.2257, pruned_loss=0.04319, over 972247.70 frames.], batch size: 13, lr: 4.38e-04 +2022-05-05 00:40:41,521 INFO [train.py:715] (6/8) Epoch 4, batch 26250, loss[loss=0.1145, simple_loss=0.1875, pruned_loss=0.02078, over 4743.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2257, pruned_loss=0.04326, over 971250.92 frames.], batch size: 16, lr: 4.38e-04 +2022-05-05 00:41:21,387 INFO [train.py:715] (6/8) Epoch 4, batch 26300, loss[loss=0.1644, simple_loss=0.232, pruned_loss=0.04834, over 4673.00 frames.], tot_loss[loss=0.1562, simple_loss=0.226, pruned_loss=0.0432, over 971032.60 frames.], batch size: 13, lr: 4.38e-04 +2022-05-05 00:42:01,538 INFO [train.py:715] (6/8) Epoch 4, batch 26350, loss[loss=0.1206, simple_loss=0.1928, pruned_loss=0.02422, over 4749.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2256, pruned_loss=0.04273, over 971549.73 frames.], batch size: 16, lr: 4.38e-04 +2022-05-05 00:42:40,874 INFO [train.py:715] (6/8) Epoch 4, batch 26400, loss[loss=0.1399, simple_loss=0.2156, pruned_loss=0.03213, over 4855.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2259, pruned_loss=0.04296, over 972134.10 frames.], batch size: 20, lr: 4.38e-04 +2022-05-05 00:43:20,962 INFO [train.py:715] (6/8) Epoch 4, batch 26450, loss[loss=0.1688, simple_loss=0.2422, pruned_loss=0.04768, over 4855.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2263, pruned_loss=0.04306, over 971973.13 frames.], batch size: 39, lr: 4.38e-04 +2022-05-05 00:44:01,488 INFO [train.py:715] (6/8) Epoch 4, batch 26500, loss[loss=0.1558, simple_loss=0.231, pruned_loss=0.04029, over 4785.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2261, pruned_loss=0.04304, over 971578.97 frames.], batch size: 17, lr: 4.38e-04 +2022-05-05 00:44:40,386 INFO [train.py:715] (6/8) Epoch 4, batch 26550, loss[loss=0.1765, simple_loss=0.2486, pruned_loss=0.05221, over 4797.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2268, pruned_loss=0.04311, over 972507.26 frames.], batch size: 21, lr: 4.38e-04 +2022-05-05 00:45:20,029 INFO [train.py:715] (6/8) Epoch 4, batch 26600, loss[loss=0.161, simple_loss=0.2325, pruned_loss=0.04473, over 4792.00 frames.], tot_loss[loss=0.156, simple_loss=0.2264, pruned_loss=0.04276, over 972209.32 frames.], batch size: 24, lr: 4.38e-04 +2022-05-05 00:46:00,420 INFO [train.py:715] (6/8) Epoch 4, batch 26650, loss[loss=0.1532, simple_loss=0.2227, pruned_loss=0.0419, over 4846.00 frames.], tot_loss[loss=0.156, simple_loss=0.2263, pruned_loss=0.04286, over 971571.26 frames.], batch size: 30, lr: 4.38e-04 +2022-05-05 00:46:41,234 INFO [train.py:715] (6/8) Epoch 4, batch 26700, loss[loss=0.1766, simple_loss=0.2456, pruned_loss=0.05376, over 4859.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2269, pruned_loss=0.04347, over 972167.90 frames.], batch size: 20, lr: 4.38e-04 +2022-05-05 00:47:20,024 INFO [train.py:715] (6/8) Epoch 4, batch 26750, loss[loss=0.2022, simple_loss=0.2673, pruned_loss=0.06859, over 4992.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2274, pruned_loss=0.04407, over 972110.41 frames.], batch size: 15, lr: 4.38e-04 +2022-05-05 00:47:59,598 INFO [train.py:715] (6/8) Epoch 4, batch 26800, loss[loss=0.1536, simple_loss=0.2218, pruned_loss=0.04275, over 4917.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2274, pruned_loss=0.04388, over 972186.60 frames.], batch size: 18, lr: 4.38e-04 +2022-05-05 00:48:39,807 INFO [train.py:715] (6/8) Epoch 4, batch 26850, loss[loss=0.1601, simple_loss=0.2357, pruned_loss=0.04226, over 4929.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2265, pruned_loss=0.04367, over 972598.83 frames.], batch size: 18, lr: 4.38e-04 +2022-05-05 00:49:18,738 INFO [train.py:715] (6/8) Epoch 4, batch 26900, loss[loss=0.1653, simple_loss=0.2271, pruned_loss=0.05169, over 4957.00 frames.], tot_loss[loss=0.156, simple_loss=0.2257, pruned_loss=0.04317, over 973213.28 frames.], batch size: 15, lr: 4.38e-04 +2022-05-05 00:49:58,562 INFO [train.py:715] (6/8) Epoch 4, batch 26950, loss[loss=0.1654, simple_loss=0.2373, pruned_loss=0.04673, over 4793.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2247, pruned_loss=0.04254, over 972738.23 frames.], batch size: 24, lr: 4.37e-04 +2022-05-05 00:50:38,531 INFO [train.py:715] (6/8) Epoch 4, batch 27000, loss[loss=0.1671, simple_loss=0.2305, pruned_loss=0.05187, over 4886.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2254, pruned_loss=0.04289, over 973506.23 frames.], batch size: 39, lr: 4.37e-04 +2022-05-05 00:50:38,532 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 00:50:48,691 INFO [train.py:742] (6/8) Epoch 4, validation: loss=0.1114, simple_loss=0.197, pruned_loss=0.01284, over 914524.00 frames. +2022-05-05 00:51:28,852 INFO [train.py:715] (6/8) Epoch 4, batch 27050, loss[loss=0.1546, simple_loss=0.2239, pruned_loss=0.04265, over 4818.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2259, pruned_loss=0.04321, over 972667.72 frames.], batch size: 25, lr: 4.37e-04 +2022-05-05 00:52:08,423 INFO [train.py:715] (6/8) Epoch 4, batch 27100, loss[loss=0.1966, simple_loss=0.251, pruned_loss=0.07109, over 4841.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2258, pruned_loss=0.04325, over 972903.69 frames.], batch size: 32, lr: 4.37e-04 +2022-05-05 00:52:47,729 INFO [train.py:715] (6/8) Epoch 4, batch 27150, loss[loss=0.1954, simple_loss=0.2588, pruned_loss=0.066, over 4817.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2267, pruned_loss=0.04374, over 971203.79 frames.], batch size: 26, lr: 4.37e-04 +2022-05-05 00:53:27,411 INFO [train.py:715] (6/8) Epoch 4, batch 27200, loss[loss=0.1813, simple_loss=0.2449, pruned_loss=0.05888, over 4831.00 frames.], tot_loss[loss=0.1574, simple_loss=0.227, pruned_loss=0.04394, over 971190.35 frames.], batch size: 26, lr: 4.37e-04 +2022-05-05 00:54:07,874 INFO [train.py:715] (6/8) Epoch 4, batch 27250, loss[loss=0.1603, simple_loss=0.2269, pruned_loss=0.04686, over 4977.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2269, pruned_loss=0.04368, over 971303.94 frames.], batch size: 14, lr: 4.37e-04 +2022-05-05 00:54:46,636 INFO [train.py:715] (6/8) Epoch 4, batch 27300, loss[loss=0.163, simple_loss=0.2445, pruned_loss=0.04076, over 4750.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2263, pruned_loss=0.04333, over 970954.21 frames.], batch size: 16, lr: 4.37e-04 +2022-05-05 00:55:26,631 INFO [train.py:715] (6/8) Epoch 4, batch 27350, loss[loss=0.169, simple_loss=0.2376, pruned_loss=0.05025, over 4853.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2267, pruned_loss=0.04349, over 972260.34 frames.], batch size: 32, lr: 4.37e-04 +2022-05-05 00:56:06,584 INFO [train.py:715] (6/8) Epoch 4, batch 27400, loss[loss=0.1428, simple_loss=0.209, pruned_loss=0.03827, over 4764.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2265, pruned_loss=0.04303, over 972780.07 frames.], batch size: 14, lr: 4.37e-04 +2022-05-05 00:56:45,010 INFO [train.py:715] (6/8) Epoch 4, batch 27450, loss[loss=0.136, simple_loss=0.1983, pruned_loss=0.03682, over 4937.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2257, pruned_loss=0.04238, over 973041.40 frames.], batch size: 23, lr: 4.37e-04 +2022-05-05 00:57:24,958 INFO [train.py:715] (6/8) Epoch 4, batch 27500, loss[loss=0.1445, simple_loss=0.2328, pruned_loss=0.02811, over 4869.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2261, pruned_loss=0.04283, over 973241.50 frames.], batch size: 16, lr: 4.37e-04 +2022-05-05 00:58:03,980 INFO [train.py:715] (6/8) Epoch 4, batch 27550, loss[loss=0.1411, simple_loss=0.2168, pruned_loss=0.0327, over 4970.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2275, pruned_loss=0.04366, over 974859.16 frames.], batch size: 28, lr: 4.37e-04 +2022-05-05 00:58:43,893 INFO [train.py:715] (6/8) Epoch 4, batch 27600, loss[loss=0.1634, simple_loss=0.2294, pruned_loss=0.04871, over 4837.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2254, pruned_loss=0.04284, over 974349.19 frames.], batch size: 32, lr: 4.37e-04 +2022-05-05 00:59:22,452 INFO [train.py:715] (6/8) Epoch 4, batch 27650, loss[loss=0.1583, simple_loss=0.2288, pruned_loss=0.04395, over 4791.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2264, pruned_loss=0.04326, over 973630.83 frames.], batch size: 12, lr: 4.37e-04 +2022-05-05 01:00:01,784 INFO [train.py:715] (6/8) Epoch 4, batch 27700, loss[loss=0.1749, simple_loss=0.2462, pruned_loss=0.0518, over 4985.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2262, pruned_loss=0.04331, over 973431.29 frames.], batch size: 39, lr: 4.36e-04 +2022-05-05 01:00:41,407 INFO [train.py:715] (6/8) Epoch 4, batch 27750, loss[loss=0.1377, simple_loss=0.2012, pruned_loss=0.03705, over 4934.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2254, pruned_loss=0.04298, over 974285.25 frames.], batch size: 21, lr: 4.36e-04 +2022-05-05 01:01:20,723 INFO [train.py:715] (6/8) Epoch 4, batch 27800, loss[loss=0.1524, simple_loss=0.2225, pruned_loss=0.04112, over 4885.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2264, pruned_loss=0.04344, over 974110.83 frames.], batch size: 19, lr: 4.36e-04 +2022-05-05 01:01:59,770 INFO [train.py:715] (6/8) Epoch 4, batch 27850, loss[loss=0.1691, simple_loss=0.2407, pruned_loss=0.04873, over 4777.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2264, pruned_loss=0.04359, over 973925.09 frames.], batch size: 18, lr: 4.36e-04 +2022-05-05 01:02:38,866 INFO [train.py:715] (6/8) Epoch 4, batch 27900, loss[loss=0.1411, simple_loss=0.2142, pruned_loss=0.03404, over 4922.00 frames.], tot_loss[loss=0.1573, simple_loss=0.227, pruned_loss=0.04381, over 974457.12 frames.], batch size: 17, lr: 4.36e-04 +2022-05-05 01:03:18,330 INFO [train.py:715] (6/8) Epoch 4, batch 27950, loss[loss=0.1576, simple_loss=0.2316, pruned_loss=0.04174, over 4763.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2261, pruned_loss=0.04341, over 973255.31 frames.], batch size: 16, lr: 4.36e-04 +2022-05-05 01:03:57,897 INFO [train.py:715] (6/8) Epoch 4, batch 28000, loss[loss=0.1638, simple_loss=0.2426, pruned_loss=0.04248, over 4967.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2242, pruned_loss=0.04217, over 972859.27 frames.], batch size: 14, lr: 4.36e-04 +2022-05-05 01:04:37,853 INFO [train.py:715] (6/8) Epoch 4, batch 28050, loss[loss=0.1535, simple_loss=0.2183, pruned_loss=0.04439, over 4805.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2246, pruned_loss=0.04229, over 972152.40 frames.], batch size: 12, lr: 4.36e-04 +2022-05-05 01:05:17,737 INFO [train.py:715] (6/8) Epoch 4, batch 28100, loss[loss=0.1288, simple_loss=0.2083, pruned_loss=0.0247, over 4865.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2248, pruned_loss=0.04266, over 972704.00 frames.], batch size: 20, lr: 4.36e-04 +2022-05-05 01:05:57,332 INFO [train.py:715] (6/8) Epoch 4, batch 28150, loss[loss=0.1678, simple_loss=0.2536, pruned_loss=0.04098, over 4812.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2247, pruned_loss=0.04245, over 972828.76 frames.], batch size: 26, lr: 4.36e-04 +2022-05-05 01:06:36,807 INFO [train.py:715] (6/8) Epoch 4, batch 28200, loss[loss=0.1381, simple_loss=0.2077, pruned_loss=0.03425, over 4851.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2249, pruned_loss=0.04295, over 972419.55 frames.], batch size: 20, lr: 4.36e-04 +2022-05-05 01:07:15,869 INFO [train.py:715] (6/8) Epoch 4, batch 28250, loss[loss=0.1332, simple_loss=0.2112, pruned_loss=0.02763, over 4794.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2254, pruned_loss=0.04301, over 972059.94 frames.], batch size: 24, lr: 4.36e-04 +2022-05-05 01:07:55,430 INFO [train.py:715] (6/8) Epoch 4, batch 28300, loss[loss=0.135, simple_loss=0.2131, pruned_loss=0.02845, over 4804.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2259, pruned_loss=0.04276, over 972289.31 frames.], batch size: 25, lr: 4.36e-04 +2022-05-05 01:08:34,758 INFO [train.py:715] (6/8) Epoch 4, batch 28350, loss[loss=0.1411, simple_loss=0.2005, pruned_loss=0.04089, over 4988.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2257, pruned_loss=0.04323, over 972031.81 frames.], batch size: 14, lr: 4.36e-04 +2022-05-05 01:09:14,659 INFO [train.py:715] (6/8) Epoch 4, batch 28400, loss[loss=0.177, simple_loss=0.2508, pruned_loss=0.05161, over 4736.00 frames.], tot_loss[loss=0.156, simple_loss=0.2257, pruned_loss=0.04318, over 971262.24 frames.], batch size: 16, lr: 4.36e-04 +2022-05-05 01:09:53,874 INFO [train.py:715] (6/8) Epoch 4, batch 28450, loss[loss=0.1556, simple_loss=0.2323, pruned_loss=0.0394, over 4863.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2255, pruned_loss=0.04281, over 971279.80 frames.], batch size: 20, lr: 4.36e-04 +2022-05-05 01:10:32,532 INFO [train.py:715] (6/8) Epoch 4, batch 28500, loss[loss=0.148, simple_loss=0.2221, pruned_loss=0.03693, over 4886.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2256, pruned_loss=0.04314, over 971577.03 frames.], batch size: 17, lr: 4.35e-04 +2022-05-05 01:11:12,043 INFO [train.py:715] (6/8) Epoch 4, batch 28550, loss[loss=0.1372, simple_loss=0.2131, pruned_loss=0.03059, over 4922.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2255, pruned_loss=0.04294, over 972175.71 frames.], batch size: 18, lr: 4.35e-04 +2022-05-05 01:11:51,206 INFO [train.py:715] (6/8) Epoch 4, batch 28600, loss[loss=0.1536, simple_loss=0.2197, pruned_loss=0.04371, over 4744.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2263, pruned_loss=0.04378, over 972001.40 frames.], batch size: 19, lr: 4.35e-04 +2022-05-05 01:12:30,865 INFO [train.py:715] (6/8) Epoch 4, batch 28650, loss[loss=0.1149, simple_loss=0.188, pruned_loss=0.02088, over 4828.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2252, pruned_loss=0.0432, over 971772.16 frames.], batch size: 25, lr: 4.35e-04 +2022-05-05 01:13:10,042 INFO [train.py:715] (6/8) Epoch 4, batch 28700, loss[loss=0.1848, simple_loss=0.259, pruned_loss=0.05528, over 4903.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2242, pruned_loss=0.04272, over 971785.24 frames.], batch size: 18, lr: 4.35e-04 +2022-05-05 01:13:49,558 INFO [train.py:715] (6/8) Epoch 4, batch 28750, loss[loss=0.163, simple_loss=0.2204, pruned_loss=0.05284, over 4895.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2236, pruned_loss=0.04256, over 971794.96 frames.], batch size: 17, lr: 4.35e-04 +2022-05-05 01:14:31,717 INFO [train.py:715] (6/8) Epoch 4, batch 28800, loss[loss=0.1657, simple_loss=0.2382, pruned_loss=0.0466, over 4856.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2247, pruned_loss=0.04278, over 971680.04 frames.], batch size: 32, lr: 4.35e-04 +2022-05-05 01:15:10,515 INFO [train.py:715] (6/8) Epoch 4, batch 28850, loss[loss=0.1819, simple_loss=0.2504, pruned_loss=0.05669, over 4987.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2261, pruned_loss=0.04334, over 972130.41 frames.], batch size: 15, lr: 4.35e-04 +2022-05-05 01:15:50,214 INFO [train.py:715] (6/8) Epoch 4, batch 28900, loss[loss=0.1866, simple_loss=0.2639, pruned_loss=0.0547, over 4848.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2271, pruned_loss=0.04374, over 971527.25 frames.], batch size: 20, lr: 4.35e-04 +2022-05-05 01:16:29,311 INFO [train.py:715] (6/8) Epoch 4, batch 28950, loss[loss=0.1657, simple_loss=0.2309, pruned_loss=0.05031, over 4750.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2268, pruned_loss=0.04352, over 970977.95 frames.], batch size: 16, lr: 4.35e-04 +2022-05-05 01:17:08,545 INFO [train.py:715] (6/8) Epoch 4, batch 29000, loss[loss=0.1275, simple_loss=0.196, pruned_loss=0.02947, over 4828.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2269, pruned_loss=0.04346, over 971699.33 frames.], batch size: 12, lr: 4.35e-04 +2022-05-05 01:17:48,180 INFO [train.py:715] (6/8) Epoch 4, batch 29050, loss[loss=0.1553, simple_loss=0.2317, pruned_loss=0.03941, over 4883.00 frames.], tot_loss[loss=0.157, simple_loss=0.2271, pruned_loss=0.04347, over 972064.03 frames.], batch size: 22, lr: 4.35e-04 +2022-05-05 01:18:28,194 INFO [train.py:715] (6/8) Epoch 4, batch 29100, loss[loss=0.1316, simple_loss=0.1945, pruned_loss=0.03431, over 4738.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2259, pruned_loss=0.04276, over 971798.65 frames.], batch size: 16, lr: 4.35e-04 +2022-05-05 01:19:07,859 INFO [train.py:715] (6/8) Epoch 4, batch 29150, loss[loss=0.1917, simple_loss=0.2445, pruned_loss=0.06941, over 4775.00 frames.], tot_loss[loss=0.155, simple_loss=0.2253, pruned_loss=0.04234, over 971691.50 frames.], batch size: 18, lr: 4.35e-04 +2022-05-05 01:19:46,744 INFO [train.py:715] (6/8) Epoch 4, batch 29200, loss[loss=0.1616, simple_loss=0.2379, pruned_loss=0.04268, over 4941.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2248, pruned_loss=0.04203, over 972707.58 frames.], batch size: 21, lr: 4.35e-04 +2022-05-05 01:20:26,110 INFO [train.py:715] (6/8) Epoch 4, batch 29250, loss[loss=0.1844, simple_loss=0.2461, pruned_loss=0.06135, over 4814.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2249, pruned_loss=0.04238, over 972612.39 frames.], batch size: 15, lr: 4.34e-04 +2022-05-05 01:21:04,999 INFO [train.py:715] (6/8) Epoch 4, batch 29300, loss[loss=0.1516, simple_loss=0.2183, pruned_loss=0.04245, over 4985.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2247, pruned_loss=0.04245, over 972829.14 frames.], batch size: 31, lr: 4.34e-04 +2022-05-05 01:21:43,988 INFO [train.py:715] (6/8) Epoch 4, batch 29350, loss[loss=0.1681, simple_loss=0.2363, pruned_loss=0.04996, over 4816.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2252, pruned_loss=0.04304, over 972757.55 frames.], batch size: 15, lr: 4.34e-04 +2022-05-05 01:22:22,968 INFO [train.py:715] (6/8) Epoch 4, batch 29400, loss[loss=0.2103, simple_loss=0.2755, pruned_loss=0.07258, over 4871.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2259, pruned_loss=0.04329, over 973083.88 frames.], batch size: 16, lr: 4.34e-04 +2022-05-05 01:23:02,048 INFO [train.py:715] (6/8) Epoch 4, batch 29450, loss[loss=0.1457, simple_loss=0.2159, pruned_loss=0.03774, over 4911.00 frames.], tot_loss[loss=0.1565, simple_loss=0.226, pruned_loss=0.04346, over 973155.60 frames.], batch size: 17, lr: 4.34e-04 +2022-05-05 01:23:41,626 INFO [train.py:715] (6/8) Epoch 4, batch 29500, loss[loss=0.1507, simple_loss=0.2206, pruned_loss=0.04041, over 4904.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2262, pruned_loss=0.04335, over 973840.60 frames.], batch size: 39, lr: 4.34e-04 +2022-05-05 01:24:20,883 INFO [train.py:715] (6/8) Epoch 4, batch 29550, loss[loss=0.1663, simple_loss=0.2261, pruned_loss=0.0533, over 4791.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2255, pruned_loss=0.04261, over 973551.58 frames.], batch size: 14, lr: 4.34e-04 +2022-05-05 01:25:00,166 INFO [train.py:715] (6/8) Epoch 4, batch 29600, loss[loss=0.1462, simple_loss=0.2178, pruned_loss=0.0373, over 4819.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2256, pruned_loss=0.04264, over 973593.32 frames.], batch size: 25, lr: 4.34e-04 +2022-05-05 01:25:39,288 INFO [train.py:715] (6/8) Epoch 4, batch 29650, loss[loss=0.1417, simple_loss=0.2212, pruned_loss=0.03108, over 4976.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2257, pruned_loss=0.04251, over 972602.67 frames.], batch size: 24, lr: 4.34e-04 +2022-05-05 01:26:18,057 INFO [train.py:715] (6/8) Epoch 4, batch 29700, loss[loss=0.1557, simple_loss=0.2263, pruned_loss=0.0425, over 4734.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2259, pruned_loss=0.0425, over 971855.92 frames.], batch size: 16, lr: 4.34e-04 +2022-05-05 01:26:57,622 INFO [train.py:715] (6/8) Epoch 4, batch 29750, loss[loss=0.136, simple_loss=0.2069, pruned_loss=0.03258, over 4919.00 frames.], tot_loss[loss=0.1544, simple_loss=0.225, pruned_loss=0.0419, over 972993.65 frames.], batch size: 29, lr: 4.34e-04 +2022-05-05 01:27:36,801 INFO [train.py:715] (6/8) Epoch 4, batch 29800, loss[loss=0.1211, simple_loss=0.186, pruned_loss=0.02814, over 4896.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2254, pruned_loss=0.04213, over 973549.35 frames.], batch size: 17, lr: 4.34e-04 +2022-05-05 01:28:16,330 INFO [train.py:715] (6/8) Epoch 4, batch 29850, loss[loss=0.1814, simple_loss=0.2458, pruned_loss=0.05851, over 4822.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2252, pruned_loss=0.04172, over 973499.63 frames.], batch size: 15, lr: 4.34e-04 +2022-05-05 01:28:55,202 INFO [train.py:715] (6/8) Epoch 4, batch 29900, loss[loss=0.1533, simple_loss=0.2273, pruned_loss=0.03966, over 4901.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2242, pruned_loss=0.04183, over 973316.80 frames.], batch size: 19, lr: 4.34e-04 +2022-05-05 01:29:34,844 INFO [train.py:715] (6/8) Epoch 4, batch 29950, loss[loss=0.1719, simple_loss=0.2321, pruned_loss=0.05587, over 4853.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2245, pruned_loss=0.042, over 973371.27 frames.], batch size: 13, lr: 4.34e-04 +2022-05-05 01:30:13,998 INFO [train.py:715] (6/8) Epoch 4, batch 30000, loss[loss=0.1634, simple_loss=0.2332, pruned_loss=0.04684, over 4861.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2244, pruned_loss=0.04169, over 972758.07 frames.], batch size: 30, lr: 4.34e-04 +2022-05-05 01:30:13,999 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 01:30:23,828 INFO [train.py:742] (6/8) Epoch 4, validation: loss=0.1113, simple_loss=0.1968, pruned_loss=0.01286, over 914524.00 frames. +2022-05-05 01:31:03,991 INFO [train.py:715] (6/8) Epoch 4, batch 30050, loss[loss=0.1555, simple_loss=0.2271, pruned_loss=0.04199, over 4963.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2251, pruned_loss=0.04229, over 973037.24 frames.], batch size: 35, lr: 4.33e-04 +2022-05-05 01:31:43,427 INFO [train.py:715] (6/8) Epoch 4, batch 30100, loss[loss=0.1364, simple_loss=0.2055, pruned_loss=0.03363, over 4759.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2254, pruned_loss=0.04268, over 973332.99 frames.], batch size: 19, lr: 4.33e-04 +2022-05-05 01:32:23,322 INFO [train.py:715] (6/8) Epoch 4, batch 30150, loss[loss=0.1337, simple_loss=0.205, pruned_loss=0.03116, over 4781.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2254, pruned_loss=0.04282, over 971989.00 frames.], batch size: 14, lr: 4.33e-04 +2022-05-05 01:33:02,793 INFO [train.py:715] (6/8) Epoch 4, batch 30200, loss[loss=0.1427, simple_loss=0.217, pruned_loss=0.03416, over 4851.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2249, pruned_loss=0.04227, over 971838.77 frames.], batch size: 15, lr: 4.33e-04 +2022-05-05 01:33:42,429 INFO [train.py:715] (6/8) Epoch 4, batch 30250, loss[loss=0.1891, simple_loss=0.2578, pruned_loss=0.06017, over 4954.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2256, pruned_loss=0.04293, over 972867.85 frames.], batch size: 21, lr: 4.33e-04 +2022-05-05 01:34:21,598 INFO [train.py:715] (6/8) Epoch 4, batch 30300, loss[loss=0.132, simple_loss=0.2012, pruned_loss=0.03145, over 4854.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2253, pruned_loss=0.04258, over 972666.13 frames.], batch size: 32, lr: 4.33e-04 +2022-05-05 01:35:01,079 INFO [train.py:715] (6/8) Epoch 4, batch 30350, loss[loss=0.1483, simple_loss=0.2211, pruned_loss=0.03775, over 4812.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2252, pruned_loss=0.04298, over 972035.83 frames.], batch size: 25, lr: 4.33e-04 +2022-05-05 01:35:41,057 INFO [train.py:715] (6/8) Epoch 4, batch 30400, loss[loss=0.2058, simple_loss=0.263, pruned_loss=0.07432, over 4870.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2252, pruned_loss=0.04308, over 973092.64 frames.], batch size: 38, lr: 4.33e-04 +2022-05-05 01:36:20,218 INFO [train.py:715] (6/8) Epoch 4, batch 30450, loss[loss=0.1441, simple_loss=0.2201, pruned_loss=0.03403, over 4804.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2247, pruned_loss=0.0423, over 972987.56 frames.], batch size: 21, lr: 4.33e-04 +2022-05-05 01:36:59,982 INFO [train.py:715] (6/8) Epoch 4, batch 30500, loss[loss=0.1585, simple_loss=0.2367, pruned_loss=0.04011, over 4817.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2252, pruned_loss=0.04255, over 972901.63 frames.], batch size: 25, lr: 4.33e-04 +2022-05-05 01:37:40,028 INFO [train.py:715] (6/8) Epoch 4, batch 30550, loss[loss=0.1521, simple_loss=0.2315, pruned_loss=0.03638, over 4804.00 frames.], tot_loss[loss=0.1542, simple_loss=0.224, pruned_loss=0.04218, over 971832.26 frames.], batch size: 14, lr: 4.33e-04 +2022-05-05 01:38:19,336 INFO [train.py:715] (6/8) Epoch 4, batch 30600, loss[loss=0.1726, simple_loss=0.2399, pruned_loss=0.05263, over 4862.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2245, pruned_loss=0.04289, over 972214.01 frames.], batch size: 20, lr: 4.33e-04 +2022-05-05 01:38:58,942 INFO [train.py:715] (6/8) Epoch 4, batch 30650, loss[loss=0.1322, simple_loss=0.2151, pruned_loss=0.0247, over 4891.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2238, pruned_loss=0.04259, over 973307.90 frames.], batch size: 22, lr: 4.33e-04 +2022-05-05 01:39:38,413 INFO [train.py:715] (6/8) Epoch 4, batch 30700, loss[loss=0.1359, simple_loss=0.2084, pruned_loss=0.03169, over 4925.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2245, pruned_loss=0.04257, over 973465.79 frames.], batch size: 18, lr: 4.33e-04 +2022-05-05 01:40:18,147 INFO [train.py:715] (6/8) Epoch 4, batch 30750, loss[loss=0.1718, simple_loss=0.2492, pruned_loss=0.04724, over 4904.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2245, pruned_loss=0.04249, over 972764.45 frames.], batch size: 17, lr: 4.33e-04 +2022-05-05 01:40:57,688 INFO [train.py:715] (6/8) Epoch 4, batch 30800, loss[loss=0.1617, simple_loss=0.2322, pruned_loss=0.04561, over 4988.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2251, pruned_loss=0.04286, over 973379.48 frames.], batch size: 28, lr: 4.32e-04 +2022-05-05 01:41:37,515 INFO [train.py:715] (6/8) Epoch 4, batch 30850, loss[loss=0.1563, simple_loss=0.2117, pruned_loss=0.05043, over 4755.00 frames.], tot_loss[loss=0.1553, simple_loss=0.225, pruned_loss=0.04285, over 974138.51 frames.], batch size: 12, lr: 4.32e-04 +2022-05-05 01:42:17,790 INFO [train.py:715] (6/8) Epoch 4, batch 30900, loss[loss=0.1918, simple_loss=0.2612, pruned_loss=0.06117, over 4743.00 frames.], tot_loss[loss=0.1556, simple_loss=0.225, pruned_loss=0.04311, over 973557.87 frames.], batch size: 19, lr: 4.32e-04 +2022-05-05 01:42:57,262 INFO [train.py:715] (6/8) Epoch 4, batch 30950, loss[loss=0.2292, simple_loss=0.2955, pruned_loss=0.08148, over 4744.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2248, pruned_loss=0.04301, over 974316.12 frames.], batch size: 16, lr: 4.32e-04 +2022-05-05 01:43:36,637 INFO [train.py:715] (6/8) Epoch 4, batch 31000, loss[loss=0.138, simple_loss=0.1995, pruned_loss=0.03828, over 4953.00 frames.], tot_loss[loss=0.1553, simple_loss=0.225, pruned_loss=0.04279, over 974362.68 frames.], batch size: 21, lr: 4.32e-04 +2022-05-05 01:44:16,110 INFO [train.py:715] (6/8) Epoch 4, batch 31050, loss[loss=0.1882, simple_loss=0.2547, pruned_loss=0.06088, over 4786.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2262, pruned_loss=0.04322, over 973893.04 frames.], batch size: 18, lr: 4.32e-04 +2022-05-05 01:44:55,521 INFO [train.py:715] (6/8) Epoch 4, batch 31100, loss[loss=0.1647, simple_loss=0.2328, pruned_loss=0.04836, over 4899.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2257, pruned_loss=0.0434, over 972972.11 frames.], batch size: 17, lr: 4.32e-04 +2022-05-05 01:45:35,035 INFO [train.py:715] (6/8) Epoch 4, batch 31150, loss[loss=0.1914, simple_loss=0.2508, pruned_loss=0.06597, over 4948.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2261, pruned_loss=0.04367, over 972231.04 frames.], batch size: 21, lr: 4.32e-04 +2022-05-05 01:46:13,905 INFO [train.py:715] (6/8) Epoch 4, batch 31200, loss[loss=0.169, simple_loss=0.2434, pruned_loss=0.04728, over 4961.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2252, pruned_loss=0.04324, over 973241.66 frames.], batch size: 24, lr: 4.32e-04 +2022-05-05 01:46:53,979 INFO [train.py:715] (6/8) Epoch 4, batch 31250, loss[loss=0.1545, simple_loss=0.2199, pruned_loss=0.04451, over 4807.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2261, pruned_loss=0.0435, over 972995.64 frames.], batch size: 14, lr: 4.32e-04 +2022-05-05 01:47:33,178 INFO [train.py:715] (6/8) Epoch 4, batch 31300, loss[loss=0.1781, simple_loss=0.2448, pruned_loss=0.05572, over 4839.00 frames.], tot_loss[loss=0.1563, simple_loss=0.226, pruned_loss=0.04334, over 971778.92 frames.], batch size: 30, lr: 4.32e-04 +2022-05-05 01:48:12,188 INFO [train.py:715] (6/8) Epoch 4, batch 31350, loss[loss=0.1661, simple_loss=0.2329, pruned_loss=0.04969, over 4845.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2257, pruned_loss=0.04293, over 972163.30 frames.], batch size: 30, lr: 4.32e-04 +2022-05-05 01:48:52,073 INFO [train.py:715] (6/8) Epoch 4, batch 31400, loss[loss=0.1279, simple_loss=0.2, pruned_loss=0.02786, over 4993.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2248, pruned_loss=0.04229, over 972447.27 frames.], batch size: 14, lr: 4.32e-04 +2022-05-05 01:49:31,803 INFO [train.py:715] (6/8) Epoch 4, batch 31450, loss[loss=0.1726, simple_loss=0.2384, pruned_loss=0.05338, over 4991.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2245, pruned_loss=0.04241, over 973086.82 frames.], batch size: 26, lr: 4.32e-04 +2022-05-05 01:50:11,371 INFO [train.py:715] (6/8) Epoch 4, batch 31500, loss[loss=0.2152, simple_loss=0.2899, pruned_loss=0.07028, over 4770.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2253, pruned_loss=0.04288, over 972680.94 frames.], batch size: 14, lr: 4.32e-04 +2022-05-05 01:50:51,739 INFO [train.py:715] (6/8) Epoch 4, batch 31550, loss[loss=0.1378, simple_loss=0.2167, pruned_loss=0.02945, over 4816.00 frames.], tot_loss[loss=0.156, simple_loss=0.2262, pruned_loss=0.04289, over 973348.80 frames.], batch size: 15, lr: 4.32e-04 +2022-05-05 01:51:32,269 INFO [train.py:715] (6/8) Epoch 4, batch 31600, loss[loss=0.1345, simple_loss=0.2114, pruned_loss=0.02879, over 4953.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2254, pruned_loss=0.04259, over 972908.73 frames.], batch size: 21, lr: 4.31e-04 +2022-05-05 01:52:11,915 INFO [train.py:715] (6/8) Epoch 4, batch 31650, loss[loss=0.2085, simple_loss=0.2571, pruned_loss=0.07992, over 4870.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2247, pruned_loss=0.04212, over 973488.54 frames.], batch size: 16, lr: 4.31e-04 +2022-05-05 01:52:51,504 INFO [train.py:715] (6/8) Epoch 4, batch 31700, loss[loss=0.1702, simple_loss=0.2502, pruned_loss=0.04512, over 4824.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2242, pruned_loss=0.04183, over 971983.45 frames.], batch size: 27, lr: 4.31e-04 +2022-05-05 01:53:31,555 INFO [train.py:715] (6/8) Epoch 4, batch 31750, loss[loss=0.1804, simple_loss=0.2395, pruned_loss=0.06066, over 4771.00 frames.], tot_loss[loss=0.1547, simple_loss=0.225, pruned_loss=0.04225, over 971804.43 frames.], batch size: 14, lr: 4.31e-04 +2022-05-05 01:54:11,606 INFO [train.py:715] (6/8) Epoch 4, batch 31800, loss[loss=0.172, simple_loss=0.236, pruned_loss=0.05397, over 4781.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2234, pruned_loss=0.04152, over 971076.50 frames.], batch size: 18, lr: 4.31e-04 +2022-05-05 01:54:51,201 INFO [train.py:715] (6/8) Epoch 4, batch 31850, loss[loss=0.1196, simple_loss=0.1882, pruned_loss=0.02547, over 4810.00 frames.], tot_loss[loss=0.153, simple_loss=0.2237, pruned_loss=0.04114, over 971618.42 frames.], batch size: 13, lr: 4.31e-04 +2022-05-05 01:55:30,810 INFO [train.py:715] (6/8) Epoch 4, batch 31900, loss[loss=0.1501, simple_loss=0.2303, pruned_loss=0.03491, over 4982.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2241, pruned_loss=0.04144, over 971693.71 frames.], batch size: 24, lr: 4.31e-04 +2022-05-05 01:56:11,031 INFO [train.py:715] (6/8) Epoch 4, batch 31950, loss[loss=0.168, simple_loss=0.2402, pruned_loss=0.04792, over 4859.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2246, pruned_loss=0.042, over 972373.59 frames.], batch size: 20, lr: 4.31e-04 +2022-05-05 01:56:50,985 INFO [train.py:715] (6/8) Epoch 4, batch 32000, loss[loss=0.138, simple_loss=0.1981, pruned_loss=0.03896, over 4754.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2242, pruned_loss=0.04146, over 972706.81 frames.], batch size: 16, lr: 4.31e-04 +2022-05-05 01:57:30,381 INFO [train.py:715] (6/8) Epoch 4, batch 32050, loss[loss=0.1348, simple_loss=0.2021, pruned_loss=0.03369, over 4968.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2227, pruned_loss=0.04097, over 972392.40 frames.], batch size: 15, lr: 4.31e-04 +2022-05-05 01:58:10,945 INFO [train.py:715] (6/8) Epoch 4, batch 32100, loss[loss=0.1746, simple_loss=0.2447, pruned_loss=0.05228, over 4707.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2231, pruned_loss=0.04116, over 972383.30 frames.], batch size: 15, lr: 4.31e-04 +2022-05-05 01:58:50,866 INFO [train.py:715] (6/8) Epoch 4, batch 32150, loss[loss=0.1321, simple_loss=0.1988, pruned_loss=0.03276, over 4907.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2238, pruned_loss=0.04179, over 972552.97 frames.], batch size: 38, lr: 4.31e-04 +2022-05-05 01:59:30,405 INFO [train.py:715] (6/8) Epoch 4, batch 32200, loss[loss=0.1583, simple_loss=0.2264, pruned_loss=0.04509, over 4845.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2242, pruned_loss=0.04207, over 971454.66 frames.], batch size: 34, lr: 4.31e-04 +2022-05-05 02:00:10,363 INFO [train.py:715] (6/8) Epoch 4, batch 32250, loss[loss=0.1432, simple_loss=0.2168, pruned_loss=0.03476, over 4815.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2243, pruned_loss=0.04217, over 971686.77 frames.], batch size: 25, lr: 4.31e-04 +2022-05-05 02:00:51,160 INFO [train.py:715] (6/8) Epoch 4, batch 32300, loss[loss=0.1497, simple_loss=0.215, pruned_loss=0.04216, over 4843.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2248, pruned_loss=0.04293, over 971488.23 frames.], batch size: 32, lr: 4.31e-04 +2022-05-05 02:01:31,945 INFO [train.py:715] (6/8) Epoch 4, batch 32350, loss[loss=0.2176, simple_loss=0.2811, pruned_loss=0.07706, over 4920.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2255, pruned_loss=0.04305, over 971872.97 frames.], batch size: 18, lr: 4.31e-04 +2022-05-05 02:02:12,277 INFO [train.py:715] (6/8) Epoch 4, batch 32400, loss[loss=0.1305, simple_loss=0.2024, pruned_loss=0.02934, over 4960.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2245, pruned_loss=0.04253, over 971764.70 frames.], batch size: 21, lr: 4.30e-04 +2022-05-05 02:02:52,624 INFO [train.py:715] (6/8) Epoch 4, batch 32450, loss[loss=0.1707, simple_loss=0.2371, pruned_loss=0.05215, over 4884.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2247, pruned_loss=0.04272, over 971819.77 frames.], batch size: 16, lr: 4.30e-04 +2022-05-05 02:03:31,866 INFO [train.py:715] (6/8) Epoch 4, batch 32500, loss[loss=0.1385, simple_loss=0.2045, pruned_loss=0.03626, over 4992.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2242, pruned_loss=0.04223, over 972475.69 frames.], batch size: 20, lr: 4.30e-04 +2022-05-05 02:04:11,775 INFO [train.py:715] (6/8) Epoch 4, batch 32550, loss[loss=0.1371, simple_loss=0.2136, pruned_loss=0.03031, over 4767.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2243, pruned_loss=0.04203, over 971609.08 frames.], batch size: 14, lr: 4.30e-04 +2022-05-05 02:04:50,741 INFO [train.py:715] (6/8) Epoch 4, batch 32600, loss[loss=0.1454, simple_loss=0.2242, pruned_loss=0.03329, over 4987.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2241, pruned_loss=0.04166, over 973244.73 frames.], batch size: 25, lr: 4.30e-04 +2022-05-05 02:05:30,806 INFO [train.py:715] (6/8) Epoch 4, batch 32650, loss[loss=0.1511, simple_loss=0.2192, pruned_loss=0.04152, over 4856.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2244, pruned_loss=0.04219, over 973881.82 frames.], batch size: 20, lr: 4.30e-04 +2022-05-05 02:06:09,917 INFO [train.py:715] (6/8) Epoch 4, batch 32700, loss[loss=0.1659, simple_loss=0.2285, pruned_loss=0.05165, over 4949.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2246, pruned_loss=0.04224, over 973216.01 frames.], batch size: 35, lr: 4.30e-04 +2022-05-05 02:06:49,546 INFO [train.py:715] (6/8) Epoch 4, batch 32750, loss[loss=0.1392, simple_loss=0.2039, pruned_loss=0.03729, over 4847.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2245, pruned_loss=0.04212, over 973996.17 frames.], batch size: 30, lr: 4.30e-04 +2022-05-05 02:07:29,261 INFO [train.py:715] (6/8) Epoch 4, batch 32800, loss[loss=0.1408, simple_loss=0.2094, pruned_loss=0.0361, over 4843.00 frames.], tot_loss[loss=0.1545, simple_loss=0.225, pruned_loss=0.04204, over 973293.52 frames.], batch size: 32, lr: 4.30e-04 +2022-05-05 02:08:09,358 INFO [train.py:715] (6/8) Epoch 4, batch 32850, loss[loss=0.1619, simple_loss=0.2411, pruned_loss=0.04132, over 4785.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2244, pruned_loss=0.04169, over 973812.38 frames.], batch size: 18, lr: 4.30e-04 +2022-05-05 02:08:49,852 INFO [train.py:715] (6/8) Epoch 4, batch 32900, loss[loss=0.1905, simple_loss=0.2515, pruned_loss=0.06474, over 4982.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2246, pruned_loss=0.04203, over 973484.82 frames.], batch size: 35, lr: 4.30e-04 +2022-05-05 02:09:30,079 INFO [train.py:715] (6/8) Epoch 4, batch 32950, loss[loss=0.1416, simple_loss=0.2083, pruned_loss=0.03746, over 4836.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2249, pruned_loss=0.04249, over 974034.30 frames.], batch size: 32, lr: 4.30e-04 +2022-05-05 02:10:10,325 INFO [train.py:715] (6/8) Epoch 4, batch 33000, loss[loss=0.1576, simple_loss=0.2271, pruned_loss=0.0441, over 4881.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2252, pruned_loss=0.04291, over 974488.31 frames.], batch size: 22, lr: 4.30e-04 +2022-05-05 02:10:10,326 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 02:10:20,091 INFO [train.py:742] (6/8) Epoch 4, validation: loss=0.1115, simple_loss=0.197, pruned_loss=0.01298, over 914524.00 frames. +2022-05-05 02:11:00,301 INFO [train.py:715] (6/8) Epoch 4, batch 33050, loss[loss=0.1508, simple_loss=0.2287, pruned_loss=0.03645, over 4898.00 frames.], tot_loss[loss=0.156, simple_loss=0.2259, pruned_loss=0.04308, over 973935.91 frames.], batch size: 19, lr: 4.30e-04 +2022-05-05 02:11:40,022 INFO [train.py:715] (6/8) Epoch 4, batch 33100, loss[loss=0.151, simple_loss=0.2166, pruned_loss=0.04272, over 4754.00 frames.], tot_loss[loss=0.156, simple_loss=0.2256, pruned_loss=0.04324, over 974001.67 frames.], batch size: 19, lr: 4.30e-04 +2022-05-05 02:12:20,029 INFO [train.py:715] (6/8) Epoch 4, batch 33150, loss[loss=0.1778, simple_loss=0.2341, pruned_loss=0.06069, over 4795.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2261, pruned_loss=0.04376, over 973476.65 frames.], batch size: 14, lr: 4.30e-04 +2022-05-05 02:13:00,226 INFO [train.py:715] (6/8) Epoch 4, batch 33200, loss[loss=0.1632, simple_loss=0.2318, pruned_loss=0.04728, over 4962.00 frames.], tot_loss[loss=0.1557, simple_loss=0.225, pruned_loss=0.04321, over 973083.16 frames.], batch size: 24, lr: 4.29e-04 +2022-05-05 02:13:40,205 INFO [train.py:715] (6/8) Epoch 4, batch 33250, loss[loss=0.1463, simple_loss=0.2119, pruned_loss=0.04038, over 4978.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2254, pruned_loss=0.04289, over 973357.84 frames.], batch size: 15, lr: 4.29e-04 +2022-05-05 02:14:20,218 INFO [train.py:715] (6/8) Epoch 4, batch 33300, loss[loss=0.1992, simple_loss=0.2644, pruned_loss=0.06695, over 4929.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2268, pruned_loss=0.04344, over 973290.86 frames.], batch size: 18, lr: 4.29e-04 +2022-05-05 02:14:59,208 INFO [train.py:715] (6/8) Epoch 4, batch 33350, loss[loss=0.1375, simple_loss=0.2226, pruned_loss=0.02626, over 4824.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2268, pruned_loss=0.0432, over 973440.92 frames.], batch size: 27, lr: 4.29e-04 +2022-05-05 02:15:38,984 INFO [train.py:715] (6/8) Epoch 4, batch 33400, loss[loss=0.1527, simple_loss=0.2331, pruned_loss=0.03613, over 4960.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2261, pruned_loss=0.04257, over 973470.38 frames.], batch size: 24, lr: 4.29e-04 +2022-05-05 02:16:18,846 INFO [train.py:715] (6/8) Epoch 4, batch 33450, loss[loss=0.1373, simple_loss=0.215, pruned_loss=0.02977, over 4898.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2255, pruned_loss=0.04259, over 972784.45 frames.], batch size: 19, lr: 4.29e-04 +2022-05-05 02:16:58,393 INFO [train.py:715] (6/8) Epoch 4, batch 33500, loss[loss=0.1671, simple_loss=0.2303, pruned_loss=0.05193, over 4692.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2255, pruned_loss=0.04308, over 971853.45 frames.], batch size: 15, lr: 4.29e-04 +2022-05-05 02:17:38,205 INFO [train.py:715] (6/8) Epoch 4, batch 33550, loss[loss=0.1657, simple_loss=0.2392, pruned_loss=0.04608, over 4921.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2257, pruned_loss=0.04294, over 971906.76 frames.], batch size: 39, lr: 4.29e-04 +2022-05-05 02:18:17,698 INFO [train.py:715] (6/8) Epoch 4, batch 33600, loss[loss=0.168, simple_loss=0.2245, pruned_loss=0.05579, over 4832.00 frames.], tot_loss[loss=0.1561, simple_loss=0.226, pruned_loss=0.04311, over 971936.62 frames.], batch size: 12, lr: 4.29e-04 +2022-05-05 02:18:57,440 INFO [train.py:715] (6/8) Epoch 4, batch 33650, loss[loss=0.1268, simple_loss=0.1963, pruned_loss=0.02866, over 4828.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2258, pruned_loss=0.04273, over 973216.74 frames.], batch size: 12, lr: 4.29e-04 +2022-05-05 02:19:36,831 INFO [train.py:715] (6/8) Epoch 4, batch 33700, loss[loss=0.1286, simple_loss=0.2073, pruned_loss=0.02495, over 4849.00 frames.], tot_loss[loss=0.1556, simple_loss=0.226, pruned_loss=0.04257, over 973477.01 frames.], batch size: 20, lr: 4.29e-04 +2022-05-05 02:20:16,629 INFO [train.py:715] (6/8) Epoch 4, batch 33750, loss[loss=0.1513, simple_loss=0.2279, pruned_loss=0.03735, over 4945.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2256, pruned_loss=0.04253, over 973116.47 frames.], batch size: 21, lr: 4.29e-04 +2022-05-05 02:20:56,486 INFO [train.py:715] (6/8) Epoch 4, batch 33800, loss[loss=0.1305, simple_loss=0.1993, pruned_loss=0.03084, over 4918.00 frames.], tot_loss[loss=0.155, simple_loss=0.2254, pruned_loss=0.04227, over 973287.75 frames.], batch size: 19, lr: 4.29e-04 +2022-05-05 02:21:35,975 INFO [train.py:715] (6/8) Epoch 4, batch 33850, loss[loss=0.1204, simple_loss=0.1947, pruned_loss=0.02301, over 4696.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2257, pruned_loss=0.04263, over 972886.17 frames.], batch size: 15, lr: 4.29e-04 +2022-05-05 02:22:15,610 INFO [train.py:715] (6/8) Epoch 4, batch 33900, loss[loss=0.1765, simple_loss=0.2421, pruned_loss=0.05539, over 4842.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2253, pruned_loss=0.04249, over 973510.04 frames.], batch size: 15, lr: 4.29e-04 +2022-05-05 02:22:55,362 INFO [train.py:715] (6/8) Epoch 4, batch 33950, loss[loss=0.1341, simple_loss=0.2085, pruned_loss=0.02979, over 4935.00 frames.], tot_loss[loss=0.156, simple_loss=0.2263, pruned_loss=0.04284, over 973347.51 frames.], batch size: 29, lr: 4.29e-04 +2022-05-05 02:23:35,328 INFO [train.py:715] (6/8) Epoch 4, batch 34000, loss[loss=0.1341, simple_loss=0.2089, pruned_loss=0.02965, over 4816.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2257, pruned_loss=0.04227, over 972800.16 frames.], batch size: 26, lr: 4.28e-04 +2022-05-05 02:24:14,853 INFO [train.py:715] (6/8) Epoch 4, batch 34050, loss[loss=0.1643, simple_loss=0.2395, pruned_loss=0.04451, over 4875.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2259, pruned_loss=0.04219, over 972898.50 frames.], batch size: 39, lr: 4.28e-04 +2022-05-05 02:24:54,573 INFO [train.py:715] (6/8) Epoch 4, batch 34100, loss[loss=0.1465, simple_loss=0.2317, pruned_loss=0.03063, over 4755.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2264, pruned_loss=0.04242, over 973081.98 frames.], batch size: 19, lr: 4.28e-04 +2022-05-05 02:25:34,634 INFO [train.py:715] (6/8) Epoch 4, batch 34150, loss[loss=0.1432, simple_loss=0.2159, pruned_loss=0.03518, over 4766.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2253, pruned_loss=0.0418, over 972556.01 frames.], batch size: 18, lr: 4.28e-04 +2022-05-05 02:26:13,484 INFO [train.py:715] (6/8) Epoch 4, batch 34200, loss[loss=0.1369, simple_loss=0.2069, pruned_loss=0.03346, over 4840.00 frames.], tot_loss[loss=0.155, simple_loss=0.2257, pruned_loss=0.04212, over 972537.66 frames.], batch size: 20, lr: 4.28e-04 +2022-05-05 02:26:54,318 INFO [train.py:715] (6/8) Epoch 4, batch 34250, loss[loss=0.2056, simple_loss=0.2558, pruned_loss=0.07767, over 4793.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2261, pruned_loss=0.04233, over 972906.41 frames.], batch size: 17, lr: 4.28e-04 +2022-05-05 02:27:34,192 INFO [train.py:715] (6/8) Epoch 4, batch 34300, loss[loss=0.1864, simple_loss=0.2442, pruned_loss=0.06435, over 4982.00 frames.], tot_loss[loss=0.155, simple_loss=0.2256, pruned_loss=0.04224, over 972679.31 frames.], batch size: 35, lr: 4.28e-04 +2022-05-05 02:28:13,943 INFO [train.py:715] (6/8) Epoch 4, batch 34350, loss[loss=0.144, simple_loss=0.2247, pruned_loss=0.03162, over 4897.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2255, pruned_loss=0.04255, over 973252.10 frames.], batch size: 19, lr: 4.28e-04 +2022-05-05 02:28:53,978 INFO [train.py:715] (6/8) Epoch 4, batch 34400, loss[loss=0.1916, simple_loss=0.2592, pruned_loss=0.06206, over 4874.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2252, pruned_loss=0.0427, over 972615.37 frames.], batch size: 38, lr: 4.28e-04 +2022-05-05 02:29:33,808 INFO [train.py:715] (6/8) Epoch 4, batch 34450, loss[loss=0.1388, simple_loss=0.2177, pruned_loss=0.0299, over 4812.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2258, pruned_loss=0.043, over 971862.08 frames.], batch size: 25, lr: 4.28e-04 +2022-05-05 02:30:14,474 INFO [train.py:715] (6/8) Epoch 4, batch 34500, loss[loss=0.1578, simple_loss=0.2363, pruned_loss=0.03965, over 4782.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2269, pruned_loss=0.04372, over 972121.43 frames.], batch size: 18, lr: 4.28e-04 +2022-05-05 02:30:53,317 INFO [train.py:715] (6/8) Epoch 4, batch 34550, loss[loss=0.1251, simple_loss=0.2017, pruned_loss=0.02429, over 4960.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2263, pruned_loss=0.04327, over 972447.44 frames.], batch size: 24, lr: 4.28e-04 +2022-05-05 02:31:33,263 INFO [train.py:715] (6/8) Epoch 4, batch 34600, loss[loss=0.1761, simple_loss=0.2388, pruned_loss=0.0567, over 4915.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2262, pruned_loss=0.04342, over 971389.11 frames.], batch size: 29, lr: 4.28e-04 +2022-05-05 02:32:13,238 INFO [train.py:715] (6/8) Epoch 4, batch 34650, loss[loss=0.1688, simple_loss=0.2362, pruned_loss=0.05077, over 4958.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2261, pruned_loss=0.0437, over 971677.64 frames.], batch size: 39, lr: 4.28e-04 +2022-05-05 02:32:52,592 INFO [train.py:715] (6/8) Epoch 4, batch 34700, loss[loss=0.1718, simple_loss=0.2446, pruned_loss=0.04955, over 4979.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2258, pruned_loss=0.04367, over 971726.28 frames.], batch size: 25, lr: 4.28e-04 +2022-05-05 02:33:30,873 INFO [train.py:715] (6/8) Epoch 4, batch 34750, loss[loss=0.1269, simple_loss=0.2013, pruned_loss=0.02628, over 4918.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2253, pruned_loss=0.04303, over 972462.16 frames.], batch size: 29, lr: 4.28e-04 +2022-05-05 02:34:07,935 INFO [train.py:715] (6/8) Epoch 4, batch 34800, loss[loss=0.1961, simple_loss=0.2733, pruned_loss=0.05948, over 4888.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2265, pruned_loss=0.04336, over 974193.16 frames.], batch size: 19, lr: 4.27e-04 +2022-05-05 02:34:57,767 INFO [train.py:715] (6/8) Epoch 5, batch 0, loss[loss=0.1569, simple_loss=0.2187, pruned_loss=0.0475, over 4778.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2187, pruned_loss=0.0475, over 4778.00 frames.], batch size: 14, lr: 4.02e-04 +2022-05-05 02:35:38,101 INFO [train.py:715] (6/8) Epoch 5, batch 50, loss[loss=0.2041, simple_loss=0.2626, pruned_loss=0.07283, over 4773.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2243, pruned_loss=0.04364, over 218639.75 frames.], batch size: 14, lr: 4.02e-04 +2022-05-05 02:36:17,800 INFO [train.py:715] (6/8) Epoch 5, batch 100, loss[loss=0.1442, simple_loss=0.2248, pruned_loss=0.03183, over 4965.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2226, pruned_loss=0.04238, over 385721.19 frames.], batch size: 35, lr: 4.02e-04 +2022-05-05 02:36:57,768 INFO [train.py:715] (6/8) Epoch 5, batch 150, loss[loss=0.1576, simple_loss=0.2347, pruned_loss=0.04026, over 4989.00 frames.], tot_loss[loss=0.155, simple_loss=0.2241, pruned_loss=0.04289, over 516976.40 frames.], batch size: 28, lr: 4.02e-04 +2022-05-05 02:37:38,289 INFO [train.py:715] (6/8) Epoch 5, batch 200, loss[loss=0.1347, simple_loss=0.2101, pruned_loss=0.02959, over 4821.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2241, pruned_loss=0.04255, over 618403.07 frames.], batch size: 26, lr: 4.02e-04 +2022-05-05 02:38:17,741 INFO [train.py:715] (6/8) Epoch 5, batch 250, loss[loss=0.1452, simple_loss=0.2161, pruned_loss=0.03711, over 4924.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2244, pruned_loss=0.04193, over 696892.89 frames.], batch size: 21, lr: 4.02e-04 +2022-05-05 02:38:57,163 INFO [train.py:715] (6/8) Epoch 5, batch 300, loss[loss=0.1543, simple_loss=0.226, pruned_loss=0.04124, over 4988.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2254, pruned_loss=0.04289, over 758082.77 frames.], batch size: 31, lr: 4.01e-04 +2022-05-05 02:39:36,895 INFO [train.py:715] (6/8) Epoch 5, batch 350, loss[loss=0.167, simple_loss=0.2366, pruned_loss=0.04866, over 4780.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2251, pruned_loss=0.04254, over 805762.54 frames.], batch size: 18, lr: 4.01e-04 +2022-05-05 02:40:16,658 INFO [train.py:715] (6/8) Epoch 5, batch 400, loss[loss=0.1937, simple_loss=0.2587, pruned_loss=0.06434, over 4734.00 frames.], tot_loss[loss=0.155, simple_loss=0.2247, pruned_loss=0.04261, over 842899.67 frames.], batch size: 16, lr: 4.01e-04 +2022-05-05 02:40:56,048 INFO [train.py:715] (6/8) Epoch 5, batch 450, loss[loss=0.1446, simple_loss=0.2132, pruned_loss=0.03802, over 4751.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2244, pruned_loss=0.04234, over 871706.30 frames.], batch size: 16, lr: 4.01e-04 +2022-05-05 02:41:35,800 INFO [train.py:715] (6/8) Epoch 5, batch 500, loss[loss=0.1309, simple_loss=0.2097, pruned_loss=0.02601, over 4794.00 frames.], tot_loss[loss=0.1543, simple_loss=0.224, pruned_loss=0.0423, over 893756.98 frames.], batch size: 18, lr: 4.01e-04 +2022-05-05 02:42:15,656 INFO [train.py:715] (6/8) Epoch 5, batch 550, loss[loss=0.1522, simple_loss=0.2233, pruned_loss=0.0406, over 4976.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2238, pruned_loss=0.04193, over 911103.80 frames.], batch size: 28, lr: 4.01e-04 +2022-05-05 02:42:54,762 INFO [train.py:715] (6/8) Epoch 5, batch 600, loss[loss=0.1731, simple_loss=0.2311, pruned_loss=0.05755, over 4940.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2245, pruned_loss=0.0425, over 924240.22 frames.], batch size: 23, lr: 4.01e-04 +2022-05-05 02:43:34,144 INFO [train.py:715] (6/8) Epoch 5, batch 650, loss[loss=0.1413, simple_loss=0.2118, pruned_loss=0.03539, over 4828.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2235, pruned_loss=0.04185, over 935902.24 frames.], batch size: 26, lr: 4.01e-04 +2022-05-05 02:44:13,849 INFO [train.py:715] (6/8) Epoch 5, batch 700, loss[loss=0.1326, simple_loss=0.2077, pruned_loss=0.02874, over 4985.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2242, pruned_loss=0.04266, over 944241.85 frames.], batch size: 28, lr: 4.01e-04 +2022-05-05 02:44:53,909 INFO [train.py:715] (6/8) Epoch 5, batch 750, loss[loss=0.1789, simple_loss=0.2599, pruned_loss=0.04893, over 4757.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2253, pruned_loss=0.0429, over 950438.34 frames.], batch size: 14, lr: 4.01e-04 +2022-05-05 02:45:33,283 INFO [train.py:715] (6/8) Epoch 5, batch 800, loss[loss=0.1705, simple_loss=0.2427, pruned_loss=0.04917, over 4806.00 frames.], tot_loss[loss=0.156, simple_loss=0.2259, pruned_loss=0.04304, over 954546.75 frames.], batch size: 21, lr: 4.01e-04 +2022-05-05 02:46:12,790 INFO [train.py:715] (6/8) Epoch 5, batch 850, loss[loss=0.1756, simple_loss=0.2514, pruned_loss=0.04988, over 4765.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2248, pruned_loss=0.0425, over 958607.47 frames.], batch size: 16, lr: 4.01e-04 +2022-05-05 02:46:52,359 INFO [train.py:715] (6/8) Epoch 5, batch 900, loss[loss=0.1366, simple_loss=0.2154, pruned_loss=0.02897, over 4786.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2239, pruned_loss=0.04188, over 961328.99 frames.], batch size: 18, lr: 4.01e-04 +2022-05-05 02:47:31,846 INFO [train.py:715] (6/8) Epoch 5, batch 950, loss[loss=0.1252, simple_loss=0.1953, pruned_loss=0.02758, over 4892.00 frames.], tot_loss[loss=0.153, simple_loss=0.2232, pruned_loss=0.04141, over 963818.58 frames.], batch size: 17, lr: 4.01e-04 +2022-05-05 02:48:11,354 INFO [train.py:715] (6/8) Epoch 5, batch 1000, loss[loss=0.1608, simple_loss=0.2268, pruned_loss=0.04742, over 4827.00 frames.], tot_loss[loss=0.1528, simple_loss=0.223, pruned_loss=0.04129, over 965190.44 frames.], batch size: 15, lr: 4.01e-04 +2022-05-05 02:48:50,619 INFO [train.py:715] (6/8) Epoch 5, batch 1050, loss[loss=0.1631, simple_loss=0.2443, pruned_loss=0.04099, over 4959.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2228, pruned_loss=0.04141, over 966584.12 frames.], batch size: 21, lr: 4.01e-04 +2022-05-05 02:49:30,327 INFO [train.py:715] (6/8) Epoch 5, batch 1100, loss[loss=0.2049, simple_loss=0.2718, pruned_loss=0.06897, over 4893.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2244, pruned_loss=0.04238, over 968038.36 frames.], batch size: 17, lr: 4.01e-04 +2022-05-05 02:50:09,333 INFO [train.py:715] (6/8) Epoch 5, batch 1150, loss[loss=0.1536, simple_loss=0.2146, pruned_loss=0.04633, over 4848.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2246, pruned_loss=0.04239, over 968826.77 frames.], batch size: 30, lr: 4.00e-04 +2022-05-05 02:50:49,093 INFO [train.py:715] (6/8) Epoch 5, batch 1200, loss[loss=0.1172, simple_loss=0.1828, pruned_loss=0.02583, over 4816.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2241, pruned_loss=0.04186, over 968796.21 frames.], batch size: 21, lr: 4.00e-04 +2022-05-05 02:51:29,246 INFO [train.py:715] (6/8) Epoch 5, batch 1250, loss[loss=0.1311, simple_loss=0.2041, pruned_loss=0.02907, over 4798.00 frames.], tot_loss[loss=0.154, simple_loss=0.2244, pruned_loss=0.04182, over 968460.59 frames.], batch size: 21, lr: 4.00e-04 +2022-05-05 02:52:08,412 INFO [train.py:715] (6/8) Epoch 5, batch 1300, loss[loss=0.1745, simple_loss=0.2479, pruned_loss=0.05058, over 4776.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2246, pruned_loss=0.04242, over 968608.63 frames.], batch size: 17, lr: 4.00e-04 +2022-05-05 02:52:48,195 INFO [train.py:715] (6/8) Epoch 5, batch 1350, loss[loss=0.159, simple_loss=0.227, pruned_loss=0.04545, over 4796.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2244, pruned_loss=0.04233, over 968791.57 frames.], batch size: 21, lr: 4.00e-04 +2022-05-05 02:53:27,488 INFO [train.py:715] (6/8) Epoch 5, batch 1400, loss[loss=0.14, simple_loss=0.2027, pruned_loss=0.0387, over 4978.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2253, pruned_loss=0.04271, over 970416.45 frames.], batch size: 14, lr: 4.00e-04 +2022-05-05 02:54:07,303 INFO [train.py:715] (6/8) Epoch 5, batch 1450, loss[loss=0.1564, simple_loss=0.2304, pruned_loss=0.0412, over 4979.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2246, pruned_loss=0.04212, over 971295.03 frames.], batch size: 28, lr: 4.00e-04 +2022-05-05 02:54:46,731 INFO [train.py:715] (6/8) Epoch 5, batch 1500, loss[loss=0.1405, simple_loss=0.2171, pruned_loss=0.03198, over 4894.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2243, pruned_loss=0.04178, over 971903.48 frames.], batch size: 16, lr: 4.00e-04 +2022-05-05 02:55:25,725 INFO [train.py:715] (6/8) Epoch 5, batch 1550, loss[loss=0.1728, simple_loss=0.2384, pruned_loss=0.05355, over 4866.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2246, pruned_loss=0.04234, over 972648.60 frames.], batch size: 32, lr: 4.00e-04 +2022-05-05 02:56:05,366 INFO [train.py:715] (6/8) Epoch 5, batch 1600, loss[loss=0.1178, simple_loss=0.1857, pruned_loss=0.02499, over 4715.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2246, pruned_loss=0.04225, over 972629.04 frames.], batch size: 15, lr: 4.00e-04 +2022-05-05 02:56:45,705 INFO [train.py:715] (6/8) Epoch 5, batch 1650, loss[loss=0.1606, simple_loss=0.2272, pruned_loss=0.04702, over 4886.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2249, pruned_loss=0.04247, over 972439.22 frames.], batch size: 16, lr: 4.00e-04 +2022-05-05 02:57:24,647 INFO [train.py:715] (6/8) Epoch 5, batch 1700, loss[loss=0.138, simple_loss=0.2079, pruned_loss=0.03401, over 4908.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2246, pruned_loss=0.04255, over 972670.62 frames.], batch size: 29, lr: 4.00e-04 +2022-05-05 02:58:05,304 INFO [train.py:715] (6/8) Epoch 5, batch 1750, loss[loss=0.1557, simple_loss=0.2372, pruned_loss=0.03707, over 4776.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2244, pruned_loss=0.04226, over 972201.47 frames.], batch size: 14, lr: 4.00e-04 +2022-05-05 02:58:45,441 INFO [train.py:715] (6/8) Epoch 5, batch 1800, loss[loss=0.1102, simple_loss=0.1864, pruned_loss=0.01705, over 4933.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2248, pruned_loss=0.04224, over 972924.55 frames.], batch size: 23, lr: 4.00e-04 +2022-05-05 02:59:25,897 INFO [train.py:715] (6/8) Epoch 5, batch 1850, loss[loss=0.1527, simple_loss=0.227, pruned_loss=0.03923, over 4763.00 frames.], tot_loss[loss=0.155, simple_loss=0.2253, pruned_loss=0.04235, over 972376.70 frames.], batch size: 14, lr: 4.00e-04 +2022-05-05 03:00:06,313 INFO [train.py:715] (6/8) Epoch 5, batch 1900, loss[loss=0.1485, simple_loss=0.2054, pruned_loss=0.0458, over 4848.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2255, pruned_loss=0.04263, over 972913.80 frames.], batch size: 13, lr: 4.00e-04 +2022-05-05 03:00:46,052 INFO [train.py:715] (6/8) Epoch 5, batch 1950, loss[loss=0.1904, simple_loss=0.2628, pruned_loss=0.05906, over 4791.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2261, pruned_loss=0.04347, over 972317.47 frames.], batch size: 17, lr: 4.00e-04 +2022-05-05 03:01:29,140 INFO [train.py:715] (6/8) Epoch 5, batch 2000, loss[loss=0.188, simple_loss=0.2625, pruned_loss=0.05675, over 4810.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2256, pruned_loss=0.04312, over 972161.75 frames.], batch size: 21, lr: 4.00e-04 +2022-05-05 03:02:09,175 INFO [train.py:715] (6/8) Epoch 5, batch 2050, loss[loss=0.1439, simple_loss=0.2278, pruned_loss=0.02998, over 4901.00 frames.], tot_loss[loss=0.156, simple_loss=0.2259, pruned_loss=0.04301, over 971628.31 frames.], batch size: 19, lr: 3.99e-04 +2022-05-05 03:02:49,515 INFO [train.py:715] (6/8) Epoch 5, batch 2100, loss[loss=0.1518, simple_loss=0.2296, pruned_loss=0.03702, over 4931.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2255, pruned_loss=0.04274, over 971664.62 frames.], batch size: 21, lr: 3.99e-04 +2022-05-05 03:03:30,095 INFO [train.py:715] (6/8) Epoch 5, batch 2150, loss[loss=0.1163, simple_loss=0.1913, pruned_loss=0.02064, over 4928.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2251, pruned_loss=0.04242, over 972352.65 frames.], batch size: 23, lr: 3.99e-04 +2022-05-05 03:04:09,684 INFO [train.py:715] (6/8) Epoch 5, batch 2200, loss[loss=0.1427, simple_loss=0.2021, pruned_loss=0.0416, over 4972.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2254, pruned_loss=0.04261, over 972729.24 frames.], batch size: 14, lr: 3.99e-04 +2022-05-05 03:04:50,060 INFO [train.py:715] (6/8) Epoch 5, batch 2250, loss[loss=0.1517, simple_loss=0.2319, pruned_loss=0.03575, over 4938.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2255, pruned_loss=0.04251, over 973281.15 frames.], batch size: 35, lr: 3.99e-04 +2022-05-05 03:05:30,775 INFO [train.py:715] (6/8) Epoch 5, batch 2300, loss[loss=0.1526, simple_loss=0.2181, pruned_loss=0.04355, over 4979.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2253, pruned_loss=0.04255, over 972072.96 frames.], batch size: 15, lr: 3.99e-04 +2022-05-05 03:06:10,989 INFO [train.py:715] (6/8) Epoch 5, batch 2350, loss[loss=0.1737, simple_loss=0.2448, pruned_loss=0.05126, over 4767.00 frames.], tot_loss[loss=0.1557, simple_loss=0.226, pruned_loss=0.0427, over 971729.87 frames.], batch size: 19, lr: 3.99e-04 +2022-05-05 03:06:51,193 INFO [train.py:715] (6/8) Epoch 5, batch 2400, loss[loss=0.1516, simple_loss=0.2243, pruned_loss=0.03948, over 4742.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2256, pruned_loss=0.04252, over 971953.80 frames.], batch size: 16, lr: 3.99e-04 +2022-05-05 03:07:31,732 INFO [train.py:715] (6/8) Epoch 5, batch 2450, loss[loss=0.1687, simple_loss=0.2478, pruned_loss=0.04479, over 4875.00 frames.], tot_loss[loss=0.155, simple_loss=0.2254, pruned_loss=0.04232, over 972406.65 frames.], batch size: 32, lr: 3.99e-04 +2022-05-05 03:08:12,405 INFO [train.py:715] (6/8) Epoch 5, batch 2500, loss[loss=0.1456, simple_loss=0.2143, pruned_loss=0.03851, over 4884.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2239, pruned_loss=0.04158, over 972332.27 frames.], batch size: 22, lr: 3.99e-04 +2022-05-05 03:08:52,449 INFO [train.py:715] (6/8) Epoch 5, batch 2550, loss[loss=0.197, simple_loss=0.2537, pruned_loss=0.07014, over 4861.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2249, pruned_loss=0.04184, over 972579.54 frames.], batch size: 32, lr: 3.99e-04 +2022-05-05 03:09:33,389 INFO [train.py:715] (6/8) Epoch 5, batch 2600, loss[loss=0.1731, simple_loss=0.2328, pruned_loss=0.05666, over 4835.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2258, pruned_loss=0.04232, over 972082.74 frames.], batch size: 30, lr: 3.99e-04 +2022-05-05 03:10:13,556 INFO [train.py:715] (6/8) Epoch 5, batch 2650, loss[loss=0.1684, simple_loss=0.2393, pruned_loss=0.04869, over 4743.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2253, pruned_loss=0.04215, over 971916.07 frames.], batch size: 19, lr: 3.99e-04 +2022-05-05 03:10:54,131 INFO [train.py:715] (6/8) Epoch 5, batch 2700, loss[loss=0.1503, simple_loss=0.2159, pruned_loss=0.04236, over 4856.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2255, pruned_loss=0.0424, over 971847.06 frames.], batch size: 32, lr: 3.99e-04 +2022-05-05 03:11:34,322 INFO [train.py:715] (6/8) Epoch 5, batch 2750, loss[loss=0.1365, simple_loss=0.2117, pruned_loss=0.03065, over 4876.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2249, pruned_loss=0.04146, over 971568.65 frames.], batch size: 22, lr: 3.99e-04 +2022-05-05 03:12:14,290 INFO [train.py:715] (6/8) Epoch 5, batch 2800, loss[loss=0.1465, simple_loss=0.2141, pruned_loss=0.03943, over 4653.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2245, pruned_loss=0.0416, over 971631.05 frames.], batch size: 13, lr: 3.99e-04 +2022-05-05 03:12:54,881 INFO [train.py:715] (6/8) Epoch 5, batch 2850, loss[loss=0.1325, simple_loss=0.1951, pruned_loss=0.03498, over 4821.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2248, pruned_loss=0.0419, over 971660.46 frames.], batch size: 25, lr: 3.99e-04 +2022-05-05 03:13:35,009 INFO [train.py:715] (6/8) Epoch 5, batch 2900, loss[loss=0.1216, simple_loss=0.203, pruned_loss=0.02008, over 4750.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2238, pruned_loss=0.04165, over 972062.31 frames.], batch size: 12, lr: 3.99e-04 +2022-05-05 03:14:15,392 INFO [train.py:715] (6/8) Epoch 5, batch 2950, loss[loss=0.1652, simple_loss=0.2338, pruned_loss=0.0483, over 4870.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2225, pruned_loss=0.04101, over 971488.13 frames.], batch size: 16, lr: 3.98e-04 +2022-05-05 03:14:54,471 INFO [train.py:715] (6/8) Epoch 5, batch 3000, loss[loss=0.1306, simple_loss=0.198, pruned_loss=0.03164, over 4765.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2227, pruned_loss=0.04104, over 971796.82 frames.], batch size: 14, lr: 3.98e-04 +2022-05-05 03:14:54,472 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 03:15:03,921 INFO [train.py:742] (6/8) Epoch 5, validation: loss=0.1108, simple_loss=0.1962, pruned_loss=0.01274, over 914524.00 frames. +2022-05-05 03:15:42,396 INFO [train.py:715] (6/8) Epoch 5, batch 3050, loss[loss=0.1344, simple_loss=0.2026, pruned_loss=0.03307, over 4920.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2235, pruned_loss=0.04179, over 971967.45 frames.], batch size: 23, lr: 3.98e-04 +2022-05-05 03:16:21,554 INFO [train.py:715] (6/8) Epoch 5, batch 3100, loss[loss=0.171, simple_loss=0.2283, pruned_loss=0.05684, over 4845.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2241, pruned_loss=0.04176, over 971895.05 frames.], batch size: 20, lr: 3.98e-04 +2022-05-05 03:17:00,520 INFO [train.py:715] (6/8) Epoch 5, batch 3150, loss[loss=0.1595, simple_loss=0.2407, pruned_loss=0.03914, over 4987.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2239, pruned_loss=0.04145, over 972195.31 frames.], batch size: 14, lr: 3.98e-04 +2022-05-05 03:17:40,039 INFO [train.py:715] (6/8) Epoch 5, batch 3200, loss[loss=0.1311, simple_loss=0.2094, pruned_loss=0.02641, over 4889.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2246, pruned_loss=0.04149, over 972867.30 frames.], batch size: 22, lr: 3.98e-04 +2022-05-05 03:18:19,747 INFO [train.py:715] (6/8) Epoch 5, batch 3250, loss[loss=0.1323, simple_loss=0.2094, pruned_loss=0.02761, over 4923.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2242, pruned_loss=0.04111, over 972816.72 frames.], batch size: 29, lr: 3.98e-04 +2022-05-05 03:18:58,960 INFO [train.py:715] (6/8) Epoch 5, batch 3300, loss[loss=0.1898, simple_loss=0.2654, pruned_loss=0.05712, over 4893.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2242, pruned_loss=0.04112, over 972889.92 frames.], batch size: 19, lr: 3.98e-04 +2022-05-05 03:19:38,242 INFO [train.py:715] (6/8) Epoch 5, batch 3350, loss[loss=0.158, simple_loss=0.2288, pruned_loss=0.04359, over 4908.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2239, pruned_loss=0.04146, over 973220.21 frames.], batch size: 23, lr: 3.98e-04 +2022-05-05 03:20:17,973 INFO [train.py:715] (6/8) Epoch 5, batch 3400, loss[loss=0.164, simple_loss=0.2376, pruned_loss=0.04517, over 4894.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2234, pruned_loss=0.04118, over 973762.57 frames.], batch size: 22, lr: 3.98e-04 +2022-05-05 03:20:57,514 INFO [train.py:715] (6/8) Epoch 5, batch 3450, loss[loss=0.2569, simple_loss=0.2912, pruned_loss=0.1113, over 4776.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2245, pruned_loss=0.04168, over 971759.48 frames.], batch size: 19, lr: 3.98e-04 +2022-05-05 03:21:36,806 INFO [train.py:715] (6/8) Epoch 5, batch 3500, loss[loss=0.151, simple_loss=0.223, pruned_loss=0.03949, over 4801.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2246, pruned_loss=0.04199, over 972918.28 frames.], batch size: 24, lr: 3.98e-04 +2022-05-05 03:22:16,033 INFO [train.py:715] (6/8) Epoch 5, batch 3550, loss[loss=0.1514, simple_loss=0.2302, pruned_loss=0.03627, over 4846.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2243, pruned_loss=0.04177, over 972471.21 frames.], batch size: 15, lr: 3.98e-04 +2022-05-05 03:22:55,534 INFO [train.py:715] (6/8) Epoch 5, batch 3600, loss[loss=0.1779, simple_loss=0.2403, pruned_loss=0.05771, over 4858.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2241, pruned_loss=0.04175, over 971519.06 frames.], batch size: 30, lr: 3.98e-04 +2022-05-05 03:23:34,524 INFO [train.py:715] (6/8) Epoch 5, batch 3650, loss[loss=0.1344, simple_loss=0.2096, pruned_loss=0.02965, over 4784.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2234, pruned_loss=0.04146, over 971424.66 frames.], batch size: 18, lr: 3.98e-04 +2022-05-05 03:24:13,764 INFO [train.py:715] (6/8) Epoch 5, batch 3700, loss[loss=0.1458, simple_loss=0.2192, pruned_loss=0.0362, over 4985.00 frames.], tot_loss[loss=0.153, simple_loss=0.2231, pruned_loss=0.04146, over 972115.80 frames.], batch size: 26, lr: 3.98e-04 +2022-05-05 03:24:53,921 INFO [train.py:715] (6/8) Epoch 5, batch 3750, loss[loss=0.1977, simple_loss=0.2654, pruned_loss=0.06503, over 4875.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2235, pruned_loss=0.04138, over 973206.33 frames.], batch size: 16, lr: 3.98e-04 +2022-05-05 03:25:33,702 INFO [train.py:715] (6/8) Epoch 5, batch 3800, loss[loss=0.1346, simple_loss=0.2089, pruned_loss=0.03017, over 4899.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2236, pruned_loss=0.04135, over 973605.28 frames.], batch size: 19, lr: 3.97e-04 +2022-05-05 03:26:13,095 INFO [train.py:715] (6/8) Epoch 5, batch 3850, loss[loss=0.158, simple_loss=0.2295, pruned_loss=0.04326, over 4762.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2231, pruned_loss=0.04101, over 973405.96 frames.], batch size: 19, lr: 3.97e-04 +2022-05-05 03:26:52,957 INFO [train.py:715] (6/8) Epoch 5, batch 3900, loss[loss=0.1912, simple_loss=0.2499, pruned_loss=0.06626, over 4783.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2236, pruned_loss=0.04157, over 972908.92 frames.], batch size: 18, lr: 3.97e-04 +2022-05-05 03:27:32,998 INFO [train.py:715] (6/8) Epoch 5, batch 3950, loss[loss=0.1563, simple_loss=0.2261, pruned_loss=0.04323, over 4744.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2236, pruned_loss=0.04181, over 972485.10 frames.], batch size: 16, lr: 3.97e-04 +2022-05-05 03:28:13,084 INFO [train.py:715] (6/8) Epoch 5, batch 4000, loss[loss=0.176, simple_loss=0.2524, pruned_loss=0.04978, over 4791.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2236, pruned_loss=0.04156, over 971386.32 frames.], batch size: 17, lr: 3.97e-04 +2022-05-05 03:28:53,740 INFO [train.py:715] (6/8) Epoch 5, batch 4050, loss[loss=0.1747, simple_loss=0.2579, pruned_loss=0.04581, over 4932.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2237, pruned_loss=0.04139, over 971230.33 frames.], batch size: 21, lr: 3.97e-04 +2022-05-05 03:29:33,849 INFO [train.py:715] (6/8) Epoch 5, batch 4100, loss[loss=0.1363, simple_loss=0.2005, pruned_loss=0.03603, over 4982.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2234, pruned_loss=0.04115, over 971407.22 frames.], batch size: 14, lr: 3.97e-04 +2022-05-05 03:30:14,069 INFO [train.py:715] (6/8) Epoch 5, batch 4150, loss[loss=0.1687, simple_loss=0.2435, pruned_loss=0.04696, over 4850.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2233, pruned_loss=0.04104, over 971567.86 frames.], batch size: 20, lr: 3.97e-04 +2022-05-05 03:30:53,451 INFO [train.py:715] (6/8) Epoch 5, batch 4200, loss[loss=0.1613, simple_loss=0.2321, pruned_loss=0.04528, over 4784.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2233, pruned_loss=0.04103, over 971346.16 frames.], batch size: 14, lr: 3.97e-04 +2022-05-05 03:31:32,791 INFO [train.py:715] (6/8) Epoch 5, batch 4250, loss[loss=0.1291, simple_loss=0.1995, pruned_loss=0.02931, over 4678.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2232, pruned_loss=0.04095, over 970858.98 frames.], batch size: 15, lr: 3.97e-04 +2022-05-05 03:32:12,489 INFO [train.py:715] (6/8) Epoch 5, batch 4300, loss[loss=0.1221, simple_loss=0.1937, pruned_loss=0.02529, over 4814.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2227, pruned_loss=0.04078, over 971082.33 frames.], batch size: 12, lr: 3.97e-04 +2022-05-05 03:32:52,102 INFO [train.py:715] (6/8) Epoch 5, batch 4350, loss[loss=0.1276, simple_loss=0.203, pruned_loss=0.02612, over 4769.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2235, pruned_loss=0.04102, over 972049.86 frames.], batch size: 18, lr: 3.97e-04 +2022-05-05 03:33:32,072 INFO [train.py:715] (6/8) Epoch 5, batch 4400, loss[loss=0.1518, simple_loss=0.228, pruned_loss=0.03781, over 4732.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2236, pruned_loss=0.04107, over 971495.42 frames.], batch size: 16, lr: 3.97e-04 +2022-05-05 03:34:10,945 INFO [train.py:715] (6/8) Epoch 5, batch 4450, loss[loss=0.1438, simple_loss=0.2129, pruned_loss=0.03741, over 4900.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2223, pruned_loss=0.04073, over 971404.23 frames.], batch size: 19, lr: 3.97e-04 +2022-05-05 03:34:50,793 INFO [train.py:715] (6/8) Epoch 5, batch 4500, loss[loss=0.151, simple_loss=0.2208, pruned_loss=0.04057, over 4816.00 frames.], tot_loss[loss=0.1518, simple_loss=0.222, pruned_loss=0.0408, over 971618.28 frames.], batch size: 27, lr: 3.97e-04 +2022-05-05 03:35:30,125 INFO [train.py:715] (6/8) Epoch 5, batch 4550, loss[loss=0.1583, simple_loss=0.2234, pruned_loss=0.04662, over 4802.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2221, pruned_loss=0.04075, over 971456.04 frames.], batch size: 13, lr: 3.97e-04 +2022-05-05 03:36:09,740 INFO [train.py:715] (6/8) Epoch 5, batch 4600, loss[loss=0.176, simple_loss=0.2348, pruned_loss=0.05857, over 4979.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2232, pruned_loss=0.04124, over 970678.21 frames.], batch size: 33, lr: 3.97e-04 +2022-05-05 03:36:50,102 INFO [train.py:715] (6/8) Epoch 5, batch 4650, loss[loss=0.1722, simple_loss=0.2453, pruned_loss=0.04951, over 4932.00 frames.], tot_loss[loss=0.153, simple_loss=0.2234, pruned_loss=0.04126, over 970991.29 frames.], batch size: 21, lr: 3.97e-04 +2022-05-05 03:37:30,434 INFO [train.py:715] (6/8) Epoch 5, batch 4700, loss[loss=0.1297, simple_loss=0.1973, pruned_loss=0.031, over 4844.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2237, pruned_loss=0.04151, over 971437.63 frames.], batch size: 15, lr: 3.96e-04 +2022-05-05 03:38:10,933 INFO [train.py:715] (6/8) Epoch 5, batch 4750, loss[loss=0.1902, simple_loss=0.2645, pruned_loss=0.05792, over 4960.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2231, pruned_loss=0.04098, over 971977.54 frames.], batch size: 39, lr: 3.96e-04 +2022-05-05 03:38:50,696 INFO [train.py:715] (6/8) Epoch 5, batch 4800, loss[loss=0.149, simple_loss=0.2295, pruned_loss=0.03426, over 4942.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2235, pruned_loss=0.0414, over 972004.39 frames.], batch size: 23, lr: 3.96e-04 +2022-05-05 03:39:31,186 INFO [train.py:715] (6/8) Epoch 5, batch 4850, loss[loss=0.1537, simple_loss=0.2304, pruned_loss=0.03845, over 4984.00 frames.], tot_loss[loss=0.1526, simple_loss=0.223, pruned_loss=0.04111, over 972465.73 frames.], batch size: 26, lr: 3.96e-04 +2022-05-05 03:40:11,790 INFO [train.py:715] (6/8) Epoch 5, batch 4900, loss[loss=0.1411, simple_loss=0.2182, pruned_loss=0.03201, over 4696.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2231, pruned_loss=0.0412, over 973088.00 frames.], batch size: 15, lr: 3.96e-04 +2022-05-05 03:40:51,920 INFO [train.py:715] (6/8) Epoch 5, batch 4950, loss[loss=0.1904, simple_loss=0.2469, pruned_loss=0.06692, over 4983.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2239, pruned_loss=0.04175, over 972556.60 frames.], batch size: 14, lr: 3.96e-04 +2022-05-05 03:41:32,226 INFO [train.py:715] (6/8) Epoch 5, batch 5000, loss[loss=0.1974, simple_loss=0.2702, pruned_loss=0.06231, over 4874.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2235, pruned_loss=0.04209, over 971993.22 frames.], batch size: 16, lr: 3.96e-04 +2022-05-05 03:42:13,231 INFO [train.py:715] (6/8) Epoch 5, batch 5050, loss[loss=0.1527, simple_loss=0.2178, pruned_loss=0.04373, over 4981.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2235, pruned_loss=0.04189, over 973084.53 frames.], batch size: 35, lr: 3.96e-04 +2022-05-05 03:42:52,852 INFO [train.py:715] (6/8) Epoch 5, batch 5100, loss[loss=0.1242, simple_loss=0.2096, pruned_loss=0.0194, over 4818.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2244, pruned_loss=0.04196, over 972477.82 frames.], batch size: 27, lr: 3.96e-04 +2022-05-05 03:43:32,136 INFO [train.py:715] (6/8) Epoch 5, batch 5150, loss[loss=0.1469, simple_loss=0.2188, pruned_loss=0.03754, over 4815.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2243, pruned_loss=0.04157, over 972969.22 frames.], batch size: 26, lr: 3.96e-04 +2022-05-05 03:44:11,859 INFO [train.py:715] (6/8) Epoch 5, batch 5200, loss[loss=0.1409, simple_loss=0.213, pruned_loss=0.03437, over 4916.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2237, pruned_loss=0.0408, over 972103.85 frames.], batch size: 18, lr: 3.96e-04 +2022-05-05 03:44:51,642 INFO [train.py:715] (6/8) Epoch 5, batch 5250, loss[loss=0.1361, simple_loss=0.2076, pruned_loss=0.03226, over 4892.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2227, pruned_loss=0.04054, over 972574.63 frames.], batch size: 16, lr: 3.96e-04 +2022-05-05 03:45:32,214 INFO [train.py:715] (6/8) Epoch 5, batch 5300, loss[loss=0.1333, simple_loss=0.2084, pruned_loss=0.02906, over 4804.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2224, pruned_loss=0.04061, over 971730.93 frames.], batch size: 24, lr: 3.96e-04 +2022-05-05 03:46:12,530 INFO [train.py:715] (6/8) Epoch 5, batch 5350, loss[loss=0.123, simple_loss=0.1946, pruned_loss=0.02565, over 4942.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2218, pruned_loss=0.04023, over 972529.42 frames.], batch size: 29, lr: 3.96e-04 +2022-05-05 03:46:52,884 INFO [train.py:715] (6/8) Epoch 5, batch 5400, loss[loss=0.1836, simple_loss=0.2411, pruned_loss=0.06303, over 4910.00 frames.], tot_loss[loss=0.1513, simple_loss=0.222, pruned_loss=0.04031, over 973183.06 frames.], batch size: 17, lr: 3.96e-04 +2022-05-05 03:47:32,580 INFO [train.py:715] (6/8) Epoch 5, batch 5450, loss[loss=0.1505, simple_loss=0.2228, pruned_loss=0.03916, over 4753.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2222, pruned_loss=0.04068, over 973306.69 frames.], batch size: 19, lr: 3.96e-04 +2022-05-05 03:48:12,699 INFO [train.py:715] (6/8) Epoch 5, batch 5500, loss[loss=0.1399, simple_loss=0.2186, pruned_loss=0.03057, over 4970.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2232, pruned_loss=0.04099, over 973831.97 frames.], batch size: 25, lr: 3.96e-04 +2022-05-05 03:48:53,029 INFO [train.py:715] (6/8) Epoch 5, batch 5550, loss[loss=0.1157, simple_loss=0.201, pruned_loss=0.0152, over 4817.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2238, pruned_loss=0.0413, over 973367.20 frames.], batch size: 26, lr: 3.96e-04 +2022-05-05 03:49:33,411 INFO [train.py:715] (6/8) Epoch 5, batch 5600, loss[loss=0.1488, simple_loss=0.2151, pruned_loss=0.04127, over 4893.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2241, pruned_loss=0.04179, over 973229.61 frames.], batch size: 19, lr: 3.95e-04 +2022-05-05 03:50:13,547 INFO [train.py:715] (6/8) Epoch 5, batch 5650, loss[loss=0.1535, simple_loss=0.2158, pruned_loss=0.04562, over 4770.00 frames.], tot_loss[loss=0.1527, simple_loss=0.223, pruned_loss=0.04116, over 973086.14 frames.], batch size: 17, lr: 3.95e-04 +2022-05-05 03:50:52,901 INFO [train.py:715] (6/8) Epoch 5, batch 5700, loss[loss=0.1529, simple_loss=0.2163, pruned_loss=0.04471, over 4946.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2231, pruned_loss=0.04151, over 973426.75 frames.], batch size: 21, lr: 3.95e-04 +2022-05-05 03:51:33,322 INFO [train.py:715] (6/8) Epoch 5, batch 5750, loss[loss=0.1545, simple_loss=0.2222, pruned_loss=0.04344, over 4762.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2227, pruned_loss=0.04156, over 973391.35 frames.], batch size: 16, lr: 3.95e-04 +2022-05-05 03:52:13,227 INFO [train.py:715] (6/8) Epoch 5, batch 5800, loss[loss=0.1717, simple_loss=0.2358, pruned_loss=0.05382, over 4759.00 frames.], tot_loss[loss=0.153, simple_loss=0.223, pruned_loss=0.0415, over 972641.62 frames.], batch size: 19, lr: 3.95e-04 +2022-05-05 03:52:53,762 INFO [train.py:715] (6/8) Epoch 5, batch 5850, loss[loss=0.1355, simple_loss=0.2032, pruned_loss=0.03391, over 4761.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2222, pruned_loss=0.04131, over 971982.65 frames.], batch size: 12, lr: 3.95e-04 +2022-05-05 03:53:33,398 INFO [train.py:715] (6/8) Epoch 5, batch 5900, loss[loss=0.1554, simple_loss=0.2414, pruned_loss=0.03465, over 4934.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2226, pruned_loss=0.0415, over 972196.12 frames.], batch size: 21, lr: 3.95e-04 +2022-05-05 03:54:13,788 INFO [train.py:715] (6/8) Epoch 5, batch 5950, loss[loss=0.1982, simple_loss=0.2607, pruned_loss=0.06785, over 4825.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2228, pruned_loss=0.04177, over 972206.54 frames.], batch size: 26, lr: 3.95e-04 +2022-05-05 03:54:53,620 INFO [train.py:715] (6/8) Epoch 5, batch 6000, loss[loss=0.1502, simple_loss=0.2135, pruned_loss=0.0435, over 4847.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2225, pruned_loss=0.04167, over 972878.50 frames.], batch size: 30, lr: 3.95e-04 +2022-05-05 03:54:53,621 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 03:55:03,071 INFO [train.py:742] (6/8) Epoch 5, validation: loss=0.1106, simple_loss=0.1959, pruned_loss=0.01263, over 914524.00 frames. +2022-05-05 03:55:42,940 INFO [train.py:715] (6/8) Epoch 5, batch 6050, loss[loss=0.1301, simple_loss=0.2091, pruned_loss=0.02549, over 4860.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2229, pruned_loss=0.0414, over 972952.88 frames.], batch size: 32, lr: 3.95e-04 +2022-05-05 03:56:22,015 INFO [train.py:715] (6/8) Epoch 5, batch 6100, loss[loss=0.1367, simple_loss=0.2154, pruned_loss=0.02899, over 4863.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2237, pruned_loss=0.04183, over 973925.52 frames.], batch size: 20, lr: 3.95e-04 +2022-05-05 03:57:01,853 INFO [train.py:715] (6/8) Epoch 5, batch 6150, loss[loss=0.1722, simple_loss=0.2391, pruned_loss=0.05264, over 4907.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2245, pruned_loss=0.04237, over 973405.76 frames.], batch size: 19, lr: 3.95e-04 +2022-05-05 03:57:40,838 INFO [train.py:715] (6/8) Epoch 5, batch 6200, loss[loss=0.1509, simple_loss=0.2236, pruned_loss=0.03917, over 4964.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2236, pruned_loss=0.04152, over 973839.95 frames.], batch size: 24, lr: 3.95e-04 +2022-05-05 03:58:21,090 INFO [train.py:715] (6/8) Epoch 5, batch 6250, loss[loss=0.1212, simple_loss=0.1955, pruned_loss=0.02349, over 4812.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2231, pruned_loss=0.04124, over 973339.54 frames.], batch size: 25, lr: 3.95e-04 +2022-05-05 03:58:59,727 INFO [train.py:715] (6/8) Epoch 5, batch 6300, loss[loss=0.1662, simple_loss=0.229, pruned_loss=0.05169, over 4700.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2237, pruned_loss=0.04179, over 973641.73 frames.], batch size: 15, lr: 3.95e-04 +2022-05-05 03:59:39,540 INFO [train.py:715] (6/8) Epoch 5, batch 6350, loss[loss=0.1473, simple_loss=0.2139, pruned_loss=0.04034, over 4977.00 frames.], tot_loss[loss=0.154, simple_loss=0.2243, pruned_loss=0.04186, over 973459.05 frames.], batch size: 14, lr: 3.95e-04 +2022-05-05 04:00:18,905 INFO [train.py:715] (6/8) Epoch 5, batch 6400, loss[loss=0.1781, simple_loss=0.2435, pruned_loss=0.05633, over 4639.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2244, pruned_loss=0.04166, over 973450.84 frames.], batch size: 13, lr: 3.95e-04 +2022-05-05 04:00:57,770 INFO [train.py:715] (6/8) Epoch 5, batch 6450, loss[loss=0.1611, simple_loss=0.2299, pruned_loss=0.04609, over 4867.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2232, pruned_loss=0.04133, over 972890.64 frames.], batch size: 20, lr: 3.95e-04 +2022-05-05 04:01:37,239 INFO [train.py:715] (6/8) Epoch 5, batch 6500, loss[loss=0.1435, simple_loss=0.2061, pruned_loss=0.04045, over 4973.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2239, pruned_loss=0.04136, over 973252.46 frames.], batch size: 14, lr: 3.95e-04 +2022-05-05 04:02:16,581 INFO [train.py:715] (6/8) Epoch 5, batch 6550, loss[loss=0.1592, simple_loss=0.2367, pruned_loss=0.04081, over 4946.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2231, pruned_loss=0.04089, over 972565.63 frames.], batch size: 21, lr: 3.94e-04 +2022-05-05 04:02:55,732 INFO [train.py:715] (6/8) Epoch 5, batch 6600, loss[loss=0.1664, simple_loss=0.2402, pruned_loss=0.04626, over 4686.00 frames.], tot_loss[loss=0.152, simple_loss=0.2228, pruned_loss=0.0406, over 972686.63 frames.], batch size: 15, lr: 3.94e-04 +2022-05-05 04:03:35,253 INFO [train.py:715] (6/8) Epoch 5, batch 6650, loss[loss=0.1428, simple_loss=0.2165, pruned_loss=0.03453, over 4796.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2235, pruned_loss=0.04093, over 972039.41 frames.], batch size: 25, lr: 3.94e-04 +2022-05-05 04:04:15,786 INFO [train.py:715] (6/8) Epoch 5, batch 6700, loss[loss=0.1481, simple_loss=0.2213, pruned_loss=0.03746, over 4976.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2231, pruned_loss=0.04082, over 972213.57 frames.], batch size: 24, lr: 3.94e-04 +2022-05-05 04:04:56,123 INFO [train.py:715] (6/8) Epoch 5, batch 6750, loss[loss=0.1565, simple_loss=0.2276, pruned_loss=0.04272, over 4964.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2238, pruned_loss=0.04134, over 972669.08 frames.], batch size: 39, lr: 3.94e-04 +2022-05-05 04:05:36,109 INFO [train.py:715] (6/8) Epoch 5, batch 6800, loss[loss=0.1553, simple_loss=0.2243, pruned_loss=0.04316, over 4911.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2246, pruned_loss=0.04188, over 972844.26 frames.], batch size: 39, lr: 3.94e-04 +2022-05-05 04:06:16,591 INFO [train.py:715] (6/8) Epoch 5, batch 6850, loss[loss=0.1577, simple_loss=0.2281, pruned_loss=0.04363, over 4953.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2243, pruned_loss=0.04163, over 972431.56 frames.], batch size: 29, lr: 3.94e-04 +2022-05-05 04:06:56,552 INFO [train.py:715] (6/8) Epoch 5, batch 6900, loss[loss=0.2076, simple_loss=0.268, pruned_loss=0.07363, over 4892.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2241, pruned_loss=0.04147, over 972545.38 frames.], batch size: 39, lr: 3.94e-04 +2022-05-05 04:07:37,128 INFO [train.py:715] (6/8) Epoch 5, batch 6950, loss[loss=0.1539, simple_loss=0.2246, pruned_loss=0.04161, over 4813.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2239, pruned_loss=0.0414, over 972713.96 frames.], batch size: 26, lr: 3.94e-04 +2022-05-05 04:08:16,568 INFO [train.py:715] (6/8) Epoch 5, batch 7000, loss[loss=0.1307, simple_loss=0.2114, pruned_loss=0.025, over 4984.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2239, pruned_loss=0.04163, over 972636.33 frames.], batch size: 26, lr: 3.94e-04 +2022-05-05 04:08:56,462 INFO [train.py:715] (6/8) Epoch 5, batch 7050, loss[loss=0.1386, simple_loss=0.2157, pruned_loss=0.03077, over 4883.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2235, pruned_loss=0.0415, over 972074.62 frames.], batch size: 22, lr: 3.94e-04 +2022-05-05 04:09:36,253 INFO [train.py:715] (6/8) Epoch 5, batch 7100, loss[loss=0.1479, simple_loss=0.2225, pruned_loss=0.03661, over 4898.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2239, pruned_loss=0.04166, over 973131.32 frames.], batch size: 39, lr: 3.94e-04 +2022-05-05 04:10:15,694 INFO [train.py:715] (6/8) Epoch 5, batch 7150, loss[loss=0.1579, simple_loss=0.2248, pruned_loss=0.04547, over 4920.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2246, pruned_loss=0.04202, over 973428.85 frames.], batch size: 23, lr: 3.94e-04 +2022-05-05 04:10:55,642 INFO [train.py:715] (6/8) Epoch 5, batch 7200, loss[loss=0.1403, simple_loss=0.2164, pruned_loss=0.03209, over 4972.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2253, pruned_loss=0.04221, over 973841.00 frames.], batch size: 28, lr: 3.94e-04 +2022-05-05 04:11:35,241 INFO [train.py:715] (6/8) Epoch 5, batch 7250, loss[loss=0.1645, simple_loss=0.2346, pruned_loss=0.04713, over 4830.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2238, pruned_loss=0.04152, over 973649.90 frames.], batch size: 26, lr: 3.94e-04 +2022-05-05 04:12:15,758 INFO [train.py:715] (6/8) Epoch 5, batch 7300, loss[loss=0.1895, simple_loss=0.2592, pruned_loss=0.05985, over 4873.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2238, pruned_loss=0.0414, over 973879.80 frames.], batch size: 16, lr: 3.94e-04 +2022-05-05 04:12:55,316 INFO [train.py:715] (6/8) Epoch 5, batch 7350, loss[loss=0.149, simple_loss=0.2215, pruned_loss=0.03828, over 4683.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2239, pruned_loss=0.0415, over 972845.01 frames.], batch size: 15, lr: 3.94e-04 +2022-05-05 04:13:34,918 INFO [train.py:715] (6/8) Epoch 5, batch 7400, loss[loss=0.1662, simple_loss=0.2337, pruned_loss=0.04929, over 4987.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2246, pruned_loss=0.04154, over 972661.72 frames.], batch size: 26, lr: 3.94e-04 +2022-05-05 04:14:14,464 INFO [train.py:715] (6/8) Epoch 5, batch 7450, loss[loss=0.118, simple_loss=0.1905, pruned_loss=0.02276, over 4822.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2244, pruned_loss=0.04154, over 973718.70 frames.], batch size: 25, lr: 3.93e-04 +2022-05-05 04:14:53,553 INFO [train.py:715] (6/8) Epoch 5, batch 7500, loss[loss=0.1778, simple_loss=0.2571, pruned_loss=0.0492, over 4934.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2243, pruned_loss=0.0414, over 973331.08 frames.], batch size: 39, lr: 3.93e-04 +2022-05-05 04:15:33,689 INFO [train.py:715] (6/8) Epoch 5, batch 7550, loss[loss=0.1404, simple_loss=0.2138, pruned_loss=0.0335, over 4916.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2237, pruned_loss=0.04134, over 973271.39 frames.], batch size: 17, lr: 3.93e-04 +2022-05-05 04:16:13,350 INFO [train.py:715] (6/8) Epoch 5, batch 7600, loss[loss=0.1526, simple_loss=0.2152, pruned_loss=0.04495, over 4816.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2232, pruned_loss=0.04121, over 973435.17 frames.], batch size: 25, lr: 3.93e-04 +2022-05-05 04:16:53,612 INFO [train.py:715] (6/8) Epoch 5, batch 7650, loss[loss=0.158, simple_loss=0.2228, pruned_loss=0.04657, over 4795.00 frames.], tot_loss[loss=0.154, simple_loss=0.2245, pruned_loss=0.0417, over 973774.67 frames.], batch size: 18, lr: 3.93e-04 +2022-05-05 04:17:33,269 INFO [train.py:715] (6/8) Epoch 5, batch 7700, loss[loss=0.1391, simple_loss=0.213, pruned_loss=0.03257, over 4833.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2241, pruned_loss=0.04126, over 973075.65 frames.], batch size: 13, lr: 3.93e-04 +2022-05-05 04:18:12,779 INFO [train.py:715] (6/8) Epoch 5, batch 7750, loss[loss=0.1744, simple_loss=0.2518, pruned_loss=0.04846, over 4912.00 frames.], tot_loss[loss=0.1531, simple_loss=0.224, pruned_loss=0.04109, over 973530.52 frames.], batch size: 29, lr: 3.93e-04 +2022-05-05 04:18:52,928 INFO [train.py:715] (6/8) Epoch 5, batch 7800, loss[loss=0.1731, simple_loss=0.2433, pruned_loss=0.0514, over 4956.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2238, pruned_loss=0.04104, over 973014.12 frames.], batch size: 39, lr: 3.93e-04 +2022-05-05 04:19:32,132 INFO [train.py:715] (6/8) Epoch 5, batch 7850, loss[loss=0.1524, simple_loss=0.2218, pruned_loss=0.04153, over 4981.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2247, pruned_loss=0.04162, over 973375.34 frames.], batch size: 28, lr: 3.93e-04 +2022-05-05 04:20:12,359 INFO [train.py:715] (6/8) Epoch 5, batch 7900, loss[loss=0.1484, simple_loss=0.217, pruned_loss=0.03995, over 4731.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2238, pruned_loss=0.04131, over 973086.11 frames.], batch size: 16, lr: 3.93e-04 +2022-05-05 04:20:51,915 INFO [train.py:715] (6/8) Epoch 5, batch 7950, loss[loss=0.1754, simple_loss=0.2477, pruned_loss=0.05151, over 4865.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2227, pruned_loss=0.0409, over 973081.57 frames.], batch size: 16, lr: 3.93e-04 +2022-05-05 04:21:32,116 INFO [train.py:715] (6/8) Epoch 5, batch 8000, loss[loss=0.1495, simple_loss=0.2186, pruned_loss=0.04016, over 4956.00 frames.], tot_loss[loss=0.1525, simple_loss=0.223, pruned_loss=0.04101, over 973265.70 frames.], batch size: 21, lr: 3.93e-04 +2022-05-05 04:22:11,573 INFO [train.py:715] (6/8) Epoch 5, batch 8050, loss[loss=0.1681, simple_loss=0.2409, pruned_loss=0.04761, over 4810.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2241, pruned_loss=0.04168, over 972666.91 frames.], batch size: 21, lr: 3.93e-04 +2022-05-05 04:22:51,025 INFO [train.py:715] (6/8) Epoch 5, batch 8100, loss[loss=0.1365, simple_loss=0.1988, pruned_loss=0.03705, over 4768.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2242, pruned_loss=0.04237, over 971957.87 frames.], batch size: 18, lr: 3.93e-04 +2022-05-05 04:23:30,814 INFO [train.py:715] (6/8) Epoch 5, batch 8150, loss[loss=0.2003, simple_loss=0.2629, pruned_loss=0.06884, over 4815.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2256, pruned_loss=0.04311, over 971662.40 frames.], batch size: 15, lr: 3.93e-04 +2022-05-05 04:24:09,997 INFO [train.py:715] (6/8) Epoch 5, batch 8200, loss[loss=0.1362, simple_loss=0.2124, pruned_loss=0.02995, over 4950.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2238, pruned_loss=0.04203, over 971620.21 frames.], batch size: 23, lr: 3.93e-04 +2022-05-05 04:24:50,014 INFO [train.py:715] (6/8) Epoch 5, batch 8250, loss[loss=0.1566, simple_loss=0.229, pruned_loss=0.04207, over 4900.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2238, pruned_loss=0.04195, over 972150.58 frames.], batch size: 19, lr: 3.93e-04 +2022-05-05 04:25:29,485 INFO [train.py:715] (6/8) Epoch 5, batch 8300, loss[loss=0.132, simple_loss=0.2041, pruned_loss=0.02991, over 4771.00 frames.], tot_loss[loss=0.154, simple_loss=0.2242, pruned_loss=0.04191, over 972541.30 frames.], batch size: 18, lr: 3.93e-04 +2022-05-05 04:26:09,423 INFO [train.py:715] (6/8) Epoch 5, batch 8350, loss[loss=0.1537, simple_loss=0.2429, pruned_loss=0.03224, over 4984.00 frames.], tot_loss[loss=0.153, simple_loss=0.2241, pruned_loss=0.04102, over 972635.77 frames.], batch size: 25, lr: 3.93e-04 +2022-05-05 04:26:48,505 INFO [train.py:715] (6/8) Epoch 5, batch 8400, loss[loss=0.1526, simple_loss=0.2277, pruned_loss=0.03874, over 4839.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2239, pruned_loss=0.0407, over 972117.38 frames.], batch size: 15, lr: 3.92e-04 +2022-05-05 04:27:27,554 INFO [train.py:715] (6/8) Epoch 5, batch 8450, loss[loss=0.1441, simple_loss=0.2194, pruned_loss=0.03441, over 4824.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2228, pruned_loss=0.03998, over 972445.47 frames.], batch size: 13, lr: 3.92e-04 +2022-05-05 04:28:06,815 INFO [train.py:715] (6/8) Epoch 5, batch 8500, loss[loss=0.1385, simple_loss=0.2082, pruned_loss=0.03436, over 4826.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2234, pruned_loss=0.0404, over 972399.13 frames.], batch size: 25, lr: 3.92e-04 +2022-05-05 04:28:45,805 INFO [train.py:715] (6/8) Epoch 5, batch 8550, loss[loss=0.1642, simple_loss=0.2383, pruned_loss=0.04504, over 4900.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2234, pruned_loss=0.04006, over 972521.26 frames.], batch size: 19, lr: 3.92e-04 +2022-05-05 04:29:25,251 INFO [train.py:715] (6/8) Epoch 5, batch 8600, loss[loss=0.1356, simple_loss=0.2186, pruned_loss=0.02627, over 4811.00 frames.], tot_loss[loss=0.1527, simple_loss=0.224, pruned_loss=0.04068, over 971542.35 frames.], batch size: 27, lr: 3.92e-04 +2022-05-05 04:30:04,410 INFO [train.py:715] (6/8) Epoch 5, batch 8650, loss[loss=0.1676, simple_loss=0.2214, pruned_loss=0.05693, over 4981.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2242, pruned_loss=0.0407, over 971541.40 frames.], batch size: 31, lr: 3.92e-04 +2022-05-05 04:30:43,888 INFO [train.py:715] (6/8) Epoch 5, batch 8700, loss[loss=0.1647, simple_loss=0.2421, pruned_loss=0.04368, over 4736.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2239, pruned_loss=0.04061, over 971469.25 frames.], batch size: 16, lr: 3.92e-04 +2022-05-05 04:31:23,275 INFO [train.py:715] (6/8) Epoch 5, batch 8750, loss[loss=0.1416, simple_loss=0.2093, pruned_loss=0.03698, over 4806.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2238, pruned_loss=0.04117, over 971516.24 frames.], batch size: 13, lr: 3.92e-04 +2022-05-05 04:32:02,280 INFO [train.py:715] (6/8) Epoch 5, batch 8800, loss[loss=0.1794, simple_loss=0.2574, pruned_loss=0.05076, over 4766.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2237, pruned_loss=0.041, over 971883.05 frames.], batch size: 16, lr: 3.92e-04 +2022-05-05 04:32:42,163 INFO [train.py:715] (6/8) Epoch 5, batch 8850, loss[loss=0.175, simple_loss=0.2339, pruned_loss=0.05802, over 4746.00 frames.], tot_loss[loss=0.1533, simple_loss=0.224, pruned_loss=0.04127, over 971647.50 frames.], batch size: 16, lr: 3.92e-04 +2022-05-05 04:33:20,887 INFO [train.py:715] (6/8) Epoch 5, batch 8900, loss[loss=0.1352, simple_loss=0.2113, pruned_loss=0.02959, over 4819.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2238, pruned_loss=0.0412, over 971272.92 frames.], batch size: 25, lr: 3.92e-04 +2022-05-05 04:33:59,749 INFO [train.py:715] (6/8) Epoch 5, batch 8950, loss[loss=0.1573, simple_loss=0.2211, pruned_loss=0.04674, over 4900.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2242, pruned_loss=0.04126, over 970525.30 frames.], batch size: 17, lr: 3.92e-04 +2022-05-05 04:34:39,030 INFO [train.py:715] (6/8) Epoch 5, batch 9000, loss[loss=0.1545, simple_loss=0.2289, pruned_loss=0.04007, over 4872.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2241, pruned_loss=0.04141, over 971153.46 frames.], batch size: 22, lr: 3.92e-04 +2022-05-05 04:34:39,031 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 04:34:48,553 INFO [train.py:742] (6/8) Epoch 5, validation: loss=0.1105, simple_loss=0.196, pruned_loss=0.01252, over 914524.00 frames. +2022-05-05 04:35:28,197 INFO [train.py:715] (6/8) Epoch 5, batch 9050, loss[loss=0.186, simple_loss=0.2606, pruned_loss=0.05566, over 4935.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2245, pruned_loss=0.04117, over 970881.86 frames.], batch size: 23, lr: 3.92e-04 +2022-05-05 04:36:07,671 INFO [train.py:715] (6/8) Epoch 5, batch 9100, loss[loss=0.1232, simple_loss=0.1879, pruned_loss=0.02931, over 4859.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2251, pruned_loss=0.04203, over 970203.23 frames.], batch size: 13, lr: 3.92e-04 +2022-05-05 04:36:46,714 INFO [train.py:715] (6/8) Epoch 5, batch 9150, loss[loss=0.1362, simple_loss=0.208, pruned_loss=0.03217, over 4869.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2243, pruned_loss=0.0415, over 970228.17 frames.], batch size: 32, lr: 3.92e-04 +2022-05-05 04:37:26,202 INFO [train.py:715] (6/8) Epoch 5, batch 9200, loss[loss=0.1627, simple_loss=0.2327, pruned_loss=0.04634, over 4780.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2243, pruned_loss=0.04138, over 969942.41 frames.], batch size: 17, lr: 3.92e-04 +2022-05-05 04:38:06,417 INFO [train.py:715] (6/8) Epoch 5, batch 9250, loss[loss=0.1542, simple_loss=0.2332, pruned_loss=0.03759, over 4893.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2242, pruned_loss=0.04139, over 970638.24 frames.], batch size: 19, lr: 3.92e-04 +2022-05-05 04:38:45,292 INFO [train.py:715] (6/8) Epoch 5, batch 9300, loss[loss=0.1504, simple_loss=0.2214, pruned_loss=0.03968, over 4929.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2241, pruned_loss=0.04108, over 971323.42 frames.], batch size: 23, lr: 3.91e-04 +2022-05-05 04:39:24,932 INFO [train.py:715] (6/8) Epoch 5, batch 9350, loss[loss=0.1549, simple_loss=0.2246, pruned_loss=0.04262, over 4913.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2249, pruned_loss=0.04184, over 971231.79 frames.], batch size: 17, lr: 3.91e-04 +2022-05-05 04:40:04,423 INFO [train.py:715] (6/8) Epoch 5, batch 9400, loss[loss=0.1286, simple_loss=0.2047, pruned_loss=0.02628, over 4988.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2249, pruned_loss=0.04196, over 972542.68 frames.], batch size: 25, lr: 3.91e-04 +2022-05-05 04:40:43,715 INFO [train.py:715] (6/8) Epoch 5, batch 9450, loss[loss=0.1483, simple_loss=0.2225, pruned_loss=0.03703, over 4985.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2258, pruned_loss=0.04256, over 972923.00 frames.], batch size: 27, lr: 3.91e-04 +2022-05-05 04:41:22,595 INFO [train.py:715] (6/8) Epoch 5, batch 9500, loss[loss=0.1317, simple_loss=0.2102, pruned_loss=0.02666, over 4949.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2251, pruned_loss=0.0421, over 972340.04 frames.], batch size: 24, lr: 3.91e-04 +2022-05-05 04:42:02,156 INFO [train.py:715] (6/8) Epoch 5, batch 9550, loss[loss=0.1266, simple_loss=0.1931, pruned_loss=0.03004, over 4772.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2247, pruned_loss=0.0413, over 972363.48 frames.], batch size: 12, lr: 3.91e-04 +2022-05-05 04:42:41,921 INFO [train.py:715] (6/8) Epoch 5, batch 9600, loss[loss=0.1373, simple_loss=0.2103, pruned_loss=0.03212, over 4663.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2249, pruned_loss=0.04144, over 972330.82 frames.], batch size: 13, lr: 3.91e-04 +2022-05-05 04:43:21,154 INFO [train.py:715] (6/8) Epoch 5, batch 9650, loss[loss=0.1525, simple_loss=0.2165, pruned_loss=0.04424, over 4862.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2246, pruned_loss=0.04195, over 972598.89 frames.], batch size: 13, lr: 3.91e-04 +2022-05-05 04:44:00,809 INFO [train.py:715] (6/8) Epoch 5, batch 9700, loss[loss=0.1463, simple_loss=0.217, pruned_loss=0.03778, over 4865.00 frames.], tot_loss[loss=0.153, simple_loss=0.2233, pruned_loss=0.04135, over 972238.70 frames.], batch size: 20, lr: 3.91e-04 +2022-05-05 04:44:40,235 INFO [train.py:715] (6/8) Epoch 5, batch 9750, loss[loss=0.1409, simple_loss=0.2191, pruned_loss=0.03135, over 4852.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2229, pruned_loss=0.04088, over 972835.47 frames.], batch size: 20, lr: 3.91e-04 +2022-05-05 04:45:19,137 INFO [train.py:715] (6/8) Epoch 5, batch 9800, loss[loss=0.144, simple_loss=0.2186, pruned_loss=0.03475, over 4822.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2224, pruned_loss=0.04051, over 972276.29 frames.], batch size: 26, lr: 3.91e-04 +2022-05-05 04:45:58,976 INFO [train.py:715] (6/8) Epoch 5, batch 9850, loss[loss=0.1633, simple_loss=0.2361, pruned_loss=0.04526, over 4943.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2218, pruned_loss=0.04032, over 972898.96 frames.], batch size: 21, lr: 3.91e-04 +2022-05-05 04:46:38,175 INFO [train.py:715] (6/8) Epoch 5, batch 9900, loss[loss=0.1502, simple_loss=0.2215, pruned_loss=0.03944, over 4979.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2235, pruned_loss=0.04129, over 972596.49 frames.], batch size: 28, lr: 3.91e-04 +2022-05-05 04:47:17,941 INFO [train.py:715] (6/8) Epoch 5, batch 9950, loss[loss=0.1503, simple_loss=0.2139, pruned_loss=0.04339, over 4849.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2243, pruned_loss=0.04193, over 973154.53 frames.], batch size: 32, lr: 3.91e-04 +2022-05-05 04:47:59,852 INFO [train.py:715] (6/8) Epoch 5, batch 10000, loss[loss=0.1415, simple_loss=0.2126, pruned_loss=0.0352, over 4749.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2246, pruned_loss=0.04251, over 973700.76 frames.], batch size: 16, lr: 3.91e-04 +2022-05-05 04:48:39,811 INFO [train.py:715] (6/8) Epoch 5, batch 10050, loss[loss=0.1616, simple_loss=0.2268, pruned_loss=0.04823, over 4935.00 frames.], tot_loss[loss=0.154, simple_loss=0.224, pruned_loss=0.04194, over 973158.92 frames.], batch size: 21, lr: 3.91e-04 +2022-05-05 04:49:19,417 INFO [train.py:715] (6/8) Epoch 5, batch 10100, loss[loss=0.1455, simple_loss=0.2246, pruned_loss=0.03324, over 4760.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2242, pruned_loss=0.04168, over 971496.72 frames.], batch size: 19, lr: 3.91e-04 +2022-05-05 04:49:58,588 INFO [train.py:715] (6/8) Epoch 5, batch 10150, loss[loss=0.1583, simple_loss=0.2383, pruned_loss=0.0392, over 4750.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2235, pruned_loss=0.04107, over 971621.72 frames.], batch size: 19, lr: 3.91e-04 +2022-05-05 04:50:38,456 INFO [train.py:715] (6/8) Epoch 5, batch 10200, loss[loss=0.2045, simple_loss=0.2514, pruned_loss=0.07878, over 4694.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2229, pruned_loss=0.04072, over 971710.76 frames.], batch size: 15, lr: 3.91e-04 +2022-05-05 04:51:17,798 INFO [train.py:715] (6/8) Epoch 5, batch 10250, loss[loss=0.16, simple_loss=0.2229, pruned_loss=0.04856, over 4877.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2222, pruned_loss=0.04038, over 972207.80 frames.], batch size: 32, lr: 3.90e-04 +2022-05-05 04:51:56,804 INFO [train.py:715] (6/8) Epoch 5, batch 10300, loss[loss=0.1507, simple_loss=0.2238, pruned_loss=0.03876, over 4937.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2224, pruned_loss=0.04009, over 972105.81 frames.], batch size: 23, lr: 3.90e-04 +2022-05-05 04:52:36,629 INFO [train.py:715] (6/8) Epoch 5, batch 10350, loss[loss=0.1183, simple_loss=0.192, pruned_loss=0.02233, over 4947.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2224, pruned_loss=0.04029, over 971884.01 frames.], batch size: 21, lr: 3.90e-04 +2022-05-05 04:53:15,667 INFO [train.py:715] (6/8) Epoch 5, batch 10400, loss[loss=0.1397, simple_loss=0.2047, pruned_loss=0.03739, over 4978.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2223, pruned_loss=0.04032, over 972428.89 frames.], batch size: 28, lr: 3.90e-04 +2022-05-05 04:53:55,620 INFO [train.py:715] (6/8) Epoch 5, batch 10450, loss[loss=0.1332, simple_loss=0.1956, pruned_loss=0.0354, over 4952.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2215, pruned_loss=0.04014, over 972612.48 frames.], batch size: 29, lr: 3.90e-04 +2022-05-05 04:54:35,514 INFO [train.py:715] (6/8) Epoch 5, batch 10500, loss[loss=0.139, simple_loss=0.214, pruned_loss=0.03201, over 4902.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2208, pruned_loss=0.04024, over 972231.81 frames.], batch size: 18, lr: 3.90e-04 +2022-05-05 04:55:15,985 INFO [train.py:715] (6/8) Epoch 5, batch 10550, loss[loss=0.1697, simple_loss=0.2388, pruned_loss=0.05032, over 4858.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2219, pruned_loss=0.04043, over 971715.97 frames.], batch size: 32, lr: 3.90e-04 +2022-05-05 04:55:55,071 INFO [train.py:715] (6/8) Epoch 5, batch 10600, loss[loss=0.1488, simple_loss=0.2313, pruned_loss=0.03312, over 4935.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2221, pruned_loss=0.04011, over 973067.62 frames.], batch size: 21, lr: 3.90e-04 +2022-05-05 04:56:34,541 INFO [train.py:715] (6/8) Epoch 5, batch 10650, loss[loss=0.2025, simple_loss=0.271, pruned_loss=0.06702, over 4968.00 frames.], tot_loss[loss=0.1525, simple_loss=0.223, pruned_loss=0.04096, over 972702.20 frames.], batch size: 39, lr: 3.90e-04 +2022-05-05 04:57:14,073 INFO [train.py:715] (6/8) Epoch 5, batch 10700, loss[loss=0.1574, simple_loss=0.2225, pruned_loss=0.04619, over 4879.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2239, pruned_loss=0.0418, over 973409.21 frames.], batch size: 16, lr: 3.90e-04 +2022-05-05 04:57:53,024 INFO [train.py:715] (6/8) Epoch 5, batch 10750, loss[loss=0.1404, simple_loss=0.2241, pruned_loss=0.0284, over 4701.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2235, pruned_loss=0.04087, over 972828.20 frames.], batch size: 15, lr: 3.90e-04 +2022-05-05 04:58:32,278 INFO [train.py:715] (6/8) Epoch 5, batch 10800, loss[loss=0.1702, simple_loss=0.2277, pruned_loss=0.05632, over 4800.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2225, pruned_loss=0.04054, over 973010.97 frames.], batch size: 21, lr: 3.90e-04 +2022-05-05 04:59:11,507 INFO [train.py:715] (6/8) Epoch 5, batch 10850, loss[loss=0.1471, simple_loss=0.2269, pruned_loss=0.03367, over 4746.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2222, pruned_loss=0.04031, over 972366.89 frames.], batch size: 19, lr: 3.90e-04 +2022-05-05 04:59:51,501 INFO [train.py:715] (6/8) Epoch 5, batch 10900, loss[loss=0.1502, simple_loss=0.2272, pruned_loss=0.0366, over 4836.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2231, pruned_loss=0.0408, over 972702.41 frames.], batch size: 13, lr: 3.90e-04 +2022-05-05 05:00:30,697 INFO [train.py:715] (6/8) Epoch 5, batch 10950, loss[loss=0.1226, simple_loss=0.1986, pruned_loss=0.02332, over 4951.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2232, pruned_loss=0.04055, over 973229.46 frames.], batch size: 29, lr: 3.90e-04 +2022-05-05 05:01:10,468 INFO [train.py:715] (6/8) Epoch 5, batch 11000, loss[loss=0.1389, simple_loss=0.2121, pruned_loss=0.03287, over 4884.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2235, pruned_loss=0.04057, over 973133.88 frames.], batch size: 22, lr: 3.90e-04 +2022-05-05 05:01:49,965 INFO [train.py:715] (6/8) Epoch 5, batch 11050, loss[loss=0.1417, simple_loss=0.2097, pruned_loss=0.03681, over 4771.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2234, pruned_loss=0.04076, over 971694.44 frames.], batch size: 18, lr: 3.90e-04 +2022-05-05 05:02:29,387 INFO [train.py:715] (6/8) Epoch 5, batch 11100, loss[loss=0.1731, simple_loss=0.2467, pruned_loss=0.04978, over 4970.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2226, pruned_loss=0.04085, over 972373.37 frames.], batch size: 24, lr: 3.90e-04 +2022-05-05 05:03:08,927 INFO [train.py:715] (6/8) Epoch 5, batch 11150, loss[loss=0.1602, simple_loss=0.2393, pruned_loss=0.04059, over 4818.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2228, pruned_loss=0.04113, over 971587.72 frames.], batch size: 15, lr: 3.90e-04 +2022-05-05 05:03:48,022 INFO [train.py:715] (6/8) Epoch 5, batch 11200, loss[loss=0.161, simple_loss=0.2322, pruned_loss=0.04491, over 4765.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2223, pruned_loss=0.04093, over 972372.57 frames.], batch size: 17, lr: 3.89e-04 +2022-05-05 05:04:27,940 INFO [train.py:715] (6/8) Epoch 5, batch 11250, loss[loss=0.1241, simple_loss=0.1928, pruned_loss=0.02771, over 4771.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2217, pruned_loss=0.04075, over 972619.16 frames.], batch size: 17, lr: 3.89e-04 +2022-05-05 05:05:07,261 INFO [train.py:715] (6/8) Epoch 5, batch 11300, loss[loss=0.1548, simple_loss=0.2219, pruned_loss=0.04388, over 4773.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2218, pruned_loss=0.04076, over 971913.88 frames.], batch size: 14, lr: 3.89e-04 +2022-05-05 05:05:46,392 INFO [train.py:715] (6/8) Epoch 5, batch 11350, loss[loss=0.1408, simple_loss=0.2117, pruned_loss=0.03492, over 4818.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2227, pruned_loss=0.04119, over 972775.90 frames.], batch size: 21, lr: 3.89e-04 +2022-05-05 05:06:27,213 INFO [train.py:715] (6/8) Epoch 5, batch 11400, loss[loss=0.1342, simple_loss=0.2028, pruned_loss=0.03276, over 4961.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2219, pruned_loss=0.04046, over 972485.17 frames.], batch size: 15, lr: 3.89e-04 +2022-05-05 05:07:07,354 INFO [train.py:715] (6/8) Epoch 5, batch 11450, loss[loss=0.1362, simple_loss=0.2154, pruned_loss=0.0285, over 4769.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2217, pruned_loss=0.04061, over 972121.43 frames.], batch size: 19, lr: 3.89e-04 +2022-05-05 05:07:47,395 INFO [train.py:715] (6/8) Epoch 5, batch 11500, loss[loss=0.1458, simple_loss=0.2138, pruned_loss=0.0389, over 4834.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2214, pruned_loss=0.04039, over 972700.35 frames.], batch size: 30, lr: 3.89e-04 +2022-05-05 05:08:27,417 INFO [train.py:715] (6/8) Epoch 5, batch 11550, loss[loss=0.1367, simple_loss=0.2133, pruned_loss=0.03003, over 4902.00 frames.], tot_loss[loss=0.151, simple_loss=0.2214, pruned_loss=0.04034, over 972433.48 frames.], batch size: 19, lr: 3.89e-04 +2022-05-05 05:09:07,600 INFO [train.py:715] (6/8) Epoch 5, batch 11600, loss[loss=0.1496, simple_loss=0.2164, pruned_loss=0.04141, over 4950.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2221, pruned_loss=0.04079, over 972647.68 frames.], batch size: 21, lr: 3.89e-04 +2022-05-05 05:09:48,307 INFO [train.py:715] (6/8) Epoch 5, batch 11650, loss[loss=0.1667, simple_loss=0.234, pruned_loss=0.04974, over 4773.00 frames.], tot_loss[loss=0.1518, simple_loss=0.222, pruned_loss=0.04078, over 973144.52 frames.], batch size: 18, lr: 3.89e-04 +2022-05-05 05:10:28,057 INFO [train.py:715] (6/8) Epoch 5, batch 11700, loss[loss=0.1587, simple_loss=0.2365, pruned_loss=0.04046, over 4895.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2218, pruned_loss=0.04025, over 972638.53 frames.], batch size: 17, lr: 3.89e-04 +2022-05-05 05:11:08,776 INFO [train.py:715] (6/8) Epoch 5, batch 11750, loss[loss=0.1641, simple_loss=0.2269, pruned_loss=0.05064, over 4902.00 frames.], tot_loss[loss=0.1515, simple_loss=0.222, pruned_loss=0.04053, over 972122.04 frames.], batch size: 19, lr: 3.89e-04 +2022-05-05 05:11:48,920 INFO [train.py:715] (6/8) Epoch 5, batch 11800, loss[loss=0.152, simple_loss=0.2266, pruned_loss=0.03873, over 4795.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2213, pruned_loss=0.04052, over 971913.58 frames.], batch size: 21, lr: 3.89e-04 +2022-05-05 05:12:29,042 INFO [train.py:715] (6/8) Epoch 5, batch 11850, loss[loss=0.1465, simple_loss=0.2146, pruned_loss=0.03925, over 4958.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2215, pruned_loss=0.04065, over 970418.12 frames.], batch size: 24, lr: 3.89e-04 +2022-05-05 05:13:08,183 INFO [train.py:715] (6/8) Epoch 5, batch 11900, loss[loss=0.1588, simple_loss=0.2287, pruned_loss=0.04445, over 4874.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2212, pruned_loss=0.04081, over 970638.39 frames.], batch size: 22, lr: 3.89e-04 +2022-05-05 05:13:47,508 INFO [train.py:715] (6/8) Epoch 5, batch 11950, loss[loss=0.1814, simple_loss=0.255, pruned_loss=0.05391, over 4815.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2206, pruned_loss=0.04055, over 971453.70 frames.], batch size: 26, lr: 3.89e-04 +2022-05-05 05:14:27,513 INFO [train.py:715] (6/8) Epoch 5, batch 12000, loss[loss=0.1583, simple_loss=0.2235, pruned_loss=0.04658, over 4910.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2206, pruned_loss=0.04065, over 971122.89 frames.], batch size: 19, lr: 3.89e-04 +2022-05-05 05:14:27,514 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 05:14:37,327 INFO [train.py:742] (6/8) Epoch 5, validation: loss=0.1103, simple_loss=0.1957, pruned_loss=0.01243, over 914524.00 frames. +2022-05-05 05:15:17,601 INFO [train.py:715] (6/8) Epoch 5, batch 12050, loss[loss=0.1567, simple_loss=0.2221, pruned_loss=0.04567, over 4850.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2214, pruned_loss=0.04089, over 972136.39 frames.], batch size: 20, lr: 3.89e-04 +2022-05-05 05:15:57,249 INFO [train.py:715] (6/8) Epoch 5, batch 12100, loss[loss=0.1683, simple_loss=0.2308, pruned_loss=0.05292, over 4770.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2215, pruned_loss=0.04073, over 972220.73 frames.], batch size: 18, lr: 3.89e-04 +2022-05-05 05:16:36,760 INFO [train.py:715] (6/8) Epoch 5, batch 12150, loss[loss=0.1342, simple_loss=0.2144, pruned_loss=0.02697, over 4767.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2216, pruned_loss=0.04033, over 972795.73 frames.], batch size: 12, lr: 3.88e-04 +2022-05-05 05:17:16,021 INFO [train.py:715] (6/8) Epoch 5, batch 12200, loss[loss=0.1257, simple_loss=0.1934, pruned_loss=0.02895, over 4911.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2209, pruned_loss=0.03987, over 972397.91 frames.], batch size: 19, lr: 3.88e-04 +2022-05-05 05:17:56,099 INFO [train.py:715] (6/8) Epoch 5, batch 12250, loss[loss=0.1362, simple_loss=0.2121, pruned_loss=0.03012, over 4748.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2208, pruned_loss=0.0394, over 972051.10 frames.], batch size: 16, lr: 3.88e-04 +2022-05-05 05:18:35,379 INFO [train.py:715] (6/8) Epoch 5, batch 12300, loss[loss=0.1517, simple_loss=0.2162, pruned_loss=0.04363, over 4805.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2217, pruned_loss=0.04004, over 972322.41 frames.], batch size: 21, lr: 3.88e-04 +2022-05-05 05:19:14,277 INFO [train.py:715] (6/8) Epoch 5, batch 12350, loss[loss=0.1622, simple_loss=0.2367, pruned_loss=0.04387, over 4845.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2227, pruned_loss=0.03997, over 971486.43 frames.], batch size: 30, lr: 3.88e-04 +2022-05-05 05:19:53,841 INFO [train.py:715] (6/8) Epoch 5, batch 12400, loss[loss=0.1246, simple_loss=0.1909, pruned_loss=0.0292, over 4749.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2233, pruned_loss=0.04018, over 971802.03 frames.], batch size: 16, lr: 3.88e-04 +2022-05-05 05:20:33,429 INFO [train.py:715] (6/8) Epoch 5, batch 12450, loss[loss=0.1467, simple_loss=0.2186, pruned_loss=0.0374, over 4766.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2237, pruned_loss=0.04025, over 972816.88 frames.], batch size: 19, lr: 3.88e-04 +2022-05-05 05:21:12,659 INFO [train.py:715] (6/8) Epoch 5, batch 12500, loss[loss=0.1669, simple_loss=0.2348, pruned_loss=0.04944, over 4734.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2238, pruned_loss=0.04066, over 972723.10 frames.], batch size: 16, lr: 3.88e-04 +2022-05-05 05:21:51,879 INFO [train.py:715] (6/8) Epoch 5, batch 12550, loss[loss=0.1365, simple_loss=0.2193, pruned_loss=0.02687, over 4898.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2237, pruned_loss=0.04073, over 971832.88 frames.], batch size: 17, lr: 3.88e-04 +2022-05-05 05:22:30,628 INFO [train.py:715] (6/8) Epoch 5, batch 12600, loss[loss=0.1725, simple_loss=0.2362, pruned_loss=0.05439, over 4865.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2237, pruned_loss=0.04128, over 972063.38 frames.], batch size: 30, lr: 3.88e-04 +2022-05-05 05:23:08,928 INFO [train.py:715] (6/8) Epoch 5, batch 12650, loss[loss=0.1527, simple_loss=0.2285, pruned_loss=0.03846, over 4975.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2248, pruned_loss=0.04189, over 971398.00 frames.], batch size: 28, lr: 3.88e-04 +2022-05-05 05:23:47,148 INFO [train.py:715] (6/8) Epoch 5, batch 12700, loss[loss=0.1335, simple_loss=0.2101, pruned_loss=0.02844, over 4814.00 frames.], tot_loss[loss=0.154, simple_loss=0.2245, pruned_loss=0.04175, over 972302.77 frames.], batch size: 27, lr: 3.88e-04 +2022-05-05 05:24:27,046 INFO [train.py:715] (6/8) Epoch 5, batch 12750, loss[loss=0.1272, simple_loss=0.2023, pruned_loss=0.02604, over 4984.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2238, pruned_loss=0.04167, over 971716.86 frames.], batch size: 25, lr: 3.88e-04 +2022-05-05 05:25:06,592 INFO [train.py:715] (6/8) Epoch 5, batch 12800, loss[loss=0.149, simple_loss=0.2157, pruned_loss=0.04115, over 4770.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2236, pruned_loss=0.04147, over 971625.69 frames.], batch size: 16, lr: 3.88e-04 +2022-05-05 05:25:46,750 INFO [train.py:715] (6/8) Epoch 5, batch 12850, loss[loss=0.1529, simple_loss=0.2376, pruned_loss=0.03411, over 4809.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2236, pruned_loss=0.04108, over 972024.48 frames.], batch size: 21, lr: 3.88e-04 +2022-05-05 05:26:26,310 INFO [train.py:715] (6/8) Epoch 5, batch 12900, loss[loss=0.1431, simple_loss=0.214, pruned_loss=0.0361, over 4759.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2245, pruned_loss=0.04138, over 972698.38 frames.], batch size: 19, lr: 3.88e-04 +2022-05-05 05:27:06,309 INFO [train.py:715] (6/8) Epoch 5, batch 12950, loss[loss=0.1635, simple_loss=0.2345, pruned_loss=0.04623, over 4962.00 frames.], tot_loss[loss=0.153, simple_loss=0.2238, pruned_loss=0.04113, over 972654.90 frames.], batch size: 24, lr: 3.88e-04 +2022-05-05 05:27:45,735 INFO [train.py:715] (6/8) Epoch 5, batch 13000, loss[loss=0.1369, simple_loss=0.2022, pruned_loss=0.03574, over 4785.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2232, pruned_loss=0.04101, over 973363.42 frames.], batch size: 17, lr: 3.88e-04 +2022-05-05 05:28:25,602 INFO [train.py:715] (6/8) Epoch 5, batch 13050, loss[loss=0.1317, simple_loss=0.207, pruned_loss=0.0282, over 4894.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2231, pruned_loss=0.04117, over 973500.03 frames.], batch size: 19, lr: 3.88e-04 +2022-05-05 05:29:03,806 INFO [train.py:715] (6/8) Epoch 5, batch 13100, loss[loss=0.1556, simple_loss=0.2338, pruned_loss=0.03872, over 4812.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2228, pruned_loss=0.04101, over 973874.10 frames.], batch size: 27, lr: 3.87e-04 +2022-05-05 05:29:42,387 INFO [train.py:715] (6/8) Epoch 5, batch 13150, loss[loss=0.1731, simple_loss=0.233, pruned_loss=0.05659, over 4966.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2236, pruned_loss=0.04148, over 974296.94 frames.], batch size: 15, lr: 3.87e-04 +2022-05-05 05:30:20,476 INFO [train.py:715] (6/8) Epoch 5, batch 13200, loss[loss=0.1567, simple_loss=0.2281, pruned_loss=0.04265, over 4952.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2238, pruned_loss=0.04164, over 974784.76 frames.], batch size: 21, lr: 3.87e-04 +2022-05-05 05:30:58,487 INFO [train.py:715] (6/8) Epoch 5, batch 13250, loss[loss=0.1821, simple_loss=0.2454, pruned_loss=0.05945, over 4950.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2223, pruned_loss=0.04061, over 974581.55 frames.], batch size: 29, lr: 3.87e-04 +2022-05-05 05:31:37,092 INFO [train.py:715] (6/8) Epoch 5, batch 13300, loss[loss=0.1506, simple_loss=0.2067, pruned_loss=0.04729, over 4836.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2223, pruned_loss=0.04103, over 974021.37 frames.], batch size: 12, lr: 3.87e-04 +2022-05-05 05:32:14,953 INFO [train.py:715] (6/8) Epoch 5, batch 13350, loss[loss=0.2335, simple_loss=0.2832, pruned_loss=0.09197, over 4957.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2225, pruned_loss=0.04141, over 974722.77 frames.], batch size: 39, lr: 3.87e-04 +2022-05-05 05:32:53,085 INFO [train.py:715] (6/8) Epoch 5, batch 13400, loss[loss=0.1753, simple_loss=0.2445, pruned_loss=0.05307, over 4839.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2222, pruned_loss=0.04079, over 974112.81 frames.], batch size: 30, lr: 3.87e-04 +2022-05-05 05:33:30,830 INFO [train.py:715] (6/8) Epoch 5, batch 13450, loss[loss=0.1485, simple_loss=0.2167, pruned_loss=0.0401, over 4863.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2229, pruned_loss=0.04115, over 973207.32 frames.], batch size: 20, lr: 3.87e-04 +2022-05-05 05:34:09,166 INFO [train.py:715] (6/8) Epoch 5, batch 13500, loss[loss=0.1703, simple_loss=0.2374, pruned_loss=0.05164, over 4759.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2235, pruned_loss=0.04165, over 972689.34 frames.], batch size: 16, lr: 3.87e-04 +2022-05-05 05:34:47,070 INFO [train.py:715] (6/8) Epoch 5, batch 13550, loss[loss=0.1587, simple_loss=0.2297, pruned_loss=0.04382, over 4899.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2232, pruned_loss=0.04103, over 972801.61 frames.], batch size: 19, lr: 3.87e-04 +2022-05-05 05:35:24,567 INFO [train.py:715] (6/8) Epoch 5, batch 13600, loss[loss=0.1442, simple_loss=0.2103, pruned_loss=0.03911, over 4743.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2236, pruned_loss=0.04149, over 971895.79 frames.], batch size: 16, lr: 3.87e-04 +2022-05-05 05:36:03,224 INFO [train.py:715] (6/8) Epoch 5, batch 13650, loss[loss=0.1374, simple_loss=0.2113, pruned_loss=0.03169, over 4989.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2237, pruned_loss=0.04133, over 971892.12 frames.], batch size: 28, lr: 3.87e-04 +2022-05-05 05:36:41,018 INFO [train.py:715] (6/8) Epoch 5, batch 13700, loss[loss=0.1566, simple_loss=0.2275, pruned_loss=0.04287, over 4968.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2234, pruned_loss=0.04135, over 971982.99 frames.], batch size: 24, lr: 3.87e-04 +2022-05-05 05:37:19,075 INFO [train.py:715] (6/8) Epoch 5, batch 13750, loss[loss=0.1696, simple_loss=0.2363, pruned_loss=0.05151, over 4941.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2228, pruned_loss=0.04113, over 971981.91 frames.], batch size: 21, lr: 3.87e-04 +2022-05-05 05:37:56,881 INFO [train.py:715] (6/8) Epoch 5, batch 13800, loss[loss=0.1829, simple_loss=0.2439, pruned_loss=0.06095, over 4970.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2239, pruned_loss=0.04182, over 972334.75 frames.], batch size: 15, lr: 3.87e-04 +2022-05-05 05:38:35,345 INFO [train.py:715] (6/8) Epoch 5, batch 13850, loss[loss=0.1929, simple_loss=0.2629, pruned_loss=0.0614, over 4855.00 frames.], tot_loss[loss=0.1535, simple_loss=0.224, pruned_loss=0.04155, over 973732.69 frames.], batch size: 32, lr: 3.87e-04 +2022-05-05 05:39:13,571 INFO [train.py:715] (6/8) Epoch 5, batch 13900, loss[loss=0.1488, simple_loss=0.2161, pruned_loss=0.04073, over 4929.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2225, pruned_loss=0.04082, over 973570.39 frames.], batch size: 23, lr: 3.87e-04 +2022-05-05 05:39:51,057 INFO [train.py:715] (6/8) Epoch 5, batch 13950, loss[loss=0.1736, simple_loss=0.2446, pruned_loss=0.05128, over 4911.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2223, pruned_loss=0.04094, over 972676.28 frames.], batch size: 18, lr: 3.87e-04 +2022-05-05 05:40:29,788 INFO [train.py:715] (6/8) Epoch 5, batch 14000, loss[loss=0.168, simple_loss=0.2384, pruned_loss=0.04881, over 4823.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2223, pruned_loss=0.041, over 972494.98 frames.], batch size: 15, lr: 3.87e-04 +2022-05-05 05:41:07,816 INFO [train.py:715] (6/8) Epoch 5, batch 14050, loss[loss=0.1576, simple_loss=0.2308, pruned_loss=0.04213, over 4754.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2229, pruned_loss=0.04122, over 972381.44 frames.], batch size: 19, lr: 3.87e-04 +2022-05-05 05:41:45,578 INFO [train.py:715] (6/8) Epoch 5, batch 14100, loss[loss=0.1377, simple_loss=0.2073, pruned_loss=0.03407, over 4770.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2236, pruned_loss=0.04168, over 971869.34 frames.], batch size: 19, lr: 3.86e-04 +2022-05-05 05:42:23,455 INFO [train.py:715] (6/8) Epoch 5, batch 14150, loss[loss=0.1563, simple_loss=0.2226, pruned_loss=0.04502, over 4827.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2233, pruned_loss=0.0415, over 972021.98 frames.], batch size: 26, lr: 3.86e-04 +2022-05-05 05:43:01,799 INFO [train.py:715] (6/8) Epoch 5, batch 14200, loss[loss=0.1302, simple_loss=0.2081, pruned_loss=0.02618, over 4940.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2235, pruned_loss=0.04164, over 972428.46 frames.], batch size: 23, lr: 3.86e-04 +2022-05-05 05:43:40,051 INFO [train.py:715] (6/8) Epoch 5, batch 14250, loss[loss=0.1621, simple_loss=0.2281, pruned_loss=0.04808, over 4933.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2227, pruned_loss=0.04129, over 972390.53 frames.], batch size: 18, lr: 3.86e-04 +2022-05-05 05:44:18,050 INFO [train.py:715] (6/8) Epoch 5, batch 14300, loss[loss=0.1564, simple_loss=0.2287, pruned_loss=0.04207, over 4754.00 frames.], tot_loss[loss=0.152, simple_loss=0.2221, pruned_loss=0.04095, over 971358.96 frames.], batch size: 16, lr: 3.86e-04 +2022-05-05 05:44:56,436 INFO [train.py:715] (6/8) Epoch 5, batch 14350, loss[loss=0.1439, simple_loss=0.1981, pruned_loss=0.04482, over 4809.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2226, pruned_loss=0.04136, over 971467.33 frames.], batch size: 12, lr: 3.86e-04 +2022-05-05 05:45:34,230 INFO [train.py:715] (6/8) Epoch 5, batch 14400, loss[loss=0.1908, simple_loss=0.2522, pruned_loss=0.06469, over 4930.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2225, pruned_loss=0.04101, over 972289.49 frames.], batch size: 18, lr: 3.86e-04 +2022-05-05 05:46:11,863 INFO [train.py:715] (6/8) Epoch 5, batch 14450, loss[loss=0.1909, simple_loss=0.2727, pruned_loss=0.05453, over 4833.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2227, pruned_loss=0.04097, over 972842.33 frames.], batch size: 15, lr: 3.86e-04 +2022-05-05 05:46:49,661 INFO [train.py:715] (6/8) Epoch 5, batch 14500, loss[loss=0.1881, simple_loss=0.2619, pruned_loss=0.05718, over 4880.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2226, pruned_loss=0.04096, over 972531.28 frames.], batch size: 22, lr: 3.86e-04 +2022-05-05 05:47:27,996 INFO [train.py:715] (6/8) Epoch 5, batch 14550, loss[loss=0.1808, simple_loss=0.2624, pruned_loss=0.0496, over 4704.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2234, pruned_loss=0.04122, over 972040.01 frames.], batch size: 15, lr: 3.86e-04 +2022-05-05 05:48:06,093 INFO [train.py:715] (6/8) Epoch 5, batch 14600, loss[loss=0.1523, simple_loss=0.2231, pruned_loss=0.04073, over 4810.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2244, pruned_loss=0.04156, over 972026.67 frames.], batch size: 25, lr: 3.86e-04 +2022-05-05 05:48:44,025 INFO [train.py:715] (6/8) Epoch 5, batch 14650, loss[loss=0.1595, simple_loss=0.2253, pruned_loss=0.04681, over 4831.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2242, pruned_loss=0.04122, over 971811.93 frames.], batch size: 15, lr: 3.86e-04 +2022-05-05 05:49:22,272 INFO [train.py:715] (6/8) Epoch 5, batch 14700, loss[loss=0.1137, simple_loss=0.1862, pruned_loss=0.02064, over 4932.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2232, pruned_loss=0.04052, over 971910.20 frames.], batch size: 23, lr: 3.86e-04 +2022-05-05 05:49:59,645 INFO [train.py:715] (6/8) Epoch 5, batch 14750, loss[loss=0.1426, simple_loss=0.2155, pruned_loss=0.03486, over 4978.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2225, pruned_loss=0.04064, over 970851.88 frames.], batch size: 24, lr: 3.86e-04 +2022-05-05 05:50:37,675 INFO [train.py:715] (6/8) Epoch 5, batch 14800, loss[loss=0.1699, simple_loss=0.2476, pruned_loss=0.0461, over 4891.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2232, pruned_loss=0.04093, over 970611.56 frames.], batch size: 22, lr: 3.86e-04 +2022-05-05 05:51:15,493 INFO [train.py:715] (6/8) Epoch 5, batch 14850, loss[loss=0.1538, simple_loss=0.2322, pruned_loss=0.03775, over 4811.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2237, pruned_loss=0.04134, over 971650.51 frames.], batch size: 26, lr: 3.86e-04 +2022-05-05 05:51:54,089 INFO [train.py:715] (6/8) Epoch 5, batch 14900, loss[loss=0.1294, simple_loss=0.2017, pruned_loss=0.02852, over 4780.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2236, pruned_loss=0.04108, over 971404.09 frames.], batch size: 18, lr: 3.86e-04 +2022-05-05 05:52:32,749 INFO [train.py:715] (6/8) Epoch 5, batch 14950, loss[loss=0.1419, simple_loss=0.215, pruned_loss=0.03441, over 4918.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2235, pruned_loss=0.04102, over 971662.49 frames.], batch size: 29, lr: 3.86e-04 +2022-05-05 05:53:10,810 INFO [train.py:715] (6/8) Epoch 5, batch 15000, loss[loss=0.1619, simple_loss=0.239, pruned_loss=0.04243, over 4839.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2224, pruned_loss=0.04062, over 971979.25 frames.], batch size: 26, lr: 3.86e-04 +2022-05-05 05:53:10,811 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 05:53:21,083 INFO [train.py:742] (6/8) Epoch 5, validation: loss=0.1105, simple_loss=0.1958, pruned_loss=0.01261, over 914524.00 frames. +2022-05-05 05:53:58,557 INFO [train.py:715] (6/8) Epoch 5, batch 15050, loss[loss=0.1695, simple_loss=0.2333, pruned_loss=0.05283, over 4986.00 frames.], tot_loss[loss=0.1515, simple_loss=0.222, pruned_loss=0.04051, over 971811.81 frames.], batch size: 31, lr: 3.85e-04 +2022-05-05 05:54:37,215 INFO [train.py:715] (6/8) Epoch 5, batch 15100, loss[loss=0.1476, simple_loss=0.2064, pruned_loss=0.04444, over 4781.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2219, pruned_loss=0.0403, over 970848.15 frames.], batch size: 12, lr: 3.85e-04 +2022-05-05 05:55:15,135 INFO [train.py:715] (6/8) Epoch 5, batch 15150, loss[loss=0.1873, simple_loss=0.2456, pruned_loss=0.06451, over 4946.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2228, pruned_loss=0.0409, over 971729.61 frames.], batch size: 40, lr: 3.85e-04 +2022-05-05 05:55:53,271 INFO [train.py:715] (6/8) Epoch 5, batch 15200, loss[loss=0.1708, simple_loss=0.2503, pruned_loss=0.04565, over 4900.00 frames.], tot_loss[loss=0.153, simple_loss=0.2234, pruned_loss=0.04127, over 971007.22 frames.], batch size: 19, lr: 3.85e-04 +2022-05-05 05:56:32,188 INFO [train.py:715] (6/8) Epoch 5, batch 15250, loss[loss=0.1586, simple_loss=0.2194, pruned_loss=0.04891, over 4848.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2231, pruned_loss=0.04083, over 971049.11 frames.], batch size: 32, lr: 3.85e-04 +2022-05-05 05:57:10,898 INFO [train.py:715] (6/8) Epoch 5, batch 15300, loss[loss=0.1678, simple_loss=0.2527, pruned_loss=0.04143, over 4805.00 frames.], tot_loss[loss=0.153, simple_loss=0.2238, pruned_loss=0.04105, over 971543.89 frames.], batch size: 21, lr: 3.85e-04 +2022-05-05 05:57:50,139 INFO [train.py:715] (6/8) Epoch 5, batch 15350, loss[loss=0.1515, simple_loss=0.2269, pruned_loss=0.03805, over 4877.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2235, pruned_loss=0.04087, over 971355.73 frames.], batch size: 22, lr: 3.85e-04 +2022-05-05 05:58:28,473 INFO [train.py:715] (6/8) Epoch 5, batch 15400, loss[loss=0.1414, simple_loss=0.2203, pruned_loss=0.03126, over 4744.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2241, pruned_loss=0.04136, over 971466.55 frames.], batch size: 16, lr: 3.85e-04 +2022-05-05 05:59:07,519 INFO [train.py:715] (6/8) Epoch 5, batch 15450, loss[loss=0.1305, simple_loss=0.2088, pruned_loss=0.02607, over 4921.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2238, pruned_loss=0.041, over 971012.31 frames.], batch size: 17, lr: 3.85e-04 +2022-05-05 05:59:46,049 INFO [train.py:715] (6/8) Epoch 5, batch 15500, loss[loss=0.1737, simple_loss=0.2398, pruned_loss=0.05375, over 4854.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2246, pruned_loss=0.0415, over 971431.94 frames.], batch size: 32, lr: 3.85e-04 +2022-05-05 06:00:25,317 INFO [train.py:715] (6/8) Epoch 5, batch 15550, loss[loss=0.1444, simple_loss=0.2146, pruned_loss=0.03716, over 4827.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2251, pruned_loss=0.04189, over 971595.39 frames.], batch size: 13, lr: 3.85e-04 +2022-05-05 06:01:03,324 INFO [train.py:715] (6/8) Epoch 5, batch 15600, loss[loss=0.1586, simple_loss=0.232, pruned_loss=0.04264, over 4932.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2254, pruned_loss=0.04215, over 971578.87 frames.], batch size: 21, lr: 3.85e-04 +2022-05-05 06:01:40,925 INFO [train.py:715] (6/8) Epoch 5, batch 15650, loss[loss=0.1558, simple_loss=0.2265, pruned_loss=0.04256, over 4970.00 frames.], tot_loss[loss=0.1544, simple_loss=0.225, pruned_loss=0.04191, over 971787.08 frames.], batch size: 35, lr: 3.85e-04 +2022-05-05 06:02:18,446 INFO [train.py:715] (6/8) Epoch 5, batch 15700, loss[loss=0.1627, simple_loss=0.2363, pruned_loss=0.04452, over 4908.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2256, pruned_loss=0.04184, over 971907.63 frames.], batch size: 19, lr: 3.85e-04 +2022-05-05 06:02:56,463 INFO [train.py:715] (6/8) Epoch 5, batch 15750, loss[loss=0.1642, simple_loss=0.2261, pruned_loss=0.05121, over 4792.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2257, pruned_loss=0.04183, over 972756.96 frames.], batch size: 18, lr: 3.85e-04 +2022-05-05 06:03:34,888 INFO [train.py:715] (6/8) Epoch 5, batch 15800, loss[loss=0.1433, simple_loss=0.2261, pruned_loss=0.03024, over 4904.00 frames.], tot_loss[loss=0.154, simple_loss=0.2246, pruned_loss=0.04166, over 972761.77 frames.], batch size: 19, lr: 3.85e-04 +2022-05-05 06:04:12,956 INFO [train.py:715] (6/8) Epoch 5, batch 15850, loss[loss=0.1344, simple_loss=0.2008, pruned_loss=0.03397, over 4751.00 frames.], tot_loss[loss=0.1524, simple_loss=0.223, pruned_loss=0.04089, over 972499.61 frames.], batch size: 16, lr: 3.85e-04 +2022-05-05 06:04:50,528 INFO [train.py:715] (6/8) Epoch 5, batch 15900, loss[loss=0.2012, simple_loss=0.2395, pruned_loss=0.08145, over 4967.00 frames.], tot_loss[loss=0.1524, simple_loss=0.223, pruned_loss=0.04092, over 972009.07 frames.], batch size: 14, lr: 3.85e-04 +2022-05-05 06:05:28,343 INFO [train.py:715] (6/8) Epoch 5, batch 15950, loss[loss=0.1045, simple_loss=0.1719, pruned_loss=0.01856, over 4754.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2219, pruned_loss=0.04045, over 971977.43 frames.], batch size: 12, lr: 3.85e-04 +2022-05-05 06:06:05,813 INFO [train.py:715] (6/8) Epoch 5, batch 16000, loss[loss=0.1273, simple_loss=0.2033, pruned_loss=0.02564, over 4872.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2216, pruned_loss=0.04031, over 971489.22 frames.], batch size: 16, lr: 3.85e-04 +2022-05-05 06:06:43,534 INFO [train.py:715] (6/8) Epoch 5, batch 16050, loss[loss=0.1544, simple_loss=0.2208, pruned_loss=0.04395, over 4918.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2219, pruned_loss=0.04055, over 970859.14 frames.], batch size: 23, lr: 3.84e-04 +2022-05-05 06:07:21,601 INFO [train.py:715] (6/8) Epoch 5, batch 16100, loss[loss=0.1384, simple_loss=0.2091, pruned_loss=0.03384, over 4804.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2225, pruned_loss=0.04091, over 971053.83 frames.], batch size: 26, lr: 3.84e-04 +2022-05-05 06:08:00,773 INFO [train.py:715] (6/8) Epoch 5, batch 16150, loss[loss=0.1499, simple_loss=0.2173, pruned_loss=0.04122, over 4760.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2236, pruned_loss=0.0419, over 971468.97 frames.], batch size: 19, lr: 3.84e-04 +2022-05-05 06:08:39,728 INFO [train.py:715] (6/8) Epoch 5, batch 16200, loss[loss=0.1575, simple_loss=0.2201, pruned_loss=0.04743, over 4918.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2224, pruned_loss=0.04094, over 971221.75 frames.], batch size: 18, lr: 3.84e-04 +2022-05-05 06:09:18,291 INFO [train.py:715] (6/8) Epoch 5, batch 16250, loss[loss=0.1448, simple_loss=0.2102, pruned_loss=0.03971, over 4779.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2218, pruned_loss=0.04066, over 971608.13 frames.], batch size: 17, lr: 3.84e-04 +2022-05-05 06:09:56,099 INFO [train.py:715] (6/8) Epoch 5, batch 16300, loss[loss=0.1424, simple_loss=0.2028, pruned_loss=0.04101, over 4984.00 frames.], tot_loss[loss=0.152, simple_loss=0.2222, pruned_loss=0.04092, over 971722.92 frames.], batch size: 33, lr: 3.84e-04 +2022-05-05 06:10:34,111 INFO [train.py:715] (6/8) Epoch 5, batch 16350, loss[loss=0.1339, simple_loss=0.2016, pruned_loss=0.0331, over 4690.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2222, pruned_loss=0.04127, over 971152.77 frames.], batch size: 15, lr: 3.84e-04 +2022-05-05 06:11:12,494 INFO [train.py:715] (6/8) Epoch 5, batch 16400, loss[loss=0.1364, simple_loss=0.2222, pruned_loss=0.0253, over 4926.00 frames.], tot_loss[loss=0.153, simple_loss=0.2232, pruned_loss=0.04141, over 971564.09 frames.], batch size: 21, lr: 3.84e-04 +2022-05-05 06:11:50,949 INFO [train.py:715] (6/8) Epoch 5, batch 16450, loss[loss=0.1547, simple_loss=0.2209, pruned_loss=0.04423, over 4750.00 frames.], tot_loss[loss=0.153, simple_loss=0.2233, pruned_loss=0.04134, over 972197.13 frames.], batch size: 19, lr: 3.84e-04 +2022-05-05 06:12:30,301 INFO [train.py:715] (6/8) Epoch 5, batch 16500, loss[loss=0.1527, simple_loss=0.2114, pruned_loss=0.04705, over 4939.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2227, pruned_loss=0.04075, over 972185.30 frames.], batch size: 29, lr: 3.84e-04 +2022-05-05 06:13:08,220 INFO [train.py:715] (6/8) Epoch 5, batch 16550, loss[loss=0.1513, simple_loss=0.2205, pruned_loss=0.04102, over 4988.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2227, pruned_loss=0.04087, over 972773.92 frames.], batch size: 28, lr: 3.84e-04 +2022-05-05 06:13:46,905 INFO [train.py:715] (6/8) Epoch 5, batch 16600, loss[loss=0.1414, simple_loss=0.2111, pruned_loss=0.03588, over 4876.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2227, pruned_loss=0.04098, over 972224.84 frames.], batch size: 22, lr: 3.84e-04 +2022-05-05 06:14:25,620 INFO [train.py:715] (6/8) Epoch 5, batch 16650, loss[loss=0.2001, simple_loss=0.2603, pruned_loss=0.06997, over 4801.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2218, pruned_loss=0.04043, over 972940.86 frames.], batch size: 24, lr: 3.84e-04 +2022-05-05 06:15:04,298 INFO [train.py:715] (6/8) Epoch 5, batch 16700, loss[loss=0.1567, simple_loss=0.2263, pruned_loss=0.04356, over 4644.00 frames.], tot_loss[loss=0.1514, simple_loss=0.222, pruned_loss=0.04041, over 972788.55 frames.], batch size: 13, lr: 3.84e-04 +2022-05-05 06:15:42,485 INFO [train.py:715] (6/8) Epoch 5, batch 16750, loss[loss=0.1409, simple_loss=0.2081, pruned_loss=0.03688, over 4862.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2213, pruned_loss=0.0399, over 971892.47 frames.], batch size: 20, lr: 3.84e-04 +2022-05-05 06:16:20,936 INFO [train.py:715] (6/8) Epoch 5, batch 16800, loss[loss=0.12, simple_loss=0.1899, pruned_loss=0.02505, over 4921.00 frames.], tot_loss[loss=0.1496, simple_loss=0.22, pruned_loss=0.03954, over 972444.92 frames.], batch size: 18, lr: 3.84e-04 +2022-05-05 06:17:00,069 INFO [train.py:715] (6/8) Epoch 5, batch 16850, loss[loss=0.1228, simple_loss=0.1909, pruned_loss=0.02735, over 4726.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2202, pruned_loss=0.03986, over 972534.06 frames.], batch size: 12, lr: 3.84e-04 +2022-05-05 06:17:37,930 INFO [train.py:715] (6/8) Epoch 5, batch 16900, loss[loss=0.1869, simple_loss=0.2522, pruned_loss=0.06077, over 4750.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2211, pruned_loss=0.04027, over 972390.71 frames.], batch size: 19, lr: 3.84e-04 +2022-05-05 06:18:16,758 INFO [train.py:715] (6/8) Epoch 5, batch 16950, loss[loss=0.1504, simple_loss=0.217, pruned_loss=0.0419, over 4918.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2214, pruned_loss=0.04045, over 973596.65 frames.], batch size: 23, lr: 3.84e-04 +2022-05-05 06:18:55,162 INFO [train.py:715] (6/8) Epoch 5, batch 17000, loss[loss=0.1233, simple_loss=0.2048, pruned_loss=0.02089, over 4825.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2224, pruned_loss=0.04064, over 973023.95 frames.], batch size: 26, lr: 3.84e-04 +2022-05-05 06:19:33,550 INFO [train.py:715] (6/8) Epoch 5, batch 17050, loss[loss=0.193, simple_loss=0.2659, pruned_loss=0.06008, over 4738.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2226, pruned_loss=0.0405, over 973648.52 frames.], batch size: 16, lr: 3.83e-04 +2022-05-05 06:20:11,943 INFO [train.py:715] (6/8) Epoch 5, batch 17100, loss[loss=0.1883, simple_loss=0.2604, pruned_loss=0.05814, over 4781.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2224, pruned_loss=0.04062, over 972879.23 frames.], batch size: 18, lr: 3.83e-04 +2022-05-05 06:20:49,749 INFO [train.py:715] (6/8) Epoch 5, batch 17150, loss[loss=0.141, simple_loss=0.2226, pruned_loss=0.02969, over 4921.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2218, pruned_loss=0.03995, over 973220.50 frames.], batch size: 23, lr: 3.83e-04 +2022-05-05 06:21:27,631 INFO [train.py:715] (6/8) Epoch 5, batch 17200, loss[loss=0.1688, simple_loss=0.2343, pruned_loss=0.05168, over 4924.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2231, pruned_loss=0.04038, over 972685.94 frames.], batch size: 18, lr: 3.83e-04 +2022-05-05 06:22:04,736 INFO [train.py:715] (6/8) Epoch 5, batch 17250, loss[loss=0.2044, simple_loss=0.2716, pruned_loss=0.06859, over 4898.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2233, pruned_loss=0.04028, over 972569.96 frames.], batch size: 22, lr: 3.83e-04 +2022-05-05 06:22:42,998 INFO [train.py:715] (6/8) Epoch 5, batch 17300, loss[loss=0.1458, simple_loss=0.2306, pruned_loss=0.03044, over 4758.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2236, pruned_loss=0.0405, over 972180.30 frames.], batch size: 19, lr: 3.83e-04 +2022-05-05 06:23:22,498 INFO [train.py:715] (6/8) Epoch 5, batch 17350, loss[loss=0.1304, simple_loss=0.2051, pruned_loss=0.02787, over 4750.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2233, pruned_loss=0.0405, over 971775.93 frames.], batch size: 19, lr: 3.83e-04 +2022-05-05 06:24:00,864 INFO [train.py:715] (6/8) Epoch 5, batch 17400, loss[loss=0.1568, simple_loss=0.2341, pruned_loss=0.03981, over 4832.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2235, pruned_loss=0.04067, over 971579.89 frames.], batch size: 30, lr: 3.83e-04 +2022-05-05 06:24:39,482 INFO [train.py:715] (6/8) Epoch 5, batch 17450, loss[loss=0.1314, simple_loss=0.2033, pruned_loss=0.02972, over 4815.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2237, pruned_loss=0.04063, over 971261.08 frames.], batch size: 13, lr: 3.83e-04 +2022-05-05 06:25:17,956 INFO [train.py:715] (6/8) Epoch 5, batch 17500, loss[loss=0.1902, simple_loss=0.2457, pruned_loss=0.06737, over 4797.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2232, pruned_loss=0.04055, over 971341.80 frames.], batch size: 18, lr: 3.83e-04 +2022-05-05 06:25:56,805 INFO [train.py:715] (6/8) Epoch 5, batch 17550, loss[loss=0.1501, simple_loss=0.2171, pruned_loss=0.04156, over 4854.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2227, pruned_loss=0.04033, over 971408.04 frames.], batch size: 16, lr: 3.83e-04 +2022-05-05 06:26:35,458 INFO [train.py:715] (6/8) Epoch 5, batch 17600, loss[loss=0.1907, simple_loss=0.2625, pruned_loss=0.05947, over 4900.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2233, pruned_loss=0.04098, over 971322.28 frames.], batch size: 17, lr: 3.83e-04 +2022-05-05 06:27:14,170 INFO [train.py:715] (6/8) Epoch 5, batch 17650, loss[loss=0.1395, simple_loss=0.2168, pruned_loss=0.0311, over 4770.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2226, pruned_loss=0.04085, over 971802.54 frames.], batch size: 19, lr: 3.83e-04 +2022-05-05 06:27:52,811 INFO [train.py:715] (6/8) Epoch 5, batch 17700, loss[loss=0.164, simple_loss=0.2431, pruned_loss=0.04238, over 4947.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2218, pruned_loss=0.04046, over 971795.50 frames.], batch size: 29, lr: 3.83e-04 +2022-05-05 06:28:31,728 INFO [train.py:715] (6/8) Epoch 5, batch 17750, loss[loss=0.1457, simple_loss=0.2208, pruned_loss=0.03529, over 4755.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2217, pruned_loss=0.04029, over 971990.21 frames.], batch size: 19, lr: 3.83e-04 +2022-05-05 06:29:09,753 INFO [train.py:715] (6/8) Epoch 5, batch 17800, loss[loss=0.1465, simple_loss=0.2209, pruned_loss=0.03603, over 4685.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2226, pruned_loss=0.0412, over 971233.88 frames.], batch size: 15, lr: 3.83e-04 +2022-05-05 06:29:48,585 INFO [train.py:715] (6/8) Epoch 5, batch 17850, loss[loss=0.1712, simple_loss=0.2459, pruned_loss=0.04825, over 4839.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2224, pruned_loss=0.04111, over 971457.52 frames.], batch size: 15, lr: 3.83e-04 +2022-05-05 06:30:27,678 INFO [train.py:715] (6/8) Epoch 5, batch 17900, loss[loss=0.188, simple_loss=0.2716, pruned_loss=0.05218, over 4915.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2228, pruned_loss=0.04078, over 971549.47 frames.], batch size: 23, lr: 3.83e-04 +2022-05-05 06:31:06,329 INFO [train.py:715] (6/8) Epoch 5, batch 17950, loss[loss=0.1723, simple_loss=0.2309, pruned_loss=0.05686, over 4755.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2231, pruned_loss=0.04086, over 972593.04 frames.], batch size: 19, lr: 3.83e-04 +2022-05-05 06:31:47,057 INFO [train.py:715] (6/8) Epoch 5, batch 18000, loss[loss=0.1328, simple_loss=0.203, pruned_loss=0.03132, over 4930.00 frames.], tot_loss[loss=0.1524, simple_loss=0.223, pruned_loss=0.04087, over 972526.17 frames.], batch size: 29, lr: 3.83e-04 +2022-05-05 06:31:47,057 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 06:31:59,753 INFO [train.py:742] (6/8) Epoch 5, validation: loss=0.1102, simple_loss=0.1955, pruned_loss=0.01245, over 914524.00 frames. +2022-05-05 06:32:38,353 INFO [train.py:715] (6/8) Epoch 5, batch 18050, loss[loss=0.1337, simple_loss=0.2039, pruned_loss=0.03171, over 4979.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2234, pruned_loss=0.04115, over 972987.74 frames.], batch size: 25, lr: 3.82e-04 +2022-05-05 06:33:17,598 INFO [train.py:715] (6/8) Epoch 5, batch 18100, loss[loss=0.1555, simple_loss=0.2396, pruned_loss=0.0357, over 4782.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2229, pruned_loss=0.04076, over 972856.67 frames.], batch size: 17, lr: 3.82e-04 +2022-05-05 06:33:56,333 INFO [train.py:715] (6/8) Epoch 5, batch 18150, loss[loss=0.1501, simple_loss=0.2163, pruned_loss=0.04195, over 4787.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2226, pruned_loss=0.04041, over 973083.02 frames.], batch size: 14, lr: 3.82e-04 +2022-05-05 06:34:34,861 INFO [train.py:715] (6/8) Epoch 5, batch 18200, loss[loss=0.1407, simple_loss=0.2149, pruned_loss=0.03328, over 4824.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2225, pruned_loss=0.04053, over 972791.14 frames.], batch size: 26, lr: 3.82e-04 +2022-05-05 06:35:14,235 INFO [train.py:715] (6/8) Epoch 5, batch 18250, loss[loss=0.2018, simple_loss=0.2485, pruned_loss=0.0775, over 4943.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2226, pruned_loss=0.04079, over 972749.21 frames.], batch size: 21, lr: 3.82e-04 +2022-05-05 06:35:53,135 INFO [train.py:715] (6/8) Epoch 5, batch 18300, loss[loss=0.1552, simple_loss=0.2388, pruned_loss=0.03584, over 4815.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2225, pruned_loss=0.04051, over 972068.68 frames.], batch size: 21, lr: 3.82e-04 +2022-05-05 06:36:31,710 INFO [train.py:715] (6/8) Epoch 5, batch 18350, loss[loss=0.163, simple_loss=0.2301, pruned_loss=0.04795, over 4957.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2234, pruned_loss=0.04118, over 972571.71 frames.], batch size: 28, lr: 3.82e-04 +2022-05-05 06:37:09,999 INFO [train.py:715] (6/8) Epoch 5, batch 18400, loss[loss=0.1524, simple_loss=0.222, pruned_loss=0.04146, over 4873.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2233, pruned_loss=0.04118, over 972737.66 frames.], batch size: 16, lr: 3.82e-04 +2022-05-05 06:37:49,154 INFO [train.py:715] (6/8) Epoch 5, batch 18450, loss[loss=0.125, simple_loss=0.1989, pruned_loss=0.02554, over 4747.00 frames.], tot_loss[loss=0.1524, simple_loss=0.223, pruned_loss=0.04088, over 972488.21 frames.], batch size: 16, lr: 3.82e-04 +2022-05-05 06:38:27,816 INFO [train.py:715] (6/8) Epoch 5, batch 18500, loss[loss=0.1462, simple_loss=0.2218, pruned_loss=0.03528, over 4750.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2227, pruned_loss=0.04017, over 972715.38 frames.], batch size: 16, lr: 3.82e-04 +2022-05-05 06:39:06,127 INFO [train.py:715] (6/8) Epoch 5, batch 18550, loss[loss=0.1472, simple_loss=0.2135, pruned_loss=0.04045, over 4960.00 frames.], tot_loss[loss=0.1508, simple_loss=0.222, pruned_loss=0.03987, over 973160.25 frames.], batch size: 39, lr: 3.82e-04 +2022-05-05 06:39:45,171 INFO [train.py:715] (6/8) Epoch 5, batch 18600, loss[loss=0.1392, simple_loss=0.2148, pruned_loss=0.03181, over 4831.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2217, pruned_loss=0.04004, over 972690.00 frames.], batch size: 26, lr: 3.82e-04 +2022-05-05 06:40:23,781 INFO [train.py:715] (6/8) Epoch 5, batch 18650, loss[loss=0.1395, simple_loss=0.2073, pruned_loss=0.03584, over 4742.00 frames.], tot_loss[loss=0.15, simple_loss=0.2208, pruned_loss=0.03964, over 972527.25 frames.], batch size: 16, lr: 3.82e-04 +2022-05-05 06:41:01,938 INFO [train.py:715] (6/8) Epoch 5, batch 18700, loss[loss=0.14, simple_loss=0.2182, pruned_loss=0.03097, over 4897.00 frames.], tot_loss[loss=0.1515, simple_loss=0.222, pruned_loss=0.04051, over 972649.13 frames.], batch size: 17, lr: 3.82e-04 +2022-05-05 06:41:40,677 INFO [train.py:715] (6/8) Epoch 5, batch 18750, loss[loss=0.2029, simple_loss=0.2583, pruned_loss=0.07372, over 4756.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2229, pruned_loss=0.04134, over 971882.95 frames.], batch size: 19, lr: 3.82e-04 +2022-05-05 06:42:19,972 INFO [train.py:715] (6/8) Epoch 5, batch 18800, loss[loss=0.1587, simple_loss=0.2271, pruned_loss=0.04514, over 4943.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2234, pruned_loss=0.04143, over 972073.29 frames.], batch size: 29, lr: 3.82e-04 +2022-05-05 06:42:59,679 INFO [train.py:715] (6/8) Epoch 5, batch 18850, loss[loss=0.1502, simple_loss=0.219, pruned_loss=0.04072, over 4820.00 frames.], tot_loss[loss=0.1525, simple_loss=0.223, pruned_loss=0.04097, over 972242.18 frames.], batch size: 27, lr: 3.82e-04 +2022-05-05 06:43:38,448 INFO [train.py:715] (6/8) Epoch 5, batch 18900, loss[loss=0.1337, simple_loss=0.2063, pruned_loss=0.03054, over 4787.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2236, pruned_loss=0.04129, over 972746.12 frames.], batch size: 17, lr: 3.82e-04 +2022-05-05 06:44:16,645 INFO [train.py:715] (6/8) Epoch 5, batch 18950, loss[loss=0.1628, simple_loss=0.2312, pruned_loss=0.04717, over 4748.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2232, pruned_loss=0.0411, over 972803.52 frames.], batch size: 19, lr: 3.82e-04 +2022-05-05 06:44:56,116 INFO [train.py:715] (6/8) Epoch 5, batch 19000, loss[loss=0.1207, simple_loss=0.1862, pruned_loss=0.02759, over 4759.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2235, pruned_loss=0.04144, over 972809.87 frames.], batch size: 16, lr: 3.82e-04 +2022-05-05 06:45:34,090 INFO [train.py:715] (6/8) Epoch 5, batch 19050, loss[loss=0.124, simple_loss=0.2007, pruned_loss=0.02359, over 4958.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2239, pruned_loss=0.0418, over 973047.84 frames.], batch size: 24, lr: 3.81e-04 +2022-05-05 06:46:13,033 INFO [train.py:715] (6/8) Epoch 5, batch 19100, loss[loss=0.1367, simple_loss=0.2106, pruned_loss=0.03145, over 4830.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2244, pruned_loss=0.04185, over 972401.26 frames.], batch size: 26, lr: 3.81e-04 +2022-05-05 06:46:52,736 INFO [train.py:715] (6/8) Epoch 5, batch 19150, loss[loss=0.1704, simple_loss=0.2342, pruned_loss=0.05325, over 4938.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2228, pruned_loss=0.04086, over 973049.64 frames.], batch size: 23, lr: 3.81e-04 +2022-05-05 06:47:31,315 INFO [train.py:715] (6/8) Epoch 5, batch 19200, loss[loss=0.169, simple_loss=0.2317, pruned_loss=0.05317, over 4803.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2222, pruned_loss=0.04046, over 973296.09 frames.], batch size: 21, lr: 3.81e-04 +2022-05-05 06:48:10,846 INFO [train.py:715] (6/8) Epoch 5, batch 19250, loss[loss=0.1356, simple_loss=0.2063, pruned_loss=0.03241, over 4962.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2234, pruned_loss=0.04072, over 973367.25 frames.], batch size: 24, lr: 3.81e-04 +2022-05-05 06:48:48,906 INFO [train.py:715] (6/8) Epoch 5, batch 19300, loss[loss=0.1972, simple_loss=0.2726, pruned_loss=0.06088, over 4976.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2232, pruned_loss=0.04087, over 974113.00 frames.], batch size: 33, lr: 3.81e-04 +2022-05-05 06:49:28,001 INFO [train.py:715] (6/8) Epoch 5, batch 19350, loss[loss=0.1271, simple_loss=0.197, pruned_loss=0.02858, over 4905.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2228, pruned_loss=0.04074, over 973229.72 frames.], batch size: 19, lr: 3.81e-04 +2022-05-05 06:50:06,778 INFO [train.py:715] (6/8) Epoch 5, batch 19400, loss[loss=0.1823, simple_loss=0.2471, pruned_loss=0.05871, over 4781.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2227, pruned_loss=0.04085, over 972935.98 frames.], batch size: 17, lr: 3.81e-04 +2022-05-05 06:50:45,414 INFO [train.py:715] (6/8) Epoch 5, batch 19450, loss[loss=0.1198, simple_loss=0.2006, pruned_loss=0.01949, over 4959.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2222, pruned_loss=0.0407, over 972827.26 frames.], batch size: 24, lr: 3.81e-04 +2022-05-05 06:51:25,048 INFO [train.py:715] (6/8) Epoch 5, batch 19500, loss[loss=0.1871, simple_loss=0.2605, pruned_loss=0.05684, over 4921.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2216, pruned_loss=0.0403, over 972497.21 frames.], batch size: 23, lr: 3.81e-04 +2022-05-05 06:52:03,849 INFO [train.py:715] (6/8) Epoch 5, batch 19550, loss[loss=0.1513, simple_loss=0.2242, pruned_loss=0.03918, over 4760.00 frames.], tot_loss[loss=0.1517, simple_loss=0.222, pruned_loss=0.04067, over 972092.90 frames.], batch size: 19, lr: 3.81e-04 +2022-05-05 06:52:42,735 INFO [train.py:715] (6/8) Epoch 5, batch 19600, loss[loss=0.1691, simple_loss=0.232, pruned_loss=0.05316, over 4783.00 frames.], tot_loss[loss=0.1515, simple_loss=0.222, pruned_loss=0.04049, over 972548.34 frames.], batch size: 17, lr: 3.81e-04 +2022-05-05 06:53:21,192 INFO [train.py:715] (6/8) Epoch 5, batch 19650, loss[loss=0.1374, simple_loss=0.2087, pruned_loss=0.03305, over 4772.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2222, pruned_loss=0.04048, over 972869.84 frames.], batch size: 17, lr: 3.81e-04 +2022-05-05 06:54:00,678 INFO [train.py:715] (6/8) Epoch 5, batch 19700, loss[loss=0.1574, simple_loss=0.2276, pruned_loss=0.04358, over 4962.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2223, pruned_loss=0.04027, over 973073.52 frames.], batch size: 14, lr: 3.81e-04 +2022-05-05 06:54:39,904 INFO [train.py:715] (6/8) Epoch 5, batch 19750, loss[loss=0.1737, simple_loss=0.2429, pruned_loss=0.05222, over 4915.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2214, pruned_loss=0.0396, over 973014.31 frames.], batch size: 17, lr: 3.81e-04 +2022-05-05 06:55:17,845 INFO [train.py:715] (6/8) Epoch 5, batch 19800, loss[loss=0.1501, simple_loss=0.2342, pruned_loss=0.03301, over 4921.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2213, pruned_loss=0.03945, over 973511.72 frames.], batch size: 29, lr: 3.81e-04 +2022-05-05 06:55:56,847 INFO [train.py:715] (6/8) Epoch 5, batch 19850, loss[loss=0.1354, simple_loss=0.2001, pruned_loss=0.03531, over 4891.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2226, pruned_loss=0.04036, over 973006.78 frames.], batch size: 19, lr: 3.81e-04 +2022-05-05 06:56:35,744 INFO [train.py:715] (6/8) Epoch 5, batch 19900, loss[loss=0.1538, simple_loss=0.2263, pruned_loss=0.04068, over 4775.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2226, pruned_loss=0.04009, over 973234.27 frames.], batch size: 19, lr: 3.81e-04 +2022-05-05 06:57:14,678 INFO [train.py:715] (6/8) Epoch 5, batch 19950, loss[loss=0.1466, simple_loss=0.2211, pruned_loss=0.03604, over 4843.00 frames.], tot_loss[loss=0.1514, simple_loss=0.223, pruned_loss=0.03995, over 972258.61 frames.], batch size: 32, lr: 3.81e-04 +2022-05-05 06:57:53,093 INFO [train.py:715] (6/8) Epoch 5, batch 20000, loss[loss=0.135, simple_loss=0.2085, pruned_loss=0.03074, over 4988.00 frames.], tot_loss[loss=0.1519, simple_loss=0.223, pruned_loss=0.04044, over 972906.56 frames.], batch size: 24, lr: 3.81e-04 +2022-05-05 06:58:32,600 INFO [train.py:715] (6/8) Epoch 5, batch 20050, loss[loss=0.151, simple_loss=0.2153, pruned_loss=0.04333, over 4964.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2227, pruned_loss=0.04042, over 973179.58 frames.], batch size: 15, lr: 3.81e-04 +2022-05-05 06:59:12,131 INFO [train.py:715] (6/8) Epoch 5, batch 20100, loss[loss=0.1701, simple_loss=0.2353, pruned_loss=0.05242, over 4847.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2225, pruned_loss=0.04045, over 973192.07 frames.], batch size: 20, lr: 3.80e-04 +2022-05-05 06:59:50,437 INFO [train.py:715] (6/8) Epoch 5, batch 20150, loss[loss=0.1226, simple_loss=0.2001, pruned_loss=0.02258, over 4963.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2217, pruned_loss=0.04034, over 973080.52 frames.], batch size: 15, lr: 3.80e-04 +2022-05-05 07:00:30,260 INFO [train.py:715] (6/8) Epoch 5, batch 20200, loss[loss=0.1707, simple_loss=0.2415, pruned_loss=0.04997, over 4783.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2222, pruned_loss=0.04033, over 972904.96 frames.], batch size: 14, lr: 3.80e-04 +2022-05-05 07:01:09,277 INFO [train.py:715] (6/8) Epoch 5, batch 20250, loss[loss=0.1653, simple_loss=0.2322, pruned_loss=0.04916, over 4941.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2222, pruned_loss=0.04042, over 971987.50 frames.], batch size: 21, lr: 3.80e-04 +2022-05-05 07:01:47,788 INFO [train.py:715] (6/8) Epoch 5, batch 20300, loss[loss=0.1374, simple_loss=0.206, pruned_loss=0.0344, over 4826.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2225, pruned_loss=0.0405, over 971350.28 frames.], batch size: 13, lr: 3.80e-04 +2022-05-05 07:02:25,749 INFO [train.py:715] (6/8) Epoch 5, batch 20350, loss[loss=0.1264, simple_loss=0.2026, pruned_loss=0.02509, over 4753.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2225, pruned_loss=0.04097, over 971146.07 frames.], batch size: 19, lr: 3.80e-04 +2022-05-05 07:03:04,302 INFO [train.py:715] (6/8) Epoch 5, batch 20400, loss[loss=0.1692, simple_loss=0.2358, pruned_loss=0.05134, over 4934.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2229, pruned_loss=0.04119, over 971047.04 frames.], batch size: 23, lr: 3.80e-04 +2022-05-05 07:03:43,174 INFO [train.py:715] (6/8) Epoch 5, batch 20450, loss[loss=0.1366, simple_loss=0.2104, pruned_loss=0.03141, over 4860.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2236, pruned_loss=0.04127, over 970733.15 frames.], batch size: 20, lr: 3.80e-04 +2022-05-05 07:04:21,315 INFO [train.py:715] (6/8) Epoch 5, batch 20500, loss[loss=0.1368, simple_loss=0.2081, pruned_loss=0.0327, over 4992.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2231, pruned_loss=0.0411, over 971447.72 frames.], batch size: 14, lr: 3.80e-04 +2022-05-05 07:05:00,716 INFO [train.py:715] (6/8) Epoch 5, batch 20550, loss[loss=0.1433, simple_loss=0.2135, pruned_loss=0.03656, over 4980.00 frames.], tot_loss[loss=0.153, simple_loss=0.2236, pruned_loss=0.04124, over 971927.39 frames.], batch size: 35, lr: 3.80e-04 +2022-05-05 07:05:39,978 INFO [train.py:715] (6/8) Epoch 5, batch 20600, loss[loss=0.1617, simple_loss=0.2343, pruned_loss=0.04453, over 4861.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2237, pruned_loss=0.04124, over 972962.74 frames.], batch size: 20, lr: 3.80e-04 +2022-05-05 07:06:18,991 INFO [train.py:715] (6/8) Epoch 5, batch 20650, loss[loss=0.145, simple_loss=0.2232, pruned_loss=0.0334, over 4890.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2234, pruned_loss=0.04084, over 972987.96 frames.], batch size: 22, lr: 3.80e-04 +2022-05-05 07:06:58,193 INFO [train.py:715] (6/8) Epoch 5, batch 20700, loss[loss=0.1398, simple_loss=0.2127, pruned_loss=0.03346, over 4882.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2228, pruned_loss=0.04045, over 972908.86 frames.], batch size: 16, lr: 3.80e-04 +2022-05-05 07:07:36,970 INFO [train.py:715] (6/8) Epoch 5, batch 20750, loss[loss=0.1407, simple_loss=0.2072, pruned_loss=0.03709, over 4903.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2232, pruned_loss=0.04061, over 974227.21 frames.], batch size: 19, lr: 3.80e-04 +2022-05-05 07:08:16,384 INFO [train.py:715] (6/8) Epoch 5, batch 20800, loss[loss=0.1414, simple_loss=0.2148, pruned_loss=0.03398, over 4920.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2237, pruned_loss=0.0409, over 973886.41 frames.], batch size: 23, lr: 3.80e-04 +2022-05-05 07:08:55,024 INFO [train.py:715] (6/8) Epoch 5, batch 20850, loss[loss=0.158, simple_loss=0.2327, pruned_loss=0.04162, over 4948.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2236, pruned_loss=0.04082, over 973276.60 frames.], batch size: 29, lr: 3.80e-04 +2022-05-05 07:09:34,326 INFO [train.py:715] (6/8) Epoch 5, batch 20900, loss[loss=0.1364, simple_loss=0.214, pruned_loss=0.02941, over 4758.00 frames.], tot_loss[loss=0.152, simple_loss=0.2228, pruned_loss=0.04062, over 972867.41 frames.], batch size: 19, lr: 3.80e-04 +2022-05-05 07:10:12,904 INFO [train.py:715] (6/8) Epoch 5, batch 20950, loss[loss=0.1303, simple_loss=0.2022, pruned_loss=0.02919, over 4847.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2221, pruned_loss=0.04011, over 972484.72 frames.], batch size: 13, lr: 3.80e-04 +2022-05-05 07:10:51,487 INFO [train.py:715] (6/8) Epoch 5, batch 21000, loss[loss=0.1223, simple_loss=0.1888, pruned_loss=0.02786, over 4812.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2213, pruned_loss=0.03946, over 971915.74 frames.], batch size: 12, lr: 3.80e-04 +2022-05-05 07:10:51,487 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 07:11:01,470 INFO [train.py:742] (6/8) Epoch 5, validation: loss=0.1101, simple_loss=0.1954, pruned_loss=0.01242, over 914524.00 frames. +2022-05-05 07:11:40,514 INFO [train.py:715] (6/8) Epoch 5, batch 21050, loss[loss=0.147, simple_loss=0.2124, pruned_loss=0.04078, over 4691.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2218, pruned_loss=0.03971, over 972367.35 frames.], batch size: 15, lr: 3.80e-04 +2022-05-05 07:12:19,700 INFO [train.py:715] (6/8) Epoch 5, batch 21100, loss[loss=0.157, simple_loss=0.2339, pruned_loss=0.04003, over 4857.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2221, pruned_loss=0.04033, over 973056.80 frames.], batch size: 20, lr: 3.79e-04 +2022-05-05 07:12:58,336 INFO [train.py:715] (6/8) Epoch 5, batch 21150, loss[loss=0.1856, simple_loss=0.2433, pruned_loss=0.06397, over 4779.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2226, pruned_loss=0.04065, over 973433.78 frames.], batch size: 18, lr: 3.79e-04 +2022-05-05 07:13:37,166 INFO [train.py:715] (6/8) Epoch 5, batch 21200, loss[loss=0.1497, simple_loss=0.2189, pruned_loss=0.04023, over 4855.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2219, pruned_loss=0.04047, over 973664.62 frames.], batch size: 20, lr: 3.79e-04 +2022-05-05 07:14:15,842 INFO [train.py:715] (6/8) Epoch 5, batch 21250, loss[loss=0.1638, simple_loss=0.2312, pruned_loss=0.04815, over 4947.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2216, pruned_loss=0.04051, over 972844.65 frames.], batch size: 21, lr: 3.79e-04 +2022-05-05 07:14:54,662 INFO [train.py:715] (6/8) Epoch 5, batch 21300, loss[loss=0.1599, simple_loss=0.2372, pruned_loss=0.04134, over 4784.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2216, pruned_loss=0.0404, over 973043.26 frames.], batch size: 18, lr: 3.79e-04 +2022-05-05 07:15:33,336 INFO [train.py:715] (6/8) Epoch 5, batch 21350, loss[loss=0.1241, simple_loss=0.195, pruned_loss=0.02657, over 4839.00 frames.], tot_loss[loss=0.1515, simple_loss=0.222, pruned_loss=0.04045, over 972756.82 frames.], batch size: 15, lr: 3.79e-04 +2022-05-05 07:16:11,915 INFO [train.py:715] (6/8) Epoch 5, batch 21400, loss[loss=0.1559, simple_loss=0.2298, pruned_loss=0.04099, over 4922.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2221, pruned_loss=0.04064, over 971546.19 frames.], batch size: 29, lr: 3.79e-04 +2022-05-05 07:16:50,972 INFO [train.py:715] (6/8) Epoch 5, batch 21450, loss[loss=0.129, simple_loss=0.1999, pruned_loss=0.02901, over 4834.00 frames.], tot_loss[loss=0.152, simple_loss=0.2226, pruned_loss=0.04065, over 972126.11 frames.], batch size: 25, lr: 3.79e-04 +2022-05-05 07:17:29,100 INFO [train.py:715] (6/8) Epoch 5, batch 21500, loss[loss=0.1523, simple_loss=0.2342, pruned_loss=0.03519, over 4936.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2219, pruned_loss=0.04033, over 971365.97 frames.], batch size: 35, lr: 3.79e-04 +2022-05-05 07:18:08,223 INFO [train.py:715] (6/8) Epoch 5, batch 21550, loss[loss=0.1605, simple_loss=0.2372, pruned_loss=0.04193, over 4951.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2217, pruned_loss=0.03999, over 971236.31 frames.], batch size: 35, lr: 3.79e-04 +2022-05-05 07:18:46,743 INFO [train.py:715] (6/8) Epoch 5, batch 21600, loss[loss=0.1578, simple_loss=0.2274, pruned_loss=0.04404, over 4689.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2221, pruned_loss=0.04023, over 971843.47 frames.], batch size: 15, lr: 3.79e-04 +2022-05-05 07:19:25,824 INFO [train.py:715] (6/8) Epoch 5, batch 21650, loss[loss=0.1555, simple_loss=0.2284, pruned_loss=0.04135, over 4883.00 frames.], tot_loss[loss=0.151, simple_loss=0.2219, pruned_loss=0.04004, over 971376.93 frames.], batch size: 32, lr: 3.79e-04 +2022-05-05 07:20:04,070 INFO [train.py:715] (6/8) Epoch 5, batch 21700, loss[loss=0.1444, simple_loss=0.2224, pruned_loss=0.03323, over 4950.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2224, pruned_loss=0.04045, over 972281.58 frames.], batch size: 23, lr: 3.79e-04 +2022-05-05 07:20:42,464 INFO [train.py:715] (6/8) Epoch 5, batch 21750, loss[loss=0.1306, simple_loss=0.2037, pruned_loss=0.02874, over 4684.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2224, pruned_loss=0.04043, over 971820.98 frames.], batch size: 15, lr: 3.79e-04 +2022-05-05 07:21:20,818 INFO [train.py:715] (6/8) Epoch 5, batch 21800, loss[loss=0.1391, simple_loss=0.2069, pruned_loss=0.03562, over 4868.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2224, pruned_loss=0.04045, over 971814.70 frames.], batch size: 22, lr: 3.79e-04 +2022-05-05 07:22:00,030 INFO [train.py:715] (6/8) Epoch 5, batch 21850, loss[loss=0.1754, simple_loss=0.2563, pruned_loss=0.04727, over 4936.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2231, pruned_loss=0.04078, over 971844.90 frames.], batch size: 23, lr: 3.79e-04 +2022-05-05 07:22:38,261 INFO [train.py:715] (6/8) Epoch 5, batch 21900, loss[loss=0.1492, simple_loss=0.2111, pruned_loss=0.04369, over 4831.00 frames.], tot_loss[loss=0.1522, simple_loss=0.223, pruned_loss=0.04072, over 971366.60 frames.], batch size: 26, lr: 3.79e-04 +2022-05-05 07:23:16,806 INFO [train.py:715] (6/8) Epoch 5, batch 21950, loss[loss=0.1571, simple_loss=0.2287, pruned_loss=0.04272, over 4949.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2229, pruned_loss=0.04092, over 971763.92 frames.], batch size: 21, lr: 3.79e-04 +2022-05-05 07:23:55,216 INFO [train.py:715] (6/8) Epoch 5, batch 22000, loss[loss=0.1235, simple_loss=0.2025, pruned_loss=0.02229, over 4792.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2227, pruned_loss=0.04087, over 972007.61 frames.], batch size: 14, lr: 3.79e-04 +2022-05-05 07:24:34,725 INFO [train.py:715] (6/8) Epoch 5, batch 22050, loss[loss=0.1383, simple_loss=0.2133, pruned_loss=0.03167, over 4810.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2232, pruned_loss=0.04077, over 972166.60 frames.], batch size: 25, lr: 3.79e-04 +2022-05-05 07:25:13,188 INFO [train.py:715] (6/8) Epoch 5, batch 22100, loss[loss=0.1705, simple_loss=0.251, pruned_loss=0.04493, over 4811.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2231, pruned_loss=0.04072, over 972055.96 frames.], batch size: 21, lr: 3.79e-04 +2022-05-05 07:25:52,416 INFO [train.py:715] (6/8) Epoch 5, batch 22150, loss[loss=0.1624, simple_loss=0.2262, pruned_loss=0.04925, over 4809.00 frames.], tot_loss[loss=0.151, simple_loss=0.2218, pruned_loss=0.0401, over 972434.96 frames.], batch size: 13, lr: 3.78e-04 +2022-05-05 07:26:31,447 INFO [train.py:715] (6/8) Epoch 5, batch 22200, loss[loss=0.1871, simple_loss=0.2444, pruned_loss=0.06488, over 4808.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2226, pruned_loss=0.04102, over 972535.54 frames.], batch size: 26, lr: 3.78e-04 +2022-05-05 07:27:11,167 INFO [train.py:715] (6/8) Epoch 5, batch 22250, loss[loss=0.1744, simple_loss=0.24, pruned_loss=0.05441, over 4752.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2238, pruned_loss=0.04137, over 971694.17 frames.], batch size: 19, lr: 3.78e-04 +2022-05-05 07:27:50,339 INFO [train.py:715] (6/8) Epoch 5, batch 22300, loss[loss=0.1275, simple_loss=0.1937, pruned_loss=0.03066, over 4870.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2231, pruned_loss=0.04128, over 972600.52 frames.], batch size: 16, lr: 3.78e-04 +2022-05-05 07:28:28,460 INFO [train.py:715] (6/8) Epoch 5, batch 22350, loss[loss=0.2084, simple_loss=0.27, pruned_loss=0.07339, over 4871.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2226, pruned_loss=0.04091, over 972618.83 frames.], batch size: 32, lr: 3.78e-04 +2022-05-05 07:29:06,834 INFO [train.py:715] (6/8) Epoch 5, batch 22400, loss[loss=0.1835, simple_loss=0.2597, pruned_loss=0.0536, over 4979.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2222, pruned_loss=0.04081, over 972308.37 frames.], batch size: 15, lr: 3.78e-04 +2022-05-05 07:29:45,744 INFO [train.py:715] (6/8) Epoch 5, batch 22450, loss[loss=0.1221, simple_loss=0.2063, pruned_loss=0.019, over 4939.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2215, pruned_loss=0.04037, over 971628.95 frames.], batch size: 21, lr: 3.78e-04 +2022-05-05 07:30:25,212 INFO [train.py:715] (6/8) Epoch 5, batch 22500, loss[loss=0.1495, simple_loss=0.2171, pruned_loss=0.04088, over 4870.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2216, pruned_loss=0.04011, over 971516.17 frames.], batch size: 20, lr: 3.78e-04 +2022-05-05 07:31:03,489 INFO [train.py:715] (6/8) Epoch 5, batch 22550, loss[loss=0.1229, simple_loss=0.1949, pruned_loss=0.02544, over 4949.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2213, pruned_loss=0.04006, over 972506.73 frames.], batch size: 14, lr: 3.78e-04 +2022-05-05 07:31:42,560 INFO [train.py:715] (6/8) Epoch 5, batch 22600, loss[loss=0.1499, simple_loss=0.2222, pruned_loss=0.03884, over 4653.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2212, pruned_loss=0.04005, over 971891.59 frames.], batch size: 13, lr: 3.78e-04 +2022-05-05 07:32:21,689 INFO [train.py:715] (6/8) Epoch 5, batch 22650, loss[loss=0.1701, simple_loss=0.2329, pruned_loss=0.05367, over 4794.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2221, pruned_loss=0.04074, over 972415.53 frames.], batch size: 13, lr: 3.78e-04 +2022-05-05 07:33:00,846 INFO [train.py:715] (6/8) Epoch 5, batch 22700, loss[loss=0.139, simple_loss=0.2099, pruned_loss=0.03405, over 4973.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2215, pruned_loss=0.04011, over 972360.30 frames.], batch size: 15, lr: 3.78e-04 +2022-05-05 07:33:39,168 INFO [train.py:715] (6/8) Epoch 5, batch 22750, loss[loss=0.1341, simple_loss=0.2157, pruned_loss=0.0263, over 4978.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2221, pruned_loss=0.04078, over 972714.34 frames.], batch size: 28, lr: 3.78e-04 +2022-05-05 07:34:18,365 INFO [train.py:715] (6/8) Epoch 5, batch 22800, loss[loss=0.1474, simple_loss=0.2126, pruned_loss=0.04113, over 4790.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2226, pruned_loss=0.04114, over 972628.15 frames.], batch size: 18, lr: 3.78e-04 +2022-05-05 07:34:57,943 INFO [train.py:715] (6/8) Epoch 5, batch 22850, loss[loss=0.159, simple_loss=0.2367, pruned_loss=0.04069, over 4904.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2233, pruned_loss=0.04103, over 972039.82 frames.], batch size: 19, lr: 3.78e-04 +2022-05-05 07:35:36,332 INFO [train.py:715] (6/8) Epoch 5, batch 22900, loss[loss=0.1513, simple_loss=0.2166, pruned_loss=0.043, over 4908.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2233, pruned_loss=0.04113, over 973149.97 frames.], batch size: 18, lr: 3.78e-04 +2022-05-05 07:36:15,065 INFO [train.py:715] (6/8) Epoch 5, batch 22950, loss[loss=0.1539, simple_loss=0.2297, pruned_loss=0.03905, over 4766.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2233, pruned_loss=0.04076, over 972951.22 frames.], batch size: 19, lr: 3.78e-04 +2022-05-05 07:36:54,410 INFO [train.py:715] (6/8) Epoch 5, batch 23000, loss[loss=0.1571, simple_loss=0.2314, pruned_loss=0.04146, over 4792.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2234, pruned_loss=0.04112, over 973343.36 frames.], batch size: 21, lr: 3.78e-04 +2022-05-05 07:37:33,567 INFO [train.py:715] (6/8) Epoch 5, batch 23050, loss[loss=0.1202, simple_loss=0.198, pruned_loss=0.02123, over 4883.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2232, pruned_loss=0.04094, over 972697.39 frames.], batch size: 22, lr: 3.78e-04 +2022-05-05 07:38:12,018 INFO [train.py:715] (6/8) Epoch 5, batch 23100, loss[loss=0.1634, simple_loss=0.2332, pruned_loss=0.0468, over 4804.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2239, pruned_loss=0.04159, over 973566.26 frames.], batch size: 21, lr: 3.78e-04 +2022-05-05 07:38:51,178 INFO [train.py:715] (6/8) Epoch 5, batch 23150, loss[loss=0.1588, simple_loss=0.2299, pruned_loss=0.04386, over 4813.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2235, pruned_loss=0.04135, over 973192.18 frames.], batch size: 26, lr: 3.78e-04 +2022-05-05 07:39:30,784 INFO [train.py:715] (6/8) Epoch 5, batch 23200, loss[loss=0.1526, simple_loss=0.2271, pruned_loss=0.03902, over 4951.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2237, pruned_loss=0.04155, over 972624.27 frames.], batch size: 35, lr: 3.77e-04 +2022-05-05 07:40:09,161 INFO [train.py:715] (6/8) Epoch 5, batch 23250, loss[loss=0.1575, simple_loss=0.2329, pruned_loss=0.04107, over 4878.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2227, pruned_loss=0.04106, over 972052.71 frames.], batch size: 22, lr: 3.77e-04 +2022-05-05 07:40:47,783 INFO [train.py:715] (6/8) Epoch 5, batch 23300, loss[loss=0.1296, simple_loss=0.1972, pruned_loss=0.03102, over 4742.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2226, pruned_loss=0.04121, over 972016.00 frames.], batch size: 12, lr: 3.77e-04 +2022-05-05 07:41:27,168 INFO [train.py:715] (6/8) Epoch 5, batch 23350, loss[loss=0.1557, simple_loss=0.2154, pruned_loss=0.04796, over 4923.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2228, pruned_loss=0.04099, over 971754.23 frames.], batch size: 23, lr: 3.77e-04 +2022-05-05 07:42:05,802 INFO [train.py:715] (6/8) Epoch 5, batch 23400, loss[loss=0.16, simple_loss=0.2253, pruned_loss=0.04731, over 4808.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2226, pruned_loss=0.04085, over 971314.09 frames.], batch size: 21, lr: 3.77e-04 +2022-05-05 07:42:44,245 INFO [train.py:715] (6/8) Epoch 5, batch 23450, loss[loss=0.1336, simple_loss=0.1999, pruned_loss=0.0336, over 4931.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2224, pruned_loss=0.04087, over 971763.88 frames.], batch size: 29, lr: 3.77e-04 +2022-05-05 07:43:22,955 INFO [train.py:715] (6/8) Epoch 5, batch 23500, loss[loss=0.1514, simple_loss=0.22, pruned_loss=0.04139, over 4819.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2234, pruned_loss=0.04116, over 972540.65 frames.], batch size: 25, lr: 3.77e-04 +2022-05-05 07:44:02,013 INFO [train.py:715] (6/8) Epoch 5, batch 23550, loss[loss=0.1356, simple_loss=0.2165, pruned_loss=0.0273, over 4806.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2236, pruned_loss=0.04153, over 971865.90 frames.], batch size: 25, lr: 3.77e-04 +2022-05-05 07:44:40,898 INFO [train.py:715] (6/8) Epoch 5, batch 23600, loss[loss=0.1737, simple_loss=0.2402, pruned_loss=0.0536, over 4918.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2244, pruned_loss=0.04213, over 972002.22 frames.], batch size: 19, lr: 3.77e-04 +2022-05-05 07:45:19,392 INFO [train.py:715] (6/8) Epoch 5, batch 23650, loss[loss=0.1582, simple_loss=0.2284, pruned_loss=0.04397, over 4848.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2233, pruned_loss=0.04183, over 971848.63 frames.], batch size: 20, lr: 3.77e-04 +2022-05-05 07:45:58,897 INFO [train.py:715] (6/8) Epoch 5, batch 23700, loss[loss=0.1156, simple_loss=0.1858, pruned_loss=0.02276, over 4654.00 frames.], tot_loss[loss=0.1533, simple_loss=0.223, pruned_loss=0.04181, over 971320.85 frames.], batch size: 13, lr: 3.77e-04 +2022-05-05 07:46:37,475 INFO [train.py:715] (6/8) Epoch 5, batch 23750, loss[loss=0.158, simple_loss=0.227, pruned_loss=0.04455, over 4966.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2219, pruned_loss=0.04135, over 972613.32 frames.], batch size: 31, lr: 3.77e-04 +2022-05-05 07:47:16,505 INFO [train.py:715] (6/8) Epoch 5, batch 23800, loss[loss=0.1271, simple_loss=0.2057, pruned_loss=0.0243, over 4927.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2218, pruned_loss=0.04128, over 973049.52 frames.], batch size: 23, lr: 3.77e-04 +2022-05-05 07:47:55,209 INFO [train.py:715] (6/8) Epoch 5, batch 23850, loss[loss=0.1061, simple_loss=0.17, pruned_loss=0.02111, over 4799.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2229, pruned_loss=0.04147, over 972644.61 frames.], batch size: 12, lr: 3.77e-04 +2022-05-05 07:48:34,418 INFO [train.py:715] (6/8) Epoch 5, batch 23900, loss[loss=0.129, simple_loss=0.2022, pruned_loss=0.02787, over 4782.00 frames.], tot_loss[loss=0.153, simple_loss=0.2232, pruned_loss=0.04147, over 972342.14 frames.], batch size: 18, lr: 3.77e-04 +2022-05-05 07:49:13,372 INFO [train.py:715] (6/8) Epoch 5, batch 23950, loss[loss=0.1437, simple_loss=0.2114, pruned_loss=0.03805, over 4846.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2234, pruned_loss=0.04153, over 972667.26 frames.], batch size: 30, lr: 3.77e-04 +2022-05-05 07:49:51,754 INFO [train.py:715] (6/8) Epoch 5, batch 24000, loss[loss=0.1908, simple_loss=0.2647, pruned_loss=0.05841, over 4959.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2235, pruned_loss=0.0416, over 971880.46 frames.], batch size: 24, lr: 3.77e-04 +2022-05-05 07:49:51,754 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 07:50:02,183 INFO [train.py:742] (6/8) Epoch 5, validation: loss=0.11, simple_loss=0.1955, pruned_loss=0.0123, over 914524.00 frames. +2022-05-05 07:50:40,723 INFO [train.py:715] (6/8) Epoch 5, batch 24050, loss[loss=0.1476, simple_loss=0.2081, pruned_loss=0.04354, over 4744.00 frames.], tot_loss[loss=0.1532, simple_loss=0.223, pruned_loss=0.04164, over 972264.70 frames.], batch size: 16, lr: 3.77e-04 +2022-05-05 07:51:20,434 INFO [train.py:715] (6/8) Epoch 5, batch 24100, loss[loss=0.1415, simple_loss=0.2076, pruned_loss=0.03768, over 4983.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2225, pruned_loss=0.04135, over 972417.35 frames.], batch size: 15, lr: 3.77e-04 +2022-05-05 07:51:59,182 INFO [train.py:715] (6/8) Epoch 5, batch 24150, loss[loss=0.1324, simple_loss=0.1974, pruned_loss=0.03377, over 4980.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2212, pruned_loss=0.04072, over 972949.87 frames.], batch size: 14, lr: 3.77e-04 +2022-05-05 07:52:37,494 INFO [train.py:715] (6/8) Epoch 5, batch 24200, loss[loss=0.2129, simple_loss=0.2715, pruned_loss=0.07715, over 4985.00 frames.], tot_loss[loss=0.1511, simple_loss=0.221, pruned_loss=0.04064, over 973412.05 frames.], batch size: 15, lr: 3.77e-04 +2022-05-05 07:53:16,810 INFO [train.py:715] (6/8) Epoch 5, batch 24250, loss[loss=0.1715, simple_loss=0.2505, pruned_loss=0.0463, over 4968.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2212, pruned_loss=0.04065, over 973410.02 frames.], batch size: 35, lr: 3.76e-04 +2022-05-05 07:53:55,923 INFO [train.py:715] (6/8) Epoch 5, batch 24300, loss[loss=0.1455, simple_loss=0.2155, pruned_loss=0.03776, over 4823.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2211, pruned_loss=0.04055, over 973735.39 frames.], batch size: 13, lr: 3.76e-04 +2022-05-05 07:54:34,803 INFO [train.py:715] (6/8) Epoch 5, batch 24350, loss[loss=0.1302, simple_loss=0.1959, pruned_loss=0.03228, over 4841.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2207, pruned_loss=0.04028, over 973586.69 frames.], batch size: 30, lr: 3.76e-04 +2022-05-05 07:55:13,057 INFO [train.py:715] (6/8) Epoch 5, batch 24400, loss[loss=0.1356, simple_loss=0.2033, pruned_loss=0.03397, over 4861.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2215, pruned_loss=0.04061, over 973060.91 frames.], batch size: 16, lr: 3.76e-04 +2022-05-05 07:55:52,739 INFO [train.py:715] (6/8) Epoch 5, batch 24450, loss[loss=0.1574, simple_loss=0.2194, pruned_loss=0.04774, over 4745.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2213, pruned_loss=0.04045, over 972557.48 frames.], batch size: 16, lr: 3.76e-04 +2022-05-05 07:56:30,708 INFO [train.py:715] (6/8) Epoch 5, batch 24500, loss[loss=0.1687, simple_loss=0.2395, pruned_loss=0.04896, over 4909.00 frames.], tot_loss[loss=0.1509, simple_loss=0.221, pruned_loss=0.04038, over 972351.99 frames.], batch size: 18, lr: 3.76e-04 +2022-05-05 07:57:09,366 INFO [train.py:715] (6/8) Epoch 5, batch 24550, loss[loss=0.1506, simple_loss=0.2288, pruned_loss=0.03619, over 4818.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2214, pruned_loss=0.04047, over 971767.09 frames.], batch size: 21, lr: 3.76e-04 +2022-05-05 07:57:48,724 INFO [train.py:715] (6/8) Epoch 5, batch 24600, loss[loss=0.1144, simple_loss=0.1881, pruned_loss=0.02039, over 4958.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2213, pruned_loss=0.04049, over 972342.38 frames.], batch size: 21, lr: 3.76e-04 +2022-05-05 07:58:27,793 INFO [train.py:715] (6/8) Epoch 5, batch 24650, loss[loss=0.1422, simple_loss=0.2175, pruned_loss=0.03345, over 4942.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2215, pruned_loss=0.04082, over 972637.04 frames.], batch size: 29, lr: 3.76e-04 +2022-05-05 07:59:06,982 INFO [train.py:715] (6/8) Epoch 5, batch 24700, loss[loss=0.1511, simple_loss=0.2279, pruned_loss=0.03718, over 4891.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2225, pruned_loss=0.04108, over 972230.78 frames.], batch size: 16, lr: 3.76e-04 +2022-05-05 07:59:45,118 INFO [train.py:715] (6/8) Epoch 5, batch 24750, loss[loss=0.1266, simple_loss=0.2062, pruned_loss=0.02352, over 4832.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2225, pruned_loss=0.04068, over 972946.43 frames.], batch size: 26, lr: 3.76e-04 +2022-05-05 08:00:24,684 INFO [train.py:715] (6/8) Epoch 5, batch 24800, loss[loss=0.1397, simple_loss=0.2133, pruned_loss=0.03312, over 4945.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2223, pruned_loss=0.0401, over 973006.28 frames.], batch size: 21, lr: 3.76e-04 +2022-05-05 08:01:03,111 INFO [train.py:715] (6/8) Epoch 5, batch 24850, loss[loss=0.132, simple_loss=0.2196, pruned_loss=0.02216, over 4831.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2223, pruned_loss=0.04014, over 973356.28 frames.], batch size: 26, lr: 3.76e-04 +2022-05-05 08:01:41,875 INFO [train.py:715] (6/8) Epoch 5, batch 24900, loss[loss=0.1734, simple_loss=0.2392, pruned_loss=0.05382, over 4939.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2222, pruned_loss=0.04022, over 973501.00 frames.], batch size: 29, lr: 3.76e-04 +2022-05-05 08:02:21,425 INFO [train.py:715] (6/8) Epoch 5, batch 24950, loss[loss=0.1559, simple_loss=0.2309, pruned_loss=0.04044, over 4874.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2217, pruned_loss=0.03964, over 973227.99 frames.], batch size: 22, lr: 3.76e-04 +2022-05-05 08:03:00,472 INFO [train.py:715] (6/8) Epoch 5, batch 25000, loss[loss=0.1555, simple_loss=0.2222, pruned_loss=0.04441, over 4778.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2232, pruned_loss=0.04023, over 972576.91 frames.], batch size: 18, lr: 3.76e-04 +2022-05-05 08:03:39,041 INFO [train.py:715] (6/8) Epoch 5, batch 25050, loss[loss=0.156, simple_loss=0.2249, pruned_loss=0.04355, over 4966.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2227, pruned_loss=0.03999, over 973053.32 frames.], batch size: 39, lr: 3.76e-04 +2022-05-05 08:04:17,301 INFO [train.py:715] (6/8) Epoch 5, batch 25100, loss[loss=0.1536, simple_loss=0.226, pruned_loss=0.04062, over 4861.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2227, pruned_loss=0.04012, over 974238.32 frames.], batch size: 30, lr: 3.76e-04 +2022-05-05 08:04:57,543 INFO [train.py:715] (6/8) Epoch 5, batch 25150, loss[loss=0.1363, simple_loss=0.2034, pruned_loss=0.03464, over 4850.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2232, pruned_loss=0.04046, over 973842.21 frames.], batch size: 20, lr: 3.76e-04 +2022-05-05 08:05:35,728 INFO [train.py:715] (6/8) Epoch 5, batch 25200, loss[loss=0.1631, simple_loss=0.2361, pruned_loss=0.04509, over 4943.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2247, pruned_loss=0.04148, over 973755.99 frames.], batch size: 35, lr: 3.76e-04 +2022-05-05 08:06:14,578 INFO [train.py:715] (6/8) Epoch 5, batch 25250, loss[loss=0.1448, simple_loss=0.2136, pruned_loss=0.03797, over 4815.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2221, pruned_loss=0.04038, over 972603.77 frames.], batch size: 25, lr: 3.76e-04 +2022-05-05 08:06:53,405 INFO [train.py:715] (6/8) Epoch 5, batch 25300, loss[loss=0.1418, simple_loss=0.2196, pruned_loss=0.03199, over 4779.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2216, pruned_loss=0.03981, over 972875.04 frames.], batch size: 19, lr: 3.75e-04 +2022-05-05 08:07:31,748 INFO [train.py:715] (6/8) Epoch 5, batch 25350, loss[loss=0.1498, simple_loss=0.2262, pruned_loss=0.03674, over 4925.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2215, pruned_loss=0.03999, over 973422.56 frames.], batch size: 29, lr: 3.75e-04 +2022-05-05 08:08:10,247 INFO [train.py:715] (6/8) Epoch 5, batch 25400, loss[loss=0.1604, simple_loss=0.231, pruned_loss=0.04491, over 4746.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2215, pruned_loss=0.03995, over 972816.75 frames.], batch size: 16, lr: 3.75e-04 +2022-05-05 08:08:49,164 INFO [train.py:715] (6/8) Epoch 5, batch 25450, loss[loss=0.1467, simple_loss=0.2137, pruned_loss=0.03986, over 4791.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2232, pruned_loss=0.04102, over 971757.44 frames.], batch size: 12, lr: 3.75e-04 +2022-05-05 08:09:28,362 INFO [train.py:715] (6/8) Epoch 5, batch 25500, loss[loss=0.1308, simple_loss=0.2043, pruned_loss=0.02863, over 4801.00 frames.], tot_loss[loss=0.1525, simple_loss=0.223, pruned_loss=0.04096, over 971935.01 frames.], batch size: 13, lr: 3.75e-04 +2022-05-05 08:10:07,143 INFO [train.py:715] (6/8) Epoch 5, batch 25550, loss[loss=0.1404, simple_loss=0.2135, pruned_loss=0.03367, over 4956.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2237, pruned_loss=0.04082, over 972757.81 frames.], batch size: 24, lr: 3.75e-04 +2022-05-05 08:10:45,632 INFO [train.py:715] (6/8) Epoch 5, batch 25600, loss[loss=0.1361, simple_loss=0.2055, pruned_loss=0.03334, over 4829.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2228, pruned_loss=0.04045, over 972653.01 frames.], batch size: 12, lr: 3.75e-04 +2022-05-05 08:11:24,705 INFO [train.py:715] (6/8) Epoch 5, batch 25650, loss[loss=0.1441, simple_loss=0.2212, pruned_loss=0.03356, over 4829.00 frames.], tot_loss[loss=0.152, simple_loss=0.2229, pruned_loss=0.04052, over 971879.47 frames.], batch size: 27, lr: 3.75e-04 +2022-05-05 08:12:03,094 INFO [train.py:715] (6/8) Epoch 5, batch 25700, loss[loss=0.1971, simple_loss=0.2547, pruned_loss=0.0697, over 4931.00 frames.], tot_loss[loss=0.152, simple_loss=0.2226, pruned_loss=0.04072, over 972025.02 frames.], batch size: 39, lr: 3.75e-04 +2022-05-05 08:12:41,256 INFO [train.py:715] (6/8) Epoch 5, batch 25750, loss[loss=0.1331, simple_loss=0.1968, pruned_loss=0.03469, over 4760.00 frames.], tot_loss[loss=0.152, simple_loss=0.2227, pruned_loss=0.04062, over 973066.70 frames.], batch size: 19, lr: 3.75e-04 +2022-05-05 08:13:20,738 INFO [train.py:715] (6/8) Epoch 5, batch 25800, loss[loss=0.1583, simple_loss=0.2256, pruned_loss=0.04546, over 4789.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2226, pruned_loss=0.04016, over 972123.72 frames.], batch size: 14, lr: 3.75e-04 +2022-05-05 08:13:59,833 INFO [train.py:715] (6/8) Epoch 5, batch 25850, loss[loss=0.1527, simple_loss=0.2248, pruned_loss=0.04029, over 4893.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2224, pruned_loss=0.04021, over 973054.99 frames.], batch size: 22, lr: 3.75e-04 +2022-05-05 08:14:38,583 INFO [train.py:715] (6/8) Epoch 5, batch 25900, loss[loss=0.1499, simple_loss=0.232, pruned_loss=0.0339, over 4778.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2235, pruned_loss=0.04057, over 973111.09 frames.], batch size: 14, lr: 3.75e-04 +2022-05-05 08:15:17,124 INFO [train.py:715] (6/8) Epoch 5, batch 25950, loss[loss=0.171, simple_loss=0.242, pruned_loss=0.04994, over 4965.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2241, pruned_loss=0.04064, over 973332.76 frames.], batch size: 15, lr: 3.75e-04 +2022-05-05 08:15:58,603 INFO [train.py:715] (6/8) Epoch 5, batch 26000, loss[loss=0.1574, simple_loss=0.2224, pruned_loss=0.04616, over 4911.00 frames.], tot_loss[loss=0.153, simple_loss=0.2245, pruned_loss=0.04069, over 973902.73 frames.], batch size: 17, lr: 3.75e-04 +2022-05-05 08:16:37,295 INFO [train.py:715] (6/8) Epoch 5, batch 26050, loss[loss=0.1411, simple_loss=0.2136, pruned_loss=0.03432, over 4897.00 frames.], tot_loss[loss=0.153, simple_loss=0.2247, pruned_loss=0.04068, over 973635.74 frames.], batch size: 22, lr: 3.75e-04 +2022-05-05 08:17:15,756 INFO [train.py:715] (6/8) Epoch 5, batch 26100, loss[loss=0.1401, simple_loss=0.2163, pruned_loss=0.03191, over 4783.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2234, pruned_loss=0.04015, over 973756.77 frames.], batch size: 23, lr: 3.75e-04 +2022-05-05 08:17:54,716 INFO [train.py:715] (6/8) Epoch 5, batch 26150, loss[loss=0.1408, simple_loss=0.2149, pruned_loss=0.03332, over 4810.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2239, pruned_loss=0.04066, over 974030.91 frames.], batch size: 25, lr: 3.75e-04 +2022-05-05 08:18:33,050 INFO [train.py:715] (6/8) Epoch 5, batch 26200, loss[loss=0.09842, simple_loss=0.1667, pruned_loss=0.01508, over 4884.00 frames.], tot_loss[loss=0.1509, simple_loss=0.222, pruned_loss=0.03987, over 973098.73 frames.], batch size: 22, lr: 3.75e-04 +2022-05-05 08:19:12,123 INFO [train.py:715] (6/8) Epoch 5, batch 26250, loss[loss=0.15, simple_loss=0.2322, pruned_loss=0.03392, over 4867.00 frames.], tot_loss[loss=0.1509, simple_loss=0.222, pruned_loss=0.03992, over 973365.81 frames.], batch size: 22, lr: 3.75e-04 +2022-05-05 08:19:51,347 INFO [train.py:715] (6/8) Epoch 5, batch 26300, loss[loss=0.148, simple_loss=0.2219, pruned_loss=0.03699, over 4776.00 frames.], tot_loss[loss=0.151, simple_loss=0.2215, pruned_loss=0.0402, over 973552.29 frames.], batch size: 18, lr: 3.75e-04 +2022-05-05 08:20:30,627 INFO [train.py:715] (6/8) Epoch 5, batch 26350, loss[loss=0.146, simple_loss=0.2223, pruned_loss=0.03488, over 4892.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2214, pruned_loss=0.0401, over 972554.26 frames.], batch size: 22, lr: 3.74e-04 +2022-05-05 08:21:09,424 INFO [train.py:715] (6/8) Epoch 5, batch 26400, loss[loss=0.1288, simple_loss=0.2053, pruned_loss=0.02617, over 4805.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2208, pruned_loss=0.04013, over 972265.21 frames.], batch size: 21, lr: 3.74e-04 +2022-05-05 08:21:48,026 INFO [train.py:715] (6/8) Epoch 5, batch 26450, loss[loss=0.1782, simple_loss=0.2436, pruned_loss=0.05643, over 4884.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2208, pruned_loss=0.03982, over 972084.76 frames.], batch size: 32, lr: 3.74e-04 +2022-05-05 08:22:26,946 INFO [train.py:715] (6/8) Epoch 5, batch 26500, loss[loss=0.1464, simple_loss=0.2143, pruned_loss=0.03924, over 4738.00 frames.], tot_loss[loss=0.1501, simple_loss=0.221, pruned_loss=0.03956, over 971417.87 frames.], batch size: 16, lr: 3.74e-04 +2022-05-05 08:23:06,040 INFO [train.py:715] (6/8) Epoch 5, batch 26550, loss[loss=0.1432, simple_loss=0.217, pruned_loss=0.03475, over 4929.00 frames.], tot_loss[loss=0.15, simple_loss=0.2212, pruned_loss=0.03942, over 971344.20 frames.], batch size: 21, lr: 3.74e-04 +2022-05-05 08:23:44,742 INFO [train.py:715] (6/8) Epoch 5, batch 26600, loss[loss=0.1691, simple_loss=0.2456, pruned_loss=0.04635, over 4954.00 frames.], tot_loss[loss=0.15, simple_loss=0.2211, pruned_loss=0.03946, over 970550.15 frames.], batch size: 39, lr: 3.74e-04 +2022-05-05 08:24:24,176 INFO [train.py:715] (6/8) Epoch 5, batch 26650, loss[loss=0.1675, simple_loss=0.2332, pruned_loss=0.05092, over 4868.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2212, pruned_loss=0.03959, over 971405.92 frames.], batch size: 39, lr: 3.74e-04 +2022-05-05 08:25:02,985 INFO [train.py:715] (6/8) Epoch 5, batch 26700, loss[loss=0.1267, simple_loss=0.2053, pruned_loss=0.0241, over 4966.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2213, pruned_loss=0.04013, over 972034.85 frames.], batch size: 24, lr: 3.74e-04 +2022-05-05 08:25:41,811 INFO [train.py:715] (6/8) Epoch 5, batch 26750, loss[loss=0.1528, simple_loss=0.2262, pruned_loss=0.03973, over 4904.00 frames.], tot_loss[loss=0.1511, simple_loss=0.222, pruned_loss=0.0401, over 973136.55 frames.], batch size: 19, lr: 3.74e-04 +2022-05-05 08:26:20,195 INFO [train.py:715] (6/8) Epoch 5, batch 26800, loss[loss=0.126, simple_loss=0.1996, pruned_loss=0.02619, over 4987.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2224, pruned_loss=0.0403, over 973767.08 frames.], batch size: 28, lr: 3.74e-04 +2022-05-05 08:26:59,360 INFO [train.py:715] (6/8) Epoch 5, batch 26850, loss[loss=0.1784, simple_loss=0.247, pruned_loss=0.05491, over 4740.00 frames.], tot_loss[loss=0.152, simple_loss=0.2225, pruned_loss=0.04071, over 973112.47 frames.], batch size: 16, lr: 3.74e-04 +2022-05-05 08:27:38,353 INFO [train.py:715] (6/8) Epoch 5, batch 26900, loss[loss=0.1889, simple_loss=0.2626, pruned_loss=0.05761, over 4847.00 frames.], tot_loss[loss=0.153, simple_loss=0.2233, pruned_loss=0.04131, over 972598.90 frames.], batch size: 15, lr: 3.74e-04 +2022-05-05 08:28:17,272 INFO [train.py:715] (6/8) Epoch 5, batch 26950, loss[loss=0.1033, simple_loss=0.1744, pruned_loss=0.01611, over 4802.00 frames.], tot_loss[loss=0.153, simple_loss=0.2239, pruned_loss=0.04112, over 972641.99 frames.], batch size: 12, lr: 3.74e-04 +2022-05-05 08:28:55,974 INFO [train.py:715] (6/8) Epoch 5, batch 27000, loss[loss=0.1447, simple_loss=0.2206, pruned_loss=0.03439, over 4959.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2241, pruned_loss=0.04122, over 972472.69 frames.], batch size: 21, lr: 3.74e-04 +2022-05-05 08:28:55,975 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 08:29:05,775 INFO [train.py:742] (6/8) Epoch 5, validation: loss=0.1098, simple_loss=0.195, pruned_loss=0.01232, over 914524.00 frames. +2022-05-05 08:29:45,301 INFO [train.py:715] (6/8) Epoch 5, batch 27050, loss[loss=0.1605, simple_loss=0.2246, pruned_loss=0.04816, over 4982.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2232, pruned_loss=0.04093, over 972213.98 frames.], batch size: 39, lr: 3.74e-04 +2022-05-05 08:30:24,754 INFO [train.py:715] (6/8) Epoch 5, batch 27100, loss[loss=0.1716, simple_loss=0.2362, pruned_loss=0.05346, over 4882.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2233, pruned_loss=0.0409, over 973092.40 frames.], batch size: 32, lr: 3.74e-04 +2022-05-05 08:31:04,146 INFO [train.py:715] (6/8) Epoch 5, batch 27150, loss[loss=0.1401, simple_loss=0.214, pruned_loss=0.03312, over 4808.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2234, pruned_loss=0.04113, over 972341.50 frames.], batch size: 25, lr: 3.74e-04 +2022-05-05 08:31:42,963 INFO [train.py:715] (6/8) Epoch 5, batch 27200, loss[loss=0.196, simple_loss=0.2755, pruned_loss=0.05823, over 4864.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2239, pruned_loss=0.04137, over 972968.69 frames.], batch size: 20, lr: 3.74e-04 +2022-05-05 08:32:22,589 INFO [train.py:715] (6/8) Epoch 5, batch 27250, loss[loss=0.1963, simple_loss=0.2447, pruned_loss=0.07396, over 4922.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2242, pruned_loss=0.04148, over 972556.92 frames.], batch size: 18, lr: 3.74e-04 +2022-05-05 08:33:01,564 INFO [train.py:715] (6/8) Epoch 5, batch 27300, loss[loss=0.1591, simple_loss=0.2335, pruned_loss=0.04239, over 4945.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2241, pruned_loss=0.04128, over 973307.96 frames.], batch size: 21, lr: 3.74e-04 +2022-05-05 08:33:40,119 INFO [train.py:715] (6/8) Epoch 5, batch 27350, loss[loss=0.1297, simple_loss=0.2131, pruned_loss=0.02314, over 4815.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2232, pruned_loss=0.04061, over 972430.10 frames.], batch size: 25, lr: 3.74e-04 +2022-05-05 08:34:18,999 INFO [train.py:715] (6/8) Epoch 5, batch 27400, loss[loss=0.1513, simple_loss=0.2137, pruned_loss=0.04447, over 4852.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2226, pruned_loss=0.04032, over 972787.53 frames.], batch size: 30, lr: 3.74e-04 +2022-05-05 08:34:58,264 INFO [train.py:715] (6/8) Epoch 5, batch 27450, loss[loss=0.1525, simple_loss=0.2183, pruned_loss=0.04335, over 4934.00 frames.], tot_loss[loss=0.152, simple_loss=0.2227, pruned_loss=0.04065, over 972914.45 frames.], batch size: 23, lr: 3.73e-04 +2022-05-05 08:35:38,041 INFO [train.py:715] (6/8) Epoch 5, batch 27500, loss[loss=0.1536, simple_loss=0.236, pruned_loss=0.03559, over 4977.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2215, pruned_loss=0.03994, over 971821.95 frames.], batch size: 24, lr: 3.73e-04 +2022-05-05 08:36:16,527 INFO [train.py:715] (6/8) Epoch 5, batch 27550, loss[loss=0.1687, simple_loss=0.2395, pruned_loss=0.04889, over 4731.00 frames.], tot_loss[loss=0.151, simple_loss=0.222, pruned_loss=0.04005, over 972046.83 frames.], batch size: 16, lr: 3.73e-04 +2022-05-05 08:36:55,893 INFO [train.py:715] (6/8) Epoch 5, batch 27600, loss[loss=0.1469, simple_loss=0.2162, pruned_loss=0.03884, over 4879.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2207, pruned_loss=0.03886, over 971681.34 frames.], batch size: 19, lr: 3.73e-04 +2022-05-05 08:37:34,978 INFO [train.py:715] (6/8) Epoch 5, batch 27650, loss[loss=0.1959, simple_loss=0.262, pruned_loss=0.06488, over 4980.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2214, pruned_loss=0.03973, over 971791.46 frames.], batch size: 31, lr: 3.73e-04 +2022-05-05 08:38:13,249 INFO [train.py:715] (6/8) Epoch 5, batch 27700, loss[loss=0.1595, simple_loss=0.2278, pruned_loss=0.04559, over 4980.00 frames.], tot_loss[loss=0.1505, simple_loss=0.221, pruned_loss=0.03999, over 971924.91 frames.], batch size: 15, lr: 3.73e-04 +2022-05-05 08:38:52,830 INFO [train.py:715] (6/8) Epoch 5, batch 27750, loss[loss=0.233, simple_loss=0.2856, pruned_loss=0.09016, over 4834.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2208, pruned_loss=0.03997, over 972511.70 frames.], batch size: 15, lr: 3.73e-04 +2022-05-05 08:39:32,592 INFO [train.py:715] (6/8) Epoch 5, batch 27800, loss[loss=0.1589, simple_loss=0.2235, pruned_loss=0.04713, over 4977.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2219, pruned_loss=0.0406, over 973974.57 frames.], batch size: 15, lr: 3.73e-04 +2022-05-05 08:40:11,945 INFO [train.py:715] (6/8) Epoch 5, batch 27850, loss[loss=0.1784, simple_loss=0.2427, pruned_loss=0.05704, over 4987.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2218, pruned_loss=0.0405, over 973770.83 frames.], batch size: 28, lr: 3.73e-04 +2022-05-05 08:40:50,650 INFO [train.py:715] (6/8) Epoch 5, batch 27900, loss[loss=0.1512, simple_loss=0.2186, pruned_loss=0.04194, over 4943.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2219, pruned_loss=0.04082, over 973811.66 frames.], batch size: 24, lr: 3.73e-04 +2022-05-05 08:41:29,601 INFO [train.py:715] (6/8) Epoch 5, batch 27950, loss[loss=0.1591, simple_loss=0.2275, pruned_loss=0.04531, over 4876.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2221, pruned_loss=0.04066, over 973230.96 frames.], batch size: 22, lr: 3.73e-04 +2022-05-05 08:42:09,042 INFO [train.py:715] (6/8) Epoch 5, batch 28000, loss[loss=0.1385, simple_loss=0.2088, pruned_loss=0.03415, over 4799.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2217, pruned_loss=0.04022, over 972273.70 frames.], batch size: 17, lr: 3.73e-04 +2022-05-05 08:42:47,128 INFO [train.py:715] (6/8) Epoch 5, batch 28050, loss[loss=0.1462, simple_loss=0.22, pruned_loss=0.03621, over 4969.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2221, pruned_loss=0.04016, over 972029.28 frames.], batch size: 35, lr: 3.73e-04 +2022-05-05 08:43:25,856 INFO [train.py:715] (6/8) Epoch 5, batch 28100, loss[loss=0.1488, simple_loss=0.2099, pruned_loss=0.04383, over 4852.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2228, pruned_loss=0.04094, over 972020.58 frames.], batch size: 20, lr: 3.73e-04 +2022-05-05 08:44:04,994 INFO [train.py:715] (6/8) Epoch 5, batch 28150, loss[loss=0.1568, simple_loss=0.2357, pruned_loss=0.03893, over 4793.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2232, pruned_loss=0.04114, over 971132.22 frames.], batch size: 24, lr: 3.73e-04 +2022-05-05 08:44:43,938 INFO [train.py:715] (6/8) Epoch 5, batch 28200, loss[loss=0.161, simple_loss=0.2278, pruned_loss=0.04711, over 4963.00 frames.], tot_loss[loss=0.1522, simple_loss=0.223, pruned_loss=0.04071, over 971162.85 frames.], batch size: 15, lr: 3.73e-04 +2022-05-05 08:45:22,616 INFO [train.py:715] (6/8) Epoch 5, batch 28250, loss[loss=0.1434, simple_loss=0.2173, pruned_loss=0.0348, over 4930.00 frames.], tot_loss[loss=0.1533, simple_loss=0.224, pruned_loss=0.04135, over 971872.32 frames.], batch size: 39, lr: 3.73e-04 +2022-05-05 08:46:01,489 INFO [train.py:715] (6/8) Epoch 5, batch 28300, loss[loss=0.1555, simple_loss=0.2263, pruned_loss=0.04238, over 4932.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2238, pruned_loss=0.04131, over 971279.38 frames.], batch size: 29, lr: 3.73e-04 +2022-05-05 08:46:39,905 INFO [train.py:715] (6/8) Epoch 5, batch 28350, loss[loss=0.1524, simple_loss=0.2208, pruned_loss=0.04205, over 4822.00 frames.], tot_loss[loss=0.154, simple_loss=0.2244, pruned_loss=0.04177, over 971579.01 frames.], batch size: 21, lr: 3.73e-04 +2022-05-05 08:47:18,560 INFO [train.py:715] (6/8) Epoch 5, batch 28400, loss[loss=0.1799, simple_loss=0.2542, pruned_loss=0.05282, over 4799.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2232, pruned_loss=0.04077, over 971487.64 frames.], batch size: 24, lr: 3.73e-04 +2022-05-05 08:47:57,680 INFO [train.py:715] (6/8) Epoch 5, batch 28450, loss[loss=0.1406, simple_loss=0.2285, pruned_loss=0.02634, over 4822.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2238, pruned_loss=0.04077, over 971284.46 frames.], batch size: 15, lr: 3.73e-04 +2022-05-05 08:48:36,727 INFO [train.py:715] (6/8) Epoch 5, batch 28500, loss[loss=0.1258, simple_loss=0.2031, pruned_loss=0.02428, over 4942.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2226, pruned_loss=0.04037, over 971523.71 frames.], batch size: 29, lr: 3.72e-04 +2022-05-05 08:49:15,939 INFO [train.py:715] (6/8) Epoch 5, batch 28550, loss[loss=0.1809, simple_loss=0.2495, pruned_loss=0.05614, over 4984.00 frames.], tot_loss[loss=0.1512, simple_loss=0.222, pruned_loss=0.04019, over 971546.72 frames.], batch size: 15, lr: 3.72e-04 +2022-05-05 08:49:54,627 INFO [train.py:715] (6/8) Epoch 5, batch 28600, loss[loss=0.1533, simple_loss=0.2351, pruned_loss=0.03574, over 4778.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2219, pruned_loss=0.03997, over 971477.10 frames.], batch size: 18, lr: 3.72e-04 +2022-05-05 08:50:34,061 INFO [train.py:715] (6/8) Epoch 5, batch 28650, loss[loss=0.142, simple_loss=0.2125, pruned_loss=0.03576, over 4953.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2222, pruned_loss=0.04008, over 972686.37 frames.], batch size: 24, lr: 3.72e-04 +2022-05-05 08:51:12,502 INFO [train.py:715] (6/8) Epoch 5, batch 28700, loss[loss=0.1488, simple_loss=0.2254, pruned_loss=0.0361, over 4918.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2218, pruned_loss=0.03989, over 972596.85 frames.], batch size: 19, lr: 3.72e-04 +2022-05-05 08:51:51,352 INFO [train.py:715] (6/8) Epoch 5, batch 28750, loss[loss=0.1597, simple_loss=0.2385, pruned_loss=0.04041, over 4780.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2214, pruned_loss=0.03957, over 972677.24 frames.], batch size: 17, lr: 3.72e-04 +2022-05-05 08:52:30,122 INFO [train.py:715] (6/8) Epoch 5, batch 28800, loss[loss=0.1493, simple_loss=0.2361, pruned_loss=0.03128, over 4864.00 frames.], tot_loss[loss=0.151, simple_loss=0.2223, pruned_loss=0.03983, over 972196.80 frames.], batch size: 20, lr: 3.72e-04 +2022-05-05 08:53:09,039 INFO [train.py:715] (6/8) Epoch 5, batch 28850, loss[loss=0.1427, simple_loss=0.2206, pruned_loss=0.03241, over 4929.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2217, pruned_loss=0.03968, over 971464.79 frames.], batch size: 29, lr: 3.72e-04 +2022-05-05 08:53:47,807 INFO [train.py:715] (6/8) Epoch 5, batch 28900, loss[loss=0.1111, simple_loss=0.1927, pruned_loss=0.01478, over 4949.00 frames.], tot_loss[loss=0.1508, simple_loss=0.222, pruned_loss=0.03987, over 971090.74 frames.], batch size: 21, lr: 3.72e-04 +2022-05-05 08:54:26,495 INFO [train.py:715] (6/8) Epoch 5, batch 28950, loss[loss=0.1387, simple_loss=0.2227, pruned_loss=0.02735, over 4831.00 frames.], tot_loss[loss=0.1519, simple_loss=0.223, pruned_loss=0.04039, over 970971.16 frames.], batch size: 26, lr: 3.72e-04 +2022-05-05 08:55:05,612 INFO [train.py:715] (6/8) Epoch 5, batch 29000, loss[loss=0.1134, simple_loss=0.1848, pruned_loss=0.02098, over 4854.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2226, pruned_loss=0.04005, over 970997.89 frames.], batch size: 12, lr: 3.72e-04 +2022-05-05 08:55:43,854 INFO [train.py:715] (6/8) Epoch 5, batch 29050, loss[loss=0.1667, simple_loss=0.2302, pruned_loss=0.05161, over 4900.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2223, pruned_loss=0.03994, over 972650.65 frames.], batch size: 16, lr: 3.72e-04 +2022-05-05 08:56:22,924 INFO [train.py:715] (6/8) Epoch 5, batch 29100, loss[loss=0.1521, simple_loss=0.226, pruned_loss=0.03905, over 4746.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2213, pruned_loss=0.03954, over 972578.60 frames.], batch size: 19, lr: 3.72e-04 +2022-05-05 08:57:01,739 INFO [train.py:715] (6/8) Epoch 5, batch 29150, loss[loss=0.1852, simple_loss=0.2324, pruned_loss=0.06902, over 4913.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2214, pruned_loss=0.03977, over 972588.70 frames.], batch size: 17, lr: 3.72e-04 +2022-05-05 08:57:40,489 INFO [train.py:715] (6/8) Epoch 5, batch 29200, loss[loss=0.1427, simple_loss=0.2187, pruned_loss=0.03331, over 4817.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2214, pruned_loss=0.03993, over 972744.86 frames.], batch size: 21, lr: 3.72e-04 +2022-05-05 08:58:19,233 INFO [train.py:715] (6/8) Epoch 5, batch 29250, loss[loss=0.1561, simple_loss=0.2284, pruned_loss=0.04184, over 4944.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2215, pruned_loss=0.04015, over 972241.83 frames.], batch size: 21, lr: 3.72e-04 +2022-05-05 08:58:57,802 INFO [train.py:715] (6/8) Epoch 5, batch 29300, loss[loss=0.1593, simple_loss=0.223, pruned_loss=0.04775, over 4913.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2217, pruned_loss=0.04031, over 971569.60 frames.], batch size: 29, lr: 3.72e-04 +2022-05-05 08:59:37,058 INFO [train.py:715] (6/8) Epoch 5, batch 29350, loss[loss=0.1426, simple_loss=0.2193, pruned_loss=0.03299, over 4793.00 frames.], tot_loss[loss=0.151, simple_loss=0.2217, pruned_loss=0.04017, over 972096.72 frames.], batch size: 24, lr: 3.72e-04 +2022-05-05 09:00:15,739 INFO [train.py:715] (6/8) Epoch 5, batch 29400, loss[loss=0.1721, simple_loss=0.2492, pruned_loss=0.04751, over 4835.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2219, pruned_loss=0.03993, over 972867.17 frames.], batch size: 13, lr: 3.72e-04 +2022-05-05 09:00:54,489 INFO [train.py:715] (6/8) Epoch 5, batch 29450, loss[loss=0.1414, simple_loss=0.2091, pruned_loss=0.03685, over 4967.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2216, pruned_loss=0.04007, over 972935.63 frames.], batch size: 15, lr: 3.72e-04 +2022-05-05 09:01:34,121 INFO [train.py:715] (6/8) Epoch 5, batch 29500, loss[loss=0.15, simple_loss=0.2185, pruned_loss=0.0407, over 4785.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2218, pruned_loss=0.03995, over 973284.22 frames.], batch size: 18, lr: 3.72e-04 +2022-05-05 09:02:13,206 INFO [train.py:715] (6/8) Epoch 5, batch 29550, loss[loss=0.2152, simple_loss=0.2946, pruned_loss=0.06783, over 4781.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2215, pruned_loss=0.03985, over 972289.07 frames.], batch size: 17, lr: 3.72e-04 +2022-05-05 09:02:52,389 INFO [train.py:715] (6/8) Epoch 5, batch 29600, loss[loss=0.1421, simple_loss=0.2224, pruned_loss=0.03093, over 4940.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2221, pruned_loss=0.04008, over 972695.27 frames.], batch size: 29, lr: 3.71e-04 +2022-05-05 09:03:31,059 INFO [train.py:715] (6/8) Epoch 5, batch 29650, loss[loss=0.1498, simple_loss=0.215, pruned_loss=0.04228, over 4788.00 frames.], tot_loss[loss=0.152, simple_loss=0.2226, pruned_loss=0.04072, over 971661.91 frames.], batch size: 14, lr: 3.71e-04 +2022-05-05 09:04:09,889 INFO [train.py:715] (6/8) Epoch 5, batch 29700, loss[loss=0.1217, simple_loss=0.1953, pruned_loss=0.0241, over 4642.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2226, pruned_loss=0.04049, over 972259.00 frames.], batch size: 13, lr: 3.71e-04 +2022-05-05 09:04:48,811 INFO [train.py:715] (6/8) Epoch 5, batch 29750, loss[loss=0.1295, simple_loss=0.212, pruned_loss=0.02349, over 4840.00 frames.], tot_loss[loss=0.1522, simple_loss=0.223, pruned_loss=0.04073, over 972447.35 frames.], batch size: 32, lr: 3.71e-04 +2022-05-05 09:05:27,385 INFO [train.py:715] (6/8) Epoch 5, batch 29800, loss[loss=0.1372, simple_loss=0.2067, pruned_loss=0.03381, over 4844.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2224, pruned_loss=0.04006, over 972310.35 frames.], batch size: 15, lr: 3.71e-04 +2022-05-05 09:06:05,622 INFO [train.py:715] (6/8) Epoch 5, batch 29850, loss[loss=0.1568, simple_loss=0.2334, pruned_loss=0.04009, over 4969.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2214, pruned_loss=0.03965, over 972426.42 frames.], batch size: 21, lr: 3.71e-04 +2022-05-05 09:06:44,669 INFO [train.py:715] (6/8) Epoch 5, batch 29900, loss[loss=0.1124, simple_loss=0.1764, pruned_loss=0.02425, over 4973.00 frames.], tot_loss[loss=0.15, simple_loss=0.2211, pruned_loss=0.03943, over 972979.83 frames.], batch size: 28, lr: 3.71e-04 +2022-05-05 09:07:24,016 INFO [train.py:715] (6/8) Epoch 5, batch 29950, loss[loss=0.1532, simple_loss=0.2258, pruned_loss=0.04032, over 4753.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2215, pruned_loss=0.03945, over 973254.32 frames.], batch size: 19, lr: 3.71e-04 +2022-05-05 09:08:02,566 INFO [train.py:715] (6/8) Epoch 5, batch 30000, loss[loss=0.181, simple_loss=0.2439, pruned_loss=0.05904, over 4920.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2213, pruned_loss=0.03929, over 972669.38 frames.], batch size: 29, lr: 3.71e-04 +2022-05-05 09:08:02,567 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 09:08:12,296 INFO [train.py:742] (6/8) Epoch 5, validation: loss=0.11, simple_loss=0.1953, pruned_loss=0.01241, over 914524.00 frames. +2022-05-05 09:08:51,326 INFO [train.py:715] (6/8) Epoch 5, batch 30050, loss[loss=0.1619, simple_loss=0.2524, pruned_loss=0.0357, over 4788.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2223, pruned_loss=0.03978, over 972476.32 frames.], batch size: 17, lr: 3.71e-04 +2022-05-05 09:09:31,494 INFO [train.py:715] (6/8) Epoch 5, batch 30100, loss[loss=0.1349, simple_loss=0.2152, pruned_loss=0.02729, over 4752.00 frames.], tot_loss[loss=0.151, simple_loss=0.2226, pruned_loss=0.03969, over 971790.69 frames.], batch size: 19, lr: 3.71e-04 +2022-05-05 09:10:10,293 INFO [train.py:715] (6/8) Epoch 5, batch 30150, loss[loss=0.1495, simple_loss=0.226, pruned_loss=0.03655, over 4942.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2216, pruned_loss=0.03907, over 971607.51 frames.], batch size: 21, lr: 3.71e-04 +2022-05-05 09:10:48,822 INFO [train.py:715] (6/8) Epoch 5, batch 30200, loss[loss=0.1589, simple_loss=0.2204, pruned_loss=0.04866, over 4971.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2213, pruned_loss=0.03947, over 971606.73 frames.], batch size: 15, lr: 3.71e-04 +2022-05-05 09:11:27,807 INFO [train.py:715] (6/8) Epoch 5, batch 30250, loss[loss=0.1459, simple_loss=0.2192, pruned_loss=0.0363, over 4888.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2218, pruned_loss=0.03958, over 971552.06 frames.], batch size: 19, lr: 3.71e-04 +2022-05-05 09:12:06,781 INFO [train.py:715] (6/8) Epoch 5, batch 30300, loss[loss=0.1541, simple_loss=0.2307, pruned_loss=0.03878, over 4988.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2221, pruned_loss=0.03958, over 971587.42 frames.], batch size: 28, lr: 3.71e-04 +2022-05-05 09:12:45,785 INFO [train.py:715] (6/8) Epoch 5, batch 30350, loss[loss=0.1613, simple_loss=0.2394, pruned_loss=0.04162, over 4985.00 frames.], tot_loss[loss=0.1506, simple_loss=0.222, pruned_loss=0.03963, over 971910.75 frames.], batch size: 14, lr: 3.71e-04 +2022-05-05 09:13:24,286 INFO [train.py:715] (6/8) Epoch 5, batch 30400, loss[loss=0.1436, simple_loss=0.2183, pruned_loss=0.03451, over 4739.00 frames.], tot_loss[loss=0.15, simple_loss=0.2214, pruned_loss=0.03933, over 971102.03 frames.], batch size: 16, lr: 3.71e-04 +2022-05-05 09:14:03,373 INFO [train.py:715] (6/8) Epoch 5, batch 30450, loss[loss=0.1494, simple_loss=0.2195, pruned_loss=0.03969, over 4909.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2219, pruned_loss=0.03955, over 971473.42 frames.], batch size: 38, lr: 3.71e-04 +2022-05-05 09:14:42,251 INFO [train.py:715] (6/8) Epoch 5, batch 30500, loss[loss=0.1726, simple_loss=0.2273, pruned_loss=0.0589, over 4820.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2218, pruned_loss=0.03978, over 971956.88 frames.], batch size: 15, lr: 3.71e-04 +2022-05-05 09:15:20,922 INFO [train.py:715] (6/8) Epoch 5, batch 30550, loss[loss=0.1608, simple_loss=0.2301, pruned_loss=0.04569, over 4693.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2207, pruned_loss=0.03958, over 972162.87 frames.], batch size: 15, lr: 3.71e-04 +2022-05-05 09:15:58,937 INFO [train.py:715] (6/8) Epoch 5, batch 30600, loss[loss=0.1427, simple_loss=0.2136, pruned_loss=0.03591, over 4838.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2209, pruned_loss=0.04007, over 971221.86 frames.], batch size: 15, lr: 3.71e-04 +2022-05-05 09:16:37,780 INFO [train.py:715] (6/8) Epoch 5, batch 30650, loss[loss=0.1494, simple_loss=0.2217, pruned_loss=0.03856, over 4817.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2209, pruned_loss=0.04001, over 971351.68 frames.], batch size: 26, lr: 3.71e-04 +2022-05-05 09:17:16,921 INFO [train.py:715] (6/8) Epoch 5, batch 30700, loss[loss=0.1728, simple_loss=0.2429, pruned_loss=0.05133, over 4778.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2208, pruned_loss=0.03979, over 971024.06 frames.], batch size: 18, lr: 3.70e-04 +2022-05-05 09:17:55,174 INFO [train.py:715] (6/8) Epoch 5, batch 30750, loss[loss=0.1319, simple_loss=0.1981, pruned_loss=0.03289, over 4812.00 frames.], tot_loss[loss=0.1503, simple_loss=0.221, pruned_loss=0.03981, over 972324.79 frames.], batch size: 25, lr: 3.70e-04 +2022-05-05 09:18:33,968 INFO [train.py:715] (6/8) Epoch 5, batch 30800, loss[loss=0.1157, simple_loss=0.1908, pruned_loss=0.02029, over 4823.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2211, pruned_loss=0.03975, over 972041.50 frames.], batch size: 26, lr: 3.70e-04 +2022-05-05 09:19:12,986 INFO [train.py:715] (6/8) Epoch 5, batch 30850, loss[loss=0.1466, simple_loss=0.2226, pruned_loss=0.03529, over 4866.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2215, pruned_loss=0.0402, over 972039.23 frames.], batch size: 16, lr: 3.70e-04 +2022-05-05 09:19:51,001 INFO [train.py:715] (6/8) Epoch 5, batch 30900, loss[loss=0.1331, simple_loss=0.2188, pruned_loss=0.02375, over 4828.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2215, pruned_loss=0.04016, over 972245.06 frames.], batch size: 25, lr: 3.70e-04 +2022-05-05 09:20:29,875 INFO [train.py:715] (6/8) Epoch 5, batch 30950, loss[loss=0.1834, simple_loss=0.2391, pruned_loss=0.06387, over 4849.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2219, pruned_loss=0.04063, over 971989.88 frames.], batch size: 15, lr: 3.70e-04 +2022-05-05 09:21:09,512 INFO [train.py:715] (6/8) Epoch 5, batch 31000, loss[loss=0.1721, simple_loss=0.2336, pruned_loss=0.05533, over 4877.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2223, pruned_loss=0.04076, over 971856.24 frames.], batch size: 16, lr: 3.70e-04 +2022-05-05 09:21:48,974 INFO [train.py:715] (6/8) Epoch 5, batch 31050, loss[loss=0.1691, simple_loss=0.2371, pruned_loss=0.05061, over 4979.00 frames.], tot_loss[loss=0.152, simple_loss=0.2227, pruned_loss=0.04068, over 972306.01 frames.], batch size: 33, lr: 3.70e-04 +2022-05-05 09:22:27,593 INFO [train.py:715] (6/8) Epoch 5, batch 31100, loss[loss=0.1661, simple_loss=0.2464, pruned_loss=0.0429, over 4965.00 frames.], tot_loss[loss=0.1526, simple_loss=0.223, pruned_loss=0.04112, over 972020.87 frames.], batch size: 24, lr: 3.70e-04 +2022-05-05 09:23:06,676 INFO [train.py:715] (6/8) Epoch 5, batch 31150, loss[loss=0.17, simple_loss=0.2334, pruned_loss=0.05335, over 4927.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2228, pruned_loss=0.0412, over 971573.81 frames.], batch size: 18, lr: 3.70e-04 +2022-05-05 09:23:45,588 INFO [train.py:715] (6/8) Epoch 5, batch 31200, loss[loss=0.1389, simple_loss=0.2218, pruned_loss=0.02799, over 4888.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2221, pruned_loss=0.04058, over 971594.70 frames.], batch size: 22, lr: 3.70e-04 +2022-05-05 09:24:24,057 INFO [train.py:715] (6/8) Epoch 5, batch 31250, loss[loss=0.1377, simple_loss=0.212, pruned_loss=0.03163, over 4809.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2219, pruned_loss=0.04084, over 971605.29 frames.], batch size: 21, lr: 3.70e-04 +2022-05-05 09:25:02,648 INFO [train.py:715] (6/8) Epoch 5, batch 31300, loss[loss=0.1479, simple_loss=0.2058, pruned_loss=0.04502, over 4980.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2223, pruned_loss=0.04063, over 972737.19 frames.], batch size: 35, lr: 3.70e-04 +2022-05-05 09:25:41,534 INFO [train.py:715] (6/8) Epoch 5, batch 31350, loss[loss=0.1462, simple_loss=0.2226, pruned_loss=0.03488, over 4954.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2222, pruned_loss=0.04063, over 972679.86 frames.], batch size: 24, lr: 3.70e-04 +2022-05-05 09:26:20,318 INFO [train.py:715] (6/8) Epoch 5, batch 31400, loss[loss=0.1735, simple_loss=0.2424, pruned_loss=0.05226, over 4817.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2218, pruned_loss=0.04048, over 971677.94 frames.], batch size: 27, lr: 3.70e-04 +2022-05-05 09:26:59,040 INFO [train.py:715] (6/8) Epoch 5, batch 31450, loss[loss=0.1467, simple_loss=0.2222, pruned_loss=0.03559, over 4793.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2215, pruned_loss=0.04017, over 972103.07 frames.], batch size: 18, lr: 3.70e-04 +2022-05-05 09:27:37,868 INFO [train.py:715] (6/8) Epoch 5, batch 31500, loss[loss=0.1665, simple_loss=0.2252, pruned_loss=0.05387, over 4851.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2222, pruned_loss=0.0405, over 972401.73 frames.], batch size: 13, lr: 3.70e-04 +2022-05-05 09:28:16,801 INFO [train.py:715] (6/8) Epoch 5, batch 31550, loss[loss=0.1408, simple_loss=0.2217, pruned_loss=0.02992, over 4838.00 frames.], tot_loss[loss=0.1513, simple_loss=0.222, pruned_loss=0.04034, over 973363.63 frames.], batch size: 15, lr: 3.70e-04 +2022-05-05 09:28:55,564 INFO [train.py:715] (6/8) Epoch 5, batch 31600, loss[loss=0.1527, simple_loss=0.2331, pruned_loss=0.03618, over 4887.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2221, pruned_loss=0.04042, over 973156.73 frames.], batch size: 22, lr: 3.70e-04 +2022-05-05 09:29:34,425 INFO [train.py:715] (6/8) Epoch 5, batch 31650, loss[loss=0.1377, simple_loss=0.2221, pruned_loss=0.02663, over 4802.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2221, pruned_loss=0.04024, over 972924.14 frames.], batch size: 21, lr: 3.70e-04 +2022-05-05 09:30:13,323 INFO [train.py:715] (6/8) Epoch 5, batch 31700, loss[loss=0.1507, simple_loss=0.2233, pruned_loss=0.03901, over 4843.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2227, pruned_loss=0.04079, over 972670.95 frames.], batch size: 15, lr: 3.70e-04 +2022-05-05 09:30:52,062 INFO [train.py:715] (6/8) Epoch 5, batch 31750, loss[loss=0.134, simple_loss=0.2095, pruned_loss=0.02928, over 4978.00 frames.], tot_loss[loss=0.1526, simple_loss=0.223, pruned_loss=0.04107, over 972897.82 frames.], batch size: 28, lr: 3.70e-04 +2022-05-05 09:31:31,167 INFO [train.py:715] (6/8) Epoch 5, batch 31800, loss[loss=0.1298, simple_loss=0.2039, pruned_loss=0.02784, over 4944.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2226, pruned_loss=0.04128, over 972738.03 frames.], batch size: 29, lr: 3.69e-04 +2022-05-05 09:32:09,899 INFO [train.py:715] (6/8) Epoch 5, batch 31850, loss[loss=0.1372, simple_loss=0.2083, pruned_loss=0.03306, over 4984.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2228, pruned_loss=0.04078, over 973470.52 frames.], batch size: 15, lr: 3.69e-04 +2022-05-05 09:32:49,447 INFO [train.py:715] (6/8) Epoch 5, batch 31900, loss[loss=0.1558, simple_loss=0.2105, pruned_loss=0.05051, over 4854.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2222, pruned_loss=0.04058, over 972979.86 frames.], batch size: 32, lr: 3.69e-04 +2022-05-05 09:33:28,132 INFO [train.py:715] (6/8) Epoch 5, batch 31950, loss[loss=0.1483, simple_loss=0.2214, pruned_loss=0.03757, over 4875.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2217, pruned_loss=0.04007, over 973840.91 frames.], batch size: 22, lr: 3.69e-04 +2022-05-05 09:34:06,675 INFO [train.py:715] (6/8) Epoch 5, batch 32000, loss[loss=0.1549, simple_loss=0.2291, pruned_loss=0.04036, over 4830.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2223, pruned_loss=0.04042, over 973523.26 frames.], batch size: 13, lr: 3.69e-04 +2022-05-05 09:34:45,046 INFO [train.py:715] (6/8) Epoch 5, batch 32050, loss[loss=0.152, simple_loss=0.2175, pruned_loss=0.04327, over 4830.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2213, pruned_loss=0.03992, over 972275.24 frames.], batch size: 13, lr: 3.69e-04 +2022-05-05 09:35:24,096 INFO [train.py:715] (6/8) Epoch 5, batch 32100, loss[loss=0.1457, simple_loss=0.223, pruned_loss=0.0342, over 4801.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2215, pruned_loss=0.0394, over 972893.20 frames.], batch size: 21, lr: 3.69e-04 +2022-05-05 09:36:02,961 INFO [train.py:715] (6/8) Epoch 5, batch 32150, loss[loss=0.1262, simple_loss=0.1971, pruned_loss=0.02766, over 4767.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2204, pruned_loss=0.03929, over 972766.37 frames.], batch size: 16, lr: 3.69e-04 +2022-05-05 09:36:41,523 INFO [train.py:715] (6/8) Epoch 5, batch 32200, loss[loss=0.1539, simple_loss=0.2236, pruned_loss=0.0421, over 4920.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2213, pruned_loss=0.04004, over 973139.06 frames.], batch size: 17, lr: 3.69e-04 +2022-05-05 09:37:20,073 INFO [train.py:715] (6/8) Epoch 5, batch 32250, loss[loss=0.1617, simple_loss=0.2323, pruned_loss=0.04557, over 4864.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2217, pruned_loss=0.04032, over 973012.56 frames.], batch size: 39, lr: 3.69e-04 +2022-05-05 09:37:59,209 INFO [train.py:715] (6/8) Epoch 5, batch 32300, loss[loss=0.1483, simple_loss=0.2271, pruned_loss=0.03482, over 4969.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2216, pruned_loss=0.04006, over 973208.83 frames.], batch size: 14, lr: 3.69e-04 +2022-05-05 09:38:37,805 INFO [train.py:715] (6/8) Epoch 5, batch 32350, loss[loss=0.1547, simple_loss=0.2239, pruned_loss=0.04274, over 4808.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2219, pruned_loss=0.03983, over 973092.68 frames.], batch size: 25, lr: 3.69e-04 +2022-05-05 09:39:16,504 INFO [train.py:715] (6/8) Epoch 5, batch 32400, loss[loss=0.1786, simple_loss=0.2523, pruned_loss=0.05244, over 4809.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2215, pruned_loss=0.03966, over 972928.88 frames.], batch size: 25, lr: 3.69e-04 +2022-05-05 09:39:55,118 INFO [train.py:715] (6/8) Epoch 5, batch 32450, loss[loss=0.1346, simple_loss=0.2061, pruned_loss=0.03154, over 4797.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2216, pruned_loss=0.03996, over 972610.14 frames.], batch size: 25, lr: 3.69e-04 +2022-05-05 09:40:33,931 INFO [train.py:715] (6/8) Epoch 5, batch 32500, loss[loss=0.1275, simple_loss=0.1948, pruned_loss=0.03009, over 4788.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2204, pruned_loss=0.03969, over 971295.55 frames.], batch size: 17, lr: 3.69e-04 +2022-05-05 09:41:13,491 INFO [train.py:715] (6/8) Epoch 5, batch 32550, loss[loss=0.1554, simple_loss=0.2239, pruned_loss=0.04347, over 4860.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2206, pruned_loss=0.03995, over 971460.11 frames.], batch size: 32, lr: 3.69e-04 +2022-05-05 09:41:51,932 INFO [train.py:715] (6/8) Epoch 5, batch 32600, loss[loss=0.1503, simple_loss=0.2158, pruned_loss=0.04236, over 4750.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2208, pruned_loss=0.03978, over 971832.71 frames.], batch size: 16, lr: 3.69e-04 +2022-05-05 09:42:30,727 INFO [train.py:715] (6/8) Epoch 5, batch 32650, loss[loss=0.1453, simple_loss=0.2276, pruned_loss=0.03147, over 4785.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2212, pruned_loss=0.04016, over 971902.66 frames.], batch size: 18, lr: 3.69e-04 +2022-05-05 09:43:09,273 INFO [train.py:715] (6/8) Epoch 5, batch 32700, loss[loss=0.1557, simple_loss=0.22, pruned_loss=0.04563, over 4841.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2212, pruned_loss=0.03999, over 972287.29 frames.], batch size: 34, lr: 3.69e-04 +2022-05-05 09:43:47,572 INFO [train.py:715] (6/8) Epoch 5, batch 32750, loss[loss=0.1384, simple_loss=0.2077, pruned_loss=0.03461, over 4831.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2209, pruned_loss=0.03982, over 971854.58 frames.], batch size: 25, lr: 3.69e-04 +2022-05-05 09:44:26,277 INFO [train.py:715] (6/8) Epoch 5, batch 32800, loss[loss=0.1366, simple_loss=0.2127, pruned_loss=0.03028, over 4910.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2214, pruned_loss=0.04026, over 971515.45 frames.], batch size: 19, lr: 3.69e-04 +2022-05-05 09:45:05,106 INFO [train.py:715] (6/8) Epoch 5, batch 32850, loss[loss=0.1538, simple_loss=0.2321, pruned_loss=0.0377, over 4910.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2216, pruned_loss=0.04085, over 972137.82 frames.], batch size: 17, lr: 3.69e-04 +2022-05-05 09:45:44,066 INFO [train.py:715] (6/8) Epoch 5, batch 32900, loss[loss=0.168, simple_loss=0.2307, pruned_loss=0.05268, over 4871.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2219, pruned_loss=0.04065, over 971876.54 frames.], batch size: 38, lr: 3.69e-04 +2022-05-05 09:46:22,918 INFO [train.py:715] (6/8) Epoch 5, batch 32950, loss[loss=0.1478, simple_loss=0.2165, pruned_loss=0.03949, over 4881.00 frames.], tot_loss[loss=0.1512, simple_loss=0.222, pruned_loss=0.0402, over 971996.19 frames.], batch size: 12, lr: 3.68e-04 +2022-05-05 09:47:01,973 INFO [train.py:715] (6/8) Epoch 5, batch 33000, loss[loss=0.2046, simple_loss=0.2591, pruned_loss=0.07502, over 4800.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2224, pruned_loss=0.04057, over 971535.98 frames.], batch size: 18, lr: 3.68e-04 +2022-05-05 09:47:01,973 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 09:47:11,685 INFO [train.py:742] (6/8) Epoch 5, validation: loss=0.1099, simple_loss=0.1951, pruned_loss=0.01236, over 914524.00 frames. +2022-05-05 09:47:50,706 INFO [train.py:715] (6/8) Epoch 5, batch 33050, loss[loss=0.1304, simple_loss=0.2078, pruned_loss=0.02648, over 4938.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2227, pruned_loss=0.04056, over 971912.65 frames.], batch size: 23, lr: 3.68e-04 +2022-05-05 09:48:29,614 INFO [train.py:715] (6/8) Epoch 5, batch 33100, loss[loss=0.1606, simple_loss=0.2288, pruned_loss=0.04621, over 4907.00 frames.], tot_loss[loss=0.153, simple_loss=0.2236, pruned_loss=0.04121, over 971136.26 frames.], batch size: 19, lr: 3.68e-04 +2022-05-05 09:49:07,623 INFO [train.py:715] (6/8) Epoch 5, batch 33150, loss[loss=0.1528, simple_loss=0.2297, pruned_loss=0.03798, over 4981.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2239, pruned_loss=0.04151, over 971386.73 frames.], batch size: 15, lr: 3.68e-04 +2022-05-05 09:49:46,221 INFO [train.py:715] (6/8) Epoch 5, batch 33200, loss[loss=0.1258, simple_loss=0.1986, pruned_loss=0.02646, over 4987.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2234, pruned_loss=0.041, over 971715.64 frames.], batch size: 28, lr: 3.68e-04 +2022-05-05 09:50:25,072 INFO [train.py:715] (6/8) Epoch 5, batch 33250, loss[loss=0.1825, simple_loss=0.2454, pruned_loss=0.0598, over 4802.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2225, pruned_loss=0.04058, over 971987.18 frames.], batch size: 21, lr: 3.68e-04 +2022-05-05 09:51:03,572 INFO [train.py:715] (6/8) Epoch 5, batch 33300, loss[loss=0.1518, simple_loss=0.2267, pruned_loss=0.03844, over 4817.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2227, pruned_loss=0.04005, over 971506.04 frames.], batch size: 27, lr: 3.68e-04 +2022-05-05 09:51:41,938 INFO [train.py:715] (6/8) Epoch 5, batch 33350, loss[loss=0.1589, simple_loss=0.2296, pruned_loss=0.04407, over 4833.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2222, pruned_loss=0.04013, over 971781.73 frames.], batch size: 30, lr: 3.68e-04 +2022-05-05 09:52:21,208 INFO [train.py:715] (6/8) Epoch 5, batch 33400, loss[loss=0.1446, simple_loss=0.2172, pruned_loss=0.036, over 4918.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2219, pruned_loss=0.0398, over 972121.96 frames.], batch size: 23, lr: 3.68e-04 +2022-05-05 09:52:59,901 INFO [train.py:715] (6/8) Epoch 5, batch 33450, loss[loss=0.1373, simple_loss=0.2053, pruned_loss=0.0346, over 4869.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2219, pruned_loss=0.03946, over 971817.96 frames.], batch size: 16, lr: 3.68e-04 +2022-05-05 09:53:38,245 INFO [train.py:715] (6/8) Epoch 5, batch 33500, loss[loss=0.1635, simple_loss=0.2267, pruned_loss=0.05015, over 4986.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2227, pruned_loss=0.03948, over 971932.58 frames.], batch size: 14, lr: 3.68e-04 +2022-05-05 09:54:16,984 INFO [train.py:715] (6/8) Epoch 5, batch 33550, loss[loss=0.1589, simple_loss=0.2213, pruned_loss=0.04824, over 4758.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2223, pruned_loss=0.03957, over 972678.13 frames.], batch size: 19, lr: 3.68e-04 +2022-05-05 09:54:55,688 INFO [train.py:715] (6/8) Epoch 5, batch 33600, loss[loss=0.1656, simple_loss=0.2322, pruned_loss=0.04951, over 4876.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2232, pruned_loss=0.04022, over 972318.76 frames.], batch size: 16, lr: 3.68e-04 +2022-05-05 09:55:34,355 INFO [train.py:715] (6/8) Epoch 5, batch 33650, loss[loss=0.1585, simple_loss=0.2354, pruned_loss=0.04079, over 4928.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2227, pruned_loss=0.0401, over 972695.26 frames.], batch size: 18, lr: 3.68e-04 +2022-05-05 09:56:12,633 INFO [train.py:715] (6/8) Epoch 5, batch 33700, loss[loss=0.1429, simple_loss=0.2181, pruned_loss=0.0339, over 4945.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2218, pruned_loss=0.03991, over 973525.89 frames.], batch size: 21, lr: 3.68e-04 +2022-05-05 09:56:51,516 INFO [train.py:715] (6/8) Epoch 5, batch 33750, loss[loss=0.1797, simple_loss=0.2372, pruned_loss=0.06108, over 4823.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2213, pruned_loss=0.03978, over 973059.06 frames.], batch size: 13, lr: 3.68e-04 +2022-05-05 09:57:30,148 INFO [train.py:715] (6/8) Epoch 5, batch 33800, loss[loss=0.1795, simple_loss=0.2466, pruned_loss=0.05626, over 4934.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2214, pruned_loss=0.04006, over 972835.04 frames.], batch size: 29, lr: 3.68e-04 +2022-05-05 09:58:09,141 INFO [train.py:715] (6/8) Epoch 5, batch 33850, loss[loss=0.1491, simple_loss=0.2144, pruned_loss=0.04195, over 4842.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2202, pruned_loss=0.03954, over 973961.75 frames.], batch size: 30, lr: 3.68e-04 +2022-05-05 09:58:47,623 INFO [train.py:715] (6/8) Epoch 5, batch 33900, loss[loss=0.1258, simple_loss=0.1911, pruned_loss=0.03029, over 4912.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2201, pruned_loss=0.03973, over 973295.62 frames.], batch size: 17, lr: 3.68e-04 +2022-05-05 09:59:25,968 INFO [train.py:715] (6/8) Epoch 5, batch 33950, loss[loss=0.1405, simple_loss=0.2048, pruned_loss=0.03811, over 4850.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2214, pruned_loss=0.04074, over 972232.66 frames.], batch size: 32, lr: 3.68e-04 +2022-05-05 10:00:06,981 INFO [train.py:715] (6/8) Epoch 5, batch 34000, loss[loss=0.1472, simple_loss=0.2241, pruned_loss=0.03519, over 4842.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2209, pruned_loss=0.0403, over 973268.39 frames.], batch size: 15, lr: 3.68e-04 +2022-05-05 10:00:45,229 INFO [train.py:715] (6/8) Epoch 5, batch 34050, loss[loss=0.1498, simple_loss=0.2198, pruned_loss=0.03989, over 4793.00 frames.], tot_loss[loss=0.151, simple_loss=0.2216, pruned_loss=0.04021, over 973079.08 frames.], batch size: 21, lr: 3.67e-04 +2022-05-05 10:01:23,919 INFO [train.py:715] (6/8) Epoch 5, batch 34100, loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03018, over 4783.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2217, pruned_loss=0.04, over 972486.52 frames.], batch size: 18, lr: 3.67e-04 +2022-05-05 10:02:02,747 INFO [train.py:715] (6/8) Epoch 5, batch 34150, loss[loss=0.1264, simple_loss=0.1878, pruned_loss=0.03252, over 4788.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2213, pruned_loss=0.0398, over 973257.08 frames.], batch size: 12, lr: 3.67e-04 +2022-05-05 10:02:41,106 INFO [train.py:715] (6/8) Epoch 5, batch 34200, loss[loss=0.148, simple_loss=0.2139, pruned_loss=0.04104, over 4870.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2217, pruned_loss=0.04008, over 972781.59 frames.], batch size: 22, lr: 3.67e-04 +2022-05-05 10:03:20,095 INFO [train.py:715] (6/8) Epoch 5, batch 34250, loss[loss=0.155, simple_loss=0.2298, pruned_loss=0.04014, over 4844.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2216, pruned_loss=0.03995, over 972713.12 frames.], batch size: 15, lr: 3.67e-04 +2022-05-05 10:03:58,248 INFO [train.py:715] (6/8) Epoch 5, batch 34300, loss[loss=0.1408, simple_loss=0.2085, pruned_loss=0.0366, over 4978.00 frames.], tot_loss[loss=0.151, simple_loss=0.2219, pruned_loss=0.04008, over 973010.24 frames.], batch size: 24, lr: 3.67e-04 +2022-05-05 10:04:36,910 INFO [train.py:715] (6/8) Epoch 5, batch 34350, loss[loss=0.1449, simple_loss=0.225, pruned_loss=0.03241, over 4963.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2221, pruned_loss=0.04028, over 972668.00 frames.], batch size: 28, lr: 3.67e-04 +2022-05-05 10:05:14,794 INFO [train.py:715] (6/8) Epoch 5, batch 34400, loss[loss=0.1667, simple_loss=0.2324, pruned_loss=0.05056, over 4884.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2223, pruned_loss=0.04038, over 972663.54 frames.], batch size: 22, lr: 3.67e-04 +2022-05-05 10:05:53,764 INFO [train.py:715] (6/8) Epoch 5, batch 34450, loss[loss=0.1628, simple_loss=0.2435, pruned_loss=0.04101, over 4931.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2229, pruned_loss=0.04064, over 972605.79 frames.], batch size: 21, lr: 3.67e-04 +2022-05-05 10:06:32,733 INFO [train.py:715] (6/8) Epoch 5, batch 34500, loss[loss=0.1118, simple_loss=0.1794, pruned_loss=0.02207, over 4811.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2226, pruned_loss=0.04034, over 972435.85 frames.], batch size: 12, lr: 3.67e-04 +2022-05-05 10:07:11,202 INFO [train.py:715] (6/8) Epoch 5, batch 34550, loss[loss=0.1325, simple_loss=0.2071, pruned_loss=0.02896, over 4893.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2223, pruned_loss=0.0403, over 971412.63 frames.], batch size: 19, lr: 3.67e-04 +2022-05-05 10:07:49,950 INFO [train.py:715] (6/8) Epoch 5, batch 34600, loss[loss=0.1348, simple_loss=0.2074, pruned_loss=0.03111, over 4976.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2216, pruned_loss=0.03967, over 971573.21 frames.], batch size: 25, lr: 3.67e-04 +2022-05-05 10:08:28,671 INFO [train.py:715] (6/8) Epoch 5, batch 34650, loss[loss=0.1749, simple_loss=0.2485, pruned_loss=0.05062, over 4909.00 frames.], tot_loss[loss=0.1514, simple_loss=0.222, pruned_loss=0.04034, over 971735.92 frames.], batch size: 18, lr: 3.67e-04 +2022-05-05 10:09:07,597 INFO [train.py:715] (6/8) Epoch 5, batch 34700, loss[loss=0.1325, simple_loss=0.2004, pruned_loss=0.03237, over 4979.00 frames.], tot_loss[loss=0.151, simple_loss=0.2219, pruned_loss=0.04006, over 972462.25 frames.], batch size: 28, lr: 3.67e-04 +2022-05-05 10:09:44,909 INFO [train.py:715] (6/8) Epoch 5, batch 34750, loss[loss=0.1683, simple_loss=0.2431, pruned_loss=0.04672, over 4794.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2214, pruned_loss=0.03964, over 971686.08 frames.], batch size: 21, lr: 3.67e-04 +2022-05-05 10:10:21,600 INFO [train.py:715] (6/8) Epoch 5, batch 34800, loss[loss=0.1115, simple_loss=0.1828, pruned_loss=0.02013, over 4766.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2205, pruned_loss=0.03988, over 969566.33 frames.], batch size: 12, lr: 3.67e-04 +2022-05-05 10:11:11,224 INFO [train.py:715] (6/8) Epoch 6, batch 0, loss[loss=0.1188, simple_loss=0.1946, pruned_loss=0.02146, over 4971.00 frames.], tot_loss[loss=0.1188, simple_loss=0.1946, pruned_loss=0.02146, over 4971.00 frames.], batch size: 25, lr: 3.46e-04 +2022-05-05 10:11:50,184 INFO [train.py:715] (6/8) Epoch 6, batch 50, loss[loss=0.1539, simple_loss=0.2245, pruned_loss=0.04163, over 4958.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2161, pruned_loss=0.03816, over 219627.43 frames.], batch size: 15, lr: 3.46e-04 +2022-05-05 10:12:29,110 INFO [train.py:715] (6/8) Epoch 6, batch 100, loss[loss=0.1777, simple_loss=0.2505, pruned_loss=0.05246, over 4806.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2208, pruned_loss=0.04005, over 385755.22 frames.], batch size: 21, lr: 3.46e-04 +2022-05-05 10:13:08,347 INFO [train.py:715] (6/8) Epoch 6, batch 150, loss[loss=0.1587, simple_loss=0.2226, pruned_loss=0.04737, over 4929.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2207, pruned_loss=0.04002, over 515886.63 frames.], batch size: 29, lr: 3.46e-04 +2022-05-05 10:13:47,628 INFO [train.py:715] (6/8) Epoch 6, batch 200, loss[loss=0.1527, simple_loss=0.2144, pruned_loss=0.04547, over 4830.00 frames.], tot_loss[loss=0.15, simple_loss=0.2208, pruned_loss=0.03963, over 616954.90 frames.], batch size: 27, lr: 3.45e-04 +2022-05-05 10:14:26,641 INFO [train.py:715] (6/8) Epoch 6, batch 250, loss[loss=0.1281, simple_loss=0.2104, pruned_loss=0.02286, over 4982.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2214, pruned_loss=0.03997, over 695710.90 frames.], batch size: 24, lr: 3.45e-04 +2022-05-05 10:15:05,464 INFO [train.py:715] (6/8) Epoch 6, batch 300, loss[loss=0.1477, simple_loss=0.2211, pruned_loss=0.03714, over 4881.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2218, pruned_loss=0.04017, over 756726.43 frames.], batch size: 22, lr: 3.45e-04 +2022-05-05 10:15:44,447 INFO [train.py:715] (6/8) Epoch 6, batch 350, loss[loss=0.1519, simple_loss=0.2331, pruned_loss=0.03529, over 4715.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2225, pruned_loss=0.04057, over 803887.40 frames.], batch size: 15, lr: 3.45e-04 +2022-05-05 10:16:23,654 INFO [train.py:715] (6/8) Epoch 6, batch 400, loss[loss=0.1365, simple_loss=0.2114, pruned_loss=0.03077, over 4856.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2223, pruned_loss=0.0401, over 841368.71 frames.], batch size: 30, lr: 3.45e-04 +2022-05-05 10:17:02,411 INFO [train.py:715] (6/8) Epoch 6, batch 450, loss[loss=0.163, simple_loss=0.2222, pruned_loss=0.05191, over 4839.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2227, pruned_loss=0.04006, over 870777.98 frames.], batch size: 13, lr: 3.45e-04 +2022-05-05 10:17:41,008 INFO [train.py:715] (6/8) Epoch 6, batch 500, loss[loss=0.147, simple_loss=0.2143, pruned_loss=0.03982, over 4820.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2205, pruned_loss=0.03919, over 893691.34 frames.], batch size: 25, lr: 3.45e-04 +2022-05-05 10:18:20,501 INFO [train.py:715] (6/8) Epoch 6, batch 550, loss[loss=0.1364, simple_loss=0.2135, pruned_loss=0.02964, over 4821.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2212, pruned_loss=0.03976, over 910987.79 frames.], batch size: 24, lr: 3.45e-04 +2022-05-05 10:18:59,383 INFO [train.py:715] (6/8) Epoch 6, batch 600, loss[loss=0.1527, simple_loss=0.2311, pruned_loss=0.0371, over 4942.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2213, pruned_loss=0.03971, over 924013.50 frames.], batch size: 29, lr: 3.45e-04 +2022-05-05 10:19:38,401 INFO [train.py:715] (6/8) Epoch 6, batch 650, loss[loss=0.1711, simple_loss=0.2515, pruned_loss=0.04536, over 4809.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2219, pruned_loss=0.04024, over 934330.18 frames.], batch size: 21, lr: 3.45e-04 +2022-05-05 10:20:17,486 INFO [train.py:715] (6/8) Epoch 6, batch 700, loss[loss=0.1536, simple_loss=0.2255, pruned_loss=0.04082, over 4894.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2215, pruned_loss=0.03997, over 942752.04 frames.], batch size: 22, lr: 3.45e-04 +2022-05-05 10:20:57,080 INFO [train.py:715] (6/8) Epoch 6, batch 750, loss[loss=0.1449, simple_loss=0.2179, pruned_loss=0.03591, over 4897.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2205, pruned_loss=0.03961, over 949115.03 frames.], batch size: 22, lr: 3.45e-04 +2022-05-05 10:21:35,856 INFO [train.py:715] (6/8) Epoch 6, batch 800, loss[loss=0.1371, simple_loss=0.2064, pruned_loss=0.03392, over 4786.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2204, pruned_loss=0.03945, over 954654.78 frames.], batch size: 21, lr: 3.45e-04 +2022-05-05 10:22:14,569 INFO [train.py:715] (6/8) Epoch 6, batch 850, loss[loss=0.1407, simple_loss=0.2129, pruned_loss=0.03426, over 4911.00 frames.], tot_loss[loss=0.149, simple_loss=0.2196, pruned_loss=0.03917, over 958656.74 frames.], batch size: 18, lr: 3.45e-04 +2022-05-05 10:22:54,100 INFO [train.py:715] (6/8) Epoch 6, batch 900, loss[loss=0.136, simple_loss=0.2181, pruned_loss=0.02698, over 4798.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2186, pruned_loss=0.03852, over 961596.52 frames.], batch size: 24, lr: 3.45e-04 +2022-05-05 10:23:33,400 INFO [train.py:715] (6/8) Epoch 6, batch 950, loss[loss=0.1461, simple_loss=0.2216, pruned_loss=0.03536, over 4815.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2192, pruned_loss=0.03882, over 963881.59 frames.], batch size: 26, lr: 3.45e-04 +2022-05-05 10:24:12,116 INFO [train.py:715] (6/8) Epoch 6, batch 1000, loss[loss=0.1797, simple_loss=0.2519, pruned_loss=0.05372, over 4803.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2194, pruned_loss=0.03863, over 965578.85 frames.], batch size: 24, lr: 3.45e-04 +2022-05-05 10:24:51,182 INFO [train.py:715] (6/8) Epoch 6, batch 1050, loss[loss=0.1459, simple_loss=0.213, pruned_loss=0.03945, over 4908.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2212, pruned_loss=0.03984, over 966642.52 frames.], batch size: 18, lr: 3.45e-04 +2022-05-05 10:25:30,704 INFO [train.py:715] (6/8) Epoch 6, batch 1100, loss[loss=0.1624, simple_loss=0.2314, pruned_loss=0.04663, over 4832.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2209, pruned_loss=0.03971, over 967824.47 frames.], batch size: 15, lr: 3.45e-04 +2022-05-05 10:26:09,923 INFO [train.py:715] (6/8) Epoch 6, batch 1150, loss[loss=0.1848, simple_loss=0.2517, pruned_loss=0.0589, over 4808.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2209, pruned_loss=0.03948, over 968279.29 frames.], batch size: 15, lr: 3.45e-04 +2022-05-05 10:26:48,493 INFO [train.py:715] (6/8) Epoch 6, batch 1200, loss[loss=0.1627, simple_loss=0.2294, pruned_loss=0.04804, over 4780.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2219, pruned_loss=0.04018, over 968551.46 frames.], batch size: 18, lr: 3.45e-04 +2022-05-05 10:27:28,195 INFO [train.py:715] (6/8) Epoch 6, batch 1250, loss[loss=0.1559, simple_loss=0.2182, pruned_loss=0.04683, over 4787.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2215, pruned_loss=0.03977, over 969907.82 frames.], batch size: 18, lr: 3.45e-04 +2022-05-05 10:28:07,472 INFO [train.py:715] (6/8) Epoch 6, batch 1300, loss[loss=0.1456, simple_loss=0.2191, pruned_loss=0.03603, over 4815.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2207, pruned_loss=0.03943, over 970156.54 frames.], batch size: 27, lr: 3.45e-04 +2022-05-05 10:28:46,066 INFO [train.py:715] (6/8) Epoch 6, batch 1350, loss[loss=0.1666, simple_loss=0.2378, pruned_loss=0.04769, over 4857.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2206, pruned_loss=0.03925, over 970646.82 frames.], batch size: 32, lr: 3.45e-04 +2022-05-05 10:29:24,987 INFO [train.py:715] (6/8) Epoch 6, batch 1400, loss[loss=0.1565, simple_loss=0.2352, pruned_loss=0.0389, over 4803.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2206, pruned_loss=0.0389, over 970988.94 frames.], batch size: 21, lr: 3.45e-04 +2022-05-05 10:30:04,138 INFO [train.py:715] (6/8) Epoch 6, batch 1450, loss[loss=0.1446, simple_loss=0.2201, pruned_loss=0.03462, over 4953.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2202, pruned_loss=0.03868, over 970910.97 frames.], batch size: 24, lr: 3.44e-04 +2022-05-05 10:30:42,811 INFO [train.py:715] (6/8) Epoch 6, batch 1500, loss[loss=0.1385, simple_loss=0.2125, pruned_loss=0.03219, over 4775.00 frames.], tot_loss[loss=0.1498, simple_loss=0.221, pruned_loss=0.03934, over 972094.76 frames.], batch size: 18, lr: 3.44e-04 +2022-05-05 10:31:21,207 INFO [train.py:715] (6/8) Epoch 6, batch 1550, loss[loss=0.1934, simple_loss=0.2592, pruned_loss=0.06376, over 4636.00 frames.], tot_loss[loss=0.15, simple_loss=0.2211, pruned_loss=0.03942, over 972473.32 frames.], batch size: 13, lr: 3.44e-04 +2022-05-05 10:32:00,468 INFO [train.py:715] (6/8) Epoch 6, batch 1600, loss[loss=0.1586, simple_loss=0.2402, pruned_loss=0.03852, over 4973.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2205, pruned_loss=0.03933, over 972601.15 frames.], batch size: 24, lr: 3.44e-04 +2022-05-05 10:32:40,012 INFO [train.py:715] (6/8) Epoch 6, batch 1650, loss[loss=0.14, simple_loss=0.2189, pruned_loss=0.03061, over 4834.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2213, pruned_loss=0.03916, over 972293.02 frames.], batch size: 26, lr: 3.44e-04 +2022-05-05 10:33:18,412 INFO [train.py:715] (6/8) Epoch 6, batch 1700, loss[loss=0.1444, simple_loss=0.2189, pruned_loss=0.03494, over 4790.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2207, pruned_loss=0.03886, over 971754.86 frames.], batch size: 24, lr: 3.44e-04 +2022-05-05 10:33:57,728 INFO [train.py:715] (6/8) Epoch 6, batch 1750, loss[loss=0.1588, simple_loss=0.2251, pruned_loss=0.04624, over 4835.00 frames.], tot_loss[loss=0.15, simple_loss=0.2213, pruned_loss=0.03935, over 972139.30 frames.], batch size: 30, lr: 3.44e-04 +2022-05-05 10:34:37,315 INFO [train.py:715] (6/8) Epoch 6, batch 1800, loss[loss=0.1409, simple_loss=0.2061, pruned_loss=0.03792, over 4810.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2207, pruned_loss=0.03912, over 972259.44 frames.], batch size: 24, lr: 3.44e-04 +2022-05-05 10:35:16,404 INFO [train.py:715] (6/8) Epoch 6, batch 1850, loss[loss=0.1196, simple_loss=0.19, pruned_loss=0.02465, over 4976.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2207, pruned_loss=0.03931, over 972896.65 frames.], batch size: 25, lr: 3.44e-04 +2022-05-05 10:35:54,730 INFO [train.py:715] (6/8) Epoch 6, batch 1900, loss[loss=0.1181, simple_loss=0.1863, pruned_loss=0.02491, over 4853.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2193, pruned_loss=0.03884, over 972562.40 frames.], batch size: 30, lr: 3.44e-04 +2022-05-05 10:36:34,277 INFO [train.py:715] (6/8) Epoch 6, batch 1950, loss[loss=0.1533, simple_loss=0.2258, pruned_loss=0.04039, over 4962.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2199, pruned_loss=0.03895, over 972894.28 frames.], batch size: 15, lr: 3.44e-04 +2022-05-05 10:37:13,033 INFO [train.py:715] (6/8) Epoch 6, batch 2000, loss[loss=0.1414, simple_loss=0.212, pruned_loss=0.03539, over 4811.00 frames.], tot_loss[loss=0.149, simple_loss=0.2201, pruned_loss=0.03896, over 973510.11 frames.], batch size: 25, lr: 3.44e-04 +2022-05-05 10:37:52,079 INFO [train.py:715] (6/8) Epoch 6, batch 2050, loss[loss=0.1624, simple_loss=0.2348, pruned_loss=0.04498, over 4839.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2205, pruned_loss=0.03927, over 973173.35 frames.], batch size: 30, lr: 3.44e-04 +2022-05-05 10:38:30,928 INFO [train.py:715] (6/8) Epoch 6, batch 2100, loss[loss=0.1511, simple_loss=0.2268, pruned_loss=0.03773, over 4891.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2204, pruned_loss=0.03951, over 972549.05 frames.], batch size: 22, lr: 3.44e-04 +2022-05-05 10:39:10,112 INFO [train.py:715] (6/8) Epoch 6, batch 2150, loss[loss=0.1316, simple_loss=0.2026, pruned_loss=0.03034, over 4800.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2197, pruned_loss=0.03909, over 972712.09 frames.], batch size: 21, lr: 3.44e-04 +2022-05-05 10:39:49,070 INFO [train.py:715] (6/8) Epoch 6, batch 2200, loss[loss=0.1588, simple_loss=0.2235, pruned_loss=0.04711, over 4915.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2199, pruned_loss=0.03935, over 972862.08 frames.], batch size: 18, lr: 3.44e-04 +2022-05-05 10:40:27,528 INFO [train.py:715] (6/8) Epoch 6, batch 2250, loss[loss=0.1638, simple_loss=0.2311, pruned_loss=0.04824, over 4848.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2193, pruned_loss=0.03886, over 972629.50 frames.], batch size: 30, lr: 3.44e-04 +2022-05-05 10:41:06,874 INFO [train.py:715] (6/8) Epoch 6, batch 2300, loss[loss=0.1505, simple_loss=0.2171, pruned_loss=0.04191, over 4873.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2189, pruned_loss=0.03872, over 972322.93 frames.], batch size: 22, lr: 3.44e-04 +2022-05-05 10:41:45,980 INFO [train.py:715] (6/8) Epoch 6, batch 2350, loss[loss=0.1483, simple_loss=0.2197, pruned_loss=0.03839, over 4979.00 frames.], tot_loss[loss=0.147, simple_loss=0.2181, pruned_loss=0.03797, over 972197.59 frames.], batch size: 35, lr: 3.44e-04 +2022-05-05 10:42:24,702 INFO [train.py:715] (6/8) Epoch 6, batch 2400, loss[loss=0.1666, simple_loss=0.2345, pruned_loss=0.04933, over 4935.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2195, pruned_loss=0.03911, over 972340.70 frames.], batch size: 21, lr: 3.44e-04 +2022-05-05 10:43:03,444 INFO [train.py:715] (6/8) Epoch 6, batch 2450, loss[loss=0.1543, simple_loss=0.2369, pruned_loss=0.03591, over 4992.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2195, pruned_loss=0.03901, over 971927.51 frames.], batch size: 15, lr: 3.44e-04 +2022-05-05 10:43:42,678 INFO [train.py:715] (6/8) Epoch 6, batch 2500, loss[loss=0.148, simple_loss=0.2271, pruned_loss=0.03445, over 4830.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2193, pruned_loss=0.03866, over 972461.29 frames.], batch size: 15, lr: 3.44e-04 +2022-05-05 10:44:21,858 INFO [train.py:715] (6/8) Epoch 6, batch 2550, loss[loss=0.1355, simple_loss=0.1925, pruned_loss=0.03922, over 4785.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2197, pruned_loss=0.03908, over 971821.45 frames.], batch size: 12, lr: 3.44e-04 +2022-05-05 10:45:00,760 INFO [train.py:715] (6/8) Epoch 6, batch 2600, loss[loss=0.1612, simple_loss=0.2228, pruned_loss=0.04978, over 4941.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2195, pruned_loss=0.03907, over 972534.17 frames.], batch size: 21, lr: 3.44e-04 +2022-05-05 10:45:40,392 INFO [train.py:715] (6/8) Epoch 6, batch 2650, loss[loss=0.1222, simple_loss=0.1886, pruned_loss=0.02787, over 4753.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2194, pruned_loss=0.0388, over 972537.79 frames.], batch size: 12, lr: 3.43e-04 +2022-05-05 10:46:19,965 INFO [train.py:715] (6/8) Epoch 6, batch 2700, loss[loss=0.1462, simple_loss=0.2181, pruned_loss=0.03714, over 4970.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2195, pruned_loss=0.03867, over 972408.21 frames.], batch size: 24, lr: 3.43e-04 +2022-05-05 10:46:58,104 INFO [train.py:715] (6/8) Epoch 6, batch 2750, loss[loss=0.1541, simple_loss=0.2265, pruned_loss=0.04083, over 4943.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2203, pruned_loss=0.03927, over 972158.96 frames.], batch size: 35, lr: 3.43e-04 +2022-05-05 10:47:37,113 INFO [train.py:715] (6/8) Epoch 6, batch 2800, loss[loss=0.1783, simple_loss=0.254, pruned_loss=0.05125, over 4892.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2201, pruned_loss=0.03887, over 971932.89 frames.], batch size: 17, lr: 3.43e-04 +2022-05-05 10:48:16,468 INFO [train.py:715] (6/8) Epoch 6, batch 2850, loss[loss=0.143, simple_loss=0.205, pruned_loss=0.04051, over 4970.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2203, pruned_loss=0.03904, over 971833.12 frames.], batch size: 24, lr: 3.43e-04 +2022-05-05 10:48:55,295 INFO [train.py:715] (6/8) Epoch 6, batch 2900, loss[loss=0.1456, simple_loss=0.2299, pruned_loss=0.03063, over 4975.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2219, pruned_loss=0.0397, over 971286.71 frames.], batch size: 15, lr: 3.43e-04 +2022-05-05 10:49:33,640 INFO [train.py:715] (6/8) Epoch 6, batch 2950, loss[loss=0.1295, simple_loss=0.2085, pruned_loss=0.02523, over 4763.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2223, pruned_loss=0.04014, over 972209.13 frames.], batch size: 19, lr: 3.43e-04 +2022-05-05 10:50:12,860 INFO [train.py:715] (6/8) Epoch 6, batch 3000, loss[loss=0.165, simple_loss=0.2228, pruned_loss=0.05364, over 4846.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2221, pruned_loss=0.04014, over 972009.99 frames.], batch size: 15, lr: 3.43e-04 +2022-05-05 10:50:12,860 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 10:50:22,539 INFO [train.py:742] (6/8) Epoch 6, validation: loss=0.1095, simple_loss=0.1945, pruned_loss=0.01223, over 914524.00 frames. +2022-05-05 10:51:02,174 INFO [train.py:715] (6/8) Epoch 6, batch 3050, loss[loss=0.122, simple_loss=0.1897, pruned_loss=0.02716, over 4819.00 frames.], tot_loss[loss=0.1509, simple_loss=0.222, pruned_loss=0.03994, over 972037.42 frames.], batch size: 13, lr: 3.43e-04 +2022-05-05 10:51:41,566 INFO [train.py:715] (6/8) Epoch 6, batch 3100, loss[loss=0.1808, simple_loss=0.248, pruned_loss=0.05681, over 4921.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2221, pruned_loss=0.0403, over 972214.68 frames.], batch size: 23, lr: 3.43e-04 +2022-05-05 10:52:20,136 INFO [train.py:715] (6/8) Epoch 6, batch 3150, loss[loss=0.2192, simple_loss=0.2858, pruned_loss=0.07627, over 4881.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2221, pruned_loss=0.04022, over 972133.03 frames.], batch size: 16, lr: 3.43e-04 +2022-05-05 10:52:58,781 INFO [train.py:715] (6/8) Epoch 6, batch 3200, loss[loss=0.1493, simple_loss=0.2188, pruned_loss=0.0399, over 4681.00 frames.], tot_loss[loss=0.1502, simple_loss=0.221, pruned_loss=0.03973, over 971538.38 frames.], batch size: 15, lr: 3.43e-04 +2022-05-05 10:53:38,607 INFO [train.py:715] (6/8) Epoch 6, batch 3250, loss[loss=0.1463, simple_loss=0.2041, pruned_loss=0.0443, over 4804.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2212, pruned_loss=0.03971, over 971118.24 frames.], batch size: 12, lr: 3.43e-04 +2022-05-05 10:54:17,328 INFO [train.py:715] (6/8) Epoch 6, batch 3300, loss[loss=0.166, simple_loss=0.2304, pruned_loss=0.05082, over 4915.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2222, pruned_loss=0.04005, over 971052.12 frames.], batch size: 18, lr: 3.43e-04 +2022-05-05 10:54:55,862 INFO [train.py:715] (6/8) Epoch 6, batch 3350, loss[loss=0.1633, simple_loss=0.2387, pruned_loss=0.0439, over 4941.00 frames.], tot_loss[loss=0.1503, simple_loss=0.222, pruned_loss=0.03934, over 971443.51 frames.], batch size: 21, lr: 3.43e-04 +2022-05-05 10:55:35,257 INFO [train.py:715] (6/8) Epoch 6, batch 3400, loss[loss=0.128, simple_loss=0.2004, pruned_loss=0.02773, over 4812.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2241, pruned_loss=0.0408, over 971697.37 frames.], batch size: 25, lr: 3.43e-04 +2022-05-05 10:56:14,434 INFO [train.py:715] (6/8) Epoch 6, batch 3450, loss[loss=0.1631, simple_loss=0.2408, pruned_loss=0.04267, over 4834.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2225, pruned_loss=0.04005, over 972056.15 frames.], batch size: 30, lr: 3.43e-04 +2022-05-05 10:56:52,541 INFO [train.py:715] (6/8) Epoch 6, batch 3500, loss[loss=0.1485, simple_loss=0.2162, pruned_loss=0.0404, over 4921.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2217, pruned_loss=0.03972, over 972402.38 frames.], batch size: 21, lr: 3.43e-04 +2022-05-05 10:57:31,371 INFO [train.py:715] (6/8) Epoch 6, batch 3550, loss[loss=0.1312, simple_loss=0.2085, pruned_loss=0.02694, over 4769.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2223, pruned_loss=0.03992, over 972004.63 frames.], batch size: 14, lr: 3.43e-04 +2022-05-05 10:58:10,830 INFO [train.py:715] (6/8) Epoch 6, batch 3600, loss[loss=0.1711, simple_loss=0.2321, pruned_loss=0.05508, over 4765.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2209, pruned_loss=0.03937, over 972282.73 frames.], batch size: 14, lr: 3.43e-04 +2022-05-05 10:58:49,772 INFO [train.py:715] (6/8) Epoch 6, batch 3650, loss[loss=0.163, simple_loss=0.2317, pruned_loss=0.0471, over 4901.00 frames.], tot_loss[loss=0.1503, simple_loss=0.221, pruned_loss=0.03976, over 972279.62 frames.], batch size: 39, lr: 3.43e-04 +2022-05-05 10:59:27,963 INFO [train.py:715] (6/8) Epoch 6, batch 3700, loss[loss=0.1069, simple_loss=0.179, pruned_loss=0.01741, over 4792.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2204, pruned_loss=0.03951, over 971795.04 frames.], batch size: 18, lr: 3.43e-04 +2022-05-05 11:00:07,228 INFO [train.py:715] (6/8) Epoch 6, batch 3750, loss[loss=0.153, simple_loss=0.2166, pruned_loss=0.04466, over 4773.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2205, pruned_loss=0.03954, over 971575.23 frames.], batch size: 12, lr: 3.43e-04 +2022-05-05 11:00:46,317 INFO [train.py:715] (6/8) Epoch 6, batch 3800, loss[loss=0.1538, simple_loss=0.219, pruned_loss=0.0443, over 4781.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2211, pruned_loss=0.04007, over 971810.92 frames.], batch size: 14, lr: 3.43e-04 +2022-05-05 11:01:24,434 INFO [train.py:715] (6/8) Epoch 6, batch 3850, loss[loss=0.1271, simple_loss=0.1922, pruned_loss=0.031, over 4792.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2217, pruned_loss=0.04028, over 972107.19 frames.], batch size: 12, lr: 3.43e-04 +2022-05-05 11:02:03,350 INFO [train.py:715] (6/8) Epoch 6, batch 3900, loss[loss=0.1644, simple_loss=0.2355, pruned_loss=0.04668, over 4971.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2224, pruned_loss=0.04051, over 972352.55 frames.], batch size: 15, lr: 3.42e-04 +2022-05-05 11:02:42,645 INFO [train.py:715] (6/8) Epoch 6, batch 3950, loss[loss=0.1463, simple_loss=0.2223, pruned_loss=0.03514, over 4987.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2215, pruned_loss=0.0398, over 972949.17 frames.], batch size: 14, lr: 3.42e-04 +2022-05-05 11:03:21,701 INFO [train.py:715] (6/8) Epoch 6, batch 4000, loss[loss=0.1357, simple_loss=0.2036, pruned_loss=0.03385, over 4959.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2216, pruned_loss=0.03979, over 972671.01 frames.], batch size: 15, lr: 3.42e-04 +2022-05-05 11:04:00,014 INFO [train.py:715] (6/8) Epoch 6, batch 4050, loss[loss=0.1222, simple_loss=0.1889, pruned_loss=0.02769, over 4922.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2222, pruned_loss=0.03983, over 972902.13 frames.], batch size: 29, lr: 3.42e-04 +2022-05-05 11:04:39,114 INFO [train.py:715] (6/8) Epoch 6, batch 4100, loss[loss=0.1705, simple_loss=0.237, pruned_loss=0.052, over 4905.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2226, pruned_loss=0.03977, over 972972.47 frames.], batch size: 19, lr: 3.42e-04 +2022-05-05 11:05:17,849 INFO [train.py:715] (6/8) Epoch 6, batch 4150, loss[loss=0.1577, simple_loss=0.2312, pruned_loss=0.04209, over 4809.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2223, pruned_loss=0.04002, over 972712.20 frames.], batch size: 21, lr: 3.42e-04 +2022-05-05 11:05:56,005 INFO [train.py:715] (6/8) Epoch 6, batch 4200, loss[loss=0.1519, simple_loss=0.2335, pruned_loss=0.03511, over 4831.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2218, pruned_loss=0.03986, over 972198.99 frames.], batch size: 30, lr: 3.42e-04 +2022-05-05 11:06:34,725 INFO [train.py:715] (6/8) Epoch 6, batch 4250, loss[loss=0.1336, simple_loss=0.2084, pruned_loss=0.02942, over 4919.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2227, pruned_loss=0.04021, over 972063.85 frames.], batch size: 23, lr: 3.42e-04 +2022-05-05 11:07:13,787 INFO [train.py:715] (6/8) Epoch 6, batch 4300, loss[loss=0.1709, simple_loss=0.242, pruned_loss=0.04989, over 4968.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2227, pruned_loss=0.04036, over 972612.50 frames.], batch size: 14, lr: 3.42e-04 +2022-05-05 11:07:52,580 INFO [train.py:715] (6/8) Epoch 6, batch 4350, loss[loss=0.1382, simple_loss=0.2191, pruned_loss=0.02863, over 4864.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2223, pruned_loss=0.04015, over 972040.23 frames.], batch size: 20, lr: 3.42e-04 +2022-05-05 11:08:30,488 INFO [train.py:715] (6/8) Epoch 6, batch 4400, loss[loss=0.1451, simple_loss=0.215, pruned_loss=0.03757, over 4745.00 frames.], tot_loss[loss=0.1514, simple_loss=0.222, pruned_loss=0.04036, over 971564.61 frames.], batch size: 19, lr: 3.42e-04 +2022-05-05 11:09:08,936 INFO [train.py:715] (6/8) Epoch 6, batch 4450, loss[loss=0.1559, simple_loss=0.2297, pruned_loss=0.04107, over 4872.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2218, pruned_loss=0.04033, over 970940.12 frames.], batch size: 19, lr: 3.42e-04 +2022-05-05 11:09:48,072 INFO [train.py:715] (6/8) Epoch 6, batch 4500, loss[loss=0.1527, simple_loss=0.2244, pruned_loss=0.04048, over 4961.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2213, pruned_loss=0.03976, over 971403.26 frames.], batch size: 14, lr: 3.42e-04 +2022-05-05 11:10:26,352 INFO [train.py:715] (6/8) Epoch 6, batch 4550, loss[loss=0.1402, simple_loss=0.2064, pruned_loss=0.03699, over 4742.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2203, pruned_loss=0.03924, over 971602.65 frames.], batch size: 16, lr: 3.42e-04 +2022-05-05 11:11:04,821 INFO [train.py:715] (6/8) Epoch 6, batch 4600, loss[loss=0.1338, simple_loss=0.2028, pruned_loss=0.03236, over 4919.00 frames.], tot_loss[loss=0.15, simple_loss=0.2211, pruned_loss=0.03941, over 972130.19 frames.], batch size: 18, lr: 3.42e-04 +2022-05-05 11:11:44,224 INFO [train.py:715] (6/8) Epoch 6, batch 4650, loss[loss=0.1382, simple_loss=0.2062, pruned_loss=0.03511, over 4862.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2206, pruned_loss=0.03932, over 972239.05 frames.], batch size: 32, lr: 3.42e-04 +2022-05-05 11:12:23,350 INFO [train.py:715] (6/8) Epoch 6, batch 4700, loss[loss=0.1647, simple_loss=0.2351, pruned_loss=0.04713, over 4774.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2209, pruned_loss=0.03961, over 972345.90 frames.], batch size: 18, lr: 3.42e-04 +2022-05-05 11:13:01,631 INFO [train.py:715] (6/8) Epoch 6, batch 4750, loss[loss=0.1251, simple_loss=0.1981, pruned_loss=0.02599, over 4813.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2199, pruned_loss=0.03851, over 972970.95 frames.], batch size: 24, lr: 3.42e-04 +2022-05-05 11:13:40,646 INFO [train.py:715] (6/8) Epoch 6, batch 4800, loss[loss=0.1391, simple_loss=0.2113, pruned_loss=0.03349, over 4816.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2194, pruned_loss=0.039, over 972945.01 frames.], batch size: 25, lr: 3.42e-04 +2022-05-05 11:14:19,739 INFO [train.py:715] (6/8) Epoch 6, batch 4850, loss[loss=0.168, simple_loss=0.2371, pruned_loss=0.04946, over 4794.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2204, pruned_loss=0.03954, over 972250.18 frames.], batch size: 21, lr: 3.42e-04 +2022-05-05 11:14:58,279 INFO [train.py:715] (6/8) Epoch 6, batch 4900, loss[loss=0.148, simple_loss=0.2281, pruned_loss=0.03393, over 4704.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2208, pruned_loss=0.03995, over 972030.70 frames.], batch size: 15, lr: 3.42e-04 +2022-05-05 11:15:37,163 INFO [train.py:715] (6/8) Epoch 6, batch 4950, loss[loss=0.1354, simple_loss=0.2105, pruned_loss=0.03015, over 4852.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2202, pruned_loss=0.03949, over 972527.28 frames.], batch size: 32, lr: 3.42e-04 +2022-05-05 11:16:16,919 INFO [train.py:715] (6/8) Epoch 6, batch 5000, loss[loss=0.1467, simple_loss=0.2228, pruned_loss=0.03524, over 4978.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2199, pruned_loss=0.03944, over 972801.17 frames.], batch size: 25, lr: 3.42e-04 +2022-05-05 11:16:55,993 INFO [train.py:715] (6/8) Epoch 6, batch 5050, loss[loss=0.1652, simple_loss=0.2353, pruned_loss=0.04756, over 4975.00 frames.], tot_loss[loss=0.149, simple_loss=0.22, pruned_loss=0.03897, over 973463.97 frames.], batch size: 24, lr: 3.42e-04 +2022-05-05 11:17:34,329 INFO [train.py:715] (6/8) Epoch 6, batch 5100, loss[loss=0.1541, simple_loss=0.2222, pruned_loss=0.04297, over 4777.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2199, pruned_loss=0.03889, over 972882.22 frames.], batch size: 14, lr: 3.42e-04 +2022-05-05 11:18:13,254 INFO [train.py:715] (6/8) Epoch 6, batch 5150, loss[loss=0.1376, simple_loss=0.2091, pruned_loss=0.03305, over 4967.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2202, pruned_loss=0.03899, over 973034.11 frames.], batch size: 35, lr: 3.41e-04 +2022-05-05 11:18:52,359 INFO [train.py:715] (6/8) Epoch 6, batch 5200, loss[loss=0.1314, simple_loss=0.2053, pruned_loss=0.02874, over 4817.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2196, pruned_loss=0.03903, over 973445.13 frames.], batch size: 27, lr: 3.41e-04 +2022-05-05 11:19:30,492 INFO [train.py:715] (6/8) Epoch 6, batch 5250, loss[loss=0.1465, simple_loss=0.2141, pruned_loss=0.03943, over 4785.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2192, pruned_loss=0.03862, over 973319.30 frames.], batch size: 18, lr: 3.41e-04 +2022-05-05 11:20:09,574 INFO [train.py:715] (6/8) Epoch 6, batch 5300, loss[loss=0.1451, simple_loss=0.2228, pruned_loss=0.03369, over 4726.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2185, pruned_loss=0.03831, over 972174.22 frames.], batch size: 16, lr: 3.41e-04 +2022-05-05 11:20:48,895 INFO [train.py:715] (6/8) Epoch 6, batch 5350, loss[loss=0.1895, simple_loss=0.2585, pruned_loss=0.06022, over 4921.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2195, pruned_loss=0.03892, over 972951.09 frames.], batch size: 18, lr: 3.41e-04 +2022-05-05 11:21:27,941 INFO [train.py:715] (6/8) Epoch 6, batch 5400, loss[loss=0.1729, simple_loss=0.2358, pruned_loss=0.05503, over 4979.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2219, pruned_loss=0.04037, over 972419.99 frames.], batch size: 15, lr: 3.41e-04 +2022-05-05 11:22:06,516 INFO [train.py:715] (6/8) Epoch 6, batch 5450, loss[loss=0.1564, simple_loss=0.2211, pruned_loss=0.04589, over 4939.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2208, pruned_loss=0.03946, over 973114.87 frames.], batch size: 21, lr: 3.41e-04 +2022-05-05 11:22:45,325 INFO [train.py:715] (6/8) Epoch 6, batch 5500, loss[loss=0.1497, simple_loss=0.2167, pruned_loss=0.04135, over 4839.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2203, pruned_loss=0.03911, over 972735.43 frames.], batch size: 15, lr: 3.41e-04 +2022-05-05 11:23:24,194 INFO [train.py:715] (6/8) Epoch 6, batch 5550, loss[loss=0.1736, simple_loss=0.2335, pruned_loss=0.05691, over 4872.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2201, pruned_loss=0.03933, over 972844.80 frames.], batch size: 16, lr: 3.41e-04 +2022-05-05 11:24:02,782 INFO [train.py:715] (6/8) Epoch 6, batch 5600, loss[loss=0.1664, simple_loss=0.2333, pruned_loss=0.04976, over 4919.00 frames.], tot_loss[loss=0.1502, simple_loss=0.221, pruned_loss=0.03972, over 973691.07 frames.], batch size: 18, lr: 3.41e-04 +2022-05-05 11:24:42,275 INFO [train.py:715] (6/8) Epoch 6, batch 5650, loss[loss=0.1399, simple_loss=0.2105, pruned_loss=0.03467, over 4816.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2208, pruned_loss=0.03946, over 972682.40 frames.], batch size: 27, lr: 3.41e-04 +2022-05-05 11:25:21,628 INFO [train.py:715] (6/8) Epoch 6, batch 5700, loss[loss=0.2003, simple_loss=0.2587, pruned_loss=0.071, over 4901.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2203, pruned_loss=0.039, over 972477.93 frames.], batch size: 18, lr: 3.41e-04 +2022-05-05 11:26:00,234 INFO [train.py:715] (6/8) Epoch 6, batch 5750, loss[loss=0.1325, simple_loss=0.2114, pruned_loss=0.02684, over 4926.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2207, pruned_loss=0.03899, over 972474.93 frames.], batch size: 29, lr: 3.41e-04 +2022-05-05 11:26:38,645 INFO [train.py:715] (6/8) Epoch 6, batch 5800, loss[loss=0.1674, simple_loss=0.2281, pruned_loss=0.05338, over 4874.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2212, pruned_loss=0.03927, over 972371.37 frames.], batch size: 16, lr: 3.41e-04 +2022-05-05 11:27:17,532 INFO [train.py:715] (6/8) Epoch 6, batch 5850, loss[loss=0.1535, simple_loss=0.2238, pruned_loss=0.04158, over 4968.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2211, pruned_loss=0.03931, over 972902.68 frames.], batch size: 15, lr: 3.41e-04 +2022-05-05 11:27:56,995 INFO [train.py:715] (6/8) Epoch 6, batch 5900, loss[loss=0.1355, simple_loss=0.2081, pruned_loss=0.03147, over 4905.00 frames.], tot_loss[loss=0.149, simple_loss=0.2201, pruned_loss=0.03895, over 972351.31 frames.], batch size: 29, lr: 3.41e-04 +2022-05-05 11:28:34,913 INFO [train.py:715] (6/8) Epoch 6, batch 5950, loss[loss=0.1651, simple_loss=0.2361, pruned_loss=0.04704, over 4827.00 frames.], tot_loss[loss=0.1492, simple_loss=0.22, pruned_loss=0.03919, over 972371.62 frames.], batch size: 13, lr: 3.41e-04 +2022-05-05 11:29:14,286 INFO [train.py:715] (6/8) Epoch 6, batch 6000, loss[loss=0.1608, simple_loss=0.2097, pruned_loss=0.05592, over 4823.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2198, pruned_loss=0.03899, over 972793.18 frames.], batch size: 12, lr: 3.41e-04 +2022-05-05 11:29:14,287 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 11:29:24,854 INFO [train.py:742] (6/8) Epoch 6, validation: loss=0.1095, simple_loss=0.1945, pruned_loss=0.01229, over 914524.00 frames. +2022-05-05 11:30:04,469 INFO [train.py:715] (6/8) Epoch 6, batch 6050, loss[loss=0.161, simple_loss=0.233, pruned_loss=0.04448, over 4916.00 frames.], tot_loss[loss=0.149, simple_loss=0.2202, pruned_loss=0.03896, over 971346.68 frames.], batch size: 39, lr: 3.41e-04 +2022-05-05 11:30:43,726 INFO [train.py:715] (6/8) Epoch 6, batch 6100, loss[loss=0.1561, simple_loss=0.2248, pruned_loss=0.04369, over 4915.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2212, pruned_loss=0.03956, over 971025.62 frames.], batch size: 17, lr: 3.41e-04 +2022-05-05 11:31:23,121 INFO [train.py:715] (6/8) Epoch 6, batch 6150, loss[loss=0.1273, simple_loss=0.1939, pruned_loss=0.03041, over 4846.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2219, pruned_loss=0.03993, over 970940.04 frames.], batch size: 12, lr: 3.41e-04 +2022-05-05 11:32:01,615 INFO [train.py:715] (6/8) Epoch 6, batch 6200, loss[loss=0.147, simple_loss=0.2104, pruned_loss=0.04177, over 4885.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2226, pruned_loss=0.04003, over 970588.91 frames.], batch size: 32, lr: 3.41e-04 +2022-05-05 11:32:40,935 INFO [train.py:715] (6/8) Epoch 6, batch 6250, loss[loss=0.1363, simple_loss=0.2106, pruned_loss=0.03098, over 4919.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2216, pruned_loss=0.03953, over 971068.29 frames.], batch size: 23, lr: 3.41e-04 +2022-05-05 11:33:20,233 INFO [train.py:715] (6/8) Epoch 6, batch 6300, loss[loss=0.1765, simple_loss=0.2401, pruned_loss=0.05647, over 4967.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2215, pruned_loss=0.03919, over 971665.83 frames.], batch size: 28, lr: 3.41e-04 +2022-05-05 11:33:58,709 INFO [train.py:715] (6/8) Epoch 6, batch 6350, loss[loss=0.1287, simple_loss=0.2001, pruned_loss=0.02864, over 4934.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2211, pruned_loss=0.03922, over 971950.60 frames.], batch size: 29, lr: 3.41e-04 +2022-05-05 11:34:37,339 INFO [train.py:715] (6/8) Epoch 6, batch 6400, loss[loss=0.1409, simple_loss=0.2151, pruned_loss=0.03331, over 4955.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2201, pruned_loss=0.03874, over 972011.95 frames.], batch size: 21, lr: 3.40e-04 +2022-05-05 11:35:16,565 INFO [train.py:715] (6/8) Epoch 6, batch 6450, loss[loss=0.1649, simple_loss=0.2218, pruned_loss=0.05402, over 4957.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2208, pruned_loss=0.03889, over 971762.22 frames.], batch size: 15, lr: 3.40e-04 +2022-05-05 11:35:55,390 INFO [train.py:715] (6/8) Epoch 6, batch 6500, loss[loss=0.1338, simple_loss=0.2142, pruned_loss=0.02672, over 4805.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2205, pruned_loss=0.03887, over 971907.42 frames.], batch size: 24, lr: 3.40e-04 +2022-05-05 11:36:33,974 INFO [train.py:715] (6/8) Epoch 6, batch 6550, loss[loss=0.1447, simple_loss=0.2143, pruned_loss=0.03759, over 4847.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2216, pruned_loss=0.03939, over 972068.00 frames.], batch size: 15, lr: 3.40e-04 +2022-05-05 11:37:12,777 INFO [train.py:715] (6/8) Epoch 6, batch 6600, loss[loss=0.1361, simple_loss=0.2063, pruned_loss=0.033, over 4919.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2214, pruned_loss=0.03957, over 972139.60 frames.], batch size: 17, lr: 3.40e-04 +2022-05-05 11:37:52,973 INFO [train.py:715] (6/8) Epoch 6, batch 6650, loss[loss=0.1258, simple_loss=0.2031, pruned_loss=0.02424, over 4946.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2205, pruned_loss=0.03929, over 972539.76 frames.], batch size: 29, lr: 3.40e-04 +2022-05-05 11:38:31,783 INFO [train.py:715] (6/8) Epoch 6, batch 6700, loss[loss=0.1742, simple_loss=0.243, pruned_loss=0.05274, over 4930.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2211, pruned_loss=0.03935, over 972583.69 frames.], batch size: 39, lr: 3.40e-04 +2022-05-05 11:39:10,525 INFO [train.py:715] (6/8) Epoch 6, batch 6750, loss[loss=0.153, simple_loss=0.2221, pruned_loss=0.04194, over 4852.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2216, pruned_loss=0.03958, over 973021.78 frames.], batch size: 15, lr: 3.40e-04 +2022-05-05 11:39:49,800 INFO [train.py:715] (6/8) Epoch 6, batch 6800, loss[loss=0.1998, simple_loss=0.254, pruned_loss=0.07275, over 4815.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2218, pruned_loss=0.03961, over 973003.73 frames.], batch size: 26, lr: 3.40e-04 +2022-05-05 11:40:28,791 INFO [train.py:715] (6/8) Epoch 6, batch 6850, loss[loss=0.1499, simple_loss=0.2338, pruned_loss=0.03302, over 4799.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2215, pruned_loss=0.03913, over 972913.90 frames.], batch size: 21, lr: 3.40e-04 +2022-05-05 11:41:06,845 INFO [train.py:715] (6/8) Epoch 6, batch 6900, loss[loss=0.1471, simple_loss=0.2183, pruned_loss=0.03797, over 4780.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2206, pruned_loss=0.03865, over 972448.54 frames.], batch size: 17, lr: 3.40e-04 +2022-05-05 11:41:45,924 INFO [train.py:715] (6/8) Epoch 6, batch 6950, loss[loss=0.1216, simple_loss=0.1945, pruned_loss=0.02439, over 4642.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2204, pruned_loss=0.03858, over 971513.08 frames.], batch size: 13, lr: 3.40e-04 +2022-05-05 11:42:25,622 INFO [train.py:715] (6/8) Epoch 6, batch 7000, loss[loss=0.1517, simple_loss=0.2262, pruned_loss=0.03859, over 4896.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2203, pruned_loss=0.03831, over 971196.30 frames.], batch size: 22, lr: 3.40e-04 +2022-05-05 11:43:04,217 INFO [train.py:715] (6/8) Epoch 6, batch 7050, loss[loss=0.1465, simple_loss=0.2252, pruned_loss=0.03386, over 4943.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2199, pruned_loss=0.03866, over 971393.09 frames.], batch size: 29, lr: 3.40e-04 +2022-05-05 11:43:42,733 INFO [train.py:715] (6/8) Epoch 6, batch 7100, loss[loss=0.1221, simple_loss=0.2002, pruned_loss=0.022, over 4869.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2199, pruned_loss=0.03839, over 972018.54 frames.], batch size: 22, lr: 3.40e-04 +2022-05-05 11:44:25,533 INFO [train.py:715] (6/8) Epoch 6, batch 7150, loss[loss=0.14, simple_loss=0.2082, pruned_loss=0.03589, over 4755.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2198, pruned_loss=0.03845, over 971657.16 frames.], batch size: 19, lr: 3.40e-04 +2022-05-05 11:45:04,231 INFO [train.py:715] (6/8) Epoch 6, batch 7200, loss[loss=0.1479, simple_loss=0.2174, pruned_loss=0.03923, over 4704.00 frames.], tot_loss[loss=0.1486, simple_loss=0.22, pruned_loss=0.03854, over 971472.49 frames.], batch size: 15, lr: 3.40e-04 +2022-05-05 11:45:42,692 INFO [train.py:715] (6/8) Epoch 6, batch 7250, loss[loss=0.1299, simple_loss=0.2062, pruned_loss=0.02679, over 4912.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2206, pruned_loss=0.03912, over 972448.98 frames.], batch size: 17, lr: 3.40e-04 +2022-05-05 11:46:21,451 INFO [train.py:715] (6/8) Epoch 6, batch 7300, loss[loss=0.1544, simple_loss=0.2231, pruned_loss=0.04285, over 4968.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2201, pruned_loss=0.0389, over 973326.30 frames.], batch size: 15, lr: 3.40e-04 +2022-05-05 11:47:01,049 INFO [train.py:715] (6/8) Epoch 6, batch 7350, loss[loss=0.1775, simple_loss=0.2418, pruned_loss=0.05654, over 4961.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2199, pruned_loss=0.03916, over 973538.97 frames.], batch size: 35, lr: 3.40e-04 +2022-05-05 11:47:38,868 INFO [train.py:715] (6/8) Epoch 6, batch 7400, loss[loss=0.1408, simple_loss=0.2072, pruned_loss=0.03722, over 4791.00 frames.], tot_loss[loss=0.149, simple_loss=0.2201, pruned_loss=0.03895, over 972656.02 frames.], batch size: 18, lr: 3.40e-04 +2022-05-05 11:48:18,378 INFO [train.py:715] (6/8) Epoch 6, batch 7450, loss[loss=0.1508, simple_loss=0.2232, pruned_loss=0.03917, over 4848.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2203, pruned_loss=0.03903, over 972174.92 frames.], batch size: 20, lr: 3.40e-04 +2022-05-05 11:48:56,995 INFO [train.py:715] (6/8) Epoch 6, batch 7500, loss[loss=0.1382, simple_loss=0.2076, pruned_loss=0.03446, over 4884.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2205, pruned_loss=0.0388, over 971552.63 frames.], batch size: 22, lr: 3.40e-04 +2022-05-05 11:49:35,691 INFO [train.py:715] (6/8) Epoch 6, batch 7550, loss[loss=0.172, simple_loss=0.2517, pruned_loss=0.04619, over 4919.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2213, pruned_loss=0.03952, over 971610.33 frames.], batch size: 18, lr: 3.40e-04 +2022-05-05 11:50:14,633 INFO [train.py:715] (6/8) Epoch 6, batch 7600, loss[loss=0.1564, simple_loss=0.2203, pruned_loss=0.04627, over 4880.00 frames.], tot_loss[loss=0.15, simple_loss=0.2212, pruned_loss=0.03939, over 972176.12 frames.], batch size: 16, lr: 3.40e-04 +2022-05-05 11:50:53,763 INFO [train.py:715] (6/8) Epoch 6, batch 7650, loss[loss=0.1589, simple_loss=0.2292, pruned_loss=0.04429, over 4988.00 frames.], tot_loss[loss=0.1499, simple_loss=0.221, pruned_loss=0.03937, over 972601.63 frames.], batch size: 15, lr: 3.40e-04 +2022-05-05 11:51:33,382 INFO [train.py:715] (6/8) Epoch 6, batch 7700, loss[loss=0.1517, simple_loss=0.2215, pruned_loss=0.04097, over 4962.00 frames.], tot_loss[loss=0.149, simple_loss=0.2204, pruned_loss=0.03878, over 972549.27 frames.], batch size: 24, lr: 3.39e-04 +2022-05-05 11:52:11,583 INFO [train.py:715] (6/8) Epoch 6, batch 7750, loss[loss=0.1426, simple_loss=0.2153, pruned_loss=0.03497, over 4825.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2209, pruned_loss=0.03923, over 973148.21 frames.], batch size: 27, lr: 3.39e-04 +2022-05-05 11:52:51,083 INFO [train.py:715] (6/8) Epoch 6, batch 7800, loss[loss=0.1445, simple_loss=0.2266, pruned_loss=0.0312, over 4682.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2215, pruned_loss=0.03942, over 972655.29 frames.], batch size: 15, lr: 3.39e-04 +2022-05-05 11:53:30,018 INFO [train.py:715] (6/8) Epoch 6, batch 7850, loss[loss=0.1519, simple_loss=0.2306, pruned_loss=0.0366, over 4770.00 frames.], tot_loss[loss=0.1505, simple_loss=0.222, pruned_loss=0.03956, over 973098.67 frames.], batch size: 17, lr: 3.39e-04 +2022-05-05 11:54:08,584 INFO [train.py:715] (6/8) Epoch 6, batch 7900, loss[loss=0.1543, simple_loss=0.2205, pruned_loss=0.04407, over 4690.00 frames.], tot_loss[loss=0.1496, simple_loss=0.221, pruned_loss=0.03913, over 971849.20 frames.], batch size: 15, lr: 3.39e-04 +2022-05-05 11:54:47,343 INFO [train.py:715] (6/8) Epoch 6, batch 7950, loss[loss=0.1132, simple_loss=0.1873, pruned_loss=0.01954, over 4913.00 frames.], tot_loss[loss=0.151, simple_loss=0.2219, pruned_loss=0.04005, over 971417.53 frames.], batch size: 18, lr: 3.39e-04 +2022-05-05 11:55:26,517 INFO [train.py:715] (6/8) Epoch 6, batch 8000, loss[loss=0.175, simple_loss=0.2426, pruned_loss=0.05368, over 4812.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2213, pruned_loss=0.0397, over 971404.23 frames.], batch size: 21, lr: 3.39e-04 +2022-05-05 11:56:05,893 INFO [train.py:715] (6/8) Epoch 6, batch 8050, loss[loss=0.1482, simple_loss=0.2206, pruned_loss=0.03791, over 4843.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2214, pruned_loss=0.03977, over 972133.56 frames.], batch size: 26, lr: 3.39e-04 +2022-05-05 11:56:43,892 INFO [train.py:715] (6/8) Epoch 6, batch 8100, loss[loss=0.1253, simple_loss=0.209, pruned_loss=0.02085, over 4962.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2221, pruned_loss=0.03982, over 972992.34 frames.], batch size: 21, lr: 3.39e-04 +2022-05-05 11:57:22,884 INFO [train.py:715] (6/8) Epoch 6, batch 8150, loss[loss=0.1598, simple_loss=0.2206, pruned_loss=0.04948, over 4794.00 frames.], tot_loss[loss=0.15, simple_loss=0.2214, pruned_loss=0.03931, over 972758.89 frames.], batch size: 14, lr: 3.39e-04 +2022-05-05 11:58:01,956 INFO [train.py:715] (6/8) Epoch 6, batch 8200, loss[loss=0.1539, simple_loss=0.2283, pruned_loss=0.03975, over 4808.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2209, pruned_loss=0.03932, over 971922.12 frames.], batch size: 21, lr: 3.39e-04 +2022-05-05 11:58:41,281 INFO [train.py:715] (6/8) Epoch 6, batch 8250, loss[loss=0.1401, simple_loss=0.2085, pruned_loss=0.03588, over 4892.00 frames.], tot_loss[loss=0.15, simple_loss=0.221, pruned_loss=0.03947, over 971949.43 frames.], batch size: 22, lr: 3.39e-04 +2022-05-05 11:59:19,577 INFO [train.py:715] (6/8) Epoch 6, batch 8300, loss[loss=0.1465, simple_loss=0.2131, pruned_loss=0.03992, over 4950.00 frames.], tot_loss[loss=0.15, simple_loss=0.221, pruned_loss=0.03951, over 972411.16 frames.], batch size: 14, lr: 3.39e-04 +2022-05-05 11:59:58,762 INFO [train.py:715] (6/8) Epoch 6, batch 8350, loss[loss=0.1398, simple_loss=0.2158, pruned_loss=0.03186, over 4923.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2211, pruned_loss=0.03919, over 973023.11 frames.], batch size: 17, lr: 3.39e-04 +2022-05-05 12:00:37,621 INFO [train.py:715] (6/8) Epoch 6, batch 8400, loss[loss=0.1632, simple_loss=0.2281, pruned_loss=0.0492, over 4841.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2215, pruned_loss=0.03949, over 972914.24 frames.], batch size: 30, lr: 3.39e-04 +2022-05-05 12:01:15,840 INFO [train.py:715] (6/8) Epoch 6, batch 8450, loss[loss=0.1588, simple_loss=0.2196, pruned_loss=0.04897, over 4781.00 frames.], tot_loss[loss=0.1496, simple_loss=0.221, pruned_loss=0.03914, over 973178.71 frames.], batch size: 17, lr: 3.39e-04 +2022-05-05 12:01:54,988 INFO [train.py:715] (6/8) Epoch 6, batch 8500, loss[loss=0.1376, simple_loss=0.2103, pruned_loss=0.03241, over 4912.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2207, pruned_loss=0.03918, over 972343.78 frames.], batch size: 19, lr: 3.39e-04 +2022-05-05 12:02:33,547 INFO [train.py:715] (6/8) Epoch 6, batch 8550, loss[loss=0.1572, simple_loss=0.2349, pruned_loss=0.03981, over 4844.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2211, pruned_loss=0.03939, over 972095.20 frames.], batch size: 20, lr: 3.39e-04 +2022-05-05 12:03:12,439 INFO [train.py:715] (6/8) Epoch 6, batch 8600, loss[loss=0.1649, simple_loss=0.2424, pruned_loss=0.04369, over 4794.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2206, pruned_loss=0.03906, over 971486.17 frames.], batch size: 18, lr: 3.39e-04 +2022-05-05 12:03:50,310 INFO [train.py:715] (6/8) Epoch 6, batch 8650, loss[loss=0.1324, simple_loss=0.2084, pruned_loss=0.02821, over 4822.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2203, pruned_loss=0.03921, over 971700.72 frames.], batch size: 25, lr: 3.39e-04 +2022-05-05 12:04:29,733 INFO [train.py:715] (6/8) Epoch 6, batch 8700, loss[loss=0.1485, simple_loss=0.2356, pruned_loss=0.03073, over 4707.00 frames.], tot_loss[loss=0.149, simple_loss=0.2201, pruned_loss=0.03894, over 972133.45 frames.], batch size: 15, lr: 3.39e-04 +2022-05-05 12:05:08,431 INFO [train.py:715] (6/8) Epoch 6, batch 8750, loss[loss=0.1645, simple_loss=0.2387, pruned_loss=0.04511, over 4887.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2202, pruned_loss=0.03908, over 972427.79 frames.], batch size: 32, lr: 3.39e-04 +2022-05-05 12:05:46,858 INFO [train.py:715] (6/8) Epoch 6, batch 8800, loss[loss=0.1523, simple_loss=0.2233, pruned_loss=0.04064, over 4990.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2213, pruned_loss=0.03968, over 971966.72 frames.], batch size: 14, lr: 3.39e-04 +2022-05-05 12:06:25,686 INFO [train.py:715] (6/8) Epoch 6, batch 8850, loss[loss=0.2051, simple_loss=0.2523, pruned_loss=0.07899, over 4784.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2223, pruned_loss=0.03997, over 971295.43 frames.], batch size: 17, lr: 3.39e-04 +2022-05-05 12:07:04,756 INFO [train.py:715] (6/8) Epoch 6, batch 8900, loss[loss=0.1602, simple_loss=0.2286, pruned_loss=0.0459, over 4844.00 frames.], tot_loss[loss=0.1502, simple_loss=0.221, pruned_loss=0.03969, over 971346.87 frames.], batch size: 34, lr: 3.39e-04 +2022-05-05 12:07:43,998 INFO [train.py:715] (6/8) Epoch 6, batch 8950, loss[loss=0.1277, simple_loss=0.1909, pruned_loss=0.03222, over 4793.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2213, pruned_loss=0.03988, over 971593.16 frames.], batch size: 12, lr: 3.38e-04 +2022-05-05 12:08:22,492 INFO [train.py:715] (6/8) Epoch 6, batch 9000, loss[loss=0.1431, simple_loss=0.2103, pruned_loss=0.03797, over 4978.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2206, pruned_loss=0.0395, over 971842.97 frames.], batch size: 14, lr: 3.38e-04 +2022-05-05 12:08:22,493 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 12:08:35,890 INFO [train.py:742] (6/8) Epoch 6, validation: loss=0.1094, simple_loss=0.1946, pruned_loss=0.01213, over 914524.00 frames. +2022-05-05 12:09:14,902 INFO [train.py:715] (6/8) Epoch 6, batch 9050, loss[loss=0.1293, simple_loss=0.1989, pruned_loss=0.02983, over 4826.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2202, pruned_loss=0.03923, over 971797.84 frames.], batch size: 13, lr: 3.38e-04 +2022-05-05 12:09:53,934 INFO [train.py:715] (6/8) Epoch 6, batch 9100, loss[loss=0.1215, simple_loss=0.1893, pruned_loss=0.02688, over 4642.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2187, pruned_loss=0.03829, over 972537.01 frames.], batch size: 13, lr: 3.38e-04 +2022-05-05 12:10:33,368 INFO [train.py:715] (6/8) Epoch 6, batch 9150, loss[loss=0.1594, simple_loss=0.2362, pruned_loss=0.04129, over 4872.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2184, pruned_loss=0.03807, over 972608.74 frames.], batch size: 22, lr: 3.38e-04 +2022-05-05 12:11:11,394 INFO [train.py:715] (6/8) Epoch 6, batch 9200, loss[loss=0.1202, simple_loss=0.1924, pruned_loss=0.02395, over 4940.00 frames.], tot_loss[loss=0.1479, simple_loss=0.219, pruned_loss=0.03839, over 972566.99 frames.], batch size: 21, lr: 3.38e-04 +2022-05-05 12:11:50,797 INFO [train.py:715] (6/8) Epoch 6, batch 9250, loss[loss=0.1436, simple_loss=0.2114, pruned_loss=0.03792, over 4956.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2195, pruned_loss=0.03854, over 972825.58 frames.], batch size: 35, lr: 3.38e-04 +2022-05-05 12:12:29,886 INFO [train.py:715] (6/8) Epoch 6, batch 9300, loss[loss=0.1601, simple_loss=0.2207, pruned_loss=0.04976, over 4808.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2193, pruned_loss=0.03844, over 972190.52 frames.], batch size: 26, lr: 3.38e-04 +2022-05-05 12:13:08,401 INFO [train.py:715] (6/8) Epoch 6, batch 9350, loss[loss=0.1184, simple_loss=0.2004, pruned_loss=0.01819, over 4949.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2191, pruned_loss=0.03839, over 972478.88 frames.], batch size: 21, lr: 3.38e-04 +2022-05-05 12:13:47,629 INFO [train.py:715] (6/8) Epoch 6, batch 9400, loss[loss=0.1695, simple_loss=0.2427, pruned_loss=0.04818, over 4927.00 frames.], tot_loss[loss=0.1489, simple_loss=0.22, pruned_loss=0.0389, over 972686.24 frames.], batch size: 18, lr: 3.38e-04 +2022-05-05 12:14:26,437 INFO [train.py:715] (6/8) Epoch 6, batch 9450, loss[loss=0.1276, simple_loss=0.2111, pruned_loss=0.02204, over 4825.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2203, pruned_loss=0.03903, over 971940.43 frames.], batch size: 13, lr: 3.38e-04 +2022-05-05 12:15:05,764 INFO [train.py:715] (6/8) Epoch 6, batch 9500, loss[loss=0.143, simple_loss=0.2188, pruned_loss=0.03363, over 4961.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2197, pruned_loss=0.03828, over 972630.06 frames.], batch size: 24, lr: 3.38e-04 +2022-05-05 12:15:44,435 INFO [train.py:715] (6/8) Epoch 6, batch 9550, loss[loss=0.1626, simple_loss=0.2496, pruned_loss=0.03776, over 4949.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2195, pruned_loss=0.03864, over 971454.84 frames.], batch size: 21, lr: 3.38e-04 +2022-05-05 12:16:23,398 INFO [train.py:715] (6/8) Epoch 6, batch 9600, loss[loss=0.1677, simple_loss=0.2359, pruned_loss=0.0497, over 4843.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2205, pruned_loss=0.03924, over 971873.99 frames.], batch size: 15, lr: 3.38e-04 +2022-05-05 12:17:02,130 INFO [train.py:715] (6/8) Epoch 6, batch 9650, loss[loss=0.1518, simple_loss=0.2298, pruned_loss=0.03688, over 4907.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2201, pruned_loss=0.03913, over 972076.63 frames.], batch size: 19, lr: 3.38e-04 +2022-05-05 12:17:40,451 INFO [train.py:715] (6/8) Epoch 6, batch 9700, loss[loss=0.1537, simple_loss=0.2208, pruned_loss=0.04331, over 4905.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2205, pruned_loss=0.03927, over 971442.82 frames.], batch size: 17, lr: 3.38e-04 +2022-05-05 12:18:19,758 INFO [train.py:715] (6/8) Epoch 6, batch 9750, loss[loss=0.143, simple_loss=0.2161, pruned_loss=0.03496, over 4940.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2206, pruned_loss=0.03922, over 971547.98 frames.], batch size: 23, lr: 3.38e-04 +2022-05-05 12:18:59,479 INFO [train.py:715] (6/8) Epoch 6, batch 9800, loss[loss=0.1652, simple_loss=0.2438, pruned_loss=0.04331, over 4961.00 frames.], tot_loss[loss=0.1489, simple_loss=0.22, pruned_loss=0.03892, over 972053.71 frames.], batch size: 21, lr: 3.38e-04 +2022-05-05 12:19:39,848 INFO [train.py:715] (6/8) Epoch 6, batch 9850, loss[loss=0.1106, simple_loss=0.1868, pruned_loss=0.01725, over 4825.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2203, pruned_loss=0.03898, over 972822.85 frames.], batch size: 26, lr: 3.38e-04 +2022-05-05 12:20:19,001 INFO [train.py:715] (6/8) Epoch 6, batch 9900, loss[loss=0.1569, simple_loss=0.233, pruned_loss=0.04034, over 4988.00 frames.], tot_loss[loss=0.149, simple_loss=0.2202, pruned_loss=0.0389, over 972595.15 frames.], batch size: 24, lr: 3.38e-04 +2022-05-05 12:20:59,134 INFO [train.py:715] (6/8) Epoch 6, batch 9950, loss[loss=0.1592, simple_loss=0.2292, pruned_loss=0.04458, over 4791.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2204, pruned_loss=0.03896, over 972754.98 frames.], batch size: 18, lr: 3.38e-04 +2022-05-05 12:21:39,154 INFO [train.py:715] (6/8) Epoch 6, batch 10000, loss[loss=0.1225, simple_loss=0.2015, pruned_loss=0.02174, over 4755.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2205, pruned_loss=0.03895, over 972563.39 frames.], batch size: 19, lr: 3.38e-04 +2022-05-05 12:22:17,403 INFO [train.py:715] (6/8) Epoch 6, batch 10050, loss[loss=0.1461, simple_loss=0.2134, pruned_loss=0.03935, over 4815.00 frames.], tot_loss[loss=0.1498, simple_loss=0.221, pruned_loss=0.03929, over 972683.43 frames.], batch size: 15, lr: 3.38e-04 +2022-05-05 12:22:56,769 INFO [train.py:715] (6/8) Epoch 6, batch 10100, loss[loss=0.1797, simple_loss=0.2442, pruned_loss=0.05758, over 4695.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2205, pruned_loss=0.03886, over 971770.20 frames.], batch size: 15, lr: 3.38e-04 +2022-05-05 12:23:34,992 INFO [train.py:715] (6/8) Epoch 6, batch 10150, loss[loss=0.1405, simple_loss=0.2163, pruned_loss=0.0324, over 4838.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2205, pruned_loss=0.03864, over 972254.21 frames.], batch size: 15, lr: 3.38e-04 +2022-05-05 12:24:14,025 INFO [train.py:715] (6/8) Epoch 6, batch 10200, loss[loss=0.1774, simple_loss=0.2335, pruned_loss=0.06063, over 4867.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2208, pruned_loss=0.03935, over 972148.32 frames.], batch size: 32, lr: 3.38e-04 +2022-05-05 12:24:52,555 INFO [train.py:715] (6/8) Epoch 6, batch 10250, loss[loss=0.1604, simple_loss=0.2355, pruned_loss=0.04263, over 4988.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2218, pruned_loss=0.03974, over 972817.72 frames.], batch size: 14, lr: 3.37e-04 +2022-05-05 12:25:31,643 INFO [train.py:715] (6/8) Epoch 6, batch 10300, loss[loss=0.114, simple_loss=0.1775, pruned_loss=0.02529, over 4778.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2212, pruned_loss=0.03921, over 971835.46 frames.], batch size: 12, lr: 3.37e-04 +2022-05-05 12:26:10,145 INFO [train.py:715] (6/8) Epoch 6, batch 10350, loss[loss=0.1478, simple_loss=0.2169, pruned_loss=0.03934, over 4968.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2209, pruned_loss=0.03905, over 972291.63 frames.], batch size: 39, lr: 3.37e-04 +2022-05-05 12:26:49,278 INFO [train.py:715] (6/8) Epoch 6, batch 10400, loss[loss=0.1826, simple_loss=0.2427, pruned_loss=0.06122, over 4885.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2212, pruned_loss=0.03933, over 972717.08 frames.], batch size: 32, lr: 3.37e-04 +2022-05-05 12:27:27,710 INFO [train.py:715] (6/8) Epoch 6, batch 10450, loss[loss=0.164, simple_loss=0.2359, pruned_loss=0.04609, over 4929.00 frames.], tot_loss[loss=0.15, simple_loss=0.2211, pruned_loss=0.03949, over 972196.27 frames.], batch size: 39, lr: 3.37e-04 +2022-05-05 12:28:06,363 INFO [train.py:715] (6/8) Epoch 6, batch 10500, loss[loss=0.1403, simple_loss=0.2159, pruned_loss=0.03237, over 4877.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2206, pruned_loss=0.03909, over 971755.47 frames.], batch size: 16, lr: 3.37e-04 +2022-05-05 12:28:45,431 INFO [train.py:715] (6/8) Epoch 6, batch 10550, loss[loss=0.1165, simple_loss=0.1804, pruned_loss=0.02632, over 4843.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2199, pruned_loss=0.0386, over 971465.69 frames.], batch size: 13, lr: 3.37e-04 +2022-05-05 12:29:23,699 INFO [train.py:715] (6/8) Epoch 6, batch 10600, loss[loss=0.1189, simple_loss=0.1913, pruned_loss=0.02328, over 4973.00 frames.], tot_loss[loss=0.1487, simple_loss=0.22, pruned_loss=0.03871, over 970757.62 frames.], batch size: 28, lr: 3.37e-04 +2022-05-05 12:30:02,901 INFO [train.py:715] (6/8) Epoch 6, batch 10650, loss[loss=0.1582, simple_loss=0.2294, pruned_loss=0.04348, over 4781.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2194, pruned_loss=0.03821, over 971465.80 frames.], batch size: 14, lr: 3.37e-04 +2022-05-05 12:30:41,618 INFO [train.py:715] (6/8) Epoch 6, batch 10700, loss[loss=0.1503, simple_loss=0.2315, pruned_loss=0.03455, over 4948.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2203, pruned_loss=0.03862, over 971714.89 frames.], batch size: 29, lr: 3.37e-04 +2022-05-05 12:31:20,571 INFO [train.py:715] (6/8) Epoch 6, batch 10750, loss[loss=0.1302, simple_loss=0.2059, pruned_loss=0.02725, over 4828.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2192, pruned_loss=0.03798, over 973045.91 frames.], batch size: 26, lr: 3.37e-04 +2022-05-05 12:31:59,032 INFO [train.py:715] (6/8) Epoch 6, batch 10800, loss[loss=0.1824, simple_loss=0.2461, pruned_loss=0.05934, over 4973.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2192, pruned_loss=0.03769, over 973156.40 frames.], batch size: 31, lr: 3.37e-04 +2022-05-05 12:32:37,568 INFO [train.py:715] (6/8) Epoch 6, batch 10850, loss[loss=0.1233, simple_loss=0.1947, pruned_loss=0.02599, over 4749.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2194, pruned_loss=0.03775, over 972579.70 frames.], batch size: 12, lr: 3.37e-04 +2022-05-05 12:33:15,996 INFO [train.py:715] (6/8) Epoch 6, batch 10900, loss[loss=0.1426, simple_loss=0.2221, pruned_loss=0.03151, over 4809.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2208, pruned_loss=0.0387, over 973210.77 frames.], batch size: 21, lr: 3.37e-04 +2022-05-05 12:33:54,113 INFO [train.py:715] (6/8) Epoch 6, batch 10950, loss[loss=0.1503, simple_loss=0.2203, pruned_loss=0.04017, over 4833.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2211, pruned_loss=0.03872, over 974067.38 frames.], batch size: 15, lr: 3.37e-04 +2022-05-05 12:34:33,261 INFO [train.py:715] (6/8) Epoch 6, batch 11000, loss[loss=0.1523, simple_loss=0.217, pruned_loss=0.04383, over 4881.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2207, pruned_loss=0.03882, over 973665.28 frames.], batch size: 22, lr: 3.37e-04 +2022-05-05 12:35:11,624 INFO [train.py:715] (6/8) Epoch 6, batch 11050, loss[loss=0.1465, simple_loss=0.2214, pruned_loss=0.03581, over 4978.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2206, pruned_loss=0.03889, over 973438.05 frames.], batch size: 35, lr: 3.37e-04 +2022-05-05 12:35:50,631 INFO [train.py:715] (6/8) Epoch 6, batch 11100, loss[loss=0.1432, simple_loss=0.2165, pruned_loss=0.03496, over 4806.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2203, pruned_loss=0.03917, over 972779.88 frames.], batch size: 24, lr: 3.37e-04 +2022-05-05 12:36:29,029 INFO [train.py:715] (6/8) Epoch 6, batch 11150, loss[loss=0.1587, simple_loss=0.227, pruned_loss=0.04519, over 4905.00 frames.], tot_loss[loss=0.148, simple_loss=0.2196, pruned_loss=0.03821, over 971830.21 frames.], batch size: 17, lr: 3.37e-04 +2022-05-05 12:37:07,407 INFO [train.py:715] (6/8) Epoch 6, batch 11200, loss[loss=0.1456, simple_loss=0.2063, pruned_loss=0.04245, over 4834.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2189, pruned_loss=0.03814, over 971913.56 frames.], batch size: 30, lr: 3.37e-04 +2022-05-05 12:37:45,843 INFO [train.py:715] (6/8) Epoch 6, batch 11250, loss[loss=0.1621, simple_loss=0.2435, pruned_loss=0.04034, over 4864.00 frames.], tot_loss[loss=0.1479, simple_loss=0.219, pruned_loss=0.03836, over 971376.62 frames.], batch size: 20, lr: 3.37e-04 +2022-05-05 12:38:24,405 INFO [train.py:715] (6/8) Epoch 6, batch 11300, loss[loss=0.1849, simple_loss=0.2579, pruned_loss=0.05596, over 4840.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2189, pruned_loss=0.03882, over 971539.76 frames.], batch size: 13, lr: 3.37e-04 +2022-05-05 12:39:03,682 INFO [train.py:715] (6/8) Epoch 6, batch 11350, loss[loss=0.1837, simple_loss=0.2585, pruned_loss=0.05449, over 4949.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2197, pruned_loss=0.03898, over 971948.25 frames.], batch size: 21, lr: 3.37e-04 +2022-05-05 12:39:42,621 INFO [train.py:715] (6/8) Epoch 6, batch 11400, loss[loss=0.1507, simple_loss=0.2199, pruned_loss=0.04073, over 4961.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2192, pruned_loss=0.03896, over 972105.95 frames.], batch size: 24, lr: 3.37e-04 +2022-05-05 12:40:21,680 INFO [train.py:715] (6/8) Epoch 6, batch 11450, loss[loss=0.1555, simple_loss=0.2207, pruned_loss=0.04517, over 4940.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2192, pruned_loss=0.03884, over 972135.81 frames.], batch size: 21, lr: 3.37e-04 +2022-05-05 12:40:59,949 INFO [train.py:715] (6/8) Epoch 6, batch 11500, loss[loss=0.1178, simple_loss=0.1881, pruned_loss=0.02372, over 4952.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2189, pruned_loss=0.03848, over 972466.68 frames.], batch size: 14, lr: 3.37e-04 +2022-05-05 12:41:38,301 INFO [train.py:715] (6/8) Epoch 6, batch 11550, loss[loss=0.1214, simple_loss=0.2035, pruned_loss=0.0197, over 4828.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2188, pruned_loss=0.03837, over 972170.52 frames.], batch size: 26, lr: 3.36e-04 +2022-05-05 12:42:17,677 INFO [train.py:715] (6/8) Epoch 6, batch 11600, loss[loss=0.147, simple_loss=0.2164, pruned_loss=0.03882, over 4797.00 frames.], tot_loss[loss=0.148, simple_loss=0.2193, pruned_loss=0.03836, over 972323.71 frames.], batch size: 14, lr: 3.36e-04 +2022-05-05 12:42:56,131 INFO [train.py:715] (6/8) Epoch 6, batch 11650, loss[loss=0.1301, simple_loss=0.2062, pruned_loss=0.02701, over 4819.00 frames.], tot_loss[loss=0.147, simple_loss=0.2187, pruned_loss=0.03765, over 972146.86 frames.], batch size: 21, lr: 3.36e-04 +2022-05-05 12:43:34,999 INFO [train.py:715] (6/8) Epoch 6, batch 11700, loss[loss=0.171, simple_loss=0.2547, pruned_loss=0.04363, over 4792.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2195, pruned_loss=0.03809, over 971964.04 frames.], batch size: 18, lr: 3.36e-04 +2022-05-05 12:44:13,936 INFO [train.py:715] (6/8) Epoch 6, batch 11750, loss[loss=0.1592, simple_loss=0.2238, pruned_loss=0.04724, over 4923.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2208, pruned_loss=0.03903, over 971201.84 frames.], batch size: 18, lr: 3.36e-04 +2022-05-05 12:44:53,165 INFO [train.py:715] (6/8) Epoch 6, batch 11800, loss[loss=0.1318, simple_loss=0.2012, pruned_loss=0.03119, over 4976.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2194, pruned_loss=0.03847, over 971612.24 frames.], batch size: 35, lr: 3.36e-04 +2022-05-05 12:45:31,841 INFO [train.py:715] (6/8) Epoch 6, batch 11850, loss[loss=0.1597, simple_loss=0.2429, pruned_loss=0.03826, over 4904.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2195, pruned_loss=0.03835, over 972173.99 frames.], batch size: 19, lr: 3.36e-04 +2022-05-05 12:46:10,413 INFO [train.py:715] (6/8) Epoch 6, batch 11900, loss[loss=0.1366, simple_loss=0.2118, pruned_loss=0.0307, over 4763.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2191, pruned_loss=0.03772, over 972045.08 frames.], batch size: 18, lr: 3.36e-04 +2022-05-05 12:46:49,724 INFO [train.py:715] (6/8) Epoch 6, batch 11950, loss[loss=0.12, simple_loss=0.1959, pruned_loss=0.02203, over 4797.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2189, pruned_loss=0.0377, over 971183.07 frames.], batch size: 12, lr: 3.36e-04 +2022-05-05 12:47:28,221 INFO [train.py:715] (6/8) Epoch 6, batch 12000, loss[loss=0.1171, simple_loss=0.1876, pruned_loss=0.02328, over 4843.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2198, pruned_loss=0.03784, over 971511.43 frames.], batch size: 12, lr: 3.36e-04 +2022-05-05 12:47:28,221 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 12:47:37,945 INFO [train.py:742] (6/8) Epoch 6, validation: loss=0.1091, simple_loss=0.1942, pruned_loss=0.01199, over 914524.00 frames. +2022-05-05 12:48:16,695 INFO [train.py:715] (6/8) Epoch 6, batch 12050, loss[loss=0.1701, simple_loss=0.2311, pruned_loss=0.0546, over 4748.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2202, pruned_loss=0.03864, over 971713.34 frames.], batch size: 16, lr: 3.36e-04 +2022-05-05 12:48:56,376 INFO [train.py:715] (6/8) Epoch 6, batch 12100, loss[loss=0.1368, simple_loss=0.209, pruned_loss=0.03234, over 4832.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2202, pruned_loss=0.03857, over 972097.41 frames.], batch size: 13, lr: 3.36e-04 +2022-05-05 12:49:35,322 INFO [train.py:715] (6/8) Epoch 6, batch 12150, loss[loss=0.1414, simple_loss=0.19, pruned_loss=0.04646, over 4870.00 frames.], tot_loss[loss=0.149, simple_loss=0.2205, pruned_loss=0.03879, over 972445.35 frames.], batch size: 13, lr: 3.36e-04 +2022-05-05 12:50:14,104 INFO [train.py:715] (6/8) Epoch 6, batch 12200, loss[loss=0.1212, simple_loss=0.2019, pruned_loss=0.02023, over 4814.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2214, pruned_loss=0.03939, over 972165.47 frames.], batch size: 13, lr: 3.36e-04 +2022-05-05 12:50:53,315 INFO [train.py:715] (6/8) Epoch 6, batch 12250, loss[loss=0.1227, simple_loss=0.1945, pruned_loss=0.02541, over 4955.00 frames.], tot_loss[loss=0.151, simple_loss=0.2224, pruned_loss=0.03987, over 971987.35 frames.], batch size: 21, lr: 3.36e-04 +2022-05-05 12:51:32,109 INFO [train.py:715] (6/8) Epoch 6, batch 12300, loss[loss=0.1452, simple_loss=0.2252, pruned_loss=0.03265, over 4987.00 frames.], tot_loss[loss=0.1499, simple_loss=0.221, pruned_loss=0.03936, over 972128.00 frames.], batch size: 31, lr: 3.36e-04 +2022-05-05 12:52:11,888 INFO [train.py:715] (6/8) Epoch 6, batch 12350, loss[loss=0.1547, simple_loss=0.2372, pruned_loss=0.03613, over 4768.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2208, pruned_loss=0.03902, over 971417.93 frames.], batch size: 18, lr: 3.36e-04 +2022-05-05 12:52:50,523 INFO [train.py:715] (6/8) Epoch 6, batch 12400, loss[loss=0.1406, simple_loss=0.2168, pruned_loss=0.03223, over 4991.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2203, pruned_loss=0.03854, over 971472.72 frames.], batch size: 25, lr: 3.36e-04 +2022-05-05 12:53:29,626 INFO [train.py:715] (6/8) Epoch 6, batch 12450, loss[loss=0.1137, simple_loss=0.1891, pruned_loss=0.01915, over 4927.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2201, pruned_loss=0.03818, over 972318.83 frames.], batch size: 29, lr: 3.36e-04 +2022-05-05 12:54:08,747 INFO [train.py:715] (6/8) Epoch 6, batch 12500, loss[loss=0.1376, simple_loss=0.2081, pruned_loss=0.0336, over 4814.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2193, pruned_loss=0.03796, over 972302.82 frames.], batch size: 27, lr: 3.36e-04 +2022-05-05 12:54:47,051 INFO [train.py:715] (6/8) Epoch 6, batch 12550, loss[loss=0.1541, simple_loss=0.2372, pruned_loss=0.03551, over 4807.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2211, pruned_loss=0.03917, over 972246.55 frames.], batch size: 25, lr: 3.36e-04 +2022-05-05 12:55:26,405 INFO [train.py:715] (6/8) Epoch 6, batch 12600, loss[loss=0.1306, simple_loss=0.1974, pruned_loss=0.03195, over 4893.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2209, pruned_loss=0.03868, over 972183.89 frames.], batch size: 19, lr: 3.36e-04 +2022-05-05 12:56:05,093 INFO [train.py:715] (6/8) Epoch 6, batch 12650, loss[loss=0.1234, simple_loss=0.2063, pruned_loss=0.02022, over 4768.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2206, pruned_loss=0.03889, over 970646.67 frames.], batch size: 18, lr: 3.36e-04 +2022-05-05 12:56:43,909 INFO [train.py:715] (6/8) Epoch 6, batch 12700, loss[loss=0.1375, simple_loss=0.2008, pruned_loss=0.03715, over 4871.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2199, pruned_loss=0.03849, over 971143.38 frames.], batch size: 20, lr: 3.36e-04 +2022-05-05 12:57:22,047 INFO [train.py:715] (6/8) Epoch 6, batch 12750, loss[loss=0.1442, simple_loss=0.2147, pruned_loss=0.03685, over 4970.00 frames.], tot_loss[loss=0.148, simple_loss=0.2199, pruned_loss=0.03807, over 971979.02 frames.], batch size: 15, lr: 3.36e-04 +2022-05-05 12:58:01,006 INFO [train.py:715] (6/8) Epoch 6, batch 12800, loss[loss=0.1754, simple_loss=0.2367, pruned_loss=0.05702, over 4819.00 frames.], tot_loss[loss=0.1484, simple_loss=0.22, pruned_loss=0.03842, over 971782.91 frames.], batch size: 13, lr: 3.36e-04 +2022-05-05 12:58:39,734 INFO [train.py:715] (6/8) Epoch 6, batch 12850, loss[loss=0.1712, simple_loss=0.242, pruned_loss=0.05018, over 4889.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2198, pruned_loss=0.03853, over 972118.18 frames.], batch size: 16, lr: 3.35e-04 +2022-05-05 12:59:18,385 INFO [train.py:715] (6/8) Epoch 6, batch 12900, loss[loss=0.1531, simple_loss=0.2306, pruned_loss=0.03779, over 4802.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2211, pruned_loss=0.03926, over 971656.97 frames.], batch size: 24, lr: 3.35e-04 +2022-05-05 12:59:58,331 INFO [train.py:715] (6/8) Epoch 6, batch 12950, loss[loss=0.1536, simple_loss=0.2203, pruned_loss=0.04344, over 4874.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2206, pruned_loss=0.03915, over 972042.30 frames.], batch size: 22, lr: 3.35e-04 +2022-05-05 13:00:37,483 INFO [train.py:715] (6/8) Epoch 6, batch 13000, loss[loss=0.1683, simple_loss=0.2401, pruned_loss=0.04824, over 4879.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2206, pruned_loss=0.03885, over 972175.15 frames.], batch size: 22, lr: 3.35e-04 +2022-05-05 13:01:16,479 INFO [train.py:715] (6/8) Epoch 6, batch 13050, loss[loss=0.1347, simple_loss=0.2099, pruned_loss=0.0298, over 4830.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2208, pruned_loss=0.03923, over 972110.18 frames.], batch size: 15, lr: 3.35e-04 +2022-05-05 13:01:54,767 INFO [train.py:715] (6/8) Epoch 6, batch 13100, loss[loss=0.1532, simple_loss=0.2189, pruned_loss=0.0438, over 4927.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2203, pruned_loss=0.03915, over 971190.63 frames.], batch size: 23, lr: 3.35e-04 +2022-05-05 13:02:34,347 INFO [train.py:715] (6/8) Epoch 6, batch 13150, loss[loss=0.1396, simple_loss=0.1983, pruned_loss=0.04045, over 4791.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2207, pruned_loss=0.03934, over 971719.86 frames.], batch size: 12, lr: 3.35e-04 +2022-05-05 13:03:12,925 INFO [train.py:715] (6/8) Epoch 6, batch 13200, loss[loss=0.154, simple_loss=0.2165, pruned_loss=0.04575, over 4738.00 frames.], tot_loss[loss=0.151, simple_loss=0.222, pruned_loss=0.04001, over 971877.53 frames.], batch size: 16, lr: 3.35e-04 +2022-05-05 13:03:51,769 INFO [train.py:715] (6/8) Epoch 6, batch 13250, loss[loss=0.1814, simple_loss=0.2439, pruned_loss=0.05942, over 4744.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2227, pruned_loss=0.04036, over 971118.95 frames.], batch size: 16, lr: 3.35e-04 +2022-05-05 13:04:30,643 INFO [train.py:715] (6/8) Epoch 6, batch 13300, loss[loss=0.1335, simple_loss=0.2085, pruned_loss=0.02926, over 4819.00 frames.], tot_loss[loss=0.1512, simple_loss=0.222, pruned_loss=0.04019, over 971399.23 frames.], batch size: 24, lr: 3.35e-04 +2022-05-05 13:05:09,759 INFO [train.py:715] (6/8) Epoch 6, batch 13350, loss[loss=0.1753, simple_loss=0.2433, pruned_loss=0.05369, over 4841.00 frames.], tot_loss[loss=0.15, simple_loss=0.221, pruned_loss=0.0395, over 972222.51 frames.], batch size: 13, lr: 3.35e-04 +2022-05-05 13:05:48,895 INFO [train.py:715] (6/8) Epoch 6, batch 13400, loss[loss=0.1765, simple_loss=0.2504, pruned_loss=0.05126, over 4781.00 frames.], tot_loss[loss=0.1501, simple_loss=0.221, pruned_loss=0.03957, over 971815.62 frames.], batch size: 18, lr: 3.35e-04 +2022-05-05 13:06:27,484 INFO [train.py:715] (6/8) Epoch 6, batch 13450, loss[loss=0.1282, simple_loss=0.192, pruned_loss=0.03219, over 4835.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2209, pruned_loss=0.03933, over 972047.12 frames.], batch size: 15, lr: 3.35e-04 +2022-05-05 13:07:07,012 INFO [train.py:715] (6/8) Epoch 6, batch 13500, loss[loss=0.1333, simple_loss=0.1994, pruned_loss=0.03365, over 4957.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2208, pruned_loss=0.0388, over 972039.00 frames.], batch size: 15, lr: 3.35e-04 +2022-05-05 13:07:45,023 INFO [train.py:715] (6/8) Epoch 6, batch 13550, loss[loss=0.1423, simple_loss=0.2217, pruned_loss=0.03149, over 4812.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2212, pruned_loss=0.03913, over 972807.16 frames.], batch size: 25, lr: 3.35e-04 +2022-05-05 13:08:23,968 INFO [train.py:715] (6/8) Epoch 6, batch 13600, loss[loss=0.1543, simple_loss=0.2288, pruned_loss=0.03991, over 4973.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2213, pruned_loss=0.03913, over 972553.40 frames.], batch size: 35, lr: 3.35e-04 +2022-05-05 13:09:03,112 INFO [train.py:715] (6/8) Epoch 6, batch 13650, loss[loss=0.1711, simple_loss=0.2322, pruned_loss=0.05495, over 4800.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2209, pruned_loss=0.03885, over 971339.48 frames.], batch size: 25, lr: 3.35e-04 +2022-05-05 13:09:42,438 INFO [train.py:715] (6/8) Epoch 6, batch 13700, loss[loss=0.1474, simple_loss=0.2178, pruned_loss=0.0385, over 4879.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2214, pruned_loss=0.03911, over 971831.90 frames.], batch size: 16, lr: 3.35e-04 +2022-05-05 13:10:21,546 INFO [train.py:715] (6/8) Epoch 6, batch 13750, loss[loss=0.1699, simple_loss=0.2423, pruned_loss=0.04874, over 4908.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2205, pruned_loss=0.03855, over 971353.64 frames.], batch size: 19, lr: 3.35e-04 +2022-05-05 13:11:00,146 INFO [train.py:715] (6/8) Epoch 6, batch 13800, loss[loss=0.1295, simple_loss=0.2048, pruned_loss=0.02712, over 4902.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2197, pruned_loss=0.0381, over 971755.14 frames.], batch size: 17, lr: 3.35e-04 +2022-05-05 13:11:40,117 INFO [train.py:715] (6/8) Epoch 6, batch 13850, loss[loss=0.1311, simple_loss=0.1977, pruned_loss=0.03221, over 4885.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2197, pruned_loss=0.03826, over 972888.29 frames.], batch size: 22, lr: 3.35e-04 +2022-05-05 13:12:18,448 INFO [train.py:715] (6/8) Epoch 6, batch 13900, loss[loss=0.1324, simple_loss=0.2138, pruned_loss=0.02551, over 4985.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2191, pruned_loss=0.03801, over 972220.24 frames.], batch size: 15, lr: 3.35e-04 +2022-05-05 13:12:57,457 INFO [train.py:715] (6/8) Epoch 6, batch 13950, loss[loss=0.147, simple_loss=0.2189, pruned_loss=0.03752, over 4767.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2188, pruned_loss=0.03789, over 971600.91 frames.], batch size: 19, lr: 3.35e-04 +2022-05-05 13:13:36,064 INFO [train.py:715] (6/8) Epoch 6, batch 14000, loss[loss=0.1407, simple_loss=0.2094, pruned_loss=0.03603, over 4974.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2186, pruned_loss=0.03827, over 971494.02 frames.], batch size: 25, lr: 3.35e-04 +2022-05-05 13:14:15,127 INFO [train.py:715] (6/8) Epoch 6, batch 14050, loss[loss=0.1497, simple_loss=0.215, pruned_loss=0.0422, over 4838.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2188, pruned_loss=0.03795, over 971932.59 frames.], batch size: 32, lr: 3.35e-04 +2022-05-05 13:14:53,532 INFO [train.py:715] (6/8) Epoch 6, batch 14100, loss[loss=0.1605, simple_loss=0.2393, pruned_loss=0.04088, over 4872.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2192, pruned_loss=0.03801, over 971784.84 frames.], batch size: 16, lr: 3.35e-04 +2022-05-05 13:15:32,013 INFO [train.py:715] (6/8) Epoch 6, batch 14150, loss[loss=0.1284, simple_loss=0.2109, pruned_loss=0.02293, over 4891.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2201, pruned_loss=0.03814, over 972412.76 frames.], batch size: 16, lr: 3.35e-04 +2022-05-05 13:16:11,445 INFO [train.py:715] (6/8) Epoch 6, batch 14200, loss[loss=0.1431, simple_loss=0.2125, pruned_loss=0.03682, over 4966.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2204, pruned_loss=0.03849, over 972521.28 frames.], batch size: 29, lr: 3.34e-04 +2022-05-05 13:16:50,086 INFO [train.py:715] (6/8) Epoch 6, batch 14250, loss[loss=0.1244, simple_loss=0.1944, pruned_loss=0.02716, over 4824.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2194, pruned_loss=0.03817, over 973034.10 frames.], batch size: 26, lr: 3.34e-04 +2022-05-05 13:17:29,121 INFO [train.py:715] (6/8) Epoch 6, batch 14300, loss[loss=0.1669, simple_loss=0.234, pruned_loss=0.04987, over 4832.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2201, pruned_loss=0.03839, over 973236.29 frames.], batch size: 15, lr: 3.34e-04 +2022-05-05 13:18:07,580 INFO [train.py:715] (6/8) Epoch 6, batch 14350, loss[loss=0.1609, simple_loss=0.2273, pruned_loss=0.04723, over 4876.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2207, pruned_loss=0.03924, over 973250.92 frames.], batch size: 22, lr: 3.34e-04 +2022-05-05 13:18:47,508 INFO [train.py:715] (6/8) Epoch 6, batch 14400, loss[loss=0.159, simple_loss=0.2314, pruned_loss=0.04326, over 4875.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2215, pruned_loss=0.03953, over 972451.75 frames.], batch size: 22, lr: 3.34e-04 +2022-05-05 13:19:25,857 INFO [train.py:715] (6/8) Epoch 6, batch 14450, loss[loss=0.1523, simple_loss=0.2183, pruned_loss=0.04313, over 4849.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2222, pruned_loss=0.03984, over 972487.23 frames.], batch size: 15, lr: 3.34e-04 +2022-05-05 13:20:04,246 INFO [train.py:715] (6/8) Epoch 6, batch 14500, loss[loss=0.1476, simple_loss=0.2223, pruned_loss=0.03646, over 4891.00 frames.], tot_loss[loss=0.1506, simple_loss=0.222, pruned_loss=0.03959, over 972155.59 frames.], batch size: 17, lr: 3.34e-04 +2022-05-05 13:20:43,927 INFO [train.py:715] (6/8) Epoch 6, batch 14550, loss[loss=0.1727, simple_loss=0.2347, pruned_loss=0.05531, over 4812.00 frames.], tot_loss[loss=0.1508, simple_loss=0.222, pruned_loss=0.03977, over 972289.45 frames.], batch size: 26, lr: 3.34e-04 +2022-05-05 13:21:22,652 INFO [train.py:715] (6/8) Epoch 6, batch 14600, loss[loss=0.1256, simple_loss=0.1959, pruned_loss=0.02768, over 4793.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2234, pruned_loss=0.04009, over 971494.75 frames.], batch size: 12, lr: 3.34e-04 +2022-05-05 13:22:01,119 INFO [train.py:715] (6/8) Epoch 6, batch 14650, loss[loss=0.1417, simple_loss=0.2103, pruned_loss=0.03649, over 4850.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2224, pruned_loss=0.03946, over 972051.81 frames.], batch size: 32, lr: 3.34e-04 +2022-05-05 13:22:40,130 INFO [train.py:715] (6/8) Epoch 6, batch 14700, loss[loss=0.1585, simple_loss=0.2324, pruned_loss=0.04231, over 4981.00 frames.], tot_loss[loss=0.15, simple_loss=0.2217, pruned_loss=0.03912, over 972627.11 frames.], batch size: 28, lr: 3.34e-04 +2022-05-05 13:23:19,674 INFO [train.py:715] (6/8) Epoch 6, batch 14750, loss[loss=0.1449, simple_loss=0.2261, pruned_loss=0.03185, over 4780.00 frames.], tot_loss[loss=0.1497, simple_loss=0.221, pruned_loss=0.03925, over 972560.18 frames.], batch size: 14, lr: 3.34e-04 +2022-05-05 13:23:57,829 INFO [train.py:715] (6/8) Epoch 6, batch 14800, loss[loss=0.165, simple_loss=0.2307, pruned_loss=0.0496, over 4960.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2208, pruned_loss=0.03932, over 972668.76 frames.], batch size: 39, lr: 3.34e-04 +2022-05-05 13:24:35,997 INFO [train.py:715] (6/8) Epoch 6, batch 14850, loss[loss=0.1845, simple_loss=0.2443, pruned_loss=0.06232, over 4768.00 frames.], tot_loss[loss=0.1499, simple_loss=0.221, pruned_loss=0.03938, over 973255.30 frames.], batch size: 17, lr: 3.34e-04 +2022-05-05 13:25:15,101 INFO [train.py:715] (6/8) Epoch 6, batch 14900, loss[loss=0.1729, simple_loss=0.2446, pruned_loss=0.05056, over 4817.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2214, pruned_loss=0.03939, over 972041.26 frames.], batch size: 25, lr: 3.34e-04 +2022-05-05 13:25:53,360 INFO [train.py:715] (6/8) Epoch 6, batch 14950, loss[loss=0.1304, simple_loss=0.1985, pruned_loss=0.03119, over 4928.00 frames.], tot_loss[loss=0.1501, simple_loss=0.221, pruned_loss=0.03963, over 971846.07 frames.], batch size: 23, lr: 3.34e-04 +2022-05-05 13:26:32,021 INFO [train.py:715] (6/8) Epoch 6, batch 15000, loss[loss=0.1421, simple_loss=0.2105, pruned_loss=0.03678, over 4691.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2213, pruned_loss=0.03958, over 972057.37 frames.], batch size: 15, lr: 3.34e-04 +2022-05-05 13:26:32,022 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 13:26:41,818 INFO [train.py:742] (6/8) Epoch 6, validation: loss=0.1091, simple_loss=0.1941, pruned_loss=0.01202, over 914524.00 frames. +2022-05-05 13:27:20,604 INFO [train.py:715] (6/8) Epoch 6, batch 15050, loss[loss=0.1424, simple_loss=0.2198, pruned_loss=0.03251, over 4878.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2211, pruned_loss=0.03916, over 972510.33 frames.], batch size: 22, lr: 3.34e-04 +2022-05-05 13:27:59,349 INFO [train.py:715] (6/8) Epoch 6, batch 15100, loss[loss=0.1649, simple_loss=0.2323, pruned_loss=0.04878, over 4831.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2209, pruned_loss=0.03907, over 972288.57 frames.], batch size: 15, lr: 3.34e-04 +2022-05-05 13:28:41,260 INFO [train.py:715] (6/8) Epoch 6, batch 15150, loss[loss=0.1675, simple_loss=0.238, pruned_loss=0.04856, over 4785.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2212, pruned_loss=0.03922, over 972112.74 frames.], batch size: 18, lr: 3.34e-04 +2022-05-05 13:29:19,832 INFO [train.py:715] (6/8) Epoch 6, batch 15200, loss[loss=0.1393, simple_loss=0.2155, pruned_loss=0.03158, over 4964.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2204, pruned_loss=0.03912, over 971216.37 frames.], batch size: 28, lr: 3.34e-04 +2022-05-05 13:29:58,374 INFO [train.py:715] (6/8) Epoch 6, batch 15250, loss[loss=0.1441, simple_loss=0.2211, pruned_loss=0.03352, over 4814.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2209, pruned_loss=0.03923, over 970987.77 frames.], batch size: 25, lr: 3.34e-04 +2022-05-05 13:30:37,907 INFO [train.py:715] (6/8) Epoch 6, batch 15300, loss[loss=0.1421, simple_loss=0.2033, pruned_loss=0.04045, over 4969.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2208, pruned_loss=0.03906, over 971350.94 frames.], batch size: 28, lr: 3.34e-04 +2022-05-05 13:31:15,932 INFO [train.py:715] (6/8) Epoch 6, batch 15350, loss[loss=0.1632, simple_loss=0.2309, pruned_loss=0.0477, over 4839.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2214, pruned_loss=0.03963, over 971698.58 frames.], batch size: 15, lr: 3.34e-04 +2022-05-05 13:31:54,939 INFO [train.py:715] (6/8) Epoch 6, batch 15400, loss[loss=0.1409, simple_loss=0.2169, pruned_loss=0.03242, over 4934.00 frames.], tot_loss[loss=0.151, simple_loss=0.2217, pruned_loss=0.04013, over 972335.07 frames.], batch size: 29, lr: 3.34e-04 +2022-05-05 13:32:33,865 INFO [train.py:715] (6/8) Epoch 6, batch 15450, loss[loss=0.1365, simple_loss=0.2251, pruned_loss=0.02396, over 4962.00 frames.], tot_loss[loss=0.1512, simple_loss=0.222, pruned_loss=0.04024, over 973141.54 frames.], batch size: 15, lr: 3.34e-04 +2022-05-05 13:33:13,326 INFO [train.py:715] (6/8) Epoch 6, batch 15500, loss[loss=0.1505, simple_loss=0.2231, pruned_loss=0.03891, over 4916.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2218, pruned_loss=0.04001, over 972744.84 frames.], batch size: 23, lr: 3.34e-04 +2022-05-05 13:33:51,505 INFO [train.py:715] (6/8) Epoch 6, batch 15550, loss[loss=0.1374, simple_loss=0.2144, pruned_loss=0.03018, over 4810.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2206, pruned_loss=0.03927, over 972549.86 frames.], batch size: 21, lr: 3.33e-04 +2022-05-05 13:34:30,394 INFO [train.py:715] (6/8) Epoch 6, batch 15600, loss[loss=0.176, simple_loss=0.253, pruned_loss=0.04955, over 4919.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2212, pruned_loss=0.0402, over 973014.17 frames.], batch size: 39, lr: 3.33e-04 +2022-05-05 13:35:09,326 INFO [train.py:715] (6/8) Epoch 6, batch 15650, loss[loss=0.1513, simple_loss=0.2249, pruned_loss=0.03888, over 4945.00 frames.], tot_loss[loss=0.1502, simple_loss=0.221, pruned_loss=0.03973, over 973538.74 frames.], batch size: 23, lr: 3.33e-04 +2022-05-05 13:35:47,371 INFO [train.py:715] (6/8) Epoch 6, batch 15700, loss[loss=0.1557, simple_loss=0.2202, pruned_loss=0.04566, over 4807.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2201, pruned_loss=0.03953, over 973785.23 frames.], batch size: 21, lr: 3.33e-04 +2022-05-05 13:36:26,052 INFO [train.py:715] (6/8) Epoch 6, batch 15750, loss[loss=0.129, simple_loss=0.2076, pruned_loss=0.02521, over 4812.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2209, pruned_loss=0.03993, over 972215.32 frames.], batch size: 26, lr: 3.33e-04 +2022-05-05 13:37:04,792 INFO [train.py:715] (6/8) Epoch 6, batch 15800, loss[loss=0.1309, simple_loss=0.2043, pruned_loss=0.02874, over 4867.00 frames.], tot_loss[loss=0.15, simple_loss=0.2207, pruned_loss=0.0396, over 972334.09 frames.], batch size: 20, lr: 3.33e-04 +2022-05-05 13:37:43,839 INFO [train.py:715] (6/8) Epoch 6, batch 15850, loss[loss=0.1366, simple_loss=0.2153, pruned_loss=0.02897, over 4805.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2201, pruned_loss=0.03915, over 972843.55 frames.], batch size: 25, lr: 3.33e-04 +2022-05-05 13:38:22,282 INFO [train.py:715] (6/8) Epoch 6, batch 15900, loss[loss=0.1476, simple_loss=0.2149, pruned_loss=0.04016, over 4896.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2194, pruned_loss=0.03847, over 972904.56 frames.], batch size: 19, lr: 3.33e-04 +2022-05-05 13:39:00,648 INFO [train.py:715] (6/8) Epoch 6, batch 15950, loss[loss=0.1648, simple_loss=0.2265, pruned_loss=0.05153, over 4789.00 frames.], tot_loss[loss=0.148, simple_loss=0.2192, pruned_loss=0.03842, over 972747.91 frames.], batch size: 18, lr: 3.33e-04 +2022-05-05 13:39:39,975 INFO [train.py:715] (6/8) Epoch 6, batch 16000, loss[loss=0.1256, simple_loss=0.1979, pruned_loss=0.0267, over 4816.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2198, pruned_loss=0.03834, over 972993.78 frames.], batch size: 26, lr: 3.33e-04 +2022-05-05 13:40:18,430 INFO [train.py:715] (6/8) Epoch 6, batch 16050, loss[loss=0.1583, simple_loss=0.2308, pruned_loss=0.04293, over 4959.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2201, pruned_loss=0.03871, over 972885.78 frames.], batch size: 35, lr: 3.33e-04 +2022-05-05 13:40:56,899 INFO [train.py:715] (6/8) Epoch 6, batch 16100, loss[loss=0.154, simple_loss=0.218, pruned_loss=0.045, over 4962.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2206, pruned_loss=0.03879, over 972622.16 frames.], batch size: 35, lr: 3.33e-04 +2022-05-05 13:41:35,293 INFO [train.py:715] (6/8) Epoch 6, batch 16150, loss[loss=0.1436, simple_loss=0.2143, pruned_loss=0.03647, over 4976.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2209, pruned_loss=0.03893, over 973564.00 frames.], batch size: 14, lr: 3.33e-04 +2022-05-05 13:42:14,792 INFO [train.py:715] (6/8) Epoch 6, batch 16200, loss[loss=0.1745, simple_loss=0.2234, pruned_loss=0.06275, over 4811.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2204, pruned_loss=0.03902, over 973113.69 frames.], batch size: 13, lr: 3.33e-04 +2022-05-05 13:42:53,110 INFO [train.py:715] (6/8) Epoch 6, batch 16250, loss[loss=0.2474, simple_loss=0.3064, pruned_loss=0.09423, over 4954.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2207, pruned_loss=0.03915, over 972445.68 frames.], batch size: 21, lr: 3.33e-04 +2022-05-05 13:43:31,726 INFO [train.py:715] (6/8) Epoch 6, batch 16300, loss[loss=0.1317, simple_loss=0.2113, pruned_loss=0.02611, over 4943.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2205, pruned_loss=0.03832, over 972789.15 frames.], batch size: 21, lr: 3.33e-04 +2022-05-05 13:44:11,199 INFO [train.py:715] (6/8) Epoch 6, batch 16350, loss[loss=0.1594, simple_loss=0.2286, pruned_loss=0.04512, over 4929.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2203, pruned_loss=0.03844, over 972852.29 frames.], batch size: 18, lr: 3.33e-04 +2022-05-05 13:44:49,508 INFO [train.py:715] (6/8) Epoch 6, batch 16400, loss[loss=0.136, simple_loss=0.2073, pruned_loss=0.03237, over 4787.00 frames.], tot_loss[loss=0.149, simple_loss=0.2208, pruned_loss=0.03863, over 972153.72 frames.], batch size: 14, lr: 3.33e-04 +2022-05-05 13:45:28,821 INFO [train.py:715] (6/8) Epoch 6, batch 16450, loss[loss=0.1606, simple_loss=0.2335, pruned_loss=0.04387, over 4833.00 frames.], tot_loss[loss=0.149, simple_loss=0.2202, pruned_loss=0.03892, over 972080.17 frames.], batch size: 15, lr: 3.33e-04 +2022-05-05 13:46:07,626 INFO [train.py:715] (6/8) Epoch 6, batch 16500, loss[loss=0.1397, simple_loss=0.2122, pruned_loss=0.03356, over 4978.00 frames.], tot_loss[loss=0.149, simple_loss=0.2205, pruned_loss=0.03882, over 972219.84 frames.], batch size: 15, lr: 3.33e-04 +2022-05-05 13:46:46,577 INFO [train.py:715] (6/8) Epoch 6, batch 16550, loss[loss=0.148, simple_loss=0.2163, pruned_loss=0.03983, over 4838.00 frames.], tot_loss[loss=0.1477, simple_loss=0.219, pruned_loss=0.03818, over 973061.60 frames.], batch size: 15, lr: 3.33e-04 +2022-05-05 13:47:24,408 INFO [train.py:715] (6/8) Epoch 6, batch 16600, loss[loss=0.1519, simple_loss=0.2161, pruned_loss=0.04386, over 4821.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2195, pruned_loss=0.03848, over 973445.58 frames.], batch size: 13, lr: 3.33e-04 +2022-05-05 13:48:03,149 INFO [train.py:715] (6/8) Epoch 6, batch 16650, loss[loss=0.1314, simple_loss=0.207, pruned_loss=0.0279, over 4969.00 frames.], tot_loss[loss=0.148, simple_loss=0.2196, pruned_loss=0.03818, over 973362.72 frames.], batch size: 24, lr: 3.33e-04 +2022-05-05 13:48:42,811 INFO [train.py:715] (6/8) Epoch 6, batch 16700, loss[loss=0.1512, simple_loss=0.224, pruned_loss=0.03916, over 4767.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2206, pruned_loss=0.039, over 971964.33 frames.], batch size: 18, lr: 3.33e-04 +2022-05-05 13:49:21,220 INFO [train.py:715] (6/8) Epoch 6, batch 16750, loss[loss=0.137, simple_loss=0.2148, pruned_loss=0.02964, over 4773.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2208, pruned_loss=0.03943, over 972654.99 frames.], batch size: 14, lr: 3.33e-04 +2022-05-05 13:50:00,120 INFO [train.py:715] (6/8) Epoch 6, batch 16800, loss[loss=0.1256, simple_loss=0.1934, pruned_loss=0.0289, over 4957.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2208, pruned_loss=0.03927, over 973062.11 frames.], batch size: 15, lr: 3.33e-04 +2022-05-05 13:50:39,327 INFO [train.py:715] (6/8) Epoch 6, batch 16850, loss[loss=0.1324, simple_loss=0.209, pruned_loss=0.02792, over 4748.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2201, pruned_loss=0.03902, over 971597.82 frames.], batch size: 19, lr: 3.33e-04 +2022-05-05 13:51:19,120 INFO [train.py:715] (6/8) Epoch 6, batch 16900, loss[loss=0.1427, simple_loss=0.2264, pruned_loss=0.0295, over 4896.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2208, pruned_loss=0.03911, over 972146.21 frames.], batch size: 19, lr: 3.32e-04 +2022-05-05 13:51:57,175 INFO [train.py:715] (6/8) Epoch 6, batch 16950, loss[loss=0.1394, simple_loss=0.2096, pruned_loss=0.03456, over 4985.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2207, pruned_loss=0.03881, over 972088.39 frames.], batch size: 25, lr: 3.32e-04 +2022-05-05 13:52:36,225 INFO [train.py:715] (6/8) Epoch 6, batch 17000, loss[loss=0.1489, simple_loss=0.2304, pruned_loss=0.03373, over 4686.00 frames.], tot_loss[loss=0.1497, simple_loss=0.221, pruned_loss=0.03919, over 972580.31 frames.], batch size: 15, lr: 3.32e-04 +2022-05-05 13:53:15,742 INFO [train.py:715] (6/8) Epoch 6, batch 17050, loss[loss=0.1717, simple_loss=0.2459, pruned_loss=0.04876, over 4916.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2205, pruned_loss=0.03887, over 971964.99 frames.], batch size: 29, lr: 3.32e-04 +2022-05-05 13:53:53,896 INFO [train.py:715] (6/8) Epoch 6, batch 17100, loss[loss=0.1379, simple_loss=0.2116, pruned_loss=0.03214, over 4913.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2208, pruned_loss=0.03897, over 972030.55 frames.], batch size: 18, lr: 3.32e-04 +2022-05-05 13:54:32,774 INFO [train.py:715] (6/8) Epoch 6, batch 17150, loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.0311, over 4920.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2206, pruned_loss=0.0392, over 973120.52 frames.], batch size: 23, lr: 3.32e-04 +2022-05-05 13:55:11,750 INFO [train.py:715] (6/8) Epoch 6, batch 17200, loss[loss=0.1613, simple_loss=0.2319, pruned_loss=0.04534, over 4964.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2202, pruned_loss=0.03879, over 972502.98 frames.], batch size: 14, lr: 3.32e-04 +2022-05-05 13:55:51,108 INFO [train.py:715] (6/8) Epoch 6, batch 17250, loss[loss=0.1623, simple_loss=0.2251, pruned_loss=0.04974, over 4826.00 frames.], tot_loss[loss=0.1488, simple_loss=0.22, pruned_loss=0.03877, over 972000.74 frames.], batch size: 15, lr: 3.32e-04 +2022-05-05 13:56:29,075 INFO [train.py:715] (6/8) Epoch 6, batch 17300, loss[loss=0.1303, simple_loss=0.2201, pruned_loss=0.02025, over 4791.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2196, pruned_loss=0.038, over 971540.90 frames.], batch size: 18, lr: 3.32e-04 +2022-05-05 13:57:07,891 INFO [train.py:715] (6/8) Epoch 6, batch 17350, loss[loss=0.1711, simple_loss=0.229, pruned_loss=0.05664, over 4818.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2198, pruned_loss=0.03827, over 971666.33 frames.], batch size: 25, lr: 3.32e-04 +2022-05-05 13:57:47,273 INFO [train.py:715] (6/8) Epoch 6, batch 17400, loss[loss=0.1575, simple_loss=0.225, pruned_loss=0.04502, over 4785.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2209, pruned_loss=0.0385, over 972175.11 frames.], batch size: 18, lr: 3.32e-04 +2022-05-05 13:58:26,213 INFO [train.py:715] (6/8) Epoch 6, batch 17450, loss[loss=0.1379, simple_loss=0.2096, pruned_loss=0.03313, over 4785.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2196, pruned_loss=0.03801, over 971844.74 frames.], batch size: 17, lr: 3.32e-04 +2022-05-05 13:59:04,830 INFO [train.py:715] (6/8) Epoch 6, batch 17500, loss[loss=0.1411, simple_loss=0.2262, pruned_loss=0.02799, over 4796.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2194, pruned_loss=0.03849, over 972335.21 frames.], batch size: 24, lr: 3.32e-04 +2022-05-05 13:59:43,983 INFO [train.py:715] (6/8) Epoch 6, batch 17550, loss[loss=0.1309, simple_loss=0.1994, pruned_loss=0.03119, over 4760.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2192, pruned_loss=0.03844, over 971957.97 frames.], batch size: 12, lr: 3.32e-04 +2022-05-05 14:00:23,863 INFO [train.py:715] (6/8) Epoch 6, batch 17600, loss[loss=0.1419, simple_loss=0.2172, pruned_loss=0.03326, over 4814.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2193, pruned_loss=0.03844, over 971684.25 frames.], batch size: 27, lr: 3.32e-04 +2022-05-05 14:01:01,424 INFO [train.py:715] (6/8) Epoch 6, batch 17650, loss[loss=0.14, simple_loss=0.2103, pruned_loss=0.03488, over 4834.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2191, pruned_loss=0.03822, over 972286.29 frames.], batch size: 26, lr: 3.32e-04 +2022-05-05 14:01:40,864 INFO [train.py:715] (6/8) Epoch 6, batch 17700, loss[loss=0.1202, simple_loss=0.1804, pruned_loss=0.02997, over 4816.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2179, pruned_loss=0.03789, over 972368.43 frames.], batch size: 12, lr: 3.32e-04 +2022-05-05 14:02:20,251 INFO [train.py:715] (6/8) Epoch 6, batch 17750, loss[loss=0.1524, simple_loss=0.2174, pruned_loss=0.04363, over 4817.00 frames.], tot_loss[loss=0.147, simple_loss=0.218, pruned_loss=0.038, over 973433.77 frames.], batch size: 13, lr: 3.32e-04 +2022-05-05 14:02:58,608 INFO [train.py:715] (6/8) Epoch 6, batch 17800, loss[loss=0.1655, simple_loss=0.2413, pruned_loss=0.04483, over 4709.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2184, pruned_loss=0.0383, over 973145.38 frames.], batch size: 15, lr: 3.32e-04 +2022-05-05 14:03:37,539 INFO [train.py:715] (6/8) Epoch 6, batch 17850, loss[loss=0.1448, simple_loss=0.2257, pruned_loss=0.03195, over 4865.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2175, pruned_loss=0.03774, over 973288.56 frames.], batch size: 32, lr: 3.32e-04 +2022-05-05 14:04:16,749 INFO [train.py:715] (6/8) Epoch 6, batch 17900, loss[loss=0.1359, simple_loss=0.2164, pruned_loss=0.02769, over 4807.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2171, pruned_loss=0.03732, over 972824.99 frames.], batch size: 26, lr: 3.32e-04 +2022-05-05 14:04:56,310 INFO [train.py:715] (6/8) Epoch 6, batch 17950, loss[loss=0.1449, simple_loss=0.2195, pruned_loss=0.03519, over 4914.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2171, pruned_loss=0.03708, over 972313.83 frames.], batch size: 17, lr: 3.32e-04 +2022-05-05 14:05:34,137 INFO [train.py:715] (6/8) Epoch 6, batch 18000, loss[loss=0.1406, simple_loss=0.1989, pruned_loss=0.04117, over 4769.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2169, pruned_loss=0.03767, over 973008.14 frames.], batch size: 19, lr: 3.32e-04 +2022-05-05 14:05:34,138 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 14:05:43,883 INFO [train.py:742] (6/8) Epoch 6, validation: loss=0.1087, simple_loss=0.1939, pruned_loss=0.0118, over 914524.00 frames. +2022-05-05 14:06:22,340 INFO [train.py:715] (6/8) Epoch 6, batch 18050, loss[loss=0.1344, simple_loss=0.2, pruned_loss=0.03438, over 4818.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2176, pruned_loss=0.03761, over 973409.75 frames.], batch size: 12, lr: 3.32e-04 +2022-05-05 14:07:01,819 INFO [train.py:715] (6/8) Epoch 6, batch 18100, loss[loss=0.1149, simple_loss=0.186, pruned_loss=0.02189, over 4811.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2177, pruned_loss=0.03766, over 973203.04 frames.], batch size: 25, lr: 3.32e-04 +2022-05-05 14:07:41,267 INFO [train.py:715] (6/8) Epoch 6, batch 18150, loss[loss=0.168, simple_loss=0.2336, pruned_loss=0.05117, over 4910.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2195, pruned_loss=0.03862, over 973060.99 frames.], batch size: 18, lr: 3.32e-04 +2022-05-05 14:08:19,364 INFO [train.py:715] (6/8) Epoch 6, batch 18200, loss[loss=0.1394, simple_loss=0.2117, pruned_loss=0.03351, over 4772.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2192, pruned_loss=0.0383, over 972703.43 frames.], batch size: 18, lr: 3.32e-04 +2022-05-05 14:08:58,862 INFO [train.py:715] (6/8) Epoch 6, batch 18250, loss[loss=0.1596, simple_loss=0.2365, pruned_loss=0.04136, over 4754.00 frames.], tot_loss[loss=0.148, simple_loss=0.2188, pruned_loss=0.03856, over 973360.67 frames.], batch size: 19, lr: 3.31e-04 +2022-05-05 14:09:38,213 INFO [train.py:715] (6/8) Epoch 6, batch 18300, loss[loss=0.1325, simple_loss=0.2083, pruned_loss=0.02836, over 4815.00 frames.], tot_loss[loss=0.148, simple_loss=0.2191, pruned_loss=0.03847, over 972529.36 frames.], batch size: 27, lr: 3.31e-04 +2022-05-05 14:10:17,260 INFO [train.py:715] (6/8) Epoch 6, batch 18350, loss[loss=0.1278, simple_loss=0.1964, pruned_loss=0.02963, over 4883.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2194, pruned_loss=0.03902, over 973287.84 frames.], batch size: 16, lr: 3.31e-04 +2022-05-05 14:10:55,595 INFO [train.py:715] (6/8) Epoch 6, batch 18400, loss[loss=0.1296, simple_loss=0.209, pruned_loss=0.02512, over 4863.00 frames.], tot_loss[loss=0.149, simple_loss=0.22, pruned_loss=0.03899, over 973223.40 frames.], batch size: 20, lr: 3.31e-04 +2022-05-05 14:11:34,885 INFO [train.py:715] (6/8) Epoch 6, batch 18450, loss[loss=0.1455, simple_loss=0.2188, pruned_loss=0.0361, over 4913.00 frames.], tot_loss[loss=0.148, simple_loss=0.2192, pruned_loss=0.03839, over 972404.06 frames.], batch size: 17, lr: 3.31e-04 +2022-05-05 14:12:14,311 INFO [train.py:715] (6/8) Epoch 6, batch 18500, loss[loss=0.144, simple_loss=0.2164, pruned_loss=0.03584, over 4870.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2199, pruned_loss=0.03872, over 972657.69 frames.], batch size: 16, lr: 3.31e-04 +2022-05-05 14:12:52,316 INFO [train.py:715] (6/8) Epoch 6, batch 18550, loss[loss=0.1418, simple_loss=0.2133, pruned_loss=0.03518, over 4949.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2196, pruned_loss=0.03873, over 972229.80 frames.], batch size: 29, lr: 3.31e-04 +2022-05-05 14:13:31,752 INFO [train.py:715] (6/8) Epoch 6, batch 18600, loss[loss=0.1263, simple_loss=0.1882, pruned_loss=0.03215, over 4920.00 frames.], tot_loss[loss=0.1478, simple_loss=0.219, pruned_loss=0.03834, over 971302.37 frames.], batch size: 23, lr: 3.31e-04 +2022-05-05 14:14:10,849 INFO [train.py:715] (6/8) Epoch 6, batch 18650, loss[loss=0.1554, simple_loss=0.2139, pruned_loss=0.04849, over 4978.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2199, pruned_loss=0.03901, over 971656.85 frames.], batch size: 31, lr: 3.31e-04 +2022-05-05 14:14:50,389 INFO [train.py:715] (6/8) Epoch 6, batch 18700, loss[loss=0.1426, simple_loss=0.2116, pruned_loss=0.03685, over 4905.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2201, pruned_loss=0.03877, over 971680.77 frames.], batch size: 17, lr: 3.31e-04 +2022-05-05 14:15:28,531 INFO [train.py:715] (6/8) Epoch 6, batch 18750, loss[loss=0.1546, simple_loss=0.2195, pruned_loss=0.04481, over 4853.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2196, pruned_loss=0.03835, over 971613.11 frames.], batch size: 30, lr: 3.31e-04 +2022-05-05 14:16:07,702 INFO [train.py:715] (6/8) Epoch 6, batch 18800, loss[loss=0.1618, simple_loss=0.2381, pruned_loss=0.04274, over 4917.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2201, pruned_loss=0.03858, over 971835.95 frames.], batch size: 18, lr: 3.31e-04 +2022-05-05 14:16:47,208 INFO [train.py:715] (6/8) Epoch 6, batch 18850, loss[loss=0.1466, simple_loss=0.2206, pruned_loss=0.03629, over 4915.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2202, pruned_loss=0.03847, over 972714.55 frames.], batch size: 23, lr: 3.31e-04 +2022-05-05 14:17:25,255 INFO [train.py:715] (6/8) Epoch 6, batch 18900, loss[loss=0.1381, simple_loss=0.2055, pruned_loss=0.03537, over 4836.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2204, pruned_loss=0.03842, over 972364.47 frames.], batch size: 15, lr: 3.31e-04 +2022-05-05 14:18:04,839 INFO [train.py:715] (6/8) Epoch 6, batch 18950, loss[loss=0.2051, simple_loss=0.2631, pruned_loss=0.07359, over 4895.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2209, pruned_loss=0.03838, over 972704.28 frames.], batch size: 16, lr: 3.31e-04 +2022-05-05 14:18:43,967 INFO [train.py:715] (6/8) Epoch 6, batch 19000, loss[loss=0.1516, simple_loss=0.2371, pruned_loss=0.03307, over 4902.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2203, pruned_loss=0.03831, over 972665.59 frames.], batch size: 19, lr: 3.31e-04 +2022-05-05 14:19:23,156 INFO [train.py:715] (6/8) Epoch 6, batch 19050, loss[loss=0.1777, simple_loss=0.2506, pruned_loss=0.05242, over 4892.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2206, pruned_loss=0.03866, over 972860.12 frames.], batch size: 22, lr: 3.31e-04 +2022-05-05 14:20:01,541 INFO [train.py:715] (6/8) Epoch 6, batch 19100, loss[loss=0.1484, simple_loss=0.2149, pruned_loss=0.04092, over 4691.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2207, pruned_loss=0.03891, over 972279.26 frames.], batch size: 15, lr: 3.31e-04 +2022-05-05 14:20:40,515 INFO [train.py:715] (6/8) Epoch 6, batch 19150, loss[loss=0.16, simple_loss=0.2357, pruned_loss=0.04216, over 4901.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2205, pruned_loss=0.03866, over 971993.60 frames.], batch size: 38, lr: 3.31e-04 +2022-05-05 14:21:20,172 INFO [train.py:715] (6/8) Epoch 6, batch 19200, loss[loss=0.1507, simple_loss=0.2178, pruned_loss=0.04176, over 4894.00 frames.], tot_loss[loss=0.149, simple_loss=0.2203, pruned_loss=0.03886, over 972310.64 frames.], batch size: 39, lr: 3.31e-04 +2022-05-05 14:21:58,237 INFO [train.py:715] (6/8) Epoch 6, batch 19250, loss[loss=0.1413, simple_loss=0.2022, pruned_loss=0.0402, over 4783.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2193, pruned_loss=0.03873, over 972064.66 frames.], batch size: 17, lr: 3.31e-04 +2022-05-05 14:22:37,141 INFO [train.py:715] (6/8) Epoch 6, batch 19300, loss[loss=0.1646, simple_loss=0.247, pruned_loss=0.04105, over 4779.00 frames.], tot_loss[loss=0.148, simple_loss=0.2189, pruned_loss=0.03856, over 971589.68 frames.], batch size: 17, lr: 3.31e-04 +2022-05-05 14:23:16,401 INFO [train.py:715] (6/8) Epoch 6, batch 19350, loss[loss=0.1444, simple_loss=0.217, pruned_loss=0.03592, over 4973.00 frames.], tot_loss[loss=0.1487, simple_loss=0.22, pruned_loss=0.03875, over 972159.57 frames.], batch size: 24, lr: 3.31e-04 +2022-05-05 14:23:54,986 INFO [train.py:715] (6/8) Epoch 6, batch 19400, loss[loss=0.19, simple_loss=0.25, pruned_loss=0.06502, over 4781.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2195, pruned_loss=0.03879, over 971501.30 frames.], batch size: 14, lr: 3.31e-04 +2022-05-05 14:24:33,670 INFO [train.py:715] (6/8) Epoch 6, batch 19450, loss[loss=0.1421, simple_loss=0.2222, pruned_loss=0.03105, over 4927.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2195, pruned_loss=0.03868, over 972182.57 frames.], batch size: 29, lr: 3.31e-04 +2022-05-05 14:25:13,065 INFO [train.py:715] (6/8) Epoch 6, batch 19500, loss[loss=0.1598, simple_loss=0.2391, pruned_loss=0.04025, over 4843.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2187, pruned_loss=0.0383, over 972757.81 frames.], batch size: 15, lr: 3.31e-04 +2022-05-05 14:25:51,973 INFO [train.py:715] (6/8) Epoch 6, batch 19550, loss[loss=0.1424, simple_loss=0.2228, pruned_loss=0.03097, over 4981.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2193, pruned_loss=0.03867, over 972226.13 frames.], batch size: 26, lr: 3.31e-04 +2022-05-05 14:26:30,328 INFO [train.py:715] (6/8) Epoch 6, batch 19600, loss[loss=0.1219, simple_loss=0.1895, pruned_loss=0.02713, over 4982.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2195, pruned_loss=0.0389, over 972380.68 frames.], batch size: 28, lr: 3.31e-04 +2022-05-05 14:27:09,234 INFO [train.py:715] (6/8) Epoch 6, batch 19650, loss[loss=0.1487, simple_loss=0.2124, pruned_loss=0.04248, over 4780.00 frames.], tot_loss[loss=0.148, simple_loss=0.2191, pruned_loss=0.03839, over 972001.85 frames.], batch size: 18, lr: 3.30e-04 +2022-05-05 14:27:48,352 INFO [train.py:715] (6/8) Epoch 6, batch 19700, loss[loss=0.1637, simple_loss=0.2351, pruned_loss=0.04619, over 4812.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2194, pruned_loss=0.03839, over 972080.39 frames.], batch size: 27, lr: 3.30e-04 +2022-05-05 14:28:27,136 INFO [train.py:715] (6/8) Epoch 6, batch 19750, loss[loss=0.1555, simple_loss=0.2264, pruned_loss=0.0423, over 4954.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2208, pruned_loss=0.03887, over 972133.27 frames.], batch size: 21, lr: 3.30e-04 +2022-05-05 14:29:05,245 INFO [train.py:715] (6/8) Epoch 6, batch 19800, loss[loss=0.1442, simple_loss=0.201, pruned_loss=0.04373, over 4642.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2206, pruned_loss=0.03884, over 971472.87 frames.], batch size: 13, lr: 3.30e-04 +2022-05-05 14:29:44,604 INFO [train.py:715] (6/8) Epoch 6, batch 19850, loss[loss=0.1528, simple_loss=0.2177, pruned_loss=0.04397, over 4776.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2203, pruned_loss=0.03877, over 970916.69 frames.], batch size: 17, lr: 3.30e-04 +2022-05-05 14:30:24,343 INFO [train.py:715] (6/8) Epoch 6, batch 19900, loss[loss=0.1327, simple_loss=0.2113, pruned_loss=0.02707, over 4954.00 frames.], tot_loss[loss=0.1492, simple_loss=0.22, pruned_loss=0.03924, over 971731.24 frames.], batch size: 14, lr: 3.30e-04 +2022-05-05 14:31:02,424 INFO [train.py:715] (6/8) Epoch 6, batch 19950, loss[loss=0.1359, simple_loss=0.2057, pruned_loss=0.03305, over 4982.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2198, pruned_loss=0.03916, over 972519.48 frames.], batch size: 14, lr: 3.30e-04 +2022-05-05 14:31:41,551 INFO [train.py:715] (6/8) Epoch 6, batch 20000, loss[loss=0.1541, simple_loss=0.2206, pruned_loss=0.04387, over 4796.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2192, pruned_loss=0.03923, over 972796.36 frames.], batch size: 21, lr: 3.30e-04 +2022-05-05 14:32:21,019 INFO [train.py:715] (6/8) Epoch 6, batch 20050, loss[loss=0.1484, simple_loss=0.2167, pruned_loss=0.04009, over 4982.00 frames.], tot_loss[loss=0.1488, simple_loss=0.219, pruned_loss=0.03927, over 972976.54 frames.], batch size: 14, lr: 3.30e-04 +2022-05-05 14:32:59,452 INFO [train.py:715] (6/8) Epoch 6, batch 20100, loss[loss=0.1233, simple_loss=0.1939, pruned_loss=0.0263, over 4797.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2196, pruned_loss=0.03931, over 972750.21 frames.], batch size: 24, lr: 3.30e-04 +2022-05-05 14:33:38,527 INFO [train.py:715] (6/8) Epoch 6, batch 20150, loss[loss=0.159, simple_loss=0.2288, pruned_loss=0.04462, over 4977.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2201, pruned_loss=0.03911, over 973374.14 frames.], batch size: 14, lr: 3.30e-04 +2022-05-05 14:34:17,810 INFO [train.py:715] (6/8) Epoch 6, batch 20200, loss[loss=0.1414, simple_loss=0.1958, pruned_loss=0.04349, over 4987.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2205, pruned_loss=0.03921, over 973281.10 frames.], batch size: 14, lr: 3.30e-04 +2022-05-05 14:34:56,736 INFO [train.py:715] (6/8) Epoch 6, batch 20250, loss[loss=0.2193, simple_loss=0.2815, pruned_loss=0.07854, over 4778.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2208, pruned_loss=0.03925, over 973347.89 frames.], batch size: 17, lr: 3.30e-04 +2022-05-05 14:35:35,498 INFO [train.py:715] (6/8) Epoch 6, batch 20300, loss[loss=0.1578, simple_loss=0.2344, pruned_loss=0.04058, over 4745.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2213, pruned_loss=0.03918, over 973922.05 frames.], batch size: 16, lr: 3.30e-04 +2022-05-05 14:36:14,863 INFO [train.py:715] (6/8) Epoch 6, batch 20350, loss[loss=0.1819, simple_loss=0.2343, pruned_loss=0.06475, over 4956.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2223, pruned_loss=0.03977, over 973800.71 frames.], batch size: 35, lr: 3.30e-04 +2022-05-05 14:36:54,306 INFO [train.py:715] (6/8) Epoch 6, batch 20400, loss[loss=0.168, simple_loss=0.2329, pruned_loss=0.05152, over 4776.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2221, pruned_loss=0.03933, over 973967.81 frames.], batch size: 18, lr: 3.30e-04 +2022-05-05 14:37:32,665 INFO [train.py:715] (6/8) Epoch 6, batch 20450, loss[loss=0.1551, simple_loss=0.2284, pruned_loss=0.04091, over 4870.00 frames.], tot_loss[loss=0.15, simple_loss=0.2215, pruned_loss=0.03926, over 973708.41 frames.], batch size: 34, lr: 3.30e-04 +2022-05-05 14:38:11,469 INFO [train.py:715] (6/8) Epoch 6, batch 20500, loss[loss=0.1301, simple_loss=0.2051, pruned_loss=0.02751, over 4934.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2217, pruned_loss=0.03945, over 973717.60 frames.], batch size: 23, lr: 3.30e-04 +2022-05-05 14:38:50,521 INFO [train.py:715] (6/8) Epoch 6, batch 20550, loss[loss=0.1733, simple_loss=0.2397, pruned_loss=0.05345, over 4877.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2218, pruned_loss=0.03968, over 972108.71 frames.], batch size: 32, lr: 3.30e-04 +2022-05-05 14:39:29,697 INFO [train.py:715] (6/8) Epoch 6, batch 20600, loss[loss=0.178, simple_loss=0.2518, pruned_loss=0.05212, over 4966.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2221, pruned_loss=0.03976, over 972701.48 frames.], batch size: 35, lr: 3.30e-04 +2022-05-05 14:40:07,965 INFO [train.py:715] (6/8) Epoch 6, batch 20650, loss[loss=0.1472, simple_loss=0.2182, pruned_loss=0.03815, over 4816.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2207, pruned_loss=0.03914, over 973093.85 frames.], batch size: 13, lr: 3.30e-04 +2022-05-05 14:40:46,653 INFO [train.py:715] (6/8) Epoch 6, batch 20700, loss[loss=0.1416, simple_loss=0.2189, pruned_loss=0.03215, over 4894.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2207, pruned_loss=0.03913, over 972101.47 frames.], batch size: 19, lr: 3.30e-04 +2022-05-05 14:41:25,989 INFO [train.py:715] (6/8) Epoch 6, batch 20750, loss[loss=0.187, simple_loss=0.2551, pruned_loss=0.05946, over 4880.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2202, pruned_loss=0.03863, over 971572.94 frames.], batch size: 32, lr: 3.30e-04 +2022-05-05 14:42:04,387 INFO [train.py:715] (6/8) Epoch 6, batch 20800, loss[loss=0.1074, simple_loss=0.1785, pruned_loss=0.01812, over 4946.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2201, pruned_loss=0.03867, over 971650.64 frames.], batch size: 21, lr: 3.30e-04 +2022-05-05 14:42:43,608 INFO [train.py:715] (6/8) Epoch 6, batch 20850, loss[loss=0.1607, simple_loss=0.2166, pruned_loss=0.05235, over 4862.00 frames.], tot_loss[loss=0.148, simple_loss=0.219, pruned_loss=0.03844, over 971118.42 frames.], batch size: 32, lr: 3.30e-04 +2022-05-05 14:43:22,883 INFO [train.py:715] (6/8) Epoch 6, batch 20900, loss[loss=0.1246, simple_loss=0.1967, pruned_loss=0.02622, over 4767.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2191, pruned_loss=0.03873, over 971192.46 frames.], batch size: 19, lr: 3.30e-04 +2022-05-05 14:44:02,109 INFO [train.py:715] (6/8) Epoch 6, batch 20950, loss[loss=0.1374, simple_loss=0.2071, pruned_loss=0.0338, over 4969.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2196, pruned_loss=0.03894, over 970547.99 frames.], batch size: 40, lr: 3.30e-04 +2022-05-05 14:44:40,092 INFO [train.py:715] (6/8) Epoch 6, batch 21000, loss[loss=0.1542, simple_loss=0.2328, pruned_loss=0.03777, over 4766.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2195, pruned_loss=0.03882, over 970563.80 frames.], batch size: 18, lr: 3.29e-04 +2022-05-05 14:44:40,093 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 14:44:51,876 INFO [train.py:742] (6/8) Epoch 6, validation: loss=0.1089, simple_loss=0.1939, pruned_loss=0.01192, over 914524.00 frames. +2022-05-05 14:45:30,121 INFO [train.py:715] (6/8) Epoch 6, batch 21050, loss[loss=0.2021, simple_loss=0.2776, pruned_loss=0.06333, over 4974.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2198, pruned_loss=0.03837, over 971700.28 frames.], batch size: 35, lr: 3.29e-04 +2022-05-05 14:46:09,485 INFO [train.py:715] (6/8) Epoch 6, batch 21100, loss[loss=0.1512, simple_loss=0.2324, pruned_loss=0.03501, over 4899.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2196, pruned_loss=0.0388, over 971940.72 frames.], batch size: 17, lr: 3.29e-04 +2022-05-05 14:46:48,884 INFO [train.py:715] (6/8) Epoch 6, batch 21150, loss[loss=0.1469, simple_loss=0.2137, pruned_loss=0.04003, over 4846.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2191, pruned_loss=0.03825, over 971771.48 frames.], batch size: 30, lr: 3.29e-04 +2022-05-05 14:47:27,345 INFO [train.py:715] (6/8) Epoch 6, batch 21200, loss[loss=0.1529, simple_loss=0.2248, pruned_loss=0.04048, over 4956.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2198, pruned_loss=0.03847, over 971216.16 frames.], batch size: 24, lr: 3.29e-04 +2022-05-05 14:48:06,353 INFO [train.py:715] (6/8) Epoch 6, batch 21250, loss[loss=0.1169, simple_loss=0.1885, pruned_loss=0.02266, over 4908.00 frames.], tot_loss[loss=0.148, simple_loss=0.2195, pruned_loss=0.03828, over 972068.87 frames.], batch size: 18, lr: 3.29e-04 +2022-05-05 14:48:45,979 INFO [train.py:715] (6/8) Epoch 6, batch 21300, loss[loss=0.206, simple_loss=0.2675, pruned_loss=0.07222, over 4814.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2195, pruned_loss=0.03852, over 972146.79 frames.], batch size: 21, lr: 3.29e-04 +2022-05-05 14:49:24,955 INFO [train.py:715] (6/8) Epoch 6, batch 21350, loss[loss=0.1485, simple_loss=0.2194, pruned_loss=0.03882, over 4859.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2192, pruned_loss=0.0385, over 972440.32 frames.], batch size: 20, lr: 3.29e-04 +2022-05-05 14:50:03,788 INFO [train.py:715] (6/8) Epoch 6, batch 21400, loss[loss=0.2033, simple_loss=0.2717, pruned_loss=0.06749, over 4974.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2188, pruned_loss=0.0384, over 972790.99 frames.], batch size: 15, lr: 3.29e-04 +2022-05-05 14:50:42,549 INFO [train.py:715] (6/8) Epoch 6, batch 21450, loss[loss=0.1481, simple_loss=0.223, pruned_loss=0.03667, over 4904.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2182, pruned_loss=0.03817, over 972454.34 frames.], batch size: 17, lr: 3.29e-04 +2022-05-05 14:51:21,821 INFO [train.py:715] (6/8) Epoch 6, batch 21500, loss[loss=0.118, simple_loss=0.1874, pruned_loss=0.02429, over 4767.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2179, pruned_loss=0.03761, over 972675.16 frames.], batch size: 17, lr: 3.29e-04 +2022-05-05 14:52:00,287 INFO [train.py:715] (6/8) Epoch 6, batch 21550, loss[loss=0.1507, simple_loss=0.2104, pruned_loss=0.04549, over 4787.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2184, pruned_loss=0.03792, over 971699.22 frames.], batch size: 17, lr: 3.29e-04 +2022-05-05 14:52:39,314 INFO [train.py:715] (6/8) Epoch 6, batch 21600, loss[loss=0.1573, simple_loss=0.2214, pruned_loss=0.04663, over 4737.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2184, pruned_loss=0.03788, over 971598.72 frames.], batch size: 16, lr: 3.29e-04 +2022-05-05 14:53:18,464 INFO [train.py:715] (6/8) Epoch 6, batch 21650, loss[loss=0.1418, simple_loss=0.2156, pruned_loss=0.03399, over 4781.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2195, pruned_loss=0.03865, over 972146.75 frames.], batch size: 17, lr: 3.29e-04 +2022-05-05 14:53:57,744 INFO [train.py:715] (6/8) Epoch 6, batch 21700, loss[loss=0.1201, simple_loss=0.188, pruned_loss=0.02609, over 4847.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2197, pruned_loss=0.03878, over 972125.34 frames.], batch size: 20, lr: 3.29e-04 +2022-05-05 14:54:36,455 INFO [train.py:715] (6/8) Epoch 6, batch 21750, loss[loss=0.1333, simple_loss=0.2056, pruned_loss=0.03051, over 4900.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2198, pruned_loss=0.03835, over 973205.29 frames.], batch size: 19, lr: 3.29e-04 +2022-05-05 14:55:15,313 INFO [train.py:715] (6/8) Epoch 6, batch 21800, loss[loss=0.1514, simple_loss=0.2241, pruned_loss=0.0393, over 4808.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2202, pruned_loss=0.03844, over 973085.35 frames.], batch size: 21, lr: 3.29e-04 +2022-05-05 14:55:54,107 INFO [train.py:715] (6/8) Epoch 6, batch 21850, loss[loss=0.1487, simple_loss=0.2194, pruned_loss=0.03902, over 4819.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2195, pruned_loss=0.03859, over 972846.74 frames.], batch size: 25, lr: 3.29e-04 +2022-05-05 14:56:32,647 INFO [train.py:715] (6/8) Epoch 6, batch 21900, loss[loss=0.156, simple_loss=0.2267, pruned_loss=0.04267, over 4858.00 frames.], tot_loss[loss=0.149, simple_loss=0.2204, pruned_loss=0.03881, over 973524.42 frames.], batch size: 20, lr: 3.29e-04 +2022-05-05 14:57:11,520 INFO [train.py:715] (6/8) Epoch 6, batch 21950, loss[loss=0.1243, simple_loss=0.1924, pruned_loss=0.02809, over 4989.00 frames.], tot_loss[loss=0.148, simple_loss=0.2195, pruned_loss=0.03826, over 974177.52 frames.], batch size: 14, lr: 3.29e-04 +2022-05-05 14:57:50,235 INFO [train.py:715] (6/8) Epoch 6, batch 22000, loss[loss=0.1656, simple_loss=0.246, pruned_loss=0.04258, over 4735.00 frames.], tot_loss[loss=0.1477, simple_loss=0.219, pruned_loss=0.03818, over 974098.97 frames.], batch size: 16, lr: 3.29e-04 +2022-05-05 14:58:29,939 INFO [train.py:715] (6/8) Epoch 6, batch 22050, loss[loss=0.1461, simple_loss=0.2132, pruned_loss=0.03946, over 4895.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2185, pruned_loss=0.03807, over 973540.48 frames.], batch size: 19, lr: 3.29e-04 +2022-05-05 14:59:08,264 INFO [train.py:715] (6/8) Epoch 6, batch 22100, loss[loss=0.1302, simple_loss=0.2097, pruned_loss=0.02538, over 4989.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2184, pruned_loss=0.03829, over 973504.39 frames.], batch size: 28, lr: 3.29e-04 +2022-05-05 14:59:47,063 INFO [train.py:715] (6/8) Epoch 6, batch 22150, loss[loss=0.1415, simple_loss=0.2127, pruned_loss=0.03513, over 4976.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2179, pruned_loss=0.03773, over 972961.53 frames.], batch size: 25, lr: 3.29e-04 +2022-05-05 15:00:26,256 INFO [train.py:715] (6/8) Epoch 6, batch 22200, loss[loss=0.1568, simple_loss=0.2197, pruned_loss=0.04695, over 4979.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2184, pruned_loss=0.03772, over 972894.79 frames.], batch size: 39, lr: 3.29e-04 +2022-05-05 15:01:04,919 INFO [train.py:715] (6/8) Epoch 6, batch 22250, loss[loss=0.1768, simple_loss=0.237, pruned_loss=0.05829, over 4847.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2196, pruned_loss=0.03852, over 973101.51 frames.], batch size: 32, lr: 3.29e-04 +2022-05-05 15:01:43,599 INFO [train.py:715] (6/8) Epoch 6, batch 22300, loss[loss=0.137, simple_loss=0.2079, pruned_loss=0.03301, over 4958.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2199, pruned_loss=0.03845, over 972574.44 frames.], batch size: 21, lr: 3.29e-04 +2022-05-05 15:02:22,658 INFO [train.py:715] (6/8) Epoch 6, batch 22350, loss[loss=0.153, simple_loss=0.2118, pruned_loss=0.04708, over 4972.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2196, pruned_loss=0.03888, over 973040.42 frames.], batch size: 35, lr: 3.29e-04 +2022-05-05 15:03:02,003 INFO [train.py:715] (6/8) Epoch 6, batch 22400, loss[loss=0.1704, simple_loss=0.2299, pruned_loss=0.05542, over 4956.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2196, pruned_loss=0.03895, over 972364.90 frames.], batch size: 35, lr: 3.29e-04 +2022-05-05 15:03:40,492 INFO [train.py:715] (6/8) Epoch 6, batch 22450, loss[loss=0.1416, simple_loss=0.2095, pruned_loss=0.03685, over 4915.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2185, pruned_loss=0.0385, over 971858.09 frames.], batch size: 29, lr: 3.28e-04 +2022-05-05 15:04:19,440 INFO [train.py:715] (6/8) Epoch 6, batch 22500, loss[loss=0.1958, simple_loss=0.2611, pruned_loss=0.06525, over 4851.00 frames.], tot_loss[loss=0.148, simple_loss=0.2188, pruned_loss=0.03858, over 972071.17 frames.], batch size: 34, lr: 3.28e-04 +2022-05-05 15:04:58,759 INFO [train.py:715] (6/8) Epoch 6, batch 22550, loss[loss=0.1273, simple_loss=0.1891, pruned_loss=0.03269, over 4834.00 frames.], tot_loss[loss=0.147, simple_loss=0.2178, pruned_loss=0.03812, over 970954.01 frames.], batch size: 12, lr: 3.28e-04 +2022-05-05 15:05:37,171 INFO [train.py:715] (6/8) Epoch 6, batch 22600, loss[loss=0.1357, simple_loss=0.2002, pruned_loss=0.03557, over 4860.00 frames.], tot_loss[loss=0.148, simple_loss=0.219, pruned_loss=0.03848, over 971759.74 frames.], batch size: 15, lr: 3.28e-04 +2022-05-05 15:06:16,007 INFO [train.py:715] (6/8) Epoch 6, batch 22650, loss[loss=0.1393, simple_loss=0.223, pruned_loss=0.02777, over 4953.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2193, pruned_loss=0.03813, over 972543.81 frames.], batch size: 29, lr: 3.28e-04 +2022-05-05 15:06:54,606 INFO [train.py:715] (6/8) Epoch 6, batch 22700, loss[loss=0.1532, simple_loss=0.2092, pruned_loss=0.04859, over 4771.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2195, pruned_loss=0.03847, over 973007.63 frames.], batch size: 14, lr: 3.28e-04 +2022-05-05 15:07:33,407 INFO [train.py:715] (6/8) Epoch 6, batch 22750, loss[loss=0.1644, simple_loss=0.2367, pruned_loss=0.04604, over 4780.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2212, pruned_loss=0.03927, over 973072.49 frames.], batch size: 17, lr: 3.28e-04 +2022-05-05 15:08:11,865 INFO [train.py:715] (6/8) Epoch 6, batch 22800, loss[loss=0.1498, simple_loss=0.2192, pruned_loss=0.04025, over 4776.00 frames.], tot_loss[loss=0.15, simple_loss=0.2212, pruned_loss=0.03943, over 972985.62 frames.], batch size: 17, lr: 3.28e-04 +2022-05-05 15:08:50,372 INFO [train.py:715] (6/8) Epoch 6, batch 22850, loss[loss=0.141, simple_loss=0.2153, pruned_loss=0.03332, over 4796.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2214, pruned_loss=0.03938, over 973211.15 frames.], batch size: 24, lr: 3.28e-04 +2022-05-05 15:09:29,029 INFO [train.py:715] (6/8) Epoch 6, batch 22900, loss[loss=0.1412, simple_loss=0.2208, pruned_loss=0.03075, over 4812.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2219, pruned_loss=0.0398, over 972826.20 frames.], batch size: 24, lr: 3.28e-04 +2022-05-05 15:10:08,160 INFO [train.py:715] (6/8) Epoch 6, batch 22950, loss[loss=0.1309, simple_loss=0.2004, pruned_loss=0.03071, over 4893.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2213, pruned_loss=0.03972, over 972643.25 frames.], batch size: 17, lr: 3.28e-04 +2022-05-05 15:10:46,572 INFO [train.py:715] (6/8) Epoch 6, batch 23000, loss[loss=0.1355, simple_loss=0.2081, pruned_loss=0.0314, over 4837.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2204, pruned_loss=0.03918, over 972359.30 frames.], batch size: 15, lr: 3.28e-04 +2022-05-05 15:11:25,821 INFO [train.py:715] (6/8) Epoch 6, batch 23050, loss[loss=0.1334, simple_loss=0.202, pruned_loss=0.03245, over 4802.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2198, pruned_loss=0.03857, over 972516.57 frames.], batch size: 14, lr: 3.28e-04 +2022-05-05 15:12:05,301 INFO [train.py:715] (6/8) Epoch 6, batch 23100, loss[loss=0.1582, simple_loss=0.2185, pruned_loss=0.04891, over 4982.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2191, pruned_loss=0.03795, over 972779.46 frames.], batch size: 14, lr: 3.28e-04 +2022-05-05 15:12:46,121 INFO [train.py:715] (6/8) Epoch 6, batch 23150, loss[loss=0.1515, simple_loss=0.2186, pruned_loss=0.04221, over 4858.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2193, pruned_loss=0.03826, over 972789.63 frames.], batch size: 20, lr: 3.28e-04 +2022-05-05 15:13:25,467 INFO [train.py:715] (6/8) Epoch 6, batch 23200, loss[loss=0.1553, simple_loss=0.2131, pruned_loss=0.04878, over 4664.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2187, pruned_loss=0.03779, over 972030.93 frames.], batch size: 14, lr: 3.28e-04 +2022-05-05 15:14:04,866 INFO [train.py:715] (6/8) Epoch 6, batch 23250, loss[loss=0.154, simple_loss=0.2275, pruned_loss=0.04029, over 4944.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2191, pruned_loss=0.038, over 971742.19 frames.], batch size: 21, lr: 3.28e-04 +2022-05-05 15:14:43,526 INFO [train.py:715] (6/8) Epoch 6, batch 23300, loss[loss=0.1466, simple_loss=0.2102, pruned_loss=0.04152, over 4887.00 frames.], tot_loss[loss=0.148, simple_loss=0.2195, pruned_loss=0.03824, over 971950.92 frames.], batch size: 22, lr: 3.28e-04 +2022-05-05 15:15:21,519 INFO [train.py:715] (6/8) Epoch 6, batch 23350, loss[loss=0.1514, simple_loss=0.2111, pruned_loss=0.04581, over 4785.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2203, pruned_loss=0.03859, over 972469.76 frames.], batch size: 18, lr: 3.28e-04 +2022-05-05 15:16:00,567 INFO [train.py:715] (6/8) Epoch 6, batch 23400, loss[loss=0.1152, simple_loss=0.1895, pruned_loss=0.02046, over 4890.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2191, pruned_loss=0.03822, over 972477.01 frames.], batch size: 32, lr: 3.28e-04 +2022-05-05 15:16:40,147 INFO [train.py:715] (6/8) Epoch 6, batch 23450, loss[loss=0.1295, simple_loss=0.1971, pruned_loss=0.03089, over 4788.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2185, pruned_loss=0.03756, over 972360.83 frames.], batch size: 21, lr: 3.28e-04 +2022-05-05 15:17:19,122 INFO [train.py:715] (6/8) Epoch 6, batch 23500, loss[loss=0.1376, simple_loss=0.2127, pruned_loss=0.03121, over 4794.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2186, pruned_loss=0.03778, over 971781.09 frames.], batch size: 21, lr: 3.28e-04 +2022-05-05 15:17:58,302 INFO [train.py:715] (6/8) Epoch 6, batch 23550, loss[loss=0.1776, simple_loss=0.2483, pruned_loss=0.05345, over 4789.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2189, pruned_loss=0.03836, over 971622.25 frames.], batch size: 17, lr: 3.28e-04 +2022-05-05 15:18:37,515 INFO [train.py:715] (6/8) Epoch 6, batch 23600, loss[loss=0.1627, simple_loss=0.2267, pruned_loss=0.04931, over 4934.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2191, pruned_loss=0.03839, over 971340.68 frames.], batch size: 21, lr: 3.28e-04 +2022-05-05 15:19:16,256 INFO [train.py:715] (6/8) Epoch 6, batch 23650, loss[loss=0.1183, simple_loss=0.1858, pruned_loss=0.02536, over 4861.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2198, pruned_loss=0.03887, over 970522.87 frames.], batch size: 20, lr: 3.28e-04 +2022-05-05 15:19:54,392 INFO [train.py:715] (6/8) Epoch 6, batch 23700, loss[loss=0.1849, simple_loss=0.2663, pruned_loss=0.05172, over 4915.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2199, pruned_loss=0.03863, over 971353.31 frames.], batch size: 29, lr: 3.28e-04 +2022-05-05 15:20:33,415 INFO [train.py:715] (6/8) Epoch 6, batch 23750, loss[loss=0.18, simple_loss=0.2481, pruned_loss=0.05595, over 4991.00 frames.], tot_loss[loss=0.1487, simple_loss=0.22, pruned_loss=0.0387, over 971854.05 frames.], batch size: 15, lr: 3.28e-04 +2022-05-05 15:21:12,839 INFO [train.py:715] (6/8) Epoch 6, batch 23800, loss[loss=0.1345, simple_loss=0.1951, pruned_loss=0.03701, over 4967.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2192, pruned_loss=0.03869, over 972331.39 frames.], batch size: 14, lr: 3.28e-04 +2022-05-05 15:21:51,202 INFO [train.py:715] (6/8) Epoch 6, batch 23850, loss[loss=0.141, simple_loss=0.1986, pruned_loss=0.04166, over 4848.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2193, pruned_loss=0.03885, over 973684.43 frames.], batch size: 13, lr: 3.27e-04 +2022-05-05 15:22:29,826 INFO [train.py:715] (6/8) Epoch 6, batch 23900, loss[loss=0.1355, simple_loss=0.2131, pruned_loss=0.02899, over 4923.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2193, pruned_loss=0.03891, over 973474.09 frames.], batch size: 29, lr: 3.27e-04 +2022-05-05 15:23:08,545 INFO [train.py:715] (6/8) Epoch 6, batch 23950, loss[loss=0.1276, simple_loss=0.2015, pruned_loss=0.02682, over 4983.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2195, pruned_loss=0.03866, over 973674.72 frames.], batch size: 25, lr: 3.27e-04 +2022-05-05 15:23:47,221 INFO [train.py:715] (6/8) Epoch 6, batch 24000, loss[loss=0.1502, simple_loss=0.2258, pruned_loss=0.0373, over 4697.00 frames.], tot_loss[loss=0.148, simple_loss=0.2189, pruned_loss=0.03853, over 973285.19 frames.], batch size: 15, lr: 3.27e-04 +2022-05-05 15:23:47,222 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 15:23:58,203 INFO [train.py:742] (6/8) Epoch 6, validation: loss=0.1089, simple_loss=0.1939, pruned_loss=0.01195, over 914524.00 frames. +2022-05-05 15:24:36,966 INFO [train.py:715] (6/8) Epoch 6, batch 24050, loss[loss=0.1669, simple_loss=0.2168, pruned_loss=0.05849, over 4966.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2185, pruned_loss=0.03849, over 973311.03 frames.], batch size: 14, lr: 3.27e-04 +2022-05-05 15:25:15,034 INFO [train.py:715] (6/8) Epoch 6, batch 24100, loss[loss=0.1222, simple_loss=0.1985, pruned_loss=0.02291, over 4755.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2188, pruned_loss=0.03847, over 972643.34 frames.], batch size: 19, lr: 3.27e-04 +2022-05-05 15:25:53,706 INFO [train.py:715] (6/8) Epoch 6, batch 24150, loss[loss=0.128, simple_loss=0.2139, pruned_loss=0.02111, over 4795.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2182, pruned_loss=0.03806, over 973615.49 frames.], batch size: 17, lr: 3.27e-04 +2022-05-05 15:26:32,800 INFO [train.py:715] (6/8) Epoch 6, batch 24200, loss[loss=0.1605, simple_loss=0.2335, pruned_loss=0.04372, over 4779.00 frames.], tot_loss[loss=0.148, simple_loss=0.2189, pruned_loss=0.03851, over 973625.78 frames.], batch size: 17, lr: 3.27e-04 +2022-05-05 15:27:10,720 INFO [train.py:715] (6/8) Epoch 6, batch 24250, loss[loss=0.1535, simple_loss=0.2355, pruned_loss=0.03576, over 4930.00 frames.], tot_loss[loss=0.149, simple_loss=0.2198, pruned_loss=0.03905, over 973163.38 frames.], batch size: 23, lr: 3.27e-04 +2022-05-05 15:27:49,115 INFO [train.py:715] (6/8) Epoch 6, batch 24300, loss[loss=0.1545, simple_loss=0.223, pruned_loss=0.04302, over 4839.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2205, pruned_loss=0.03996, over 972674.73 frames.], batch size: 15, lr: 3.27e-04 +2022-05-05 15:28:28,055 INFO [train.py:715] (6/8) Epoch 6, batch 24350, loss[loss=0.1162, simple_loss=0.1825, pruned_loss=0.02497, over 4821.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2199, pruned_loss=0.0395, over 972777.92 frames.], batch size: 15, lr: 3.27e-04 +2022-05-05 15:29:07,159 INFO [train.py:715] (6/8) Epoch 6, batch 24400, loss[loss=0.1706, simple_loss=0.2267, pruned_loss=0.05724, over 4954.00 frames.], tot_loss[loss=0.149, simple_loss=0.2195, pruned_loss=0.03923, over 973115.27 frames.], batch size: 35, lr: 3.27e-04 +2022-05-05 15:29:45,509 INFO [train.py:715] (6/8) Epoch 6, batch 24450, loss[loss=0.1384, simple_loss=0.2134, pruned_loss=0.03165, over 4752.00 frames.], tot_loss[loss=0.149, simple_loss=0.2199, pruned_loss=0.03907, over 972610.78 frames.], batch size: 14, lr: 3.27e-04 +2022-05-05 15:30:24,119 INFO [train.py:715] (6/8) Epoch 6, batch 24500, loss[loss=0.1403, simple_loss=0.2082, pruned_loss=0.03623, over 4911.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2201, pruned_loss=0.03943, over 973376.49 frames.], batch size: 29, lr: 3.27e-04 +2022-05-05 15:31:03,939 INFO [train.py:715] (6/8) Epoch 6, batch 24550, loss[loss=0.1376, simple_loss=0.2051, pruned_loss=0.03508, over 4820.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2196, pruned_loss=0.03946, over 971838.92 frames.], batch size: 27, lr: 3.27e-04 +2022-05-05 15:31:42,161 INFO [train.py:715] (6/8) Epoch 6, batch 24600, loss[loss=0.1419, simple_loss=0.2032, pruned_loss=0.04034, over 4646.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2189, pruned_loss=0.03883, over 971984.41 frames.], batch size: 13, lr: 3.27e-04 +2022-05-05 15:32:21,362 INFO [train.py:715] (6/8) Epoch 6, batch 24650, loss[loss=0.1717, simple_loss=0.2418, pruned_loss=0.05081, over 4954.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2195, pruned_loss=0.03855, over 972083.94 frames.], batch size: 21, lr: 3.27e-04 +2022-05-05 15:33:00,613 INFO [train.py:715] (6/8) Epoch 6, batch 24700, loss[loss=0.1433, simple_loss=0.2176, pruned_loss=0.03454, over 4938.00 frames.], tot_loss[loss=0.148, simple_loss=0.2195, pruned_loss=0.03827, over 972234.08 frames.], batch size: 29, lr: 3.27e-04 +2022-05-05 15:33:39,471 INFO [train.py:715] (6/8) Epoch 6, batch 24750, loss[loss=0.158, simple_loss=0.2278, pruned_loss=0.04413, over 4921.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2205, pruned_loss=0.03894, over 973252.58 frames.], batch size: 23, lr: 3.27e-04 +2022-05-05 15:34:17,835 INFO [train.py:715] (6/8) Epoch 6, batch 24800, loss[loss=0.1236, simple_loss=0.1963, pruned_loss=0.02547, over 4644.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2199, pruned_loss=0.03867, over 972374.60 frames.], batch size: 13, lr: 3.27e-04 +2022-05-05 15:34:56,837 INFO [train.py:715] (6/8) Epoch 6, batch 24850, loss[loss=0.146, simple_loss=0.219, pruned_loss=0.0365, over 4847.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2191, pruned_loss=0.0386, over 972629.24 frames.], batch size: 13, lr: 3.27e-04 +2022-05-05 15:35:36,648 INFO [train.py:715] (6/8) Epoch 6, batch 24900, loss[loss=0.1099, simple_loss=0.1877, pruned_loss=0.01601, over 4913.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2194, pruned_loss=0.03873, over 972394.29 frames.], batch size: 29, lr: 3.27e-04 +2022-05-05 15:36:14,920 INFO [train.py:715] (6/8) Epoch 6, batch 24950, loss[loss=0.1436, simple_loss=0.2084, pruned_loss=0.03941, over 4957.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2194, pruned_loss=0.03852, over 972341.62 frames.], batch size: 24, lr: 3.27e-04 +2022-05-05 15:36:53,552 INFO [train.py:715] (6/8) Epoch 6, batch 25000, loss[loss=0.1552, simple_loss=0.2265, pruned_loss=0.04202, over 4893.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2193, pruned_loss=0.03841, over 972881.39 frames.], batch size: 22, lr: 3.27e-04 +2022-05-05 15:37:32,635 INFO [train.py:715] (6/8) Epoch 6, batch 25050, loss[loss=0.1461, simple_loss=0.2222, pruned_loss=0.03506, over 4980.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2195, pruned_loss=0.03834, over 972443.79 frames.], batch size: 28, lr: 3.27e-04 +2022-05-05 15:38:11,565 INFO [train.py:715] (6/8) Epoch 6, batch 25100, loss[loss=0.1348, simple_loss=0.2051, pruned_loss=0.03223, over 4879.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2194, pruned_loss=0.03809, over 972112.52 frames.], batch size: 22, lr: 3.27e-04 +2022-05-05 15:38:50,095 INFO [train.py:715] (6/8) Epoch 6, batch 25150, loss[loss=0.1291, simple_loss=0.1978, pruned_loss=0.03016, over 4801.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2187, pruned_loss=0.03783, over 972416.92 frames.], batch size: 14, lr: 3.27e-04 +2022-05-05 15:39:28,931 INFO [train.py:715] (6/8) Epoch 6, batch 25200, loss[loss=0.1368, simple_loss=0.2029, pruned_loss=0.03539, over 4983.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2196, pruned_loss=0.03871, over 972786.04 frames.], batch size: 39, lr: 3.27e-04 +2022-05-05 15:40:07,779 INFO [train.py:715] (6/8) Epoch 6, batch 25250, loss[loss=0.1474, simple_loss=0.2213, pruned_loss=0.03678, over 4916.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2205, pruned_loss=0.03935, over 972626.91 frames.], batch size: 39, lr: 3.26e-04 +2022-05-05 15:40:46,084 INFO [train.py:715] (6/8) Epoch 6, batch 25300, loss[loss=0.1887, simple_loss=0.253, pruned_loss=0.06219, over 4762.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2196, pruned_loss=0.0388, over 972334.69 frames.], batch size: 19, lr: 3.26e-04 +2022-05-05 15:41:24,368 INFO [train.py:715] (6/8) Epoch 6, batch 25350, loss[loss=0.1127, simple_loss=0.1904, pruned_loss=0.01753, over 4941.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2191, pruned_loss=0.0384, over 972193.97 frames.], batch size: 21, lr: 3.26e-04 +2022-05-05 15:42:03,176 INFO [train.py:715] (6/8) Epoch 6, batch 25400, loss[loss=0.1818, simple_loss=0.2423, pruned_loss=0.06061, over 4788.00 frames.], tot_loss[loss=0.1477, simple_loss=0.219, pruned_loss=0.03819, over 972330.04 frames.], batch size: 14, lr: 3.26e-04 +2022-05-05 15:42:41,991 INFO [train.py:715] (6/8) Epoch 6, batch 25450, loss[loss=0.1306, simple_loss=0.1948, pruned_loss=0.03318, over 4827.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2188, pruned_loss=0.038, over 971770.69 frames.], batch size: 13, lr: 3.26e-04 +2022-05-05 15:43:20,085 INFO [train.py:715] (6/8) Epoch 6, batch 25500, loss[loss=0.144, simple_loss=0.2141, pruned_loss=0.03693, over 4931.00 frames.], tot_loss[loss=0.148, simple_loss=0.2195, pruned_loss=0.0382, over 972095.19 frames.], batch size: 23, lr: 3.26e-04 +2022-05-05 15:43:58,583 INFO [train.py:715] (6/8) Epoch 6, batch 25550, loss[loss=0.1664, simple_loss=0.2405, pruned_loss=0.04613, over 4766.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2205, pruned_loss=0.03859, over 972141.86 frames.], batch size: 18, lr: 3.26e-04 +2022-05-05 15:44:37,704 INFO [train.py:715] (6/8) Epoch 6, batch 25600, loss[loss=0.1551, simple_loss=0.2256, pruned_loss=0.04231, over 4983.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2205, pruned_loss=0.03891, over 972081.59 frames.], batch size: 35, lr: 3.26e-04 +2022-05-05 15:45:15,936 INFO [train.py:715] (6/8) Epoch 6, batch 25650, loss[loss=0.1894, simple_loss=0.2644, pruned_loss=0.05721, over 4843.00 frames.], tot_loss[loss=0.1498, simple_loss=0.221, pruned_loss=0.03931, over 971306.87 frames.], batch size: 15, lr: 3.26e-04 +2022-05-05 15:45:54,739 INFO [train.py:715] (6/8) Epoch 6, batch 25700, loss[loss=0.1486, simple_loss=0.2148, pruned_loss=0.04114, over 4972.00 frames.], tot_loss[loss=0.1488, simple_loss=0.22, pruned_loss=0.03883, over 971594.56 frames.], batch size: 14, lr: 3.26e-04 +2022-05-05 15:46:34,046 INFO [train.py:715] (6/8) Epoch 6, batch 25750, loss[loss=0.1637, simple_loss=0.2376, pruned_loss=0.04496, over 4782.00 frames.], tot_loss[loss=0.149, simple_loss=0.2199, pruned_loss=0.03903, over 970635.96 frames.], batch size: 17, lr: 3.26e-04 +2022-05-05 15:47:12,321 INFO [train.py:715] (6/8) Epoch 6, batch 25800, loss[loss=0.1536, simple_loss=0.2214, pruned_loss=0.04292, over 4936.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2208, pruned_loss=0.03907, over 971204.54 frames.], batch size: 29, lr: 3.26e-04 +2022-05-05 15:47:50,579 INFO [train.py:715] (6/8) Epoch 6, batch 25850, loss[loss=0.1642, simple_loss=0.2402, pruned_loss=0.04409, over 4824.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2195, pruned_loss=0.03813, over 971379.05 frames.], batch size: 27, lr: 3.26e-04 +2022-05-05 15:48:29,221 INFO [train.py:715] (6/8) Epoch 6, batch 25900, loss[loss=0.1363, simple_loss=0.2151, pruned_loss=0.02877, over 4856.00 frames.], tot_loss[loss=0.148, simple_loss=0.2194, pruned_loss=0.03829, over 971088.81 frames.], batch size: 20, lr: 3.26e-04 +2022-05-05 15:49:08,367 INFO [train.py:715] (6/8) Epoch 6, batch 25950, loss[loss=0.1308, simple_loss=0.2033, pruned_loss=0.02916, over 4783.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2193, pruned_loss=0.03808, over 972099.85 frames.], batch size: 18, lr: 3.26e-04 +2022-05-05 15:49:46,044 INFO [train.py:715] (6/8) Epoch 6, batch 26000, loss[loss=0.1277, simple_loss=0.2074, pruned_loss=0.024, over 4892.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2191, pruned_loss=0.03855, over 971914.06 frames.], batch size: 19, lr: 3.26e-04 +2022-05-05 15:50:24,229 INFO [train.py:715] (6/8) Epoch 6, batch 26050, loss[loss=0.1544, simple_loss=0.2249, pruned_loss=0.04197, over 4835.00 frames.], tot_loss[loss=0.147, simple_loss=0.2181, pruned_loss=0.03798, over 972039.96 frames.], batch size: 30, lr: 3.26e-04 +2022-05-05 15:51:03,213 INFO [train.py:715] (6/8) Epoch 6, batch 26100, loss[loss=0.1279, simple_loss=0.2029, pruned_loss=0.02647, over 4979.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2189, pruned_loss=0.03848, over 973155.82 frames.], batch size: 24, lr: 3.26e-04 +2022-05-05 15:51:41,623 INFO [train.py:715] (6/8) Epoch 6, batch 26150, loss[loss=0.1584, simple_loss=0.2243, pruned_loss=0.04631, over 4869.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2191, pruned_loss=0.03852, over 972978.29 frames.], batch size: 30, lr: 3.26e-04 +2022-05-05 15:52:20,120 INFO [train.py:715] (6/8) Epoch 6, batch 26200, loss[loss=0.1479, simple_loss=0.2307, pruned_loss=0.03254, over 4941.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2189, pruned_loss=0.03834, over 973060.20 frames.], batch size: 29, lr: 3.26e-04 +2022-05-05 15:52:58,597 INFO [train.py:715] (6/8) Epoch 6, batch 26250, loss[loss=0.1502, simple_loss=0.2188, pruned_loss=0.04082, over 4749.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2189, pruned_loss=0.03833, over 972728.18 frames.], batch size: 19, lr: 3.26e-04 +2022-05-05 15:53:37,251 INFO [train.py:715] (6/8) Epoch 6, batch 26300, loss[loss=0.1279, simple_loss=0.2026, pruned_loss=0.02655, over 4781.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2191, pruned_loss=0.0386, over 972024.55 frames.], batch size: 18, lr: 3.26e-04 +2022-05-05 15:54:15,318 INFO [train.py:715] (6/8) Epoch 6, batch 26350, loss[loss=0.1603, simple_loss=0.2295, pruned_loss=0.04556, over 4786.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2206, pruned_loss=0.03913, over 971408.64 frames.], batch size: 18, lr: 3.26e-04 +2022-05-05 15:54:53,793 INFO [train.py:715] (6/8) Epoch 6, batch 26400, loss[loss=0.154, simple_loss=0.2265, pruned_loss=0.04081, over 4691.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2208, pruned_loss=0.03916, over 971489.55 frames.], batch size: 15, lr: 3.26e-04 +2022-05-05 15:55:33,106 INFO [train.py:715] (6/8) Epoch 6, batch 26450, loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.02908, over 4984.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2206, pruned_loss=0.03907, over 971262.48 frames.], batch size: 25, lr: 3.26e-04 +2022-05-05 15:56:11,693 INFO [train.py:715] (6/8) Epoch 6, batch 26500, loss[loss=0.1067, simple_loss=0.175, pruned_loss=0.01917, over 4776.00 frames.], tot_loss[loss=0.15, simple_loss=0.2208, pruned_loss=0.03961, over 970729.65 frames.], batch size: 14, lr: 3.26e-04 +2022-05-05 15:56:50,073 INFO [train.py:715] (6/8) Epoch 6, batch 26550, loss[loss=0.1509, simple_loss=0.2242, pruned_loss=0.03875, over 4649.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2204, pruned_loss=0.03951, over 971649.47 frames.], batch size: 13, lr: 3.26e-04 +2022-05-05 15:57:28,902 INFO [train.py:715] (6/8) Epoch 6, batch 26600, loss[loss=0.1492, simple_loss=0.2201, pruned_loss=0.03916, over 4964.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2205, pruned_loss=0.03931, over 971817.44 frames.], batch size: 15, lr: 3.26e-04 +2022-05-05 15:58:07,542 INFO [train.py:715] (6/8) Epoch 6, batch 26650, loss[loss=0.1871, simple_loss=0.2646, pruned_loss=0.05481, over 4958.00 frames.], tot_loss[loss=0.1501, simple_loss=0.221, pruned_loss=0.03963, over 971614.36 frames.], batch size: 35, lr: 3.26e-04 +2022-05-05 15:58:46,268 INFO [train.py:715] (6/8) Epoch 6, batch 26700, loss[loss=0.1428, simple_loss=0.2186, pruned_loss=0.03345, over 4799.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2206, pruned_loss=0.03935, over 971548.40 frames.], batch size: 25, lr: 3.25e-04 +2022-05-05 15:59:24,555 INFO [train.py:715] (6/8) Epoch 6, batch 26750, loss[loss=0.1584, simple_loss=0.2301, pruned_loss=0.04333, over 4918.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2204, pruned_loss=0.03896, over 971195.57 frames.], batch size: 18, lr: 3.25e-04 +2022-05-05 16:00:03,786 INFO [train.py:715] (6/8) Epoch 6, batch 26800, loss[loss=0.1463, simple_loss=0.2166, pruned_loss=0.03797, over 4910.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2198, pruned_loss=0.03886, over 971343.18 frames.], batch size: 19, lr: 3.25e-04 +2022-05-05 16:00:41,899 INFO [train.py:715] (6/8) Epoch 6, batch 26850, loss[loss=0.1708, simple_loss=0.2291, pruned_loss=0.0563, over 4758.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2194, pruned_loss=0.03859, over 971286.59 frames.], batch size: 16, lr: 3.25e-04 +2022-05-05 16:01:20,552 INFO [train.py:715] (6/8) Epoch 6, batch 26900, loss[loss=0.1379, simple_loss=0.2145, pruned_loss=0.03069, over 4862.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2209, pruned_loss=0.03926, over 971018.40 frames.], batch size: 20, lr: 3.25e-04 +2022-05-05 16:01:59,792 INFO [train.py:715] (6/8) Epoch 6, batch 26950, loss[loss=0.1875, simple_loss=0.2623, pruned_loss=0.05639, over 4913.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2206, pruned_loss=0.03925, over 971206.78 frames.], batch size: 39, lr: 3.25e-04 +2022-05-05 16:02:39,039 INFO [train.py:715] (6/8) Epoch 6, batch 27000, loss[loss=0.1714, simple_loss=0.2422, pruned_loss=0.05035, over 4697.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2203, pruned_loss=0.03909, over 970800.51 frames.], batch size: 15, lr: 3.25e-04 +2022-05-05 16:02:39,040 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 16:02:48,795 INFO [train.py:742] (6/8) Epoch 6, validation: loss=0.1088, simple_loss=0.1938, pruned_loss=0.01188, over 914524.00 frames. +2022-05-05 16:03:28,074 INFO [train.py:715] (6/8) Epoch 6, batch 27050, loss[loss=0.1207, simple_loss=0.2005, pruned_loss=0.02041, over 4803.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2204, pruned_loss=0.03904, over 970843.49 frames.], batch size: 21, lr: 3.25e-04 +2022-05-05 16:04:06,806 INFO [train.py:715] (6/8) Epoch 6, batch 27100, loss[loss=0.1519, simple_loss=0.2231, pruned_loss=0.04036, over 4824.00 frames.], tot_loss[loss=0.1501, simple_loss=0.221, pruned_loss=0.03958, over 971043.56 frames.], batch size: 13, lr: 3.25e-04 +2022-05-05 16:04:45,437 INFO [train.py:715] (6/8) Epoch 6, batch 27150, loss[loss=0.1247, simple_loss=0.2064, pruned_loss=0.02147, over 4796.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2207, pruned_loss=0.03901, over 971600.25 frames.], batch size: 14, lr: 3.25e-04 +2022-05-05 16:05:25,173 INFO [train.py:715] (6/8) Epoch 6, batch 27200, loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02888, over 4702.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2196, pruned_loss=0.03851, over 971127.63 frames.], batch size: 15, lr: 3.25e-04 +2022-05-05 16:06:03,412 INFO [train.py:715] (6/8) Epoch 6, batch 27250, loss[loss=0.1566, simple_loss=0.2177, pruned_loss=0.04776, over 4907.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2191, pruned_loss=0.03816, over 970845.33 frames.], batch size: 18, lr: 3.25e-04 +2022-05-05 16:06:43,063 INFO [train.py:715] (6/8) Epoch 6, batch 27300, loss[loss=0.1179, simple_loss=0.201, pruned_loss=0.01741, over 4978.00 frames.], tot_loss[loss=0.1476, simple_loss=0.219, pruned_loss=0.0381, over 971357.41 frames.], batch size: 28, lr: 3.25e-04 +2022-05-05 16:07:22,057 INFO [train.py:715] (6/8) Epoch 6, batch 27350, loss[loss=0.1378, simple_loss=0.2089, pruned_loss=0.03336, over 4847.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2198, pruned_loss=0.03836, over 972822.51 frames.], batch size: 13, lr: 3.25e-04 +2022-05-05 16:08:01,166 INFO [train.py:715] (6/8) Epoch 6, batch 27400, loss[loss=0.1545, simple_loss=0.215, pruned_loss=0.04696, over 4976.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2201, pruned_loss=0.03847, over 973570.92 frames.], batch size: 14, lr: 3.25e-04 +2022-05-05 16:08:39,770 INFO [train.py:715] (6/8) Epoch 6, batch 27450, loss[loss=0.1456, simple_loss=0.2244, pruned_loss=0.0334, over 4918.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2202, pruned_loss=0.03874, over 973053.74 frames.], batch size: 21, lr: 3.25e-04 +2022-05-05 16:09:18,812 INFO [train.py:715] (6/8) Epoch 6, batch 27500, loss[loss=0.1482, simple_loss=0.2252, pruned_loss=0.03557, over 4942.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2199, pruned_loss=0.03869, over 973236.87 frames.], batch size: 24, lr: 3.25e-04 +2022-05-05 16:09:58,188 INFO [train.py:715] (6/8) Epoch 6, batch 27550, loss[loss=0.1386, simple_loss=0.2162, pruned_loss=0.03054, over 4797.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2198, pruned_loss=0.03873, over 973249.54 frames.], batch size: 21, lr: 3.25e-04 +2022-05-05 16:10:36,912 INFO [train.py:715] (6/8) Epoch 6, batch 27600, loss[loss=0.1425, simple_loss=0.2222, pruned_loss=0.03144, over 4922.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2191, pruned_loss=0.03826, over 972450.31 frames.], batch size: 23, lr: 3.25e-04 +2022-05-05 16:11:15,425 INFO [train.py:715] (6/8) Epoch 6, batch 27650, loss[loss=0.1723, simple_loss=0.2388, pruned_loss=0.05294, over 4661.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2193, pruned_loss=0.03844, over 972486.17 frames.], batch size: 13, lr: 3.25e-04 +2022-05-05 16:11:54,438 INFO [train.py:715] (6/8) Epoch 6, batch 27700, loss[loss=0.1504, simple_loss=0.2212, pruned_loss=0.03978, over 4925.00 frames.], tot_loss[loss=0.1468, simple_loss=0.218, pruned_loss=0.03784, over 971663.60 frames.], batch size: 29, lr: 3.25e-04 +2022-05-05 16:12:32,978 INFO [train.py:715] (6/8) Epoch 6, batch 27750, loss[loss=0.1266, simple_loss=0.2051, pruned_loss=0.02403, over 4780.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2181, pruned_loss=0.03761, over 971461.07 frames.], batch size: 17, lr: 3.25e-04 +2022-05-05 16:13:12,187 INFO [train.py:715] (6/8) Epoch 6, batch 27800, loss[loss=0.1343, simple_loss=0.2194, pruned_loss=0.02459, over 4895.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2185, pruned_loss=0.0376, over 971192.26 frames.], batch size: 22, lr: 3.25e-04 +2022-05-05 16:13:51,230 INFO [train.py:715] (6/8) Epoch 6, batch 27850, loss[loss=0.1348, simple_loss=0.2024, pruned_loss=0.03361, over 4773.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2188, pruned_loss=0.03808, over 971436.01 frames.], batch size: 14, lr: 3.25e-04 +2022-05-05 16:14:30,896 INFO [train.py:715] (6/8) Epoch 6, batch 27900, loss[loss=0.1441, simple_loss=0.2187, pruned_loss=0.03478, over 4757.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2189, pruned_loss=0.0382, over 971459.29 frames.], batch size: 19, lr: 3.25e-04 +2022-05-05 16:15:09,372 INFO [train.py:715] (6/8) Epoch 6, batch 27950, loss[loss=0.1522, simple_loss=0.2303, pruned_loss=0.03704, over 4956.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2187, pruned_loss=0.03797, over 971778.65 frames.], batch size: 29, lr: 3.25e-04 +2022-05-05 16:15:48,253 INFO [train.py:715] (6/8) Epoch 6, batch 28000, loss[loss=0.1332, simple_loss=0.2008, pruned_loss=0.03278, over 4987.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2187, pruned_loss=0.03819, over 972544.95 frames.], batch size: 14, lr: 3.25e-04 +2022-05-05 16:16:27,386 INFO [train.py:715] (6/8) Epoch 6, batch 28050, loss[loss=0.1644, simple_loss=0.2313, pruned_loss=0.04874, over 4987.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2187, pruned_loss=0.03819, over 972425.29 frames.], batch size: 25, lr: 3.25e-04 +2022-05-05 16:17:06,024 INFO [train.py:715] (6/8) Epoch 6, batch 28100, loss[loss=0.1897, simple_loss=0.2578, pruned_loss=0.06075, over 4695.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2192, pruned_loss=0.0385, over 971605.47 frames.], batch size: 15, lr: 3.25e-04 +2022-05-05 16:17:44,943 INFO [train.py:715] (6/8) Epoch 6, batch 28150, loss[loss=0.1591, simple_loss=0.2179, pruned_loss=0.05019, over 4844.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2185, pruned_loss=0.03801, over 971948.61 frames.], batch size: 15, lr: 3.24e-04 +2022-05-05 16:18:24,089 INFO [train.py:715] (6/8) Epoch 6, batch 28200, loss[loss=0.1492, simple_loss=0.2313, pruned_loss=0.03355, over 4919.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2179, pruned_loss=0.03775, over 971278.58 frames.], batch size: 18, lr: 3.24e-04 +2022-05-05 16:19:03,412 INFO [train.py:715] (6/8) Epoch 6, batch 28250, loss[loss=0.1476, simple_loss=0.2201, pruned_loss=0.03756, over 4856.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2187, pruned_loss=0.03772, over 971263.33 frames.], batch size: 12, lr: 3.24e-04 +2022-05-05 16:19:41,792 INFO [train.py:715] (6/8) Epoch 6, batch 28300, loss[loss=0.1703, simple_loss=0.2386, pruned_loss=0.05099, over 4800.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2186, pruned_loss=0.03792, over 971630.14 frames.], batch size: 21, lr: 3.24e-04 +2022-05-05 16:20:20,026 INFO [train.py:715] (6/8) Epoch 6, batch 28350, loss[loss=0.1511, simple_loss=0.2247, pruned_loss=0.03875, over 4773.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2188, pruned_loss=0.03787, over 972180.66 frames.], batch size: 18, lr: 3.24e-04 +2022-05-05 16:20:59,873 INFO [train.py:715] (6/8) Epoch 6, batch 28400, loss[loss=0.1422, simple_loss=0.2235, pruned_loss=0.03041, over 4896.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2197, pruned_loss=0.03847, over 971910.45 frames.], batch size: 19, lr: 3.24e-04 +2022-05-05 16:21:38,667 INFO [train.py:715] (6/8) Epoch 6, batch 28450, loss[loss=0.1448, simple_loss=0.2239, pruned_loss=0.03285, over 4930.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2198, pruned_loss=0.0387, over 972193.35 frames.], batch size: 23, lr: 3.24e-04 +2022-05-05 16:22:17,505 INFO [train.py:715] (6/8) Epoch 6, batch 28500, loss[loss=0.178, simple_loss=0.2461, pruned_loss=0.05495, over 4803.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2188, pruned_loss=0.03821, over 971732.88 frames.], batch size: 21, lr: 3.24e-04 +2022-05-05 16:22:56,650 INFO [train.py:715] (6/8) Epoch 6, batch 28550, loss[loss=0.1472, simple_loss=0.2229, pruned_loss=0.03568, over 4797.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2187, pruned_loss=0.03816, over 971683.36 frames.], batch size: 24, lr: 3.24e-04 +2022-05-05 16:23:36,090 INFO [train.py:715] (6/8) Epoch 6, batch 28600, loss[loss=0.1224, simple_loss=0.1885, pruned_loss=0.02813, over 4953.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2188, pruned_loss=0.03824, over 972026.13 frames.], batch size: 35, lr: 3.24e-04 +2022-05-05 16:24:14,189 INFO [train.py:715] (6/8) Epoch 6, batch 28650, loss[loss=0.164, simple_loss=0.2281, pruned_loss=0.04991, over 4964.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2188, pruned_loss=0.03825, over 971194.99 frames.], batch size: 35, lr: 3.24e-04 +2022-05-05 16:24:52,989 INFO [train.py:715] (6/8) Epoch 6, batch 28700, loss[loss=0.1294, simple_loss=0.2021, pruned_loss=0.02836, over 4966.00 frames.], tot_loss[loss=0.147, simple_loss=0.2188, pruned_loss=0.03764, over 972543.51 frames.], batch size: 35, lr: 3.24e-04 +2022-05-05 16:25:32,176 INFO [train.py:715] (6/8) Epoch 6, batch 28750, loss[loss=0.1489, simple_loss=0.2216, pruned_loss=0.03814, over 4974.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2178, pruned_loss=0.03736, over 972147.16 frames.], batch size: 28, lr: 3.24e-04 +2022-05-05 16:26:10,897 INFO [train.py:715] (6/8) Epoch 6, batch 28800, loss[loss=0.1588, simple_loss=0.2422, pruned_loss=0.0377, over 4937.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2188, pruned_loss=0.03784, over 972557.08 frames.], batch size: 23, lr: 3.24e-04 +2022-05-05 16:26:49,769 INFO [train.py:715] (6/8) Epoch 6, batch 28850, loss[loss=0.1523, simple_loss=0.2216, pruned_loss=0.04149, over 4829.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2182, pruned_loss=0.0375, over 972114.76 frames.], batch size: 15, lr: 3.24e-04 +2022-05-05 16:27:28,067 INFO [train.py:715] (6/8) Epoch 6, batch 28900, loss[loss=0.1402, simple_loss=0.2071, pruned_loss=0.03662, over 4987.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2186, pruned_loss=0.03744, over 972412.36 frames.], batch size: 14, lr: 3.24e-04 +2022-05-05 16:28:07,514 INFO [train.py:715] (6/8) Epoch 6, batch 28950, loss[loss=0.1551, simple_loss=0.2197, pruned_loss=0.04526, over 4922.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2192, pruned_loss=0.03773, over 972499.39 frames.], batch size: 18, lr: 3.24e-04 +2022-05-05 16:28:45,750 INFO [train.py:715] (6/8) Epoch 6, batch 29000, loss[loss=0.1622, simple_loss=0.2405, pruned_loss=0.04193, over 4982.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2194, pruned_loss=0.03806, over 972705.16 frames.], batch size: 39, lr: 3.24e-04 +2022-05-05 16:29:23,907 INFO [train.py:715] (6/8) Epoch 6, batch 29050, loss[loss=0.145, simple_loss=0.2204, pruned_loss=0.03474, over 4900.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2189, pruned_loss=0.03793, over 972578.82 frames.], batch size: 16, lr: 3.24e-04 +2022-05-05 16:30:02,945 INFO [train.py:715] (6/8) Epoch 6, batch 29100, loss[loss=0.1432, simple_loss=0.2202, pruned_loss=0.03312, over 4817.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2182, pruned_loss=0.03739, over 972518.83 frames.], batch size: 27, lr: 3.24e-04 +2022-05-05 16:30:41,837 INFO [train.py:715] (6/8) Epoch 6, batch 29150, loss[loss=0.1595, simple_loss=0.2258, pruned_loss=0.04657, over 4849.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2191, pruned_loss=0.03798, over 972547.94 frames.], batch size: 32, lr: 3.24e-04 +2022-05-05 16:31:20,669 INFO [train.py:715] (6/8) Epoch 6, batch 29200, loss[loss=0.1702, simple_loss=0.2363, pruned_loss=0.0521, over 4839.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2193, pruned_loss=0.03853, over 972908.69 frames.], batch size: 15, lr: 3.24e-04 +2022-05-05 16:31:59,883 INFO [train.py:715] (6/8) Epoch 6, batch 29250, loss[loss=0.1558, simple_loss=0.2162, pruned_loss=0.04771, over 4966.00 frames.], tot_loss[loss=0.1483, simple_loss=0.219, pruned_loss=0.03877, over 972934.93 frames.], batch size: 35, lr: 3.24e-04 +2022-05-05 16:32:39,921 INFO [train.py:715] (6/8) Epoch 6, batch 29300, loss[loss=0.1451, simple_loss=0.2027, pruned_loss=0.04375, over 4876.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2198, pruned_loss=0.03931, over 973290.87 frames.], batch size: 19, lr: 3.24e-04 +2022-05-05 16:33:18,207 INFO [train.py:715] (6/8) Epoch 6, batch 29350, loss[loss=0.1644, simple_loss=0.2385, pruned_loss=0.04518, over 4946.00 frames.], tot_loss[loss=0.148, simple_loss=0.2189, pruned_loss=0.03851, over 973078.55 frames.], batch size: 29, lr: 3.24e-04 +2022-05-05 16:33:57,192 INFO [train.py:715] (6/8) Epoch 6, batch 29400, loss[loss=0.1644, simple_loss=0.2329, pruned_loss=0.04794, over 4981.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2191, pruned_loss=0.03889, over 973863.81 frames.], batch size: 15, lr: 3.24e-04 +2022-05-05 16:34:36,596 INFO [train.py:715] (6/8) Epoch 6, batch 29450, loss[loss=0.1499, simple_loss=0.2149, pruned_loss=0.04245, over 4881.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2192, pruned_loss=0.03894, over 972778.73 frames.], batch size: 16, lr: 3.24e-04 +2022-05-05 16:35:15,802 INFO [train.py:715] (6/8) Epoch 6, batch 29500, loss[loss=0.1099, simple_loss=0.1921, pruned_loss=0.01385, over 4800.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2193, pruned_loss=0.03877, over 971385.60 frames.], batch size: 25, lr: 3.24e-04 +2022-05-05 16:35:53,791 INFO [train.py:715] (6/8) Epoch 6, batch 29550, loss[loss=0.1199, simple_loss=0.1956, pruned_loss=0.02216, over 4817.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2195, pruned_loss=0.03879, over 971225.99 frames.], batch size: 26, lr: 3.24e-04 +2022-05-05 16:36:33,140 INFO [train.py:715] (6/8) Epoch 6, batch 29600, loss[loss=0.1663, simple_loss=0.231, pruned_loss=0.05079, over 4696.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2194, pruned_loss=0.03861, over 971602.00 frames.], batch size: 15, lr: 3.24e-04 +2022-05-05 16:37:12,533 INFO [train.py:715] (6/8) Epoch 6, batch 29650, loss[loss=0.1467, simple_loss=0.22, pruned_loss=0.03669, over 4975.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2195, pruned_loss=0.03852, over 972197.89 frames.], batch size: 25, lr: 3.23e-04 +2022-05-05 16:37:51,063 INFO [train.py:715] (6/8) Epoch 6, batch 29700, loss[loss=0.1415, simple_loss=0.2078, pruned_loss=0.03765, over 4855.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2186, pruned_loss=0.03801, over 973054.74 frames.], batch size: 20, lr: 3.23e-04 +2022-05-05 16:38:29,762 INFO [train.py:715] (6/8) Epoch 6, batch 29750, loss[loss=0.1599, simple_loss=0.224, pruned_loss=0.04789, over 4800.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2183, pruned_loss=0.03799, over 973153.08 frames.], batch size: 18, lr: 3.23e-04 +2022-05-05 16:39:08,775 INFO [train.py:715] (6/8) Epoch 6, batch 29800, loss[loss=0.1916, simple_loss=0.2432, pruned_loss=0.07002, over 4962.00 frames.], tot_loss[loss=0.1475, simple_loss=0.219, pruned_loss=0.03804, over 972675.16 frames.], batch size: 14, lr: 3.23e-04 +2022-05-05 16:39:48,203 INFO [train.py:715] (6/8) Epoch 6, batch 29850, loss[loss=0.1204, simple_loss=0.1921, pruned_loss=0.02437, over 4749.00 frames.], tot_loss[loss=0.1466, simple_loss=0.218, pruned_loss=0.03759, over 972262.46 frames.], batch size: 16, lr: 3.23e-04 +2022-05-05 16:40:26,713 INFO [train.py:715] (6/8) Epoch 6, batch 29900, loss[loss=0.1371, simple_loss=0.2151, pruned_loss=0.02958, over 4903.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2183, pruned_loss=0.03751, over 972978.36 frames.], batch size: 19, lr: 3.23e-04 +2022-05-05 16:41:05,701 INFO [train.py:715] (6/8) Epoch 6, batch 29950, loss[loss=0.1634, simple_loss=0.2411, pruned_loss=0.04282, over 4770.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2179, pruned_loss=0.03716, over 972482.93 frames.], batch size: 14, lr: 3.23e-04 +2022-05-05 16:41:45,055 INFO [train.py:715] (6/8) Epoch 6, batch 30000, loss[loss=0.1606, simple_loss=0.2276, pruned_loss=0.04684, over 4972.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2176, pruned_loss=0.03713, over 972241.12 frames.], batch size: 31, lr: 3.23e-04 +2022-05-05 16:41:45,056 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 16:41:54,714 INFO [train.py:742] (6/8) Epoch 6, validation: loss=0.1088, simple_loss=0.1938, pruned_loss=0.0119, over 914524.00 frames. +2022-05-05 16:42:34,425 INFO [train.py:715] (6/8) Epoch 6, batch 30050, loss[loss=0.1274, simple_loss=0.1973, pruned_loss=0.02872, over 4792.00 frames.], tot_loss[loss=0.146, simple_loss=0.2177, pruned_loss=0.03717, over 973474.85 frames.], batch size: 12, lr: 3.23e-04 +2022-05-05 16:43:12,814 INFO [train.py:715] (6/8) Epoch 6, batch 30100, loss[loss=0.1075, simple_loss=0.1798, pruned_loss=0.01757, over 4829.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2174, pruned_loss=0.03722, over 973376.91 frames.], batch size: 13, lr: 3.23e-04 +2022-05-05 16:43:51,560 INFO [train.py:715] (6/8) Epoch 6, batch 30150, loss[loss=0.1534, simple_loss=0.2363, pruned_loss=0.03526, over 4801.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2173, pruned_loss=0.03728, over 972854.29 frames.], batch size: 21, lr: 3.23e-04 +2022-05-05 16:44:30,967 INFO [train.py:715] (6/8) Epoch 6, batch 30200, loss[loss=0.1345, simple_loss=0.215, pruned_loss=0.02704, over 4978.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2186, pruned_loss=0.03806, over 973594.59 frames.], batch size: 28, lr: 3.23e-04 +2022-05-05 16:45:10,340 INFO [train.py:715] (6/8) Epoch 6, batch 30250, loss[loss=0.1783, simple_loss=0.2455, pruned_loss=0.05556, over 4986.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2196, pruned_loss=0.03828, over 973603.20 frames.], batch size: 28, lr: 3.23e-04 +2022-05-05 16:45:48,512 INFO [train.py:715] (6/8) Epoch 6, batch 30300, loss[loss=0.1378, simple_loss=0.2102, pruned_loss=0.03272, over 4934.00 frames.], tot_loss[loss=0.148, simple_loss=0.2197, pruned_loss=0.0381, over 973727.93 frames.], batch size: 23, lr: 3.23e-04 +2022-05-05 16:46:27,516 INFO [train.py:715] (6/8) Epoch 6, batch 30350, loss[loss=0.1315, simple_loss=0.1952, pruned_loss=0.03389, over 4776.00 frames.], tot_loss[loss=0.1476, simple_loss=0.219, pruned_loss=0.03808, over 973274.56 frames.], batch size: 18, lr: 3.23e-04 +2022-05-05 16:47:06,586 INFO [train.py:715] (6/8) Epoch 6, batch 30400, loss[loss=0.1332, simple_loss=0.2002, pruned_loss=0.03313, over 4967.00 frames.], tot_loss[loss=0.147, simple_loss=0.2184, pruned_loss=0.03779, over 973265.25 frames.], batch size: 25, lr: 3.23e-04 +2022-05-05 16:47:45,263 INFO [train.py:715] (6/8) Epoch 6, batch 30450, loss[loss=0.14, simple_loss=0.2076, pruned_loss=0.03618, over 4967.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2186, pruned_loss=0.03758, over 973716.17 frames.], batch size: 28, lr: 3.23e-04 +2022-05-05 16:48:23,948 INFO [train.py:715] (6/8) Epoch 6, batch 30500, loss[loss=0.1703, simple_loss=0.2442, pruned_loss=0.04817, over 4954.00 frames.], tot_loss[loss=0.1472, simple_loss=0.219, pruned_loss=0.03768, over 973007.60 frames.], batch size: 21, lr: 3.23e-04 +2022-05-05 16:49:02,695 INFO [train.py:715] (6/8) Epoch 6, batch 30550, loss[loss=0.1394, simple_loss=0.1997, pruned_loss=0.03957, over 4849.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2191, pruned_loss=0.03776, over 972889.52 frames.], batch size: 32, lr: 3.23e-04 +2022-05-05 16:49:41,853 INFO [train.py:715] (6/8) Epoch 6, batch 30600, loss[loss=0.15, simple_loss=0.2097, pruned_loss=0.0452, over 4984.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2192, pruned_loss=0.03774, over 973730.21 frames.], batch size: 35, lr: 3.23e-04 +2022-05-05 16:50:20,374 INFO [train.py:715] (6/8) Epoch 6, batch 30650, loss[loss=0.1475, simple_loss=0.2201, pruned_loss=0.03744, over 4789.00 frames.], tot_loss[loss=0.148, simple_loss=0.2195, pruned_loss=0.03827, over 972856.37 frames.], batch size: 14, lr: 3.23e-04 +2022-05-05 16:50:59,232 INFO [train.py:715] (6/8) Epoch 6, batch 30700, loss[loss=0.1575, simple_loss=0.2322, pruned_loss=0.04139, over 4976.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2197, pruned_loss=0.03857, over 972754.80 frames.], batch size: 14, lr: 3.23e-04 +2022-05-05 16:51:38,190 INFO [train.py:715] (6/8) Epoch 6, batch 30750, loss[loss=0.1339, simple_loss=0.2128, pruned_loss=0.02753, over 4894.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2198, pruned_loss=0.0385, over 972353.45 frames.], batch size: 19, lr: 3.23e-04 +2022-05-05 16:52:17,034 INFO [train.py:715] (6/8) Epoch 6, batch 30800, loss[loss=0.1237, simple_loss=0.1835, pruned_loss=0.03196, over 4776.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2189, pruned_loss=0.03795, over 972172.43 frames.], batch size: 14, lr: 3.23e-04 +2022-05-05 16:52:55,431 INFO [train.py:715] (6/8) Epoch 6, batch 30850, loss[loss=0.1729, simple_loss=0.235, pruned_loss=0.05546, over 4844.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2194, pruned_loss=0.03877, over 972317.43 frames.], batch size: 32, lr: 3.23e-04 +2022-05-05 16:53:34,165 INFO [train.py:715] (6/8) Epoch 6, batch 30900, loss[loss=0.132, simple_loss=0.2039, pruned_loss=0.03011, over 4959.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2199, pruned_loss=0.0387, over 972530.78 frames.], batch size: 24, lr: 3.23e-04 +2022-05-05 16:54:13,772 INFO [train.py:715] (6/8) Epoch 6, batch 30950, loss[loss=0.1778, simple_loss=0.2506, pruned_loss=0.05245, over 4925.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2202, pruned_loss=0.03877, over 972776.15 frames.], batch size: 29, lr: 3.23e-04 +2022-05-05 16:54:51,910 INFO [train.py:715] (6/8) Epoch 6, batch 31000, loss[loss=0.1543, simple_loss=0.2318, pruned_loss=0.03838, over 4829.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2205, pruned_loss=0.03921, over 972617.78 frames.], batch size: 27, lr: 3.23e-04 +2022-05-05 16:55:30,912 INFO [train.py:715] (6/8) Epoch 6, batch 31050, loss[loss=0.1439, simple_loss=0.22, pruned_loss=0.03393, over 4906.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2205, pruned_loss=0.03889, over 972826.87 frames.], batch size: 19, lr: 3.23e-04 +2022-05-05 16:56:10,157 INFO [train.py:715] (6/8) Epoch 6, batch 31100, loss[loss=0.1487, simple_loss=0.2163, pruned_loss=0.04059, over 4823.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2215, pruned_loss=0.03944, over 972552.77 frames.], batch size: 15, lr: 3.22e-04 +2022-05-05 16:56:51,383 INFO [train.py:715] (6/8) Epoch 6, batch 31150, loss[loss=0.1505, simple_loss=0.2124, pruned_loss=0.04426, over 4801.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2217, pruned_loss=0.03949, over 971783.37 frames.], batch size: 21, lr: 3.22e-04 +2022-05-05 16:57:30,156 INFO [train.py:715] (6/8) Epoch 6, batch 31200, loss[loss=0.1261, simple_loss=0.1954, pruned_loss=0.0284, over 4867.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2206, pruned_loss=0.039, over 971336.97 frames.], batch size: 13, lr: 3.22e-04 +2022-05-05 16:58:09,408 INFO [train.py:715] (6/8) Epoch 6, batch 31250, loss[loss=0.1519, simple_loss=0.2261, pruned_loss=0.03882, over 4927.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2203, pruned_loss=0.03866, over 971163.47 frames.], batch size: 18, lr: 3.22e-04 +2022-05-05 16:58:48,244 INFO [train.py:715] (6/8) Epoch 6, batch 31300, loss[loss=0.1356, simple_loss=0.2064, pruned_loss=0.0324, over 4968.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2189, pruned_loss=0.03803, over 971717.39 frames.], batch size: 14, lr: 3.22e-04 +2022-05-05 16:59:27,126 INFO [train.py:715] (6/8) Epoch 6, batch 31350, loss[loss=0.1367, simple_loss=0.2001, pruned_loss=0.03664, over 4830.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2201, pruned_loss=0.03857, over 972468.00 frames.], batch size: 13, lr: 3.22e-04 +2022-05-05 17:00:06,354 INFO [train.py:715] (6/8) Epoch 6, batch 31400, loss[loss=0.1646, simple_loss=0.233, pruned_loss=0.04806, over 4694.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2193, pruned_loss=0.03797, over 972062.47 frames.], batch size: 15, lr: 3.22e-04 +2022-05-05 17:00:45,704 INFO [train.py:715] (6/8) Epoch 6, batch 31450, loss[loss=0.1581, simple_loss=0.2366, pruned_loss=0.03982, over 4793.00 frames.], tot_loss[loss=0.147, simple_loss=0.2187, pruned_loss=0.03769, over 972632.70 frames.], batch size: 18, lr: 3.22e-04 +2022-05-05 17:01:23,998 INFO [train.py:715] (6/8) Epoch 6, batch 31500, loss[loss=0.1633, simple_loss=0.2155, pruned_loss=0.0556, over 4933.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2182, pruned_loss=0.03758, over 971860.47 frames.], batch size: 18, lr: 3.22e-04 +2022-05-05 17:02:02,412 INFO [train.py:715] (6/8) Epoch 6, batch 31550, loss[loss=0.1539, simple_loss=0.2241, pruned_loss=0.04184, over 4875.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2173, pruned_loss=0.03727, over 971801.96 frames.], batch size: 16, lr: 3.22e-04 +2022-05-05 17:02:41,956 INFO [train.py:715] (6/8) Epoch 6, batch 31600, loss[loss=0.1438, simple_loss=0.2201, pruned_loss=0.03377, over 4776.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2178, pruned_loss=0.03747, over 972493.23 frames.], batch size: 18, lr: 3.22e-04 +2022-05-05 17:03:21,197 INFO [train.py:715] (6/8) Epoch 6, batch 31650, loss[loss=0.1699, simple_loss=0.2447, pruned_loss=0.04754, over 4711.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2178, pruned_loss=0.03771, over 972120.67 frames.], batch size: 15, lr: 3.22e-04 +2022-05-05 17:03:59,730 INFO [train.py:715] (6/8) Epoch 6, batch 31700, loss[loss=0.1407, simple_loss=0.208, pruned_loss=0.03674, over 4856.00 frames.], tot_loss[loss=0.147, simple_loss=0.2181, pruned_loss=0.03791, over 971999.34 frames.], batch size: 30, lr: 3.22e-04 +2022-05-05 17:04:38,252 INFO [train.py:715] (6/8) Epoch 6, batch 31750, loss[loss=0.1282, simple_loss=0.2044, pruned_loss=0.02594, over 4953.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2183, pruned_loss=0.03775, over 972618.14 frames.], batch size: 21, lr: 3.22e-04 +2022-05-05 17:05:17,756 INFO [train.py:715] (6/8) Epoch 6, batch 31800, loss[loss=0.1381, simple_loss=0.2112, pruned_loss=0.0325, over 4751.00 frames.], tot_loss[loss=0.146, simple_loss=0.2177, pruned_loss=0.03717, over 972683.50 frames.], batch size: 12, lr: 3.22e-04 +2022-05-05 17:05:56,237 INFO [train.py:715] (6/8) Epoch 6, batch 31850, loss[loss=0.1501, simple_loss=0.2237, pruned_loss=0.03823, over 4928.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2179, pruned_loss=0.03717, over 971362.36 frames.], batch size: 18, lr: 3.22e-04 +2022-05-05 17:06:34,778 INFO [train.py:715] (6/8) Epoch 6, batch 31900, loss[loss=0.1601, simple_loss=0.2243, pruned_loss=0.04794, over 4832.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2184, pruned_loss=0.03798, over 971421.07 frames.], batch size: 30, lr: 3.22e-04 +2022-05-05 17:07:13,870 INFO [train.py:715] (6/8) Epoch 6, batch 31950, loss[loss=0.1521, simple_loss=0.2372, pruned_loss=0.03354, over 4902.00 frames.], tot_loss[loss=0.147, simple_loss=0.2188, pruned_loss=0.03763, over 970473.99 frames.], batch size: 17, lr: 3.22e-04 +2022-05-05 17:07:52,487 INFO [train.py:715] (6/8) Epoch 6, batch 32000, loss[loss=0.1909, simple_loss=0.2567, pruned_loss=0.06256, over 4694.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2195, pruned_loss=0.03799, over 969990.61 frames.], batch size: 15, lr: 3.22e-04 +2022-05-05 17:08:31,940 INFO [train.py:715] (6/8) Epoch 6, batch 32050, loss[loss=0.1755, simple_loss=0.2479, pruned_loss=0.05154, over 4772.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2195, pruned_loss=0.03819, over 970355.22 frames.], batch size: 17, lr: 3.22e-04 +2022-05-05 17:09:11,462 INFO [train.py:715] (6/8) Epoch 6, batch 32100, loss[loss=0.122, simple_loss=0.1923, pruned_loss=0.02587, over 4787.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2191, pruned_loss=0.03797, over 970280.22 frames.], batch size: 18, lr: 3.22e-04 +2022-05-05 17:09:50,452 INFO [train.py:715] (6/8) Epoch 6, batch 32150, loss[loss=0.1352, simple_loss=0.2026, pruned_loss=0.03396, over 4861.00 frames.], tot_loss[loss=0.147, simple_loss=0.2185, pruned_loss=0.03779, over 969987.19 frames.], batch size: 32, lr: 3.22e-04 +2022-05-05 17:10:28,948 INFO [train.py:715] (6/8) Epoch 6, batch 32200, loss[loss=0.1854, simple_loss=0.2588, pruned_loss=0.05605, over 4792.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2186, pruned_loss=0.03793, over 970740.80 frames.], batch size: 14, lr: 3.22e-04 +2022-05-05 17:11:08,023 INFO [train.py:715] (6/8) Epoch 6, batch 32250, loss[loss=0.1331, simple_loss=0.204, pruned_loss=0.03108, over 4889.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2193, pruned_loss=0.03781, over 971205.66 frames.], batch size: 32, lr: 3.22e-04 +2022-05-05 17:11:46,870 INFO [train.py:715] (6/8) Epoch 6, batch 32300, loss[loss=0.1537, simple_loss=0.2453, pruned_loss=0.03102, over 4759.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2195, pruned_loss=0.03791, over 970982.22 frames.], batch size: 14, lr: 3.22e-04 +2022-05-05 17:12:26,140 INFO [train.py:715] (6/8) Epoch 6, batch 32350, loss[loss=0.1706, simple_loss=0.246, pruned_loss=0.04761, over 4849.00 frames.], tot_loss[loss=0.148, simple_loss=0.2198, pruned_loss=0.03811, over 972590.88 frames.], batch size: 20, lr: 3.22e-04 +2022-05-05 17:13:04,502 INFO [train.py:715] (6/8) Epoch 6, batch 32400, loss[loss=0.1437, simple_loss=0.2113, pruned_loss=0.03802, over 4853.00 frames.], tot_loss[loss=0.1481, simple_loss=0.22, pruned_loss=0.03808, over 973019.86 frames.], batch size: 20, lr: 3.22e-04 +2022-05-05 17:13:43,921 INFO [train.py:715] (6/8) Epoch 6, batch 32450, loss[loss=0.1697, simple_loss=0.2462, pruned_loss=0.04655, over 4803.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2206, pruned_loss=0.03849, over 972408.33 frames.], batch size: 24, lr: 3.22e-04 +2022-05-05 17:14:23,268 INFO [train.py:715] (6/8) Epoch 6, batch 32500, loss[loss=0.1863, simple_loss=0.2481, pruned_loss=0.06222, over 4850.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2209, pruned_loss=0.03863, over 972255.84 frames.], batch size: 32, lr: 3.22e-04 +2022-05-05 17:15:01,983 INFO [train.py:715] (6/8) Epoch 6, batch 32550, loss[loss=0.1493, simple_loss=0.22, pruned_loss=0.03933, over 4977.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2206, pruned_loss=0.03843, over 971816.92 frames.], batch size: 14, lr: 3.22e-04 +2022-05-05 17:15:40,776 INFO [train.py:715] (6/8) Epoch 6, batch 32600, loss[loss=0.15, simple_loss=0.2218, pruned_loss=0.03909, over 4974.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2205, pruned_loss=0.03843, over 972837.87 frames.], batch size: 15, lr: 3.21e-04 +2022-05-05 17:16:19,203 INFO [train.py:715] (6/8) Epoch 6, batch 32650, loss[loss=0.1716, simple_loss=0.2426, pruned_loss=0.05025, over 4799.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2202, pruned_loss=0.0385, over 972536.61 frames.], batch size: 24, lr: 3.21e-04 +2022-05-05 17:16:57,839 INFO [train.py:715] (6/8) Epoch 6, batch 32700, loss[loss=0.1333, simple_loss=0.2031, pruned_loss=0.03178, over 4910.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2201, pruned_loss=0.0383, over 973414.63 frames.], batch size: 39, lr: 3.21e-04 +2022-05-05 17:17:35,887 INFO [train.py:715] (6/8) Epoch 6, batch 32750, loss[loss=0.1263, simple_loss=0.2071, pruned_loss=0.02275, over 4824.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2195, pruned_loss=0.03834, over 973488.13 frames.], batch size: 27, lr: 3.21e-04 +2022-05-05 17:18:14,604 INFO [train.py:715] (6/8) Epoch 6, batch 32800, loss[loss=0.1393, simple_loss=0.2048, pruned_loss=0.0369, over 4815.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2188, pruned_loss=0.03818, over 973549.93 frames.], batch size: 27, lr: 3.21e-04 +2022-05-05 17:18:53,198 INFO [train.py:715] (6/8) Epoch 6, batch 32850, loss[loss=0.1874, simple_loss=0.2382, pruned_loss=0.06833, over 4983.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2191, pruned_loss=0.03858, over 972585.62 frames.], batch size: 15, lr: 3.21e-04 +2022-05-05 17:19:31,627 INFO [train.py:715] (6/8) Epoch 6, batch 32900, loss[loss=0.1378, simple_loss=0.2143, pruned_loss=0.03067, over 4899.00 frames.], tot_loss[loss=0.148, simple_loss=0.2187, pruned_loss=0.03862, over 972332.25 frames.], batch size: 17, lr: 3.21e-04 +2022-05-05 17:20:09,698 INFO [train.py:715] (6/8) Epoch 6, batch 32950, loss[loss=0.1766, simple_loss=0.2507, pruned_loss=0.05123, over 4982.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2187, pruned_loss=0.03881, over 972743.85 frames.], batch size: 25, lr: 3.21e-04 +2022-05-05 17:20:48,506 INFO [train.py:715] (6/8) Epoch 6, batch 33000, loss[loss=0.1807, simple_loss=0.2441, pruned_loss=0.05868, over 4807.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2199, pruned_loss=0.03942, over 972748.23 frames.], batch size: 26, lr: 3.21e-04 +2022-05-05 17:20:48,507 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 17:20:58,110 INFO [train.py:742] (6/8) Epoch 6, validation: loss=0.1087, simple_loss=0.1938, pruned_loss=0.01183, over 914524.00 frames. +2022-05-05 17:21:36,675 INFO [train.py:715] (6/8) Epoch 6, batch 33050, loss[loss=0.1652, simple_loss=0.2376, pruned_loss=0.04644, over 4888.00 frames.], tot_loss[loss=0.149, simple_loss=0.2196, pruned_loss=0.0392, over 973462.39 frames.], batch size: 19, lr: 3.21e-04 +2022-05-05 17:22:15,262 INFO [train.py:715] (6/8) Epoch 6, batch 33100, loss[loss=0.159, simple_loss=0.2292, pruned_loss=0.04442, over 4706.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2187, pruned_loss=0.03858, over 973431.45 frames.], batch size: 15, lr: 3.21e-04 +2022-05-05 17:22:53,010 INFO [train.py:715] (6/8) Epoch 6, batch 33150, loss[loss=0.1755, simple_loss=0.2535, pruned_loss=0.0487, over 4983.00 frames.], tot_loss[loss=0.148, simple_loss=0.2191, pruned_loss=0.03842, over 974072.50 frames.], batch size: 15, lr: 3.21e-04 +2022-05-05 17:23:31,898 INFO [train.py:715] (6/8) Epoch 6, batch 33200, loss[loss=0.1632, simple_loss=0.2439, pruned_loss=0.04126, over 4989.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2183, pruned_loss=0.03768, over 974016.49 frames.], batch size: 28, lr: 3.21e-04 +2022-05-05 17:24:10,785 INFO [train.py:715] (6/8) Epoch 6, batch 33250, loss[loss=0.1553, simple_loss=0.2321, pruned_loss=0.03924, over 4871.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2182, pruned_loss=0.03759, over 973606.99 frames.], batch size: 16, lr: 3.21e-04 +2022-05-05 17:24:49,863 INFO [train.py:715] (6/8) Epoch 6, batch 33300, loss[loss=0.1449, simple_loss=0.2267, pruned_loss=0.0315, over 4821.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2201, pruned_loss=0.03881, over 973043.95 frames.], batch size: 15, lr: 3.21e-04 +2022-05-05 17:25:28,470 INFO [train.py:715] (6/8) Epoch 6, batch 33350, loss[loss=0.1313, simple_loss=0.2097, pruned_loss=0.02649, over 4947.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2195, pruned_loss=0.03849, over 972633.39 frames.], batch size: 23, lr: 3.21e-04 +2022-05-05 17:26:07,936 INFO [train.py:715] (6/8) Epoch 6, batch 33400, loss[loss=0.1721, simple_loss=0.2332, pruned_loss=0.05546, over 4952.00 frames.], tot_loss[loss=0.148, simple_loss=0.2198, pruned_loss=0.03814, over 972685.71 frames.], batch size: 15, lr: 3.21e-04 +2022-05-05 17:26:47,017 INFO [train.py:715] (6/8) Epoch 6, batch 33450, loss[loss=0.1697, simple_loss=0.2334, pruned_loss=0.05295, over 4811.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2197, pruned_loss=0.03822, over 972526.44 frames.], batch size: 21, lr: 3.21e-04 +2022-05-05 17:27:25,291 INFO [train.py:715] (6/8) Epoch 6, batch 33500, loss[loss=0.1557, simple_loss=0.2289, pruned_loss=0.04127, over 4934.00 frames.], tot_loss[loss=0.148, simple_loss=0.2195, pruned_loss=0.03824, over 972492.10 frames.], batch size: 23, lr: 3.21e-04 +2022-05-05 17:28:04,315 INFO [train.py:715] (6/8) Epoch 6, batch 33550, loss[loss=0.1834, simple_loss=0.246, pruned_loss=0.06039, over 4749.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2197, pruned_loss=0.03829, over 971773.77 frames.], batch size: 16, lr: 3.21e-04 +2022-05-05 17:28:43,724 INFO [train.py:715] (6/8) Epoch 6, batch 33600, loss[loss=0.1367, simple_loss=0.2144, pruned_loss=0.02948, over 4886.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2204, pruned_loss=0.039, over 971926.01 frames.], batch size: 22, lr: 3.21e-04 +2022-05-05 17:29:22,675 INFO [train.py:715] (6/8) Epoch 6, batch 33650, loss[loss=0.1319, simple_loss=0.2144, pruned_loss=0.02471, over 4978.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2199, pruned_loss=0.03851, over 971347.18 frames.], batch size: 15, lr: 3.21e-04 +2022-05-05 17:30:01,275 INFO [train.py:715] (6/8) Epoch 6, batch 33700, loss[loss=0.1859, simple_loss=0.2488, pruned_loss=0.0615, over 4935.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2204, pruned_loss=0.03857, over 971214.24 frames.], batch size: 39, lr: 3.21e-04 +2022-05-05 17:30:39,885 INFO [train.py:715] (6/8) Epoch 6, batch 33750, loss[loss=0.1495, simple_loss=0.2279, pruned_loss=0.03557, over 4810.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2202, pruned_loss=0.03854, over 971616.85 frames.], batch size: 13, lr: 3.21e-04 +2022-05-05 17:31:19,224 INFO [train.py:715] (6/8) Epoch 6, batch 33800, loss[loss=0.1238, simple_loss=0.2027, pruned_loss=0.02248, over 4834.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2194, pruned_loss=0.0377, over 971531.37 frames.], batch size: 26, lr: 3.21e-04 +2022-05-05 17:31:58,018 INFO [train.py:715] (6/8) Epoch 6, batch 33850, loss[loss=0.1605, simple_loss=0.2332, pruned_loss=0.04389, over 4757.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2196, pruned_loss=0.03774, over 971819.99 frames.], batch size: 19, lr: 3.21e-04 +2022-05-05 17:32:36,705 INFO [train.py:715] (6/8) Epoch 6, batch 33900, loss[loss=0.127, simple_loss=0.1985, pruned_loss=0.02775, over 4932.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2197, pruned_loss=0.038, over 971458.13 frames.], batch size: 21, lr: 3.21e-04 +2022-05-05 17:33:16,049 INFO [train.py:715] (6/8) Epoch 6, batch 33950, loss[loss=0.1318, simple_loss=0.2113, pruned_loss=0.02614, over 4827.00 frames.], tot_loss[loss=0.1483, simple_loss=0.22, pruned_loss=0.03835, over 972351.35 frames.], batch size: 15, lr: 3.21e-04 +2022-05-05 17:33:55,027 INFO [train.py:715] (6/8) Epoch 6, batch 34000, loss[loss=0.1525, simple_loss=0.2276, pruned_loss=0.03866, over 4747.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2195, pruned_loss=0.03832, over 972432.36 frames.], batch size: 16, lr: 3.21e-04 +2022-05-05 17:34:33,701 INFO [train.py:715] (6/8) Epoch 6, batch 34050, loss[loss=0.1883, simple_loss=0.2475, pruned_loss=0.06449, over 4900.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2191, pruned_loss=0.03832, over 972364.59 frames.], batch size: 18, lr: 3.21e-04 +2022-05-05 17:35:12,974 INFO [train.py:715] (6/8) Epoch 6, batch 34100, loss[loss=0.1748, simple_loss=0.2452, pruned_loss=0.05216, over 4823.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2195, pruned_loss=0.03866, over 972699.15 frames.], batch size: 26, lr: 3.20e-04 +2022-05-05 17:35:51,934 INFO [train.py:715] (6/8) Epoch 6, batch 34150, loss[loss=0.1548, simple_loss=0.2249, pruned_loss=0.04231, over 4930.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2187, pruned_loss=0.03779, over 972802.66 frames.], batch size: 23, lr: 3.20e-04 +2022-05-05 17:36:30,536 INFO [train.py:715] (6/8) Epoch 6, batch 34200, loss[loss=0.1644, simple_loss=0.2435, pruned_loss=0.04268, over 4891.00 frames.], tot_loss[loss=0.147, simple_loss=0.2191, pruned_loss=0.03746, over 973393.06 frames.], batch size: 17, lr: 3.20e-04 +2022-05-05 17:37:09,176 INFO [train.py:715] (6/8) Epoch 6, batch 34250, loss[loss=0.1216, simple_loss=0.1983, pruned_loss=0.02239, over 4815.00 frames.], tot_loss[loss=0.147, simple_loss=0.2189, pruned_loss=0.0376, over 973210.06 frames.], batch size: 25, lr: 3.20e-04 +2022-05-05 17:37:48,389 INFO [train.py:715] (6/8) Epoch 6, batch 34300, loss[loss=0.1487, simple_loss=0.2234, pruned_loss=0.03699, over 4936.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2198, pruned_loss=0.03836, over 973619.19 frames.], batch size: 23, lr: 3.20e-04 +2022-05-05 17:38:26,998 INFO [train.py:715] (6/8) Epoch 6, batch 34350, loss[loss=0.1529, simple_loss=0.2426, pruned_loss=0.03161, over 4835.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2194, pruned_loss=0.0382, over 974071.23 frames.], batch size: 26, lr: 3.20e-04 +2022-05-05 17:39:05,618 INFO [train.py:715] (6/8) Epoch 6, batch 34400, loss[loss=0.1891, simple_loss=0.2558, pruned_loss=0.06118, over 4837.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2207, pruned_loss=0.03918, over 974287.31 frames.], batch size: 15, lr: 3.20e-04 +2022-05-05 17:39:45,301 INFO [train.py:715] (6/8) Epoch 6, batch 34450, loss[loss=0.197, simple_loss=0.2524, pruned_loss=0.07075, over 4794.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2206, pruned_loss=0.03913, over 972959.47 frames.], batch size: 24, lr: 3.20e-04 +2022-05-05 17:40:24,041 INFO [train.py:715] (6/8) Epoch 6, batch 34500, loss[loss=0.1165, simple_loss=0.1949, pruned_loss=0.01906, over 4978.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2198, pruned_loss=0.03867, over 973947.30 frames.], batch size: 25, lr: 3.20e-04 +2022-05-05 17:41:02,891 INFO [train.py:715] (6/8) Epoch 6, batch 34550, loss[loss=0.1305, simple_loss=0.2169, pruned_loss=0.02209, over 4841.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2201, pruned_loss=0.03839, over 973871.54 frames.], batch size: 15, lr: 3.20e-04 +2022-05-05 17:41:41,800 INFO [train.py:715] (6/8) Epoch 6, batch 34600, loss[loss=0.157, simple_loss=0.2285, pruned_loss=0.04278, over 4894.00 frames.], tot_loss[loss=0.1483, simple_loss=0.22, pruned_loss=0.03832, over 973371.57 frames.], batch size: 19, lr: 3.20e-04 +2022-05-05 17:42:20,616 INFO [train.py:715] (6/8) Epoch 6, batch 34650, loss[loss=0.125, simple_loss=0.2021, pruned_loss=0.02395, over 4973.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2204, pruned_loss=0.03887, over 973577.08 frames.], batch size: 24, lr: 3.20e-04 +2022-05-05 17:42:59,317 INFO [train.py:715] (6/8) Epoch 6, batch 34700, loss[loss=0.1132, simple_loss=0.1858, pruned_loss=0.02034, over 4812.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2188, pruned_loss=0.03808, over 972671.05 frames.], batch size: 27, lr: 3.20e-04 +2022-05-05 17:43:37,140 INFO [train.py:715] (6/8) Epoch 6, batch 34750, loss[loss=0.1292, simple_loss=0.1988, pruned_loss=0.02984, over 4742.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2187, pruned_loss=0.03794, over 971807.01 frames.], batch size: 16, lr: 3.20e-04 +2022-05-05 17:44:13,984 INFO [train.py:715] (6/8) Epoch 6, batch 34800, loss[loss=0.1109, simple_loss=0.1805, pruned_loss=0.02067, over 4824.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2171, pruned_loss=0.03721, over 971559.62 frames.], batch size: 12, lr: 3.20e-04 +2022-05-05 17:45:04,006 INFO [train.py:715] (6/8) Epoch 7, batch 0, loss[loss=0.1442, simple_loss=0.2109, pruned_loss=0.03869, over 4778.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2109, pruned_loss=0.03869, over 4778.00 frames.], batch size: 14, lr: 3.03e-04 +2022-05-05 17:45:42,575 INFO [train.py:715] (6/8) Epoch 7, batch 50, loss[loss=0.1303, simple_loss=0.2154, pruned_loss=0.02267, over 4895.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2179, pruned_loss=0.03637, over 219373.50 frames.], batch size: 17, lr: 3.03e-04 +2022-05-05 17:46:21,359 INFO [train.py:715] (6/8) Epoch 7, batch 100, loss[loss=0.1441, simple_loss=0.2086, pruned_loss=0.03985, over 4810.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2193, pruned_loss=0.03785, over 386420.02 frames.], batch size: 25, lr: 3.03e-04 +2022-05-05 17:47:00,259 INFO [train.py:715] (6/8) Epoch 7, batch 150, loss[loss=0.1268, simple_loss=0.2121, pruned_loss=0.02069, over 4982.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2192, pruned_loss=0.03764, over 516661.86 frames.], batch size: 25, lr: 3.03e-04 +2022-05-05 17:47:39,939 INFO [train.py:715] (6/8) Epoch 7, batch 200, loss[loss=0.1189, simple_loss=0.1916, pruned_loss=0.02309, over 4879.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2183, pruned_loss=0.03714, over 617994.29 frames.], batch size: 22, lr: 3.03e-04 +2022-05-05 17:48:18,731 INFO [train.py:715] (6/8) Epoch 7, batch 250, loss[loss=0.1593, simple_loss=0.2273, pruned_loss=0.04565, over 4890.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2188, pruned_loss=0.03765, over 696878.82 frames.], batch size: 16, lr: 3.03e-04 +2022-05-05 17:48:58,165 INFO [train.py:715] (6/8) Epoch 7, batch 300, loss[loss=0.169, simple_loss=0.2426, pruned_loss=0.04767, over 4879.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2189, pruned_loss=0.03779, over 758044.57 frames.], batch size: 22, lr: 3.02e-04 +2022-05-05 17:49:36,843 INFO [train.py:715] (6/8) Epoch 7, batch 350, loss[loss=0.1285, simple_loss=0.1979, pruned_loss=0.02958, over 4960.00 frames.], tot_loss[loss=0.147, simple_loss=0.2187, pruned_loss=0.03764, over 805942.84 frames.], batch size: 24, lr: 3.02e-04 +2022-05-05 17:50:16,224 INFO [train.py:715] (6/8) Epoch 7, batch 400, loss[loss=0.1811, simple_loss=0.25, pruned_loss=0.05615, over 4814.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2191, pruned_loss=0.03797, over 842442.52 frames.], batch size: 15, lr: 3.02e-04 +2022-05-05 17:50:54,885 INFO [train.py:715] (6/8) Epoch 7, batch 450, loss[loss=0.1642, simple_loss=0.2302, pruned_loss=0.04911, over 4765.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2183, pruned_loss=0.03778, over 871275.98 frames.], batch size: 17, lr: 3.02e-04 +2022-05-05 17:51:33,737 INFO [train.py:715] (6/8) Epoch 7, batch 500, loss[loss=0.1463, simple_loss=0.2104, pruned_loss=0.04115, over 4989.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2195, pruned_loss=0.03851, over 893970.83 frames.], batch size: 14, lr: 3.02e-04 +2022-05-05 17:52:12,471 INFO [train.py:715] (6/8) Epoch 7, batch 550, loss[loss=0.1758, simple_loss=0.2371, pruned_loss=0.05722, over 4918.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2194, pruned_loss=0.03892, over 912620.98 frames.], batch size: 18, lr: 3.02e-04 +2022-05-05 17:52:51,635 INFO [train.py:715] (6/8) Epoch 7, batch 600, loss[loss=0.1667, simple_loss=0.2331, pruned_loss=0.05012, over 4962.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2196, pruned_loss=0.0393, over 925987.09 frames.], batch size: 15, lr: 3.02e-04 +2022-05-05 17:53:29,945 INFO [train.py:715] (6/8) Epoch 7, batch 650, loss[loss=0.1462, simple_loss=0.2109, pruned_loss=0.04076, over 4796.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2193, pruned_loss=0.0389, over 935399.56 frames.], batch size: 14, lr: 3.02e-04 +2022-05-05 17:54:08,327 INFO [train.py:715] (6/8) Epoch 7, batch 700, loss[loss=0.1351, simple_loss=0.2051, pruned_loss=0.03258, over 4981.00 frames.], tot_loss[loss=0.149, simple_loss=0.2199, pruned_loss=0.03907, over 943892.70 frames.], batch size: 24, lr: 3.02e-04 +2022-05-05 17:54:47,593 INFO [train.py:715] (6/8) Epoch 7, batch 750, loss[loss=0.1524, simple_loss=0.2187, pruned_loss=0.04302, over 4985.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2201, pruned_loss=0.03929, over 950328.21 frames.], batch size: 35, lr: 3.02e-04 +2022-05-05 17:55:26,297 INFO [train.py:715] (6/8) Epoch 7, batch 800, loss[loss=0.1604, simple_loss=0.2231, pruned_loss=0.04884, over 4844.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2187, pruned_loss=0.03816, over 955595.14 frames.], batch size: 15, lr: 3.02e-04 +2022-05-05 17:56:04,982 INFO [train.py:715] (6/8) Epoch 7, batch 850, loss[loss=0.1222, simple_loss=0.193, pruned_loss=0.02569, over 4870.00 frames.], tot_loss[loss=0.147, simple_loss=0.2184, pruned_loss=0.03781, over 959565.51 frames.], batch size: 22, lr: 3.02e-04 +2022-05-05 17:56:44,240 INFO [train.py:715] (6/8) Epoch 7, batch 900, loss[loss=0.146, simple_loss=0.212, pruned_loss=0.04002, over 4985.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2177, pruned_loss=0.03758, over 962461.12 frames.], batch size: 25, lr: 3.02e-04 +2022-05-05 17:57:23,220 INFO [train.py:715] (6/8) Epoch 7, batch 950, loss[loss=0.1895, simple_loss=0.2452, pruned_loss=0.06686, over 4876.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2175, pruned_loss=0.03764, over 964703.05 frames.], batch size: 32, lr: 3.02e-04 +2022-05-05 17:58:01,723 INFO [train.py:715] (6/8) Epoch 7, batch 1000, loss[loss=0.1564, simple_loss=0.2269, pruned_loss=0.04296, over 4893.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2175, pruned_loss=0.03731, over 966460.96 frames.], batch size: 22, lr: 3.02e-04 +2022-05-05 17:58:40,407 INFO [train.py:715] (6/8) Epoch 7, batch 1050, loss[loss=0.1424, simple_loss=0.2243, pruned_loss=0.03029, over 4758.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2174, pruned_loss=0.03747, over 967443.76 frames.], batch size: 16, lr: 3.02e-04 +2022-05-05 17:59:19,623 INFO [train.py:715] (6/8) Epoch 7, batch 1100, loss[loss=0.1313, simple_loss=0.2028, pruned_loss=0.02992, over 4827.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2167, pruned_loss=0.03727, over 968449.12 frames.], batch size: 30, lr: 3.02e-04 +2022-05-05 17:59:57,783 INFO [train.py:715] (6/8) Epoch 7, batch 1150, loss[loss=0.1514, simple_loss=0.2306, pruned_loss=0.03609, over 4982.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2172, pruned_loss=0.03681, over 969767.92 frames.], batch size: 28, lr: 3.02e-04 +2022-05-05 18:00:36,966 INFO [train.py:715] (6/8) Epoch 7, batch 1200, loss[loss=0.1536, simple_loss=0.2167, pruned_loss=0.04523, over 4906.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2174, pruned_loss=0.0365, over 970431.91 frames.], batch size: 19, lr: 3.02e-04 +2022-05-05 18:01:16,051 INFO [train.py:715] (6/8) Epoch 7, batch 1250, loss[loss=0.1379, simple_loss=0.211, pruned_loss=0.03244, over 4739.00 frames.], tot_loss[loss=0.146, simple_loss=0.218, pruned_loss=0.03699, over 970586.83 frames.], batch size: 16, lr: 3.02e-04 +2022-05-05 18:01:55,174 INFO [train.py:715] (6/8) Epoch 7, batch 1300, loss[loss=0.1649, simple_loss=0.232, pruned_loss=0.04887, over 4907.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2188, pruned_loss=0.03728, over 970585.56 frames.], batch size: 17, lr: 3.02e-04 +2022-05-05 18:02:33,765 INFO [train.py:715] (6/8) Epoch 7, batch 1350, loss[loss=0.1911, simple_loss=0.2547, pruned_loss=0.06374, over 4933.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2196, pruned_loss=0.03806, over 971790.03 frames.], batch size: 29, lr: 3.02e-04 +2022-05-05 18:03:12,551 INFO [train.py:715] (6/8) Epoch 7, batch 1400, loss[loss=0.1568, simple_loss=0.2261, pruned_loss=0.04375, over 4951.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2189, pruned_loss=0.03766, over 971709.45 frames.], batch size: 21, lr: 3.02e-04 +2022-05-05 18:03:51,642 INFO [train.py:715] (6/8) Epoch 7, batch 1450, loss[loss=0.1886, simple_loss=0.2496, pruned_loss=0.06387, over 4774.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2198, pruned_loss=0.03841, over 972125.47 frames.], batch size: 18, lr: 3.02e-04 +2022-05-05 18:04:29,771 INFO [train.py:715] (6/8) Epoch 7, batch 1500, loss[loss=0.1601, simple_loss=0.2231, pruned_loss=0.04857, over 4929.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2196, pruned_loss=0.03831, over 972823.49 frames.], batch size: 18, lr: 3.02e-04 +2022-05-05 18:05:08,981 INFO [train.py:715] (6/8) Epoch 7, batch 1550, loss[loss=0.1697, simple_loss=0.2426, pruned_loss=0.04837, over 4784.00 frames.], tot_loss[loss=0.1476, simple_loss=0.219, pruned_loss=0.03817, over 972227.20 frames.], batch size: 17, lr: 3.02e-04 +2022-05-05 18:05:47,789 INFO [train.py:715] (6/8) Epoch 7, batch 1600, loss[loss=0.1201, simple_loss=0.1848, pruned_loss=0.02768, over 4798.00 frames.], tot_loss[loss=0.147, simple_loss=0.2185, pruned_loss=0.03773, over 972888.33 frames.], batch size: 14, lr: 3.02e-04 +2022-05-05 18:06:26,681 INFO [train.py:715] (6/8) Epoch 7, batch 1650, loss[loss=0.116, simple_loss=0.1941, pruned_loss=0.01893, over 4984.00 frames.], tot_loss[loss=0.1465, simple_loss=0.218, pruned_loss=0.03748, over 972451.49 frames.], batch size: 28, lr: 3.02e-04 +2022-05-05 18:07:05,256 INFO [train.py:715] (6/8) Epoch 7, batch 1700, loss[loss=0.1606, simple_loss=0.2341, pruned_loss=0.04355, over 4835.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2187, pruned_loss=0.03784, over 973287.33 frames.], batch size: 30, lr: 3.02e-04 +2022-05-05 18:07:44,160 INFO [train.py:715] (6/8) Epoch 7, batch 1750, loss[loss=0.1287, simple_loss=0.2005, pruned_loss=0.02848, over 4895.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2177, pruned_loss=0.03734, over 973148.37 frames.], batch size: 19, lr: 3.02e-04 +2022-05-05 18:08:24,139 INFO [train.py:715] (6/8) Epoch 7, batch 1800, loss[loss=0.1512, simple_loss=0.2182, pruned_loss=0.04211, over 4973.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2177, pruned_loss=0.03744, over 972404.36 frames.], batch size: 24, lr: 3.02e-04 +2022-05-05 18:09:03,071 INFO [train.py:715] (6/8) Epoch 7, batch 1850, loss[loss=0.1356, simple_loss=0.2132, pruned_loss=0.02899, over 4826.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2182, pruned_loss=0.03781, over 972210.65 frames.], batch size: 30, lr: 3.02e-04 +2022-05-05 18:09:41,924 INFO [train.py:715] (6/8) Epoch 7, batch 1900, loss[loss=0.1441, simple_loss=0.2186, pruned_loss=0.03482, over 4925.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2182, pruned_loss=0.03801, over 972261.21 frames.], batch size: 18, lr: 3.01e-04 +2022-05-05 18:10:20,114 INFO [train.py:715] (6/8) Epoch 7, batch 1950, loss[loss=0.1695, simple_loss=0.2311, pruned_loss=0.05397, over 4798.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2185, pruned_loss=0.03837, over 972581.10 frames.], batch size: 17, lr: 3.01e-04 +2022-05-05 18:10:59,290 INFO [train.py:715] (6/8) Epoch 7, batch 2000, loss[loss=0.1606, simple_loss=0.2295, pruned_loss=0.04587, over 4876.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2195, pruned_loss=0.0383, over 973517.22 frames.], batch size: 22, lr: 3.01e-04 +2022-05-05 18:11:37,489 INFO [train.py:715] (6/8) Epoch 7, batch 2050, loss[loss=0.1599, simple_loss=0.246, pruned_loss=0.03693, over 4830.00 frames.], tot_loss[loss=0.1475, simple_loss=0.219, pruned_loss=0.03796, over 974036.72 frames.], batch size: 15, lr: 3.01e-04 +2022-05-05 18:12:16,139 INFO [train.py:715] (6/8) Epoch 7, batch 2100, loss[loss=0.1874, simple_loss=0.2514, pruned_loss=0.06172, over 4996.00 frames.], tot_loss[loss=0.147, simple_loss=0.2185, pruned_loss=0.03774, over 973467.82 frames.], batch size: 16, lr: 3.01e-04 +2022-05-05 18:12:54,593 INFO [train.py:715] (6/8) Epoch 7, batch 2150, loss[loss=0.1573, simple_loss=0.2292, pruned_loss=0.04268, over 4922.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2191, pruned_loss=0.0379, over 972831.89 frames.], batch size: 18, lr: 3.01e-04 +2022-05-05 18:13:32,800 INFO [train.py:715] (6/8) Epoch 7, batch 2200, loss[loss=0.1487, simple_loss=0.215, pruned_loss=0.04116, over 4701.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2189, pruned_loss=0.0379, over 972347.58 frames.], batch size: 15, lr: 3.01e-04 +2022-05-05 18:14:11,046 INFO [train.py:715] (6/8) Epoch 7, batch 2250, loss[loss=0.1529, simple_loss=0.2294, pruned_loss=0.03821, over 4986.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2189, pruned_loss=0.0379, over 972083.64 frames.], batch size: 25, lr: 3.01e-04 +2022-05-05 18:14:50,046 INFO [train.py:715] (6/8) Epoch 7, batch 2300, loss[loss=0.1217, simple_loss=0.1944, pruned_loss=0.02449, over 4841.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2188, pruned_loss=0.03775, over 972544.27 frames.], batch size: 27, lr: 3.01e-04 +2022-05-05 18:15:29,527 INFO [train.py:715] (6/8) Epoch 7, batch 2350, loss[loss=0.1533, simple_loss=0.2151, pruned_loss=0.04575, over 4973.00 frames.], tot_loss[loss=0.1472, simple_loss=0.219, pruned_loss=0.03776, over 972845.75 frames.], batch size: 24, lr: 3.01e-04 +2022-05-05 18:16:08,312 INFO [train.py:715] (6/8) Epoch 7, batch 2400, loss[loss=0.1214, simple_loss=0.2035, pruned_loss=0.01964, over 4830.00 frames.], tot_loss[loss=0.147, simple_loss=0.2185, pruned_loss=0.0378, over 972771.92 frames.], batch size: 26, lr: 3.01e-04 +2022-05-05 18:16:46,789 INFO [train.py:715] (6/8) Epoch 7, batch 2450, loss[loss=0.1178, simple_loss=0.1929, pruned_loss=0.02134, over 4922.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2188, pruned_loss=0.03803, over 972339.51 frames.], batch size: 19, lr: 3.01e-04 +2022-05-05 18:17:25,558 INFO [train.py:715] (6/8) Epoch 7, batch 2500, loss[loss=0.1328, simple_loss=0.2092, pruned_loss=0.02823, over 4980.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2187, pruned_loss=0.03799, over 972715.65 frames.], batch size: 15, lr: 3.01e-04 +2022-05-05 18:18:03,860 INFO [train.py:715] (6/8) Epoch 7, batch 2550, loss[loss=0.1548, simple_loss=0.2271, pruned_loss=0.04129, over 4981.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2187, pruned_loss=0.03791, over 973239.55 frames.], batch size: 25, lr: 3.01e-04 +2022-05-05 18:18:42,383 INFO [train.py:715] (6/8) Epoch 7, batch 2600, loss[loss=0.1566, simple_loss=0.2254, pruned_loss=0.04387, over 4982.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2192, pruned_loss=0.03829, over 973522.72 frames.], batch size: 28, lr: 3.01e-04 +2022-05-05 18:19:21,120 INFO [train.py:715] (6/8) Epoch 7, batch 2650, loss[loss=0.1224, simple_loss=0.1894, pruned_loss=0.02775, over 4960.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2195, pruned_loss=0.03859, over 973454.66 frames.], batch size: 15, lr: 3.01e-04 +2022-05-05 18:19:59,710 INFO [train.py:715] (6/8) Epoch 7, batch 2700, loss[loss=0.1299, simple_loss=0.2111, pruned_loss=0.02433, over 4759.00 frames.], tot_loss[loss=0.148, simple_loss=0.2194, pruned_loss=0.03826, over 973891.61 frames.], batch size: 12, lr: 3.01e-04 +2022-05-05 18:20:37,585 INFO [train.py:715] (6/8) Epoch 7, batch 2750, loss[loss=0.157, simple_loss=0.2245, pruned_loss=0.04478, over 4974.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2204, pruned_loss=0.03852, over 972863.26 frames.], batch size: 31, lr: 3.01e-04 +2022-05-05 18:21:16,374 INFO [train.py:715] (6/8) Epoch 7, batch 2800, loss[loss=0.1431, simple_loss=0.2157, pruned_loss=0.03526, over 4810.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2204, pruned_loss=0.03864, over 972650.18 frames.], batch size: 26, lr: 3.01e-04 +2022-05-05 18:21:55,733 INFO [train.py:715] (6/8) Epoch 7, batch 2850, loss[loss=0.111, simple_loss=0.1864, pruned_loss=0.01778, over 4805.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2193, pruned_loss=0.03816, over 972186.37 frames.], batch size: 12, lr: 3.01e-04 +2022-05-05 18:22:35,327 INFO [train.py:715] (6/8) Epoch 7, batch 2900, loss[loss=0.1313, simple_loss=0.2017, pruned_loss=0.03046, over 4859.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2193, pruned_loss=0.03854, over 972019.06 frames.], batch size: 20, lr: 3.01e-04 +2022-05-05 18:23:14,209 INFO [train.py:715] (6/8) Epoch 7, batch 2950, loss[loss=0.1529, simple_loss=0.22, pruned_loss=0.04284, over 4915.00 frames.], tot_loss[loss=0.147, simple_loss=0.218, pruned_loss=0.03801, over 971813.88 frames.], batch size: 17, lr: 3.01e-04 +2022-05-05 18:23:53,378 INFO [train.py:715] (6/8) Epoch 7, batch 3000, loss[loss=0.1209, simple_loss=0.2014, pruned_loss=0.02017, over 4935.00 frames.], tot_loss[loss=0.147, simple_loss=0.2182, pruned_loss=0.03787, over 972140.11 frames.], batch size: 23, lr: 3.01e-04 +2022-05-05 18:23:53,379 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 18:24:04,767 INFO [train.py:742] (6/8) Epoch 7, validation: loss=0.1084, simple_loss=0.1933, pruned_loss=0.01171, over 914524.00 frames. +2022-05-05 18:24:44,251 INFO [train.py:715] (6/8) Epoch 7, batch 3050, loss[loss=0.1509, simple_loss=0.2339, pruned_loss=0.03397, over 4888.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2183, pruned_loss=0.03745, over 972502.92 frames.], batch size: 19, lr: 3.01e-04 +2022-05-05 18:25:23,055 INFO [train.py:715] (6/8) Epoch 7, batch 3100, loss[loss=0.174, simple_loss=0.2429, pruned_loss=0.05253, over 4781.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2183, pruned_loss=0.03767, over 972149.65 frames.], batch size: 17, lr: 3.01e-04 +2022-05-05 18:26:01,760 INFO [train.py:715] (6/8) Epoch 7, batch 3150, loss[loss=0.1477, simple_loss=0.217, pruned_loss=0.03915, over 4852.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2186, pruned_loss=0.03765, over 972452.53 frames.], batch size: 32, lr: 3.01e-04 +2022-05-05 18:26:39,662 INFO [train.py:715] (6/8) Epoch 7, batch 3200, loss[loss=0.133, simple_loss=0.211, pruned_loss=0.0275, over 4813.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2182, pruned_loss=0.03745, over 973156.99 frames.], batch size: 25, lr: 3.01e-04 +2022-05-05 18:27:17,882 INFO [train.py:715] (6/8) Epoch 7, batch 3250, loss[loss=0.1522, simple_loss=0.2237, pruned_loss=0.04031, over 4811.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2188, pruned_loss=0.03793, over 972124.28 frames.], batch size: 21, lr: 3.01e-04 +2022-05-05 18:27:56,436 INFO [train.py:715] (6/8) Epoch 7, batch 3300, loss[loss=0.1425, simple_loss=0.2075, pruned_loss=0.03875, over 4768.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2177, pruned_loss=0.03758, over 971350.99 frames.], batch size: 12, lr: 3.01e-04 +2022-05-05 18:28:35,034 INFO [train.py:715] (6/8) Epoch 7, batch 3350, loss[loss=0.1371, simple_loss=0.2132, pruned_loss=0.03053, over 4777.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2179, pruned_loss=0.03737, over 972086.52 frames.], batch size: 18, lr: 3.01e-04 +2022-05-05 18:29:13,823 INFO [train.py:715] (6/8) Epoch 7, batch 3400, loss[loss=0.1561, simple_loss=0.2361, pruned_loss=0.03809, over 4752.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2181, pruned_loss=0.03707, over 972175.55 frames.], batch size: 16, lr: 3.01e-04 +2022-05-05 18:29:52,251 INFO [train.py:715] (6/8) Epoch 7, batch 3450, loss[loss=0.1257, simple_loss=0.1998, pruned_loss=0.0258, over 4787.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2177, pruned_loss=0.03695, over 972265.52 frames.], batch size: 18, lr: 3.01e-04 +2022-05-05 18:30:31,305 INFO [train.py:715] (6/8) Epoch 7, batch 3500, loss[loss=0.1367, simple_loss=0.2068, pruned_loss=0.03334, over 4849.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2178, pruned_loss=0.03738, over 972407.13 frames.], batch size: 32, lr: 3.01e-04 +2022-05-05 18:31:09,923 INFO [train.py:715] (6/8) Epoch 7, batch 3550, loss[loss=0.1313, simple_loss=0.2035, pruned_loss=0.02953, over 4825.00 frames.], tot_loss[loss=0.146, simple_loss=0.2178, pruned_loss=0.03714, over 972611.37 frames.], batch size: 25, lr: 3.00e-04 +2022-05-05 18:31:48,696 INFO [train.py:715] (6/8) Epoch 7, batch 3600, loss[loss=0.1369, simple_loss=0.2021, pruned_loss=0.03581, over 4694.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2174, pruned_loss=0.03672, over 972835.86 frames.], batch size: 15, lr: 3.00e-04 +2022-05-05 18:32:27,425 INFO [train.py:715] (6/8) Epoch 7, batch 3650, loss[loss=0.1458, simple_loss=0.2081, pruned_loss=0.04178, over 4793.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2177, pruned_loss=0.03703, over 972504.06 frames.], batch size: 24, lr: 3.00e-04 +2022-05-05 18:33:06,461 INFO [train.py:715] (6/8) Epoch 7, batch 3700, loss[loss=0.1339, simple_loss=0.2069, pruned_loss=0.03045, over 4883.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2182, pruned_loss=0.03709, over 972549.97 frames.], batch size: 22, lr: 3.00e-04 +2022-05-05 18:33:45,258 INFO [train.py:715] (6/8) Epoch 7, batch 3750, loss[loss=0.14, simple_loss=0.2127, pruned_loss=0.03359, over 4883.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2185, pruned_loss=0.0374, over 972973.28 frames.], batch size: 16, lr: 3.00e-04 +2022-05-05 18:34:23,490 INFO [train.py:715] (6/8) Epoch 7, batch 3800, loss[loss=0.1442, simple_loss=0.2126, pruned_loss=0.03785, over 4781.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2183, pruned_loss=0.0375, over 972471.42 frames.], batch size: 14, lr: 3.00e-04 +2022-05-05 18:35:01,656 INFO [train.py:715] (6/8) Epoch 7, batch 3850, loss[loss=0.1422, simple_loss=0.2192, pruned_loss=0.03254, over 4751.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2179, pruned_loss=0.03722, over 972496.20 frames.], batch size: 19, lr: 3.00e-04 +2022-05-05 18:35:39,927 INFO [train.py:715] (6/8) Epoch 7, batch 3900, loss[loss=0.1214, simple_loss=0.2048, pruned_loss=0.01904, over 4837.00 frames.], tot_loss[loss=0.146, simple_loss=0.2176, pruned_loss=0.03719, over 972449.82 frames.], batch size: 26, lr: 3.00e-04 +2022-05-05 18:36:18,412 INFO [train.py:715] (6/8) Epoch 7, batch 3950, loss[loss=0.16, simple_loss=0.2359, pruned_loss=0.04208, over 4930.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2171, pruned_loss=0.037, over 972717.32 frames.], batch size: 29, lr: 3.00e-04 +2022-05-05 18:36:57,038 INFO [train.py:715] (6/8) Epoch 7, batch 4000, loss[loss=0.1467, simple_loss=0.2136, pruned_loss=0.03988, over 4917.00 frames.], tot_loss[loss=0.146, simple_loss=0.2173, pruned_loss=0.03736, over 972715.69 frames.], batch size: 18, lr: 3.00e-04 +2022-05-05 18:37:35,132 INFO [train.py:715] (6/8) Epoch 7, batch 4050, loss[loss=0.1764, simple_loss=0.2443, pruned_loss=0.05429, over 4831.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2185, pruned_loss=0.03838, over 972482.84 frames.], batch size: 15, lr: 3.00e-04 +2022-05-05 18:38:14,042 INFO [train.py:715] (6/8) Epoch 7, batch 4100, loss[loss=0.1387, simple_loss=0.2199, pruned_loss=0.02879, over 4771.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2183, pruned_loss=0.03795, over 972049.07 frames.], batch size: 17, lr: 3.00e-04 +2022-05-05 18:38:52,562 INFO [train.py:715] (6/8) Epoch 7, batch 4150, loss[loss=0.1217, simple_loss=0.1941, pruned_loss=0.02463, over 4768.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2196, pruned_loss=0.03869, over 972997.23 frames.], batch size: 17, lr: 3.00e-04 +2022-05-05 18:39:31,260 INFO [train.py:715] (6/8) Epoch 7, batch 4200, loss[loss=0.1389, simple_loss=0.2128, pruned_loss=0.03249, over 4984.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2197, pruned_loss=0.03857, over 972723.83 frames.], batch size: 24, lr: 3.00e-04 +2022-05-05 18:40:09,113 INFO [train.py:715] (6/8) Epoch 7, batch 4250, loss[loss=0.1609, simple_loss=0.2397, pruned_loss=0.04106, over 4916.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2196, pruned_loss=0.03858, over 973087.23 frames.], batch size: 17, lr: 3.00e-04 +2022-05-05 18:40:47,962 INFO [train.py:715] (6/8) Epoch 7, batch 4300, loss[loss=0.1678, simple_loss=0.2365, pruned_loss=0.04957, over 4944.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2193, pruned_loss=0.03862, over 972469.48 frames.], batch size: 21, lr: 3.00e-04 +2022-05-05 18:41:28,766 INFO [train.py:715] (6/8) Epoch 7, batch 4350, loss[loss=0.1583, simple_loss=0.2238, pruned_loss=0.04636, over 4769.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2184, pruned_loss=0.03789, over 972740.50 frames.], batch size: 14, lr: 3.00e-04 +2022-05-05 18:42:07,269 INFO [train.py:715] (6/8) Epoch 7, batch 4400, loss[loss=0.1342, simple_loss=0.1962, pruned_loss=0.03608, over 4819.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2184, pruned_loss=0.03792, over 972784.79 frames.], batch size: 12, lr: 3.00e-04 +2022-05-05 18:42:46,326 INFO [train.py:715] (6/8) Epoch 7, batch 4450, loss[loss=0.1722, simple_loss=0.2365, pruned_loss=0.05394, over 4969.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2195, pruned_loss=0.03842, over 972641.23 frames.], batch size: 15, lr: 3.00e-04 +2022-05-05 18:43:25,201 INFO [train.py:715] (6/8) Epoch 7, batch 4500, loss[loss=0.1337, simple_loss=0.2096, pruned_loss=0.02888, over 4894.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2181, pruned_loss=0.03768, over 971952.45 frames.], batch size: 22, lr: 3.00e-04 +2022-05-05 18:44:03,955 INFO [train.py:715] (6/8) Epoch 7, batch 4550, loss[loss=0.1721, simple_loss=0.2493, pruned_loss=0.04744, over 4875.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2188, pruned_loss=0.03768, over 972468.05 frames.], batch size: 20, lr: 3.00e-04 +2022-05-05 18:44:42,555 INFO [train.py:715] (6/8) Epoch 7, batch 4600, loss[loss=0.1654, simple_loss=0.2358, pruned_loss=0.04749, over 4876.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2185, pruned_loss=0.03732, over 971606.95 frames.], batch size: 16, lr: 3.00e-04 +2022-05-05 18:45:21,318 INFO [train.py:715] (6/8) Epoch 7, batch 4650, loss[loss=0.1436, simple_loss=0.2182, pruned_loss=0.03447, over 4778.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2181, pruned_loss=0.03721, over 971646.33 frames.], batch size: 18, lr: 3.00e-04 +2022-05-05 18:45:59,784 INFO [train.py:715] (6/8) Epoch 7, batch 4700, loss[loss=0.1539, simple_loss=0.2314, pruned_loss=0.03816, over 4770.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2174, pruned_loss=0.0369, over 971575.49 frames.], batch size: 18, lr: 3.00e-04 +2022-05-05 18:46:37,972 INFO [train.py:715] (6/8) Epoch 7, batch 4750, loss[loss=0.1266, simple_loss=0.2038, pruned_loss=0.0247, over 4912.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2174, pruned_loss=0.03704, over 971621.80 frames.], batch size: 29, lr: 3.00e-04 +2022-05-05 18:47:17,156 INFO [train.py:715] (6/8) Epoch 7, batch 4800, loss[loss=0.1244, simple_loss=0.2071, pruned_loss=0.02082, over 4985.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2168, pruned_loss=0.03643, over 972965.29 frames.], batch size: 24, lr: 3.00e-04 +2022-05-05 18:47:55,561 INFO [train.py:715] (6/8) Epoch 7, batch 4850, loss[loss=0.1405, simple_loss=0.2115, pruned_loss=0.0347, over 4808.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2162, pruned_loss=0.03632, over 972923.73 frames.], batch size: 21, lr: 3.00e-04 +2022-05-05 18:48:34,304 INFO [train.py:715] (6/8) Epoch 7, batch 4900, loss[loss=0.1697, simple_loss=0.2382, pruned_loss=0.05058, over 4933.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2167, pruned_loss=0.03687, over 971953.12 frames.], batch size: 23, lr: 3.00e-04 +2022-05-05 18:49:12,733 INFO [train.py:715] (6/8) Epoch 7, batch 4950, loss[loss=0.1385, simple_loss=0.2045, pruned_loss=0.0362, over 4747.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2159, pruned_loss=0.0364, over 971368.21 frames.], batch size: 16, lr: 3.00e-04 +2022-05-05 18:49:51,780 INFO [train.py:715] (6/8) Epoch 7, batch 5000, loss[loss=0.1456, simple_loss=0.2277, pruned_loss=0.0318, over 4954.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2173, pruned_loss=0.03706, over 971609.40 frames.], batch size: 24, lr: 3.00e-04 +2022-05-05 18:50:30,774 INFO [train.py:715] (6/8) Epoch 7, batch 5050, loss[loss=0.1577, simple_loss=0.2263, pruned_loss=0.04461, over 4945.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2178, pruned_loss=0.03702, over 971712.51 frames.], batch size: 21, lr: 3.00e-04 +2022-05-05 18:51:09,362 INFO [train.py:715] (6/8) Epoch 7, batch 5100, loss[loss=0.1334, simple_loss=0.2166, pruned_loss=0.02511, over 4777.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2178, pruned_loss=0.03701, over 971549.88 frames.], batch size: 18, lr: 3.00e-04 +2022-05-05 18:51:48,428 INFO [train.py:715] (6/8) Epoch 7, batch 5150, loss[loss=0.1627, simple_loss=0.2316, pruned_loss=0.04686, over 4985.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2175, pruned_loss=0.03704, over 972584.99 frames.], batch size: 26, lr: 3.00e-04 +2022-05-05 18:52:27,136 INFO [train.py:715] (6/8) Epoch 7, batch 5200, loss[loss=0.1137, simple_loss=0.1773, pruned_loss=0.02502, over 4895.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2175, pruned_loss=0.03692, over 972261.15 frames.], batch size: 17, lr: 2.99e-04 +2022-05-05 18:53:06,161 INFO [train.py:715] (6/8) Epoch 7, batch 5250, loss[loss=0.1564, simple_loss=0.234, pruned_loss=0.03937, over 4760.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2181, pruned_loss=0.03702, over 972487.04 frames.], batch size: 19, lr: 2.99e-04 +2022-05-05 18:53:44,793 INFO [train.py:715] (6/8) Epoch 7, batch 5300, loss[loss=0.1916, simple_loss=0.2544, pruned_loss=0.06442, over 4968.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2191, pruned_loss=0.03801, over 971995.01 frames.], batch size: 35, lr: 2.99e-04 +2022-05-05 18:54:24,159 INFO [train.py:715] (6/8) Epoch 7, batch 5350, loss[loss=0.09782, simple_loss=0.1636, pruned_loss=0.01604, over 4985.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2181, pruned_loss=0.03782, over 972927.72 frames.], batch size: 14, lr: 2.99e-04 +2022-05-05 18:55:02,368 INFO [train.py:715] (6/8) Epoch 7, batch 5400, loss[loss=0.1911, simple_loss=0.2656, pruned_loss=0.05832, over 4919.00 frames.], tot_loss[loss=0.1465, simple_loss=0.218, pruned_loss=0.03756, over 972762.99 frames.], batch size: 23, lr: 2.99e-04 +2022-05-05 18:55:41,207 INFO [train.py:715] (6/8) Epoch 7, batch 5450, loss[loss=0.1363, simple_loss=0.2096, pruned_loss=0.03155, over 4768.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2185, pruned_loss=0.03746, over 973704.99 frames.], batch size: 18, lr: 2.99e-04 +2022-05-05 18:56:20,340 INFO [train.py:715] (6/8) Epoch 7, batch 5500, loss[loss=0.1337, simple_loss=0.2014, pruned_loss=0.03299, over 4892.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2181, pruned_loss=0.03748, over 973775.81 frames.], batch size: 19, lr: 2.99e-04 +2022-05-05 18:56:59,123 INFO [train.py:715] (6/8) Epoch 7, batch 5550, loss[loss=0.194, simple_loss=0.2441, pruned_loss=0.07192, over 4794.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2183, pruned_loss=0.03779, over 973968.36 frames.], batch size: 13, lr: 2.99e-04 +2022-05-05 18:57:38,239 INFO [train.py:715] (6/8) Epoch 7, batch 5600, loss[loss=0.123, simple_loss=0.195, pruned_loss=0.02548, over 4691.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2184, pruned_loss=0.03767, over 972808.02 frames.], batch size: 12, lr: 2.99e-04 +2022-05-05 18:58:17,274 INFO [train.py:715] (6/8) Epoch 7, batch 5650, loss[loss=0.1626, simple_loss=0.2443, pruned_loss=0.04051, over 4969.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2183, pruned_loss=0.03744, over 973384.94 frames.], batch size: 24, lr: 2.99e-04 +2022-05-05 18:58:56,368 INFO [train.py:715] (6/8) Epoch 7, batch 5700, loss[loss=0.2033, simple_loss=0.261, pruned_loss=0.07278, over 4771.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2187, pruned_loss=0.03783, over 973479.94 frames.], batch size: 18, lr: 2.99e-04 +2022-05-05 18:59:34,741 INFO [train.py:715] (6/8) Epoch 7, batch 5750, loss[loss=0.1364, simple_loss=0.2142, pruned_loss=0.02933, over 4795.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2182, pruned_loss=0.03773, over 973591.09 frames.], batch size: 24, lr: 2.99e-04 +2022-05-05 19:00:12,899 INFO [train.py:715] (6/8) Epoch 7, batch 5800, loss[loss=0.1466, simple_loss=0.2229, pruned_loss=0.0352, over 4750.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2178, pruned_loss=0.03726, over 972158.57 frames.], batch size: 16, lr: 2.99e-04 +2022-05-05 19:00:52,629 INFO [train.py:715] (6/8) Epoch 7, batch 5850, loss[loss=0.1583, simple_loss=0.2245, pruned_loss=0.04601, over 4984.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2182, pruned_loss=0.03731, over 972035.10 frames.], batch size: 31, lr: 2.99e-04 +2022-05-05 19:01:30,924 INFO [train.py:715] (6/8) Epoch 7, batch 5900, loss[loss=0.1446, simple_loss=0.2209, pruned_loss=0.03418, over 4880.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2187, pruned_loss=0.03747, over 972669.94 frames.], batch size: 22, lr: 2.99e-04 +2022-05-05 19:02:09,959 INFO [train.py:715] (6/8) Epoch 7, batch 5950, loss[loss=0.1701, simple_loss=0.2516, pruned_loss=0.04429, over 4832.00 frames.], tot_loss[loss=0.147, simple_loss=0.219, pruned_loss=0.03747, over 973077.14 frames.], batch size: 15, lr: 2.99e-04 +2022-05-05 19:02:48,383 INFO [train.py:715] (6/8) Epoch 7, batch 6000, loss[loss=0.1795, simple_loss=0.2369, pruned_loss=0.061, over 4934.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2184, pruned_loss=0.0372, over 972769.14 frames.], batch size: 21, lr: 2.99e-04 +2022-05-05 19:02:48,383 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 19:02:58,047 INFO [train.py:742] (6/8) Epoch 7, validation: loss=0.1085, simple_loss=0.1933, pruned_loss=0.0119, over 914524.00 frames. +2022-05-05 19:03:36,916 INFO [train.py:715] (6/8) Epoch 7, batch 6050, loss[loss=0.171, simple_loss=0.2347, pruned_loss=0.0537, over 4828.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2193, pruned_loss=0.03773, over 972449.32 frames.], batch size: 13, lr: 2.99e-04 +2022-05-05 19:04:16,084 INFO [train.py:715] (6/8) Epoch 7, batch 6100, loss[loss=0.1563, simple_loss=0.2245, pruned_loss=0.04411, over 4982.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2192, pruned_loss=0.03768, over 973016.22 frames.], batch size: 14, lr: 2.99e-04 +2022-05-05 19:04:55,379 INFO [train.py:715] (6/8) Epoch 7, batch 6150, loss[loss=0.1389, simple_loss=0.2114, pruned_loss=0.03316, over 4981.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2188, pruned_loss=0.03773, over 973499.24 frames.], batch size: 35, lr: 2.99e-04 +2022-05-05 19:05:33,829 INFO [train.py:715] (6/8) Epoch 7, batch 6200, loss[loss=0.1537, simple_loss=0.2113, pruned_loss=0.04806, over 4845.00 frames.], tot_loss[loss=0.1469, simple_loss=0.219, pruned_loss=0.03738, over 973296.24 frames.], batch size: 32, lr: 2.99e-04 +2022-05-05 19:06:13,680 INFO [train.py:715] (6/8) Epoch 7, batch 6250, loss[loss=0.1566, simple_loss=0.2409, pruned_loss=0.03609, over 4946.00 frames.], tot_loss[loss=0.147, simple_loss=0.2192, pruned_loss=0.03745, over 972960.02 frames.], batch size: 21, lr: 2.99e-04 +2022-05-05 19:06:52,571 INFO [train.py:715] (6/8) Epoch 7, batch 6300, loss[loss=0.1335, simple_loss=0.2053, pruned_loss=0.03089, over 4854.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2178, pruned_loss=0.03706, over 972829.99 frames.], batch size: 13, lr: 2.99e-04 +2022-05-05 19:07:30,973 INFO [train.py:715] (6/8) Epoch 7, batch 6350, loss[loss=0.1208, simple_loss=0.197, pruned_loss=0.02233, over 4858.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2177, pruned_loss=0.03707, over 973091.62 frames.], batch size: 20, lr: 2.99e-04 +2022-05-05 19:08:10,034 INFO [train.py:715] (6/8) Epoch 7, batch 6400, loss[loss=0.135, simple_loss=0.2136, pruned_loss=0.02824, over 4760.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2187, pruned_loss=0.03743, over 972423.69 frames.], batch size: 19, lr: 2.99e-04 +2022-05-05 19:08:49,046 INFO [train.py:715] (6/8) Epoch 7, batch 6450, loss[loss=0.17, simple_loss=0.239, pruned_loss=0.0505, over 4931.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2191, pruned_loss=0.03758, over 972943.42 frames.], batch size: 29, lr: 2.99e-04 +2022-05-05 19:09:27,600 INFO [train.py:715] (6/8) Epoch 7, batch 6500, loss[loss=0.138, simple_loss=0.2037, pruned_loss=0.03612, over 4828.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2192, pruned_loss=0.038, over 973107.70 frames.], batch size: 26, lr: 2.99e-04 +2022-05-05 19:10:06,573 INFO [train.py:715] (6/8) Epoch 7, batch 6550, loss[loss=0.1511, simple_loss=0.2083, pruned_loss=0.04694, over 4828.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2196, pruned_loss=0.03824, over 972654.71 frames.], batch size: 30, lr: 2.99e-04 +2022-05-05 19:10:46,394 INFO [train.py:715] (6/8) Epoch 7, batch 6600, loss[loss=0.1651, simple_loss=0.2264, pruned_loss=0.05185, over 4965.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2191, pruned_loss=0.03824, over 972290.34 frames.], batch size: 35, lr: 2.99e-04 +2022-05-05 19:11:25,244 INFO [train.py:715] (6/8) Epoch 7, batch 6650, loss[loss=0.1601, simple_loss=0.2259, pruned_loss=0.04715, over 4819.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2186, pruned_loss=0.03792, over 972707.12 frames.], batch size: 25, lr: 2.99e-04 +2022-05-05 19:12:04,478 INFO [train.py:715] (6/8) Epoch 7, batch 6700, loss[loss=0.1787, simple_loss=0.2435, pruned_loss=0.05692, over 4933.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2188, pruned_loss=0.03796, over 972696.81 frames.], batch size: 23, lr: 2.99e-04 +2022-05-05 19:12:43,222 INFO [train.py:715] (6/8) Epoch 7, batch 6750, loss[loss=0.1305, simple_loss=0.2023, pruned_loss=0.02934, over 4941.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2188, pruned_loss=0.03784, over 972824.58 frames.], batch size: 21, lr: 2.99e-04 +2022-05-05 19:13:22,216 INFO [train.py:715] (6/8) Epoch 7, batch 6800, loss[loss=0.1451, simple_loss=0.2196, pruned_loss=0.03526, over 4839.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2183, pruned_loss=0.03774, over 972892.02 frames.], batch size: 15, lr: 2.99e-04 +2022-05-05 19:14:00,579 INFO [train.py:715] (6/8) Epoch 7, batch 6850, loss[loss=0.1384, simple_loss=0.2166, pruned_loss=0.03004, over 4852.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2185, pruned_loss=0.03786, over 972968.93 frames.], batch size: 20, lr: 2.99e-04 +2022-05-05 19:14:39,178 INFO [train.py:715] (6/8) Epoch 7, batch 6900, loss[loss=0.1678, simple_loss=0.2312, pruned_loss=0.05221, over 4848.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2193, pruned_loss=0.03795, over 973074.88 frames.], batch size: 32, lr: 2.98e-04 +2022-05-05 19:15:18,693 INFO [train.py:715] (6/8) Epoch 7, batch 6950, loss[loss=0.1434, simple_loss=0.2098, pruned_loss=0.03845, over 4913.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2188, pruned_loss=0.03785, over 973214.30 frames.], batch size: 18, lr: 2.98e-04 +2022-05-05 19:15:56,859 INFO [train.py:715] (6/8) Epoch 7, batch 7000, loss[loss=0.1582, simple_loss=0.2324, pruned_loss=0.04204, over 4784.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2187, pruned_loss=0.0381, over 972989.55 frames.], batch size: 18, lr: 2.98e-04 +2022-05-05 19:16:35,554 INFO [train.py:715] (6/8) Epoch 7, batch 7050, loss[loss=0.1412, simple_loss=0.2195, pruned_loss=0.03145, over 4835.00 frames.], tot_loss[loss=0.147, simple_loss=0.2185, pruned_loss=0.03781, over 972771.04 frames.], batch size: 15, lr: 2.98e-04 +2022-05-05 19:17:14,122 INFO [train.py:715] (6/8) Epoch 7, batch 7100, loss[loss=0.1182, simple_loss=0.1945, pruned_loss=0.021, over 4942.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2182, pruned_loss=0.03731, over 972725.17 frames.], batch size: 21, lr: 2.98e-04 +2022-05-05 19:17:52,400 INFO [train.py:715] (6/8) Epoch 7, batch 7150, loss[loss=0.1517, simple_loss=0.224, pruned_loss=0.03969, over 4915.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2184, pruned_loss=0.03742, over 973364.43 frames.], batch size: 17, lr: 2.98e-04 +2022-05-05 19:18:31,021 INFO [train.py:715] (6/8) Epoch 7, batch 7200, loss[loss=0.1181, simple_loss=0.1903, pruned_loss=0.02292, over 4844.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2182, pruned_loss=0.03707, over 973451.83 frames.], batch size: 32, lr: 2.98e-04 +2022-05-05 19:19:10,026 INFO [train.py:715] (6/8) Epoch 7, batch 7250, loss[loss=0.1687, simple_loss=0.2314, pruned_loss=0.05301, over 4886.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2192, pruned_loss=0.03806, over 974381.01 frames.], batch size: 17, lr: 2.98e-04 +2022-05-05 19:19:49,674 INFO [train.py:715] (6/8) Epoch 7, batch 7300, loss[loss=0.1184, simple_loss=0.1805, pruned_loss=0.02818, over 4808.00 frames.], tot_loss[loss=0.1474, simple_loss=0.219, pruned_loss=0.0379, over 973445.58 frames.], batch size: 13, lr: 2.98e-04 +2022-05-05 19:20:28,207 INFO [train.py:715] (6/8) Epoch 7, batch 7350, loss[loss=0.1425, simple_loss=0.2115, pruned_loss=0.03679, over 4911.00 frames.], tot_loss[loss=0.1468, simple_loss=0.218, pruned_loss=0.03778, over 973491.10 frames.], batch size: 17, lr: 2.98e-04 +2022-05-05 19:21:06,663 INFO [train.py:715] (6/8) Epoch 7, batch 7400, loss[loss=0.1467, simple_loss=0.2146, pruned_loss=0.03939, over 4910.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2181, pruned_loss=0.03782, over 974311.50 frames.], batch size: 19, lr: 2.98e-04 +2022-05-05 19:21:45,792 INFO [train.py:715] (6/8) Epoch 7, batch 7450, loss[loss=0.1354, simple_loss=0.2071, pruned_loss=0.03191, over 4958.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2181, pruned_loss=0.03769, over 973316.57 frames.], batch size: 24, lr: 2.98e-04 +2022-05-05 19:22:24,000 INFO [train.py:715] (6/8) Epoch 7, batch 7500, loss[loss=0.1467, simple_loss=0.2178, pruned_loss=0.03784, over 4821.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2182, pruned_loss=0.03757, over 972299.35 frames.], batch size: 13, lr: 2.98e-04 +2022-05-05 19:23:02,794 INFO [train.py:715] (6/8) Epoch 7, batch 7550, loss[loss=0.1673, simple_loss=0.246, pruned_loss=0.0443, over 4966.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2185, pruned_loss=0.03764, over 971567.96 frames.], batch size: 35, lr: 2.98e-04 +2022-05-05 19:23:41,663 INFO [train.py:715] (6/8) Epoch 7, batch 7600, loss[loss=0.1397, simple_loss=0.2132, pruned_loss=0.0331, over 4903.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2184, pruned_loss=0.03728, over 972293.40 frames.], batch size: 18, lr: 2.98e-04 +2022-05-05 19:24:20,769 INFO [train.py:715] (6/8) Epoch 7, batch 7650, loss[loss=0.1404, simple_loss=0.2072, pruned_loss=0.03678, over 4979.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2184, pruned_loss=0.0375, over 971225.02 frames.], batch size: 28, lr: 2.98e-04 +2022-05-05 19:24:59,082 INFO [train.py:715] (6/8) Epoch 7, batch 7700, loss[loss=0.1263, simple_loss=0.2101, pruned_loss=0.02123, over 4870.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2183, pruned_loss=0.03735, over 972421.71 frames.], batch size: 16, lr: 2.98e-04 +2022-05-05 19:25:38,046 INFO [train.py:715] (6/8) Epoch 7, batch 7750, loss[loss=0.182, simple_loss=0.2488, pruned_loss=0.05755, over 4888.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2189, pruned_loss=0.03785, over 971740.95 frames.], batch size: 38, lr: 2.98e-04 +2022-05-05 19:26:17,066 INFO [train.py:715] (6/8) Epoch 7, batch 7800, loss[loss=0.1708, simple_loss=0.2361, pruned_loss=0.05278, over 4976.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2181, pruned_loss=0.03741, over 972630.84 frames.], batch size: 35, lr: 2.98e-04 +2022-05-05 19:26:55,230 INFO [train.py:715] (6/8) Epoch 7, batch 7850, loss[loss=0.1798, simple_loss=0.2426, pruned_loss=0.05846, over 4945.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2181, pruned_loss=0.03707, over 971794.33 frames.], batch size: 21, lr: 2.98e-04 +2022-05-05 19:27:34,425 INFO [train.py:715] (6/8) Epoch 7, batch 7900, loss[loss=0.1436, simple_loss=0.2192, pruned_loss=0.03398, over 4945.00 frames.], tot_loss[loss=0.146, simple_loss=0.218, pruned_loss=0.03702, over 971692.63 frames.], batch size: 21, lr: 2.98e-04 +2022-05-05 19:28:13,173 INFO [train.py:715] (6/8) Epoch 7, batch 7950, loss[loss=0.148, simple_loss=0.2206, pruned_loss=0.03771, over 4952.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2177, pruned_loss=0.03663, over 971711.46 frames.], batch size: 21, lr: 2.98e-04 +2022-05-05 19:28:52,648 INFO [train.py:715] (6/8) Epoch 7, batch 8000, loss[loss=0.1411, simple_loss=0.2164, pruned_loss=0.03288, over 4934.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2179, pruned_loss=0.03685, over 971758.95 frames.], batch size: 21, lr: 2.98e-04 +2022-05-05 19:29:30,739 INFO [train.py:715] (6/8) Epoch 7, batch 8050, loss[loss=0.1498, simple_loss=0.2161, pruned_loss=0.04175, over 4840.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2177, pruned_loss=0.03699, over 971152.84 frames.], batch size: 15, lr: 2.98e-04 +2022-05-05 19:30:09,296 INFO [train.py:715] (6/8) Epoch 7, batch 8100, loss[loss=0.1468, simple_loss=0.2178, pruned_loss=0.03787, over 4766.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2177, pruned_loss=0.03706, over 971237.39 frames.], batch size: 16, lr: 2.98e-04 +2022-05-05 19:30:48,381 INFO [train.py:715] (6/8) Epoch 7, batch 8150, loss[loss=0.1703, simple_loss=0.2385, pruned_loss=0.05102, over 4918.00 frames.], tot_loss[loss=0.146, simple_loss=0.2176, pruned_loss=0.03718, over 971191.68 frames.], batch size: 23, lr: 2.98e-04 +2022-05-05 19:31:26,679 INFO [train.py:715] (6/8) Epoch 7, batch 8200, loss[loss=0.1563, simple_loss=0.2295, pruned_loss=0.04152, over 4973.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2174, pruned_loss=0.03706, over 971311.98 frames.], batch size: 15, lr: 2.98e-04 +2022-05-05 19:32:05,128 INFO [train.py:715] (6/8) Epoch 7, batch 8250, loss[loss=0.1569, simple_loss=0.2207, pruned_loss=0.04655, over 4861.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2177, pruned_loss=0.03733, over 971583.28 frames.], batch size: 16, lr: 2.98e-04 +2022-05-05 19:32:43,779 INFO [train.py:715] (6/8) Epoch 7, batch 8300, loss[loss=0.124, simple_loss=0.1954, pruned_loss=0.02637, over 4973.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2178, pruned_loss=0.03691, over 972172.49 frames.], batch size: 35, lr: 2.98e-04 +2022-05-05 19:33:22,689 INFO [train.py:715] (6/8) Epoch 7, batch 8350, loss[loss=0.1305, simple_loss=0.2073, pruned_loss=0.02684, over 4919.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2173, pruned_loss=0.03686, over 971143.90 frames.], batch size: 18, lr: 2.98e-04 +2022-05-05 19:34:00,644 INFO [train.py:715] (6/8) Epoch 7, batch 8400, loss[loss=0.1218, simple_loss=0.1958, pruned_loss=0.02388, over 4705.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2177, pruned_loss=0.03693, over 971158.44 frames.], batch size: 15, lr: 2.98e-04 +2022-05-05 19:34:39,718 INFO [train.py:715] (6/8) Epoch 7, batch 8450, loss[loss=0.2001, simple_loss=0.2712, pruned_loss=0.06456, over 4927.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2191, pruned_loss=0.03761, over 971473.53 frames.], batch size: 18, lr: 2.98e-04 +2022-05-05 19:35:18,878 INFO [train.py:715] (6/8) Epoch 7, batch 8500, loss[loss=0.1668, simple_loss=0.2367, pruned_loss=0.04841, over 4918.00 frames.], tot_loss[loss=0.1474, simple_loss=0.219, pruned_loss=0.03793, over 972178.15 frames.], batch size: 18, lr: 2.98e-04 +2022-05-05 19:35:58,055 INFO [train.py:715] (6/8) Epoch 7, batch 8550, loss[loss=0.1576, simple_loss=0.2286, pruned_loss=0.04329, over 4967.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2188, pruned_loss=0.03785, over 972142.67 frames.], batch size: 35, lr: 2.97e-04 +2022-05-05 19:36:36,295 INFO [train.py:715] (6/8) Epoch 7, batch 8600, loss[loss=0.1158, simple_loss=0.1855, pruned_loss=0.02307, over 4967.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2179, pruned_loss=0.03733, over 972550.14 frames.], batch size: 14, lr: 2.97e-04 +2022-05-05 19:37:14,961 INFO [train.py:715] (6/8) Epoch 7, batch 8650, loss[loss=0.1539, simple_loss=0.2262, pruned_loss=0.0408, over 4849.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2175, pruned_loss=0.0369, over 971769.85 frames.], batch size: 20, lr: 2.97e-04 +2022-05-05 19:37:54,309 INFO [train.py:715] (6/8) Epoch 7, batch 8700, loss[loss=0.1295, simple_loss=0.1957, pruned_loss=0.03168, over 4928.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2181, pruned_loss=0.03745, over 972337.56 frames.], batch size: 18, lr: 2.97e-04 +2022-05-05 19:38:32,520 INFO [train.py:715] (6/8) Epoch 7, batch 8750, loss[loss=0.1389, simple_loss=0.2102, pruned_loss=0.03383, over 4981.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2171, pruned_loss=0.03732, over 972624.33 frames.], batch size: 25, lr: 2.97e-04 +2022-05-05 19:39:11,385 INFO [train.py:715] (6/8) Epoch 7, batch 8800, loss[loss=0.1413, simple_loss=0.2136, pruned_loss=0.03449, over 4749.00 frames.], tot_loss[loss=0.1469, simple_loss=0.218, pruned_loss=0.0379, over 972338.46 frames.], batch size: 19, lr: 2.97e-04 +2022-05-05 19:39:50,336 INFO [train.py:715] (6/8) Epoch 7, batch 8850, loss[loss=0.1568, simple_loss=0.2243, pruned_loss=0.04462, over 4886.00 frames.], tot_loss[loss=0.147, simple_loss=0.218, pruned_loss=0.03795, over 970960.75 frames.], batch size: 22, lr: 2.97e-04 +2022-05-05 19:40:30,010 INFO [train.py:715] (6/8) Epoch 7, batch 8900, loss[loss=0.1588, simple_loss=0.2207, pruned_loss=0.04839, over 4972.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2174, pruned_loss=0.03755, over 970871.26 frames.], batch size: 14, lr: 2.97e-04 +2022-05-05 19:41:08,238 INFO [train.py:715] (6/8) Epoch 7, batch 8950, loss[loss=0.1455, simple_loss=0.2255, pruned_loss=0.03276, over 4858.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2181, pruned_loss=0.03764, over 970429.52 frames.], batch size: 20, lr: 2.97e-04 +2022-05-05 19:41:46,836 INFO [train.py:715] (6/8) Epoch 7, batch 9000, loss[loss=0.1296, simple_loss=0.1991, pruned_loss=0.0301, over 4842.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2179, pruned_loss=0.03787, over 970723.25 frames.], batch size: 15, lr: 2.97e-04 +2022-05-05 19:41:46,836 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 19:41:56,559 INFO [train.py:742] (6/8) Epoch 7, validation: loss=0.1085, simple_loss=0.1932, pruned_loss=0.01192, over 914524.00 frames. +2022-05-05 19:42:35,336 INFO [train.py:715] (6/8) Epoch 7, batch 9050, loss[loss=0.1401, simple_loss=0.2115, pruned_loss=0.03438, over 4832.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2184, pruned_loss=0.03794, over 971404.78 frames.], batch size: 30, lr: 2.97e-04 +2022-05-05 19:43:15,394 INFO [train.py:715] (6/8) Epoch 7, batch 9100, loss[loss=0.1568, simple_loss=0.2365, pruned_loss=0.03854, over 4948.00 frames.], tot_loss[loss=0.1474, simple_loss=0.219, pruned_loss=0.03791, over 972803.63 frames.], batch size: 21, lr: 2.97e-04 +2022-05-05 19:43:54,073 INFO [train.py:715] (6/8) Epoch 7, batch 9150, loss[loss=0.1762, simple_loss=0.2516, pruned_loss=0.05036, over 4749.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2187, pruned_loss=0.03785, over 972845.61 frames.], batch size: 19, lr: 2.97e-04 +2022-05-05 19:44:32,871 INFO [train.py:715] (6/8) Epoch 7, batch 9200, loss[loss=0.1198, simple_loss=0.1994, pruned_loss=0.02014, over 4758.00 frames.], tot_loss[loss=0.146, simple_loss=0.2174, pruned_loss=0.03732, over 971937.53 frames.], batch size: 19, lr: 2.97e-04 +2022-05-05 19:45:12,202 INFO [train.py:715] (6/8) Epoch 7, batch 9250, loss[loss=0.1412, simple_loss=0.2171, pruned_loss=0.03265, over 4817.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2178, pruned_loss=0.03729, over 972498.58 frames.], batch size: 25, lr: 2.97e-04 +2022-05-05 19:45:51,291 INFO [train.py:715] (6/8) Epoch 7, batch 9300, loss[loss=0.1743, simple_loss=0.2422, pruned_loss=0.05313, over 4971.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2194, pruned_loss=0.03802, over 972103.07 frames.], batch size: 39, lr: 2.97e-04 +2022-05-05 19:46:30,345 INFO [train.py:715] (6/8) Epoch 7, batch 9350, loss[loss=0.1483, simple_loss=0.2183, pruned_loss=0.03916, over 4866.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2197, pruned_loss=0.03824, over 972914.75 frames.], batch size: 20, lr: 2.97e-04 +2022-05-05 19:47:08,480 INFO [train.py:715] (6/8) Epoch 7, batch 9400, loss[loss=0.1194, simple_loss=0.194, pruned_loss=0.02241, over 4904.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2192, pruned_loss=0.03803, over 973214.09 frames.], batch size: 18, lr: 2.97e-04 +2022-05-05 19:47:48,272 INFO [train.py:715] (6/8) Epoch 7, batch 9450, loss[loss=0.1655, simple_loss=0.2347, pruned_loss=0.04811, over 4804.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2187, pruned_loss=0.03785, over 972015.72 frames.], batch size: 14, lr: 2.97e-04 +2022-05-05 19:48:27,275 INFO [train.py:715] (6/8) Epoch 7, batch 9500, loss[loss=0.1264, simple_loss=0.2058, pruned_loss=0.02349, over 4807.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2183, pruned_loss=0.03731, over 971722.45 frames.], batch size: 25, lr: 2.97e-04 +2022-05-05 19:49:05,879 INFO [train.py:715] (6/8) Epoch 7, batch 9550, loss[loss=0.1682, simple_loss=0.2204, pruned_loss=0.05796, over 4968.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2179, pruned_loss=0.03742, over 972246.71 frames.], batch size: 15, lr: 2.97e-04 +2022-05-05 19:49:44,835 INFO [train.py:715] (6/8) Epoch 7, batch 9600, loss[loss=0.1452, simple_loss=0.2195, pruned_loss=0.03542, over 4980.00 frames.], tot_loss[loss=0.1456, simple_loss=0.217, pruned_loss=0.03714, over 972867.36 frames.], batch size: 25, lr: 2.97e-04 +2022-05-05 19:50:23,438 INFO [train.py:715] (6/8) Epoch 7, batch 9650, loss[loss=0.1495, simple_loss=0.2236, pruned_loss=0.03775, over 4768.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2175, pruned_loss=0.03736, over 972495.07 frames.], batch size: 18, lr: 2.97e-04 +2022-05-05 19:51:02,958 INFO [train.py:715] (6/8) Epoch 7, batch 9700, loss[loss=0.147, simple_loss=0.2108, pruned_loss=0.04159, over 4837.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2166, pruned_loss=0.03659, over 972673.30 frames.], batch size: 13, lr: 2.97e-04 +2022-05-05 19:51:41,571 INFO [train.py:715] (6/8) Epoch 7, batch 9750, loss[loss=0.165, simple_loss=0.2303, pruned_loss=0.04983, over 4938.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2177, pruned_loss=0.03728, over 972650.78 frames.], batch size: 21, lr: 2.97e-04 +2022-05-05 19:52:20,962 INFO [train.py:715] (6/8) Epoch 7, batch 9800, loss[loss=0.1695, simple_loss=0.2346, pruned_loss=0.05222, over 4841.00 frames.], tot_loss[loss=0.146, simple_loss=0.2173, pruned_loss=0.0374, over 972413.43 frames.], batch size: 32, lr: 2.97e-04 +2022-05-05 19:52:59,041 INFO [train.py:715] (6/8) Epoch 7, batch 9850, loss[loss=0.1529, simple_loss=0.2246, pruned_loss=0.04062, over 4777.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2179, pruned_loss=0.03776, over 971572.55 frames.], batch size: 14, lr: 2.97e-04 +2022-05-05 19:53:37,272 INFO [train.py:715] (6/8) Epoch 7, batch 9900, loss[loss=0.1531, simple_loss=0.2187, pruned_loss=0.04372, over 4834.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2176, pruned_loss=0.03755, over 972329.06 frames.], batch size: 13, lr: 2.97e-04 +2022-05-05 19:54:16,173 INFO [train.py:715] (6/8) Epoch 7, batch 9950, loss[loss=0.1274, simple_loss=0.2061, pruned_loss=0.02435, over 4773.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2176, pruned_loss=0.03701, over 972357.15 frames.], batch size: 17, lr: 2.97e-04 +2022-05-05 19:54:55,288 INFO [train.py:715] (6/8) Epoch 7, batch 10000, loss[loss=0.135, simple_loss=0.2037, pruned_loss=0.03311, over 4855.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2179, pruned_loss=0.03709, over 972280.37 frames.], batch size: 32, lr: 2.97e-04 +2022-05-05 19:55:33,943 INFO [train.py:715] (6/8) Epoch 7, batch 10050, loss[loss=0.1558, simple_loss=0.2335, pruned_loss=0.03905, over 4972.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2174, pruned_loss=0.03697, over 972164.83 frames.], batch size: 28, lr: 2.97e-04 +2022-05-05 19:56:12,504 INFO [train.py:715] (6/8) Epoch 7, batch 10100, loss[loss=0.1304, simple_loss=0.207, pruned_loss=0.02695, over 4896.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2179, pruned_loss=0.0372, over 972001.69 frames.], batch size: 22, lr: 2.97e-04 +2022-05-05 19:56:51,796 INFO [train.py:715] (6/8) Epoch 7, batch 10150, loss[loss=0.144, simple_loss=0.218, pruned_loss=0.03505, over 4896.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2184, pruned_loss=0.03715, over 972675.05 frames.], batch size: 19, lr: 2.97e-04 +2022-05-05 19:57:30,414 INFO [train.py:715] (6/8) Epoch 7, batch 10200, loss[loss=0.1363, simple_loss=0.2146, pruned_loss=0.02903, over 4815.00 frames.], tot_loss[loss=0.1458, simple_loss=0.218, pruned_loss=0.03679, over 972125.71 frames.], batch size: 27, lr: 2.97e-04 +2022-05-05 19:58:09,078 INFO [train.py:715] (6/8) Epoch 7, batch 10250, loss[loss=0.1361, simple_loss=0.2143, pruned_loss=0.02891, over 4858.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2181, pruned_loss=0.03677, over 973397.44 frames.], batch size: 20, lr: 2.96e-04 +2022-05-05 19:58:48,255 INFO [train.py:715] (6/8) Epoch 7, batch 10300, loss[loss=0.1377, simple_loss=0.2141, pruned_loss=0.03063, over 4950.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2184, pruned_loss=0.03708, over 973659.84 frames.], batch size: 21, lr: 2.96e-04 +2022-05-05 19:59:26,901 INFO [train.py:715] (6/8) Epoch 7, batch 10350, loss[loss=0.1378, simple_loss=0.204, pruned_loss=0.03583, over 4974.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2177, pruned_loss=0.03721, over 974160.63 frames.], batch size: 15, lr: 2.96e-04 +2022-05-05 20:00:05,912 INFO [train.py:715] (6/8) Epoch 7, batch 10400, loss[loss=0.1452, simple_loss=0.2204, pruned_loss=0.03499, over 4886.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2181, pruned_loss=0.03721, over 973317.49 frames.], batch size: 22, lr: 2.96e-04 +2022-05-05 20:00:44,696 INFO [train.py:715] (6/8) Epoch 7, batch 10450, loss[loss=0.1239, simple_loss=0.1862, pruned_loss=0.03083, over 4809.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2175, pruned_loss=0.03685, over 973789.59 frames.], batch size: 15, lr: 2.96e-04 +2022-05-05 20:01:24,297 INFO [train.py:715] (6/8) Epoch 7, batch 10500, loss[loss=0.1411, simple_loss=0.2021, pruned_loss=0.04007, over 4830.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2173, pruned_loss=0.03693, over 973369.26 frames.], batch size: 30, lr: 2.96e-04 +2022-05-05 20:02:03,023 INFO [train.py:715] (6/8) Epoch 7, batch 10550, loss[loss=0.137, simple_loss=0.2054, pruned_loss=0.03435, over 4846.00 frames.], tot_loss[loss=0.1464, simple_loss=0.218, pruned_loss=0.03741, over 973415.41 frames.], batch size: 13, lr: 2.96e-04 +2022-05-05 20:02:41,165 INFO [train.py:715] (6/8) Epoch 7, batch 10600, loss[loss=0.1558, simple_loss=0.2298, pruned_loss=0.04089, over 4928.00 frames.], tot_loss[loss=0.1463, simple_loss=0.218, pruned_loss=0.03732, over 973678.66 frames.], batch size: 29, lr: 2.96e-04 +2022-05-05 20:03:20,355 INFO [train.py:715] (6/8) Epoch 7, batch 10650, loss[loss=0.1192, simple_loss=0.1927, pruned_loss=0.02284, over 4829.00 frames.], tot_loss[loss=0.146, simple_loss=0.2178, pruned_loss=0.03714, over 973472.74 frames.], batch size: 13, lr: 2.96e-04 +2022-05-05 20:03:59,390 INFO [train.py:715] (6/8) Epoch 7, batch 10700, loss[loss=0.157, simple_loss=0.2279, pruned_loss=0.04301, over 4981.00 frames.], tot_loss[loss=0.1467, simple_loss=0.218, pruned_loss=0.03769, over 973464.25 frames.], batch size: 15, lr: 2.96e-04 +2022-05-05 20:04:38,883 INFO [train.py:715] (6/8) Epoch 7, batch 10750, loss[loss=0.1687, simple_loss=0.2336, pruned_loss=0.05188, over 4912.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2167, pruned_loss=0.03728, over 973756.32 frames.], batch size: 19, lr: 2.96e-04 +2022-05-05 20:05:17,666 INFO [train.py:715] (6/8) Epoch 7, batch 10800, loss[loss=0.1296, simple_loss=0.2014, pruned_loss=0.02895, over 4865.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2173, pruned_loss=0.03724, over 973282.63 frames.], batch size: 30, lr: 2.96e-04 +2022-05-05 20:05:57,417 INFO [train.py:715] (6/8) Epoch 7, batch 10850, loss[loss=0.1379, simple_loss=0.2192, pruned_loss=0.02826, over 4820.00 frames.], tot_loss[loss=0.1456, simple_loss=0.217, pruned_loss=0.03705, over 971994.93 frames.], batch size: 26, lr: 2.96e-04 +2022-05-05 20:06:35,666 INFO [train.py:715] (6/8) Epoch 7, batch 10900, loss[loss=0.1673, simple_loss=0.2278, pruned_loss=0.05343, over 4943.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2174, pruned_loss=0.03723, over 972249.71 frames.], batch size: 39, lr: 2.96e-04 +2022-05-05 20:07:14,749 INFO [train.py:715] (6/8) Epoch 7, batch 10950, loss[loss=0.1557, simple_loss=0.228, pruned_loss=0.04166, over 4772.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2165, pruned_loss=0.03645, over 972650.88 frames.], batch size: 18, lr: 2.96e-04 +2022-05-05 20:07:53,907 INFO [train.py:715] (6/8) Epoch 7, batch 11000, loss[loss=0.1483, simple_loss=0.2212, pruned_loss=0.03772, over 4702.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2174, pruned_loss=0.03711, over 971584.69 frames.], batch size: 15, lr: 2.96e-04 +2022-05-05 20:08:32,747 INFO [train.py:715] (6/8) Epoch 7, batch 11050, loss[loss=0.1721, simple_loss=0.2383, pruned_loss=0.05297, over 4861.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2173, pruned_loss=0.03714, over 972222.16 frames.], batch size: 20, lr: 2.96e-04 +2022-05-05 20:09:11,470 INFO [train.py:715] (6/8) Epoch 7, batch 11100, loss[loss=0.1561, simple_loss=0.2394, pruned_loss=0.03636, over 4927.00 frames.], tot_loss[loss=0.1453, simple_loss=0.217, pruned_loss=0.03677, over 972806.26 frames.], batch size: 29, lr: 2.96e-04 +2022-05-05 20:09:50,083 INFO [train.py:715] (6/8) Epoch 7, batch 11150, loss[loss=0.1438, simple_loss=0.2259, pruned_loss=0.03091, over 4813.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2175, pruned_loss=0.03658, over 973221.27 frames.], batch size: 21, lr: 2.96e-04 +2022-05-05 20:10:29,709 INFO [train.py:715] (6/8) Epoch 7, batch 11200, loss[loss=0.1368, simple_loss=0.2174, pruned_loss=0.02803, over 4804.00 frames.], tot_loss[loss=0.146, simple_loss=0.2181, pruned_loss=0.03696, over 972370.11 frames.], batch size: 21, lr: 2.96e-04 +2022-05-05 20:11:08,076 INFO [train.py:715] (6/8) Epoch 7, batch 11250, loss[loss=0.127, simple_loss=0.1964, pruned_loss=0.02885, over 4805.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2181, pruned_loss=0.0369, over 972451.89 frames.], batch size: 25, lr: 2.96e-04 +2022-05-05 20:11:46,232 INFO [train.py:715] (6/8) Epoch 7, batch 11300, loss[loss=0.1359, simple_loss=0.2153, pruned_loss=0.02822, over 4752.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2185, pruned_loss=0.03747, over 972598.98 frames.], batch size: 16, lr: 2.96e-04 +2022-05-05 20:12:25,979 INFO [train.py:715] (6/8) Epoch 7, batch 11350, loss[loss=0.1436, simple_loss=0.2214, pruned_loss=0.0329, over 4793.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2175, pruned_loss=0.03719, over 971708.02 frames.], batch size: 18, lr: 2.96e-04 +2022-05-05 20:13:04,518 INFO [train.py:715] (6/8) Epoch 7, batch 11400, loss[loss=0.1377, simple_loss=0.2094, pruned_loss=0.03297, over 4828.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2163, pruned_loss=0.03659, over 971683.58 frames.], batch size: 13, lr: 2.96e-04 +2022-05-05 20:13:43,555 INFO [train.py:715] (6/8) Epoch 7, batch 11450, loss[loss=0.1524, simple_loss=0.223, pruned_loss=0.04086, over 4754.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2178, pruned_loss=0.03724, over 971968.00 frames.], batch size: 16, lr: 2.96e-04 +2022-05-05 20:14:22,142 INFO [train.py:715] (6/8) Epoch 7, batch 11500, loss[loss=0.1405, simple_loss=0.2153, pruned_loss=0.03282, over 4764.00 frames.], tot_loss[loss=0.1463, simple_loss=0.218, pruned_loss=0.0373, over 971963.76 frames.], batch size: 19, lr: 2.96e-04 +2022-05-05 20:15:01,730 INFO [train.py:715] (6/8) Epoch 7, batch 11550, loss[loss=0.1427, simple_loss=0.2112, pruned_loss=0.0371, over 4894.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2183, pruned_loss=0.03779, over 972989.23 frames.], batch size: 22, lr: 2.96e-04 +2022-05-05 20:15:39,999 INFO [train.py:715] (6/8) Epoch 7, batch 11600, loss[loss=0.1423, simple_loss=0.2228, pruned_loss=0.03094, over 4987.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2175, pruned_loss=0.03733, over 973228.37 frames.], batch size: 28, lr: 2.96e-04 +2022-05-05 20:16:18,809 INFO [train.py:715] (6/8) Epoch 7, batch 11650, loss[loss=0.1451, simple_loss=0.2179, pruned_loss=0.03614, over 4800.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2182, pruned_loss=0.03784, over 973123.14 frames.], batch size: 17, lr: 2.96e-04 +2022-05-05 20:16:58,203 INFO [train.py:715] (6/8) Epoch 7, batch 11700, loss[loss=0.1348, simple_loss=0.2109, pruned_loss=0.02936, over 4784.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2183, pruned_loss=0.03764, over 972470.19 frames.], batch size: 17, lr: 2.96e-04 +2022-05-05 20:17:36,280 INFO [train.py:715] (6/8) Epoch 7, batch 11750, loss[loss=0.1567, simple_loss=0.2182, pruned_loss=0.04763, over 4928.00 frames.], tot_loss[loss=0.147, simple_loss=0.2183, pruned_loss=0.03789, over 972785.68 frames.], batch size: 18, lr: 2.96e-04 +2022-05-05 20:18:15,076 INFO [train.py:715] (6/8) Epoch 7, batch 11800, loss[loss=0.1458, simple_loss=0.2107, pruned_loss=0.04049, over 4855.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2174, pruned_loss=0.03744, over 973088.78 frames.], batch size: 20, lr: 2.96e-04 +2022-05-05 20:18:54,268 INFO [train.py:715] (6/8) Epoch 7, batch 11850, loss[loss=0.1453, simple_loss=0.2192, pruned_loss=0.03574, over 4847.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2176, pruned_loss=0.03751, over 972603.37 frames.], batch size: 32, lr: 2.96e-04 +2022-05-05 20:19:32,625 INFO [train.py:715] (6/8) Epoch 7, batch 11900, loss[loss=0.1284, simple_loss=0.2083, pruned_loss=0.0243, over 4918.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2178, pruned_loss=0.03748, over 973337.72 frames.], batch size: 29, lr: 2.96e-04 +2022-05-05 20:20:11,921 INFO [train.py:715] (6/8) Epoch 7, batch 11950, loss[loss=0.1296, simple_loss=0.1998, pruned_loss=0.02965, over 4818.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2179, pruned_loss=0.03744, over 971018.51 frames.], batch size: 12, lr: 2.96e-04 +2022-05-05 20:20:50,615 INFO [train.py:715] (6/8) Epoch 7, batch 12000, loss[loss=0.1359, simple_loss=0.2164, pruned_loss=0.02771, over 4767.00 frames.], tot_loss[loss=0.147, simple_loss=0.2187, pruned_loss=0.03767, over 972384.27 frames.], batch size: 19, lr: 2.95e-04 +2022-05-05 20:20:50,616 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 20:21:00,228 INFO [train.py:742] (6/8) Epoch 7, validation: loss=0.108, simple_loss=0.193, pruned_loss=0.01154, over 914524.00 frames. +2022-05-05 20:21:38,893 INFO [train.py:715] (6/8) Epoch 7, batch 12050, loss[loss=0.1836, simple_loss=0.2637, pruned_loss=0.05178, over 4768.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2186, pruned_loss=0.03792, over 972224.50 frames.], batch size: 14, lr: 2.95e-04 +2022-05-05 20:22:18,263 INFO [train.py:715] (6/8) Epoch 7, batch 12100, loss[loss=0.1636, simple_loss=0.2322, pruned_loss=0.0475, over 4858.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2196, pruned_loss=0.03805, over 973441.40 frames.], batch size: 32, lr: 2.95e-04 +2022-05-05 20:22:56,856 INFO [train.py:715] (6/8) Epoch 7, batch 12150, loss[loss=0.1411, simple_loss=0.2153, pruned_loss=0.03343, over 4751.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2202, pruned_loss=0.03842, over 972284.16 frames.], batch size: 19, lr: 2.95e-04 +2022-05-05 20:23:35,618 INFO [train.py:715] (6/8) Epoch 7, batch 12200, loss[loss=0.1366, simple_loss=0.2135, pruned_loss=0.0299, over 4839.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2192, pruned_loss=0.03778, over 972692.88 frames.], batch size: 30, lr: 2.95e-04 +2022-05-05 20:24:14,744 INFO [train.py:715] (6/8) Epoch 7, batch 12250, loss[loss=0.1531, simple_loss=0.2208, pruned_loss=0.04265, over 4817.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2181, pruned_loss=0.03722, over 973156.22 frames.], batch size: 15, lr: 2.95e-04 +2022-05-05 20:24:53,360 INFO [train.py:715] (6/8) Epoch 7, batch 12300, loss[loss=0.1095, simple_loss=0.1782, pruned_loss=0.02038, over 4816.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2184, pruned_loss=0.0376, over 972597.19 frames.], batch size: 13, lr: 2.95e-04 +2022-05-05 20:25:35,089 INFO [train.py:715] (6/8) Epoch 7, batch 12350, loss[loss=0.1674, simple_loss=0.2394, pruned_loss=0.0477, over 4855.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2184, pruned_loss=0.03752, over 973176.17 frames.], batch size: 30, lr: 2.95e-04 +2022-05-05 20:26:13,785 INFO [train.py:715] (6/8) Epoch 7, batch 12400, loss[loss=0.1502, simple_loss=0.2199, pruned_loss=0.04028, over 4942.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2176, pruned_loss=0.03687, over 973673.52 frames.], batch size: 39, lr: 2.95e-04 +2022-05-05 20:26:53,002 INFO [train.py:715] (6/8) Epoch 7, batch 12450, loss[loss=0.1523, simple_loss=0.22, pruned_loss=0.04233, over 4795.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2179, pruned_loss=0.03675, over 973482.55 frames.], batch size: 18, lr: 2.95e-04 +2022-05-05 20:27:31,402 INFO [train.py:715] (6/8) Epoch 7, batch 12500, loss[loss=0.2128, simple_loss=0.2662, pruned_loss=0.07972, over 4782.00 frames.], tot_loss[loss=0.146, simple_loss=0.2176, pruned_loss=0.03726, over 972688.14 frames.], batch size: 17, lr: 2.95e-04 +2022-05-05 20:28:10,097 INFO [train.py:715] (6/8) Epoch 7, batch 12550, loss[loss=0.1234, simple_loss=0.1974, pruned_loss=0.02474, over 4902.00 frames.], tot_loss[loss=0.147, simple_loss=0.2181, pruned_loss=0.03797, over 971783.78 frames.], batch size: 29, lr: 2.95e-04 +2022-05-05 20:28:49,194 INFO [train.py:715] (6/8) Epoch 7, batch 12600, loss[loss=0.1548, simple_loss=0.2268, pruned_loss=0.04136, over 4815.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2186, pruned_loss=0.03807, over 971129.91 frames.], batch size: 27, lr: 2.95e-04 +2022-05-05 20:29:27,377 INFO [train.py:715] (6/8) Epoch 7, batch 12650, loss[loss=0.1334, simple_loss=0.1964, pruned_loss=0.0352, over 4980.00 frames.], tot_loss[loss=0.1486, simple_loss=0.22, pruned_loss=0.03856, over 971204.58 frames.], batch size: 25, lr: 2.95e-04 +2022-05-05 20:30:06,576 INFO [train.py:715] (6/8) Epoch 7, batch 12700, loss[loss=0.1419, simple_loss=0.2221, pruned_loss=0.03084, over 4842.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2194, pruned_loss=0.03821, over 970980.27 frames.], batch size: 25, lr: 2.95e-04 +2022-05-05 20:30:44,740 INFO [train.py:715] (6/8) Epoch 7, batch 12750, loss[loss=0.1445, simple_loss=0.2109, pruned_loss=0.03909, over 4802.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2194, pruned_loss=0.03842, over 972137.17 frames.], batch size: 13, lr: 2.95e-04 +2022-05-05 20:31:23,968 INFO [train.py:715] (6/8) Epoch 7, batch 12800, loss[loss=0.1474, simple_loss=0.2222, pruned_loss=0.03627, over 4909.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2178, pruned_loss=0.03769, over 972176.26 frames.], batch size: 19, lr: 2.95e-04 +2022-05-05 20:32:02,917 INFO [train.py:715] (6/8) Epoch 7, batch 12850, loss[loss=0.1333, simple_loss=0.2152, pruned_loss=0.02569, over 4907.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2173, pruned_loss=0.03705, over 971810.82 frames.], batch size: 39, lr: 2.95e-04 +2022-05-05 20:32:41,509 INFO [train.py:715] (6/8) Epoch 7, batch 12900, loss[loss=0.1346, simple_loss=0.2023, pruned_loss=0.03347, over 4821.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2169, pruned_loss=0.03701, over 971318.72 frames.], batch size: 26, lr: 2.95e-04 +2022-05-05 20:33:20,984 INFO [train.py:715] (6/8) Epoch 7, batch 12950, loss[loss=0.1586, simple_loss=0.2281, pruned_loss=0.04453, over 4848.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2168, pruned_loss=0.0365, over 971331.42 frames.], batch size: 20, lr: 2.95e-04 +2022-05-05 20:33:59,927 INFO [train.py:715] (6/8) Epoch 7, batch 13000, loss[loss=0.1459, simple_loss=0.2167, pruned_loss=0.03753, over 4806.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2177, pruned_loss=0.03693, over 971404.32 frames.], batch size: 21, lr: 2.95e-04 +2022-05-05 20:34:38,877 INFO [train.py:715] (6/8) Epoch 7, batch 13050, loss[loss=0.1464, simple_loss=0.2212, pruned_loss=0.03579, over 4755.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2175, pruned_loss=0.03701, over 971859.28 frames.], batch size: 16, lr: 2.95e-04 +2022-05-05 20:35:17,656 INFO [train.py:715] (6/8) Epoch 7, batch 13100, loss[loss=0.1605, simple_loss=0.2269, pruned_loss=0.047, over 4994.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2175, pruned_loss=0.03704, over 972225.51 frames.], batch size: 14, lr: 2.95e-04 +2022-05-05 20:35:57,327 INFO [train.py:715] (6/8) Epoch 7, batch 13150, loss[loss=0.1773, simple_loss=0.2465, pruned_loss=0.05402, over 4787.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2187, pruned_loss=0.03729, over 971902.57 frames.], batch size: 17, lr: 2.95e-04 +2022-05-05 20:36:35,853 INFO [train.py:715] (6/8) Epoch 7, batch 13200, loss[loss=0.1565, simple_loss=0.231, pruned_loss=0.04104, over 4736.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2183, pruned_loss=0.03691, over 971489.36 frames.], batch size: 16, lr: 2.95e-04 +2022-05-05 20:37:15,484 INFO [train.py:715] (6/8) Epoch 7, batch 13250, loss[loss=0.1402, simple_loss=0.2073, pruned_loss=0.03657, over 4921.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2187, pruned_loss=0.03728, over 972302.48 frames.], batch size: 23, lr: 2.95e-04 +2022-05-05 20:37:54,871 INFO [train.py:715] (6/8) Epoch 7, batch 13300, loss[loss=0.1307, simple_loss=0.2061, pruned_loss=0.02761, over 4758.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2182, pruned_loss=0.03716, over 971917.45 frames.], batch size: 19, lr: 2.95e-04 +2022-05-05 20:38:33,798 INFO [train.py:715] (6/8) Epoch 7, batch 13350, loss[loss=0.1509, simple_loss=0.2287, pruned_loss=0.03651, over 4701.00 frames.], tot_loss[loss=0.146, simple_loss=0.2182, pruned_loss=0.03688, over 971776.72 frames.], batch size: 15, lr: 2.95e-04 +2022-05-05 20:39:12,813 INFO [train.py:715] (6/8) Epoch 7, batch 13400, loss[loss=0.1407, simple_loss=0.2157, pruned_loss=0.03291, over 4750.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2182, pruned_loss=0.03699, over 972023.97 frames.], batch size: 16, lr: 2.95e-04 +2022-05-05 20:39:51,471 INFO [train.py:715] (6/8) Epoch 7, batch 13450, loss[loss=0.1616, simple_loss=0.2281, pruned_loss=0.04757, over 4741.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2192, pruned_loss=0.03719, over 971974.99 frames.], batch size: 16, lr: 2.95e-04 +2022-05-05 20:40:30,903 INFO [train.py:715] (6/8) Epoch 7, batch 13500, loss[loss=0.1255, simple_loss=0.2017, pruned_loss=0.02467, over 4811.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2189, pruned_loss=0.03693, over 972240.57 frames.], batch size: 25, lr: 2.95e-04 +2022-05-05 20:41:09,549 INFO [train.py:715] (6/8) Epoch 7, batch 13550, loss[loss=0.1427, simple_loss=0.2164, pruned_loss=0.03456, over 4757.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2189, pruned_loss=0.03704, over 971495.65 frames.], batch size: 12, lr: 2.95e-04 +2022-05-05 20:41:48,024 INFO [train.py:715] (6/8) Epoch 7, batch 13600, loss[loss=0.1176, simple_loss=0.1873, pruned_loss=0.02393, over 4987.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2184, pruned_loss=0.03677, over 970694.27 frames.], batch size: 14, lr: 2.95e-04 +2022-05-05 20:42:26,942 INFO [train.py:715] (6/8) Epoch 7, batch 13650, loss[loss=0.1513, simple_loss=0.2305, pruned_loss=0.03606, over 4884.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2181, pruned_loss=0.03704, over 970463.25 frames.], batch size: 22, lr: 2.95e-04 +2022-05-05 20:43:05,964 INFO [train.py:715] (6/8) Epoch 7, batch 13700, loss[loss=0.1483, simple_loss=0.2111, pruned_loss=0.0428, over 4816.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2176, pruned_loss=0.03687, over 971544.81 frames.], batch size: 26, lr: 2.95e-04 +2022-05-05 20:43:44,943 INFO [train.py:715] (6/8) Epoch 7, batch 13750, loss[loss=0.1764, simple_loss=0.234, pruned_loss=0.05941, over 4847.00 frames.], tot_loss[loss=0.1462, simple_loss=0.218, pruned_loss=0.03721, over 971635.94 frames.], batch size: 30, lr: 2.94e-04 +2022-05-05 20:44:23,923 INFO [train.py:715] (6/8) Epoch 7, batch 13800, loss[loss=0.1291, simple_loss=0.2011, pruned_loss=0.02856, over 4831.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2167, pruned_loss=0.03646, over 971651.30 frames.], batch size: 27, lr: 2.94e-04 +2022-05-05 20:45:03,234 INFO [train.py:715] (6/8) Epoch 7, batch 13850, loss[loss=0.1361, simple_loss=0.22, pruned_loss=0.02605, over 4782.00 frames.], tot_loss[loss=0.145, simple_loss=0.2171, pruned_loss=0.03645, over 971779.76 frames.], batch size: 18, lr: 2.94e-04 +2022-05-05 20:45:41,496 INFO [train.py:715] (6/8) Epoch 7, batch 13900, loss[loss=0.1533, simple_loss=0.221, pruned_loss=0.04286, over 4904.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2178, pruned_loss=0.03738, over 971506.36 frames.], batch size: 19, lr: 2.94e-04 +2022-05-05 20:46:20,519 INFO [train.py:715] (6/8) Epoch 7, batch 13950, loss[loss=0.1334, simple_loss=0.2069, pruned_loss=0.02995, over 4856.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2181, pruned_loss=0.03745, over 971873.31 frames.], batch size: 20, lr: 2.94e-04 +2022-05-05 20:46:59,563 INFO [train.py:715] (6/8) Epoch 7, batch 14000, loss[loss=0.129, simple_loss=0.2105, pruned_loss=0.02373, over 4782.00 frames.], tot_loss[loss=0.147, simple_loss=0.2186, pruned_loss=0.03766, over 972178.84 frames.], batch size: 18, lr: 2.94e-04 +2022-05-05 20:47:38,923 INFO [train.py:715] (6/8) Epoch 7, batch 14050, loss[loss=0.1511, simple_loss=0.2291, pruned_loss=0.03651, over 4826.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2179, pruned_loss=0.03744, over 972056.36 frames.], batch size: 15, lr: 2.94e-04 +2022-05-05 20:48:18,052 INFO [train.py:715] (6/8) Epoch 7, batch 14100, loss[loss=0.1755, simple_loss=0.2401, pruned_loss=0.05544, over 4806.00 frames.], tot_loss[loss=0.147, simple_loss=0.2184, pruned_loss=0.03782, over 971790.66 frames.], batch size: 14, lr: 2.94e-04 +2022-05-05 20:48:56,862 INFO [train.py:715] (6/8) Epoch 7, batch 14150, loss[loss=0.149, simple_loss=0.2158, pruned_loss=0.04105, over 4694.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2186, pruned_loss=0.03782, over 970307.96 frames.], batch size: 15, lr: 2.94e-04 +2022-05-05 20:49:36,151 INFO [train.py:715] (6/8) Epoch 7, batch 14200, loss[loss=0.1173, simple_loss=0.1921, pruned_loss=0.02131, over 4797.00 frames.], tot_loss[loss=0.147, simple_loss=0.2181, pruned_loss=0.03793, over 970948.30 frames.], batch size: 25, lr: 2.94e-04 +2022-05-05 20:50:14,436 INFO [train.py:715] (6/8) Epoch 7, batch 14250, loss[loss=0.1271, simple_loss=0.2047, pruned_loss=0.02475, over 4908.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2186, pruned_loss=0.03823, over 972097.64 frames.], batch size: 17, lr: 2.94e-04 +2022-05-05 20:50:53,730 INFO [train.py:715] (6/8) Epoch 7, batch 14300, loss[loss=0.1401, simple_loss=0.2137, pruned_loss=0.03324, over 4809.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2177, pruned_loss=0.03745, over 971776.15 frames.], batch size: 25, lr: 2.94e-04 +2022-05-05 20:51:33,015 INFO [train.py:715] (6/8) Epoch 7, batch 14350, loss[loss=0.1315, simple_loss=0.1998, pruned_loss=0.0316, over 4827.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2173, pruned_loss=0.03721, over 971644.62 frames.], batch size: 30, lr: 2.94e-04 +2022-05-05 20:52:12,027 INFO [train.py:715] (6/8) Epoch 7, batch 14400, loss[loss=0.1316, simple_loss=0.2121, pruned_loss=0.02555, over 4803.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2184, pruned_loss=0.0375, over 971973.83 frames.], batch size: 14, lr: 2.94e-04 +2022-05-05 20:52:50,741 INFO [train.py:715] (6/8) Epoch 7, batch 14450, loss[loss=0.1409, simple_loss=0.2235, pruned_loss=0.02909, over 4830.00 frames.], tot_loss[loss=0.147, simple_loss=0.2189, pruned_loss=0.03758, over 971666.73 frames.], batch size: 30, lr: 2.94e-04 +2022-05-05 20:53:29,523 INFO [train.py:715] (6/8) Epoch 7, batch 14500, loss[loss=0.1773, simple_loss=0.2442, pruned_loss=0.05523, over 4955.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2194, pruned_loss=0.03794, over 971413.96 frames.], batch size: 35, lr: 2.94e-04 +2022-05-05 20:54:09,102 INFO [train.py:715] (6/8) Epoch 7, batch 14550, loss[loss=0.1343, simple_loss=0.2165, pruned_loss=0.02605, over 4960.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2187, pruned_loss=0.03751, over 971600.95 frames.], batch size: 15, lr: 2.94e-04 +2022-05-05 20:54:47,909 INFO [train.py:715] (6/8) Epoch 7, batch 14600, loss[loss=0.1612, simple_loss=0.2237, pruned_loss=0.04937, over 4744.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2185, pruned_loss=0.03707, over 971989.29 frames.], batch size: 12, lr: 2.94e-04 +2022-05-05 20:55:26,848 INFO [train.py:715] (6/8) Epoch 7, batch 14650, loss[loss=0.1555, simple_loss=0.22, pruned_loss=0.04548, over 4774.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2186, pruned_loss=0.0372, over 971228.80 frames.], batch size: 12, lr: 2.94e-04 +2022-05-05 20:56:05,813 INFO [train.py:715] (6/8) Epoch 7, batch 14700, loss[loss=0.1722, simple_loss=0.2513, pruned_loss=0.04653, over 4904.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2181, pruned_loss=0.03738, over 971166.37 frames.], batch size: 38, lr: 2.94e-04 +2022-05-05 20:56:44,942 INFO [train.py:715] (6/8) Epoch 7, batch 14750, loss[loss=0.1173, simple_loss=0.1869, pruned_loss=0.02388, over 4631.00 frames.], tot_loss[loss=0.147, simple_loss=0.2185, pruned_loss=0.03779, over 970281.04 frames.], batch size: 13, lr: 2.94e-04 +2022-05-05 20:57:23,495 INFO [train.py:715] (6/8) Epoch 7, batch 14800, loss[loss=0.1387, simple_loss=0.2069, pruned_loss=0.03521, over 4777.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2183, pruned_loss=0.03738, over 971414.12 frames.], batch size: 18, lr: 2.94e-04 +2022-05-05 20:58:03,001 INFO [train.py:715] (6/8) Epoch 7, batch 14850, loss[loss=0.1396, simple_loss=0.2219, pruned_loss=0.02867, over 4821.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2183, pruned_loss=0.03735, over 971653.85 frames.], batch size: 26, lr: 2.94e-04 +2022-05-05 20:58:41,954 INFO [train.py:715] (6/8) Epoch 7, batch 14900, loss[loss=0.1626, simple_loss=0.2417, pruned_loss=0.04179, over 4912.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2193, pruned_loss=0.03789, over 971223.61 frames.], batch size: 23, lr: 2.94e-04 +2022-05-05 20:59:20,317 INFO [train.py:715] (6/8) Epoch 7, batch 14950, loss[loss=0.169, simple_loss=0.234, pruned_loss=0.05197, over 4864.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2187, pruned_loss=0.038, over 972056.16 frames.], batch size: 32, lr: 2.94e-04 +2022-05-05 20:59:59,927 INFO [train.py:715] (6/8) Epoch 7, batch 15000, loss[loss=0.2054, simple_loss=0.2606, pruned_loss=0.0751, over 4963.00 frames.], tot_loss[loss=0.148, simple_loss=0.2192, pruned_loss=0.03839, over 972412.45 frames.], batch size: 15, lr: 2.94e-04 +2022-05-05 20:59:59,928 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 21:00:14,354 INFO [train.py:742] (6/8) Epoch 7, validation: loss=0.1083, simple_loss=0.1931, pruned_loss=0.01175, over 914524.00 frames. +2022-05-05 21:00:53,497 INFO [train.py:715] (6/8) Epoch 7, batch 15050, loss[loss=0.1223, simple_loss=0.1921, pruned_loss=0.02629, over 4887.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2181, pruned_loss=0.03773, over 972592.08 frames.], batch size: 19, lr: 2.94e-04 +2022-05-05 21:01:32,731 INFO [train.py:715] (6/8) Epoch 7, batch 15100, loss[loss=0.1447, simple_loss=0.2221, pruned_loss=0.03363, over 4774.00 frames.], tot_loss[loss=0.147, simple_loss=0.2178, pruned_loss=0.03811, over 971692.27 frames.], batch size: 18, lr: 2.94e-04 +2022-05-05 21:02:11,969 INFO [train.py:715] (6/8) Epoch 7, batch 15150, loss[loss=0.1918, simple_loss=0.2447, pruned_loss=0.06948, over 4973.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2191, pruned_loss=0.03858, over 972393.23 frames.], batch size: 39, lr: 2.94e-04 +2022-05-05 21:02:50,725 INFO [train.py:715] (6/8) Epoch 7, batch 15200, loss[loss=0.1542, simple_loss=0.226, pruned_loss=0.04114, over 4827.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2195, pruned_loss=0.03886, over 971846.07 frames.], batch size: 26, lr: 2.94e-04 +2022-05-05 21:03:30,198 INFO [train.py:715] (6/8) Epoch 7, batch 15250, loss[loss=0.175, simple_loss=0.2405, pruned_loss=0.05472, over 4786.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2192, pruned_loss=0.03892, over 972248.82 frames.], batch size: 17, lr: 2.94e-04 +2022-05-05 21:04:09,390 INFO [train.py:715] (6/8) Epoch 7, batch 15300, loss[loss=0.168, simple_loss=0.234, pruned_loss=0.05097, over 4692.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2188, pruned_loss=0.03864, over 971416.75 frames.], batch size: 15, lr: 2.94e-04 +2022-05-05 21:04:48,397 INFO [train.py:715] (6/8) Epoch 7, batch 15350, loss[loss=0.1572, simple_loss=0.2276, pruned_loss=0.04335, over 4767.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2188, pruned_loss=0.0384, over 970866.70 frames.], batch size: 16, lr: 2.94e-04 +2022-05-05 21:05:27,506 INFO [train.py:715] (6/8) Epoch 7, batch 15400, loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02887, over 4825.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2186, pruned_loss=0.03795, over 971463.61 frames.], batch size: 27, lr: 2.94e-04 +2022-05-05 21:06:05,999 INFO [train.py:715] (6/8) Epoch 7, batch 15450, loss[loss=0.166, simple_loss=0.2418, pruned_loss=0.04513, over 4907.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2188, pruned_loss=0.03805, over 971533.73 frames.], batch size: 23, lr: 2.94e-04 +2022-05-05 21:06:45,045 INFO [train.py:715] (6/8) Epoch 7, batch 15500, loss[loss=0.1677, simple_loss=0.2423, pruned_loss=0.04657, over 4780.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2189, pruned_loss=0.03813, over 971524.58 frames.], batch size: 17, lr: 2.93e-04 +2022-05-05 21:07:23,166 INFO [train.py:715] (6/8) Epoch 7, batch 15550, loss[loss=0.1913, simple_loss=0.2579, pruned_loss=0.06236, over 4984.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2182, pruned_loss=0.03766, over 972352.08 frames.], batch size: 28, lr: 2.93e-04 +2022-05-05 21:08:02,568 INFO [train.py:715] (6/8) Epoch 7, batch 15600, loss[loss=0.1657, simple_loss=0.2341, pruned_loss=0.04869, over 4845.00 frames.], tot_loss[loss=0.1468, simple_loss=0.218, pruned_loss=0.03785, over 971853.25 frames.], batch size: 32, lr: 2.93e-04 +2022-05-05 21:08:42,086 INFO [train.py:715] (6/8) Epoch 7, batch 15650, loss[loss=0.1442, simple_loss=0.2137, pruned_loss=0.03736, over 4973.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2166, pruned_loss=0.03698, over 971927.92 frames.], batch size: 35, lr: 2.93e-04 +2022-05-05 21:09:20,364 INFO [train.py:715] (6/8) Epoch 7, batch 15700, loss[loss=0.1466, simple_loss=0.2266, pruned_loss=0.03327, over 4980.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2175, pruned_loss=0.03759, over 972434.51 frames.], batch size: 28, lr: 2.93e-04 +2022-05-05 21:09:59,353 INFO [train.py:715] (6/8) Epoch 7, batch 15750, loss[loss=0.1595, simple_loss=0.2291, pruned_loss=0.045, over 4980.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2179, pruned_loss=0.03768, over 972022.06 frames.], batch size: 15, lr: 2.93e-04 +2022-05-05 21:10:39,021 INFO [train.py:715] (6/8) Epoch 7, batch 15800, loss[loss=0.1214, simple_loss=0.1948, pruned_loss=0.02399, over 4823.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2176, pruned_loss=0.03727, over 972009.96 frames.], batch size: 12, lr: 2.93e-04 +2022-05-05 21:11:18,133 INFO [train.py:715] (6/8) Epoch 7, batch 15850, loss[loss=0.1626, simple_loss=0.231, pruned_loss=0.04709, over 4995.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2176, pruned_loss=0.03743, over 972004.91 frames.], batch size: 16, lr: 2.93e-04 +2022-05-05 21:11:57,176 INFO [train.py:715] (6/8) Epoch 7, batch 15900, loss[loss=0.1483, simple_loss=0.2244, pruned_loss=0.03612, over 4941.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2182, pruned_loss=0.03749, over 971881.85 frames.], batch size: 21, lr: 2.93e-04 +2022-05-05 21:12:36,479 INFO [train.py:715] (6/8) Epoch 7, batch 15950, loss[loss=0.1422, simple_loss=0.2186, pruned_loss=0.03294, over 4757.00 frames.], tot_loss[loss=0.1466, simple_loss=0.218, pruned_loss=0.03759, over 972038.11 frames.], batch size: 16, lr: 2.93e-04 +2022-05-05 21:13:15,929 INFO [train.py:715] (6/8) Epoch 7, batch 16000, loss[loss=0.146, simple_loss=0.2163, pruned_loss=0.03782, over 4803.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2181, pruned_loss=0.03748, over 971148.17 frames.], batch size: 25, lr: 2.93e-04 +2022-05-05 21:13:54,028 INFO [train.py:715] (6/8) Epoch 7, batch 16050, loss[loss=0.127, simple_loss=0.2048, pruned_loss=0.02462, over 4932.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2178, pruned_loss=0.0374, over 970779.99 frames.], batch size: 23, lr: 2.93e-04 +2022-05-05 21:14:33,356 INFO [train.py:715] (6/8) Epoch 7, batch 16100, loss[loss=0.1433, simple_loss=0.2051, pruned_loss=0.0407, over 4960.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2177, pruned_loss=0.03723, over 971233.04 frames.], batch size: 15, lr: 2.93e-04 +2022-05-05 21:15:12,281 INFO [train.py:715] (6/8) Epoch 7, batch 16150, loss[loss=0.1614, simple_loss=0.2176, pruned_loss=0.05256, over 4839.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2181, pruned_loss=0.03734, over 971286.28 frames.], batch size: 30, lr: 2.93e-04 +2022-05-05 21:15:50,930 INFO [train.py:715] (6/8) Epoch 7, batch 16200, loss[loss=0.1543, simple_loss=0.2285, pruned_loss=0.04008, over 4926.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2186, pruned_loss=0.03716, over 972978.35 frames.], batch size: 23, lr: 2.93e-04 +2022-05-05 21:16:30,079 INFO [train.py:715] (6/8) Epoch 7, batch 16250, loss[loss=0.1215, simple_loss=0.2014, pruned_loss=0.0208, over 4837.00 frames.], tot_loss[loss=0.1471, simple_loss=0.219, pruned_loss=0.03756, over 972744.85 frames.], batch size: 15, lr: 2.93e-04 +2022-05-05 21:17:08,725 INFO [train.py:715] (6/8) Epoch 7, batch 16300, loss[loss=0.1529, simple_loss=0.2282, pruned_loss=0.03877, over 4882.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2189, pruned_loss=0.03759, over 972595.59 frames.], batch size: 19, lr: 2.93e-04 +2022-05-05 21:17:48,273 INFO [train.py:715] (6/8) Epoch 7, batch 16350, loss[loss=0.1477, simple_loss=0.2165, pruned_loss=0.03947, over 4747.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2187, pruned_loss=0.03723, over 972844.25 frames.], batch size: 19, lr: 2.93e-04 +2022-05-05 21:18:26,609 INFO [train.py:715] (6/8) Epoch 7, batch 16400, loss[loss=0.1458, simple_loss=0.2227, pruned_loss=0.03449, over 4912.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2195, pruned_loss=0.03738, over 973081.11 frames.], batch size: 17, lr: 2.93e-04 +2022-05-05 21:19:05,501 INFO [train.py:715] (6/8) Epoch 7, batch 16450, loss[loss=0.1283, simple_loss=0.1999, pruned_loss=0.02834, over 4640.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2192, pruned_loss=0.03744, over 972806.28 frames.], batch size: 13, lr: 2.93e-04 +2022-05-05 21:19:44,555 INFO [train.py:715] (6/8) Epoch 7, batch 16500, loss[loss=0.1523, simple_loss=0.216, pruned_loss=0.04429, over 4899.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2182, pruned_loss=0.03714, over 972794.43 frames.], batch size: 32, lr: 2.93e-04 +2022-05-05 21:20:22,828 INFO [train.py:715] (6/8) Epoch 7, batch 16550, loss[loss=0.1276, simple_loss=0.2018, pruned_loss=0.02676, over 4934.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2185, pruned_loss=0.03728, over 972329.15 frames.], batch size: 23, lr: 2.93e-04 +2022-05-05 21:21:02,225 INFO [train.py:715] (6/8) Epoch 7, batch 16600, loss[loss=0.1581, simple_loss=0.2206, pruned_loss=0.04774, over 4966.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2181, pruned_loss=0.0372, over 973233.68 frames.], batch size: 35, lr: 2.93e-04 +2022-05-05 21:21:41,397 INFO [train.py:715] (6/8) Epoch 7, batch 16650, loss[loss=0.1309, simple_loss=0.2102, pruned_loss=0.02581, over 4759.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2185, pruned_loss=0.0376, over 973699.31 frames.], batch size: 16, lr: 2.93e-04 +2022-05-05 21:22:20,542 INFO [train.py:715] (6/8) Epoch 7, batch 16700, loss[loss=0.128, simple_loss=0.1967, pruned_loss=0.02969, over 4972.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2189, pruned_loss=0.03784, over 972878.41 frames.], batch size: 14, lr: 2.93e-04 +2022-05-05 21:22:59,811 INFO [train.py:715] (6/8) Epoch 7, batch 16750, loss[loss=0.1457, simple_loss=0.2141, pruned_loss=0.03861, over 4863.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2189, pruned_loss=0.03791, over 973102.87 frames.], batch size: 16, lr: 2.93e-04 +2022-05-05 21:23:38,668 INFO [train.py:715] (6/8) Epoch 7, batch 16800, loss[loss=0.1375, simple_loss=0.2012, pruned_loss=0.03686, over 4784.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2178, pruned_loss=0.0375, over 972384.07 frames.], batch size: 14, lr: 2.93e-04 +2022-05-05 21:24:17,714 INFO [train.py:715] (6/8) Epoch 7, batch 16850, loss[loss=0.1468, simple_loss=0.224, pruned_loss=0.03482, over 4769.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2181, pruned_loss=0.03717, over 972560.49 frames.], batch size: 19, lr: 2.93e-04 +2022-05-05 21:24:56,999 INFO [train.py:715] (6/8) Epoch 7, batch 16900, loss[loss=0.1658, simple_loss=0.2358, pruned_loss=0.04788, over 4802.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2181, pruned_loss=0.03748, over 973121.31 frames.], batch size: 26, lr: 2.93e-04 +2022-05-05 21:25:36,248 INFO [train.py:715] (6/8) Epoch 7, batch 16950, loss[loss=0.1429, simple_loss=0.2152, pruned_loss=0.03533, over 4815.00 frames.], tot_loss[loss=0.1462, simple_loss=0.218, pruned_loss=0.03719, over 973288.68 frames.], batch size: 25, lr: 2.93e-04 +2022-05-05 21:26:14,897 INFO [train.py:715] (6/8) Epoch 7, batch 17000, loss[loss=0.1591, simple_loss=0.2268, pruned_loss=0.04568, over 4839.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2183, pruned_loss=0.03763, over 973562.90 frames.], batch size: 15, lr: 2.93e-04 +2022-05-05 21:26:54,053 INFO [train.py:715] (6/8) Epoch 7, batch 17050, loss[loss=0.1237, simple_loss=0.2001, pruned_loss=0.02368, over 4811.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2179, pruned_loss=0.03734, over 973794.52 frames.], batch size: 25, lr: 2.93e-04 +2022-05-05 21:27:32,507 INFO [train.py:715] (6/8) Epoch 7, batch 17100, loss[loss=0.1629, simple_loss=0.2177, pruned_loss=0.05408, over 4906.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2184, pruned_loss=0.03776, over 973238.16 frames.], batch size: 19, lr: 2.93e-04 +2022-05-05 21:28:11,645 INFO [train.py:715] (6/8) Epoch 7, batch 17150, loss[loss=0.1538, simple_loss=0.229, pruned_loss=0.03934, over 4880.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2183, pruned_loss=0.03802, over 972811.76 frames.], batch size: 22, lr: 2.93e-04 +2022-05-05 21:28:50,898 INFO [train.py:715] (6/8) Epoch 7, batch 17200, loss[loss=0.1547, simple_loss=0.219, pruned_loss=0.04526, over 4762.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2174, pruned_loss=0.03741, over 972593.08 frames.], batch size: 16, lr: 2.93e-04 +2022-05-05 21:29:29,221 INFO [train.py:715] (6/8) Epoch 7, batch 17250, loss[loss=0.1344, simple_loss=0.2035, pruned_loss=0.03259, over 4852.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2175, pruned_loss=0.03693, over 972352.94 frames.], batch size: 12, lr: 2.92e-04 +2022-05-05 21:30:08,294 INFO [train.py:715] (6/8) Epoch 7, batch 17300, loss[loss=0.1463, simple_loss=0.2196, pruned_loss=0.03654, over 4836.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2177, pruned_loss=0.03653, over 972731.73 frames.], batch size: 15, lr: 2.92e-04 +2022-05-05 21:30:46,575 INFO [train.py:715] (6/8) Epoch 7, batch 17350, loss[loss=0.1247, simple_loss=0.1951, pruned_loss=0.02713, over 4803.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2193, pruned_loss=0.03796, over 972718.05 frames.], batch size: 25, lr: 2.92e-04 +2022-05-05 21:31:25,651 INFO [train.py:715] (6/8) Epoch 7, batch 17400, loss[loss=0.1253, simple_loss=0.1952, pruned_loss=0.02772, over 4933.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2195, pruned_loss=0.03802, over 973501.62 frames.], batch size: 21, lr: 2.92e-04 +2022-05-05 21:32:04,440 INFO [train.py:715] (6/8) Epoch 7, batch 17450, loss[loss=0.155, simple_loss=0.2209, pruned_loss=0.04457, over 4946.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2191, pruned_loss=0.0379, over 972906.70 frames.], batch size: 39, lr: 2.92e-04 +2022-05-05 21:32:43,222 INFO [train.py:715] (6/8) Epoch 7, batch 17500, loss[loss=0.1431, simple_loss=0.2111, pruned_loss=0.03757, over 4854.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2192, pruned_loss=0.0378, over 971838.31 frames.], batch size: 22, lr: 2.92e-04 +2022-05-05 21:33:22,414 INFO [train.py:715] (6/8) Epoch 7, batch 17550, loss[loss=0.1595, simple_loss=0.2171, pruned_loss=0.05092, over 4976.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2192, pruned_loss=0.0375, over 972860.29 frames.], batch size: 35, lr: 2.92e-04 +2022-05-05 21:34:00,736 INFO [train.py:715] (6/8) Epoch 7, batch 17600, loss[loss=0.1826, simple_loss=0.2546, pruned_loss=0.05526, over 4960.00 frames.], tot_loss[loss=0.146, simple_loss=0.2182, pruned_loss=0.03689, over 972196.17 frames.], batch size: 14, lr: 2.92e-04 +2022-05-05 21:34:39,810 INFO [train.py:715] (6/8) Epoch 7, batch 17650, loss[loss=0.1178, simple_loss=0.1917, pruned_loss=0.02197, over 4887.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2173, pruned_loss=0.03645, over 972719.83 frames.], batch size: 19, lr: 2.92e-04 +2022-05-05 21:35:19,109 INFO [train.py:715] (6/8) Epoch 7, batch 17700, loss[loss=0.1585, simple_loss=0.2276, pruned_loss=0.04463, over 4977.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2171, pruned_loss=0.03654, over 972845.66 frames.], batch size: 35, lr: 2.92e-04 +2022-05-05 21:35:58,207 INFO [train.py:715] (6/8) Epoch 7, batch 17750, loss[loss=0.1907, simple_loss=0.2462, pruned_loss=0.06761, over 4866.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2174, pruned_loss=0.03693, over 972290.53 frames.], batch size: 34, lr: 2.92e-04 +2022-05-05 21:36:37,515 INFO [train.py:715] (6/8) Epoch 7, batch 17800, loss[loss=0.1313, simple_loss=0.2024, pruned_loss=0.03011, over 4808.00 frames.], tot_loss[loss=0.1454, simple_loss=0.217, pruned_loss=0.03686, over 971584.57 frames.], batch size: 26, lr: 2.92e-04 +2022-05-05 21:37:16,002 INFO [train.py:715] (6/8) Epoch 7, batch 17850, loss[loss=0.1493, simple_loss=0.231, pruned_loss=0.03378, over 4973.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2171, pruned_loss=0.03693, over 972829.64 frames.], batch size: 15, lr: 2.92e-04 +2022-05-05 21:37:55,611 INFO [train.py:715] (6/8) Epoch 7, batch 17900, loss[loss=0.1757, simple_loss=0.2451, pruned_loss=0.05316, over 4949.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2171, pruned_loss=0.03663, over 973238.87 frames.], batch size: 35, lr: 2.92e-04 +2022-05-05 21:38:34,074 INFO [train.py:715] (6/8) Epoch 7, batch 17950, loss[loss=0.131, simple_loss=0.1986, pruned_loss=0.03168, over 4877.00 frames.], tot_loss[loss=0.1441, simple_loss=0.216, pruned_loss=0.03612, over 973415.48 frames.], batch size: 20, lr: 2.92e-04 +2022-05-05 21:39:13,126 INFO [train.py:715] (6/8) Epoch 7, batch 18000, loss[loss=0.1528, simple_loss=0.2278, pruned_loss=0.03884, over 4884.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2175, pruned_loss=0.03671, over 973625.60 frames.], batch size: 16, lr: 2.92e-04 +2022-05-05 21:39:13,127 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 21:39:22,793 INFO [train.py:742] (6/8) Epoch 7, validation: loss=0.1081, simple_loss=0.193, pruned_loss=0.01158, over 914524.00 frames. +2022-05-05 21:40:01,808 INFO [train.py:715] (6/8) Epoch 7, batch 18050, loss[loss=0.1225, simple_loss=0.198, pruned_loss=0.02354, over 4969.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2174, pruned_loss=0.03674, over 974251.17 frames.], batch size: 24, lr: 2.92e-04 +2022-05-05 21:40:41,010 INFO [train.py:715] (6/8) Epoch 7, batch 18100, loss[loss=0.1326, simple_loss=0.206, pruned_loss=0.02963, over 4800.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2175, pruned_loss=0.03679, over 973964.61 frames.], batch size: 17, lr: 2.92e-04 +2022-05-05 21:41:19,568 INFO [train.py:715] (6/8) Epoch 7, batch 18150, loss[loss=0.1464, simple_loss=0.2283, pruned_loss=0.03227, over 4892.00 frames.], tot_loss[loss=0.1464, simple_loss=0.218, pruned_loss=0.03741, over 974180.17 frames.], batch size: 19, lr: 2.92e-04 +2022-05-05 21:41:57,881 INFO [train.py:715] (6/8) Epoch 7, batch 18200, loss[loss=0.1318, simple_loss=0.2074, pruned_loss=0.02815, over 4919.00 frames.], tot_loss[loss=0.146, simple_loss=0.2175, pruned_loss=0.03728, over 974223.20 frames.], batch size: 23, lr: 2.92e-04 +2022-05-05 21:42:36,255 INFO [train.py:715] (6/8) Epoch 7, batch 18250, loss[loss=0.1474, simple_loss=0.2196, pruned_loss=0.03766, over 4973.00 frames.], tot_loss[loss=0.1457, simple_loss=0.217, pruned_loss=0.03719, over 973662.47 frames.], batch size: 15, lr: 2.92e-04 +2022-05-05 21:43:15,544 INFO [train.py:715] (6/8) Epoch 7, batch 18300, loss[loss=0.1371, simple_loss=0.2103, pruned_loss=0.03193, over 4983.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2168, pruned_loss=0.03684, over 973191.73 frames.], batch size: 14, lr: 2.92e-04 +2022-05-05 21:43:53,557 INFO [train.py:715] (6/8) Epoch 7, batch 18350, loss[loss=0.1767, simple_loss=0.2396, pruned_loss=0.05691, over 4882.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2174, pruned_loss=0.03703, over 973352.93 frames.], batch size: 22, lr: 2.92e-04 +2022-05-05 21:44:31,934 INFO [train.py:715] (6/8) Epoch 7, batch 18400, loss[loss=0.1406, simple_loss=0.2068, pruned_loss=0.03726, over 4911.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2183, pruned_loss=0.03723, over 972967.90 frames.], batch size: 23, lr: 2.92e-04 +2022-05-05 21:45:11,806 INFO [train.py:715] (6/8) Epoch 7, batch 18450, loss[loss=0.1433, simple_loss=0.206, pruned_loss=0.04035, over 4956.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2184, pruned_loss=0.03747, over 973327.23 frames.], batch size: 35, lr: 2.92e-04 +2022-05-05 21:45:50,716 INFO [train.py:715] (6/8) Epoch 7, batch 18500, loss[loss=0.1261, simple_loss=0.1965, pruned_loss=0.02785, over 4979.00 frames.], tot_loss[loss=0.147, simple_loss=0.219, pruned_loss=0.03745, over 973289.37 frames.], batch size: 14, lr: 2.92e-04 +2022-05-05 21:46:29,379 INFO [train.py:715] (6/8) Epoch 7, batch 18550, loss[loss=0.1425, simple_loss=0.2142, pruned_loss=0.03538, over 4800.00 frames.], tot_loss[loss=0.148, simple_loss=0.2199, pruned_loss=0.03799, over 973116.95 frames.], batch size: 25, lr: 2.92e-04 +2022-05-05 21:47:08,452 INFO [train.py:715] (6/8) Epoch 7, batch 18600, loss[loss=0.1625, simple_loss=0.237, pruned_loss=0.04398, over 4945.00 frames.], tot_loss[loss=0.1482, simple_loss=0.22, pruned_loss=0.03822, over 972996.56 frames.], batch size: 21, lr: 2.92e-04 +2022-05-05 21:47:47,274 INFO [train.py:715] (6/8) Epoch 7, batch 18650, loss[loss=0.1489, simple_loss=0.2257, pruned_loss=0.03602, over 4941.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2195, pruned_loss=0.03786, over 972533.03 frames.], batch size: 23, lr: 2.92e-04 +2022-05-05 21:48:25,126 INFO [train.py:715] (6/8) Epoch 7, batch 18700, loss[loss=0.1375, simple_loss=0.2151, pruned_loss=0.02999, over 4895.00 frames.], tot_loss[loss=0.147, simple_loss=0.2194, pruned_loss=0.03728, over 972187.04 frames.], batch size: 17, lr: 2.92e-04 +2022-05-05 21:49:03,390 INFO [train.py:715] (6/8) Epoch 7, batch 18750, loss[loss=0.1537, simple_loss=0.2375, pruned_loss=0.03493, over 4946.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2188, pruned_loss=0.03712, over 972356.47 frames.], batch size: 21, lr: 2.92e-04 +2022-05-05 21:49:42,762 INFO [train.py:715] (6/8) Epoch 7, batch 18800, loss[loss=0.1615, simple_loss=0.2245, pruned_loss=0.04926, over 4861.00 frames.], tot_loss[loss=0.146, simple_loss=0.2181, pruned_loss=0.03696, over 972572.00 frames.], batch size: 16, lr: 2.92e-04 +2022-05-05 21:50:21,362 INFO [train.py:715] (6/8) Epoch 7, batch 18850, loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.02842, over 4980.00 frames.], tot_loss[loss=0.1461, simple_loss=0.218, pruned_loss=0.03706, over 973352.05 frames.], batch size: 14, lr: 2.92e-04 +2022-05-05 21:50:59,416 INFO [train.py:715] (6/8) Epoch 7, batch 18900, loss[loss=0.122, simple_loss=0.2011, pruned_loss=0.02141, over 4983.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2193, pruned_loss=0.03756, over 974135.55 frames.], batch size: 28, lr: 2.92e-04 +2022-05-05 21:51:36,461 INFO [train.py:715] (6/8) Epoch 7, batch 18950, loss[loss=0.1879, simple_loss=0.251, pruned_loss=0.0624, over 4939.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2195, pruned_loss=0.03752, over 972888.43 frames.], batch size: 35, lr: 2.92e-04 +2022-05-05 21:52:14,913 INFO [train.py:715] (6/8) Epoch 7, batch 19000, loss[loss=0.1353, simple_loss=0.2214, pruned_loss=0.02464, over 4792.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2191, pruned_loss=0.03721, over 971941.34 frames.], batch size: 14, lr: 2.92e-04 +2022-05-05 21:52:52,513 INFO [train.py:715] (6/8) Epoch 7, batch 19050, loss[loss=0.1224, simple_loss=0.2022, pruned_loss=0.02137, over 4852.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2181, pruned_loss=0.03639, over 972143.16 frames.], batch size: 20, lr: 2.91e-04 +2022-05-05 21:53:30,742 INFO [train.py:715] (6/8) Epoch 7, batch 19100, loss[loss=0.1363, simple_loss=0.2172, pruned_loss=0.02772, over 4965.00 frames.], tot_loss[loss=0.145, simple_loss=0.2175, pruned_loss=0.03628, over 971938.31 frames.], batch size: 24, lr: 2.91e-04 +2022-05-05 21:54:09,412 INFO [train.py:715] (6/8) Epoch 7, batch 19150, loss[loss=0.1284, simple_loss=0.2057, pruned_loss=0.0256, over 4822.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2178, pruned_loss=0.03653, over 972750.58 frames.], batch size: 27, lr: 2.91e-04 +2022-05-05 21:54:47,124 INFO [train.py:715] (6/8) Epoch 7, batch 19200, loss[loss=0.1701, simple_loss=0.2349, pruned_loss=0.05258, over 4869.00 frames.], tot_loss[loss=0.145, simple_loss=0.2173, pruned_loss=0.03636, over 972530.87 frames.], batch size: 20, lr: 2.91e-04 +2022-05-05 21:55:24,840 INFO [train.py:715] (6/8) Epoch 7, batch 19250, loss[loss=0.1581, simple_loss=0.2344, pruned_loss=0.04093, over 4902.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2178, pruned_loss=0.03659, over 973413.30 frames.], batch size: 23, lr: 2.91e-04 +2022-05-05 21:56:02,877 INFO [train.py:715] (6/8) Epoch 7, batch 19300, loss[loss=0.1319, simple_loss=0.1969, pruned_loss=0.03349, over 4819.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2175, pruned_loss=0.03649, over 972037.67 frames.], batch size: 15, lr: 2.91e-04 +2022-05-05 21:56:41,357 INFO [train.py:715] (6/8) Epoch 7, batch 19350, loss[loss=0.1631, simple_loss=0.2342, pruned_loss=0.04606, over 4907.00 frames.], tot_loss[loss=0.144, simple_loss=0.2163, pruned_loss=0.03587, over 971860.44 frames.], batch size: 39, lr: 2.91e-04 +2022-05-05 21:57:18,829 INFO [train.py:715] (6/8) Epoch 7, batch 19400, loss[loss=0.1207, simple_loss=0.1946, pruned_loss=0.02337, over 4816.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2167, pruned_loss=0.03594, over 971505.93 frames.], batch size: 26, lr: 2.91e-04 +2022-05-05 21:57:56,262 INFO [train.py:715] (6/8) Epoch 7, batch 19450, loss[loss=0.1422, simple_loss=0.2236, pruned_loss=0.03033, over 4912.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2164, pruned_loss=0.03596, over 972304.52 frames.], batch size: 29, lr: 2.91e-04 +2022-05-05 21:58:34,321 INFO [train.py:715] (6/8) Epoch 7, batch 19500, loss[loss=0.1774, simple_loss=0.2451, pruned_loss=0.05485, over 4931.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2178, pruned_loss=0.0368, over 972222.30 frames.], batch size: 18, lr: 2.91e-04 +2022-05-05 21:59:11,841 INFO [train.py:715] (6/8) Epoch 7, batch 19550, loss[loss=0.1417, simple_loss=0.2096, pruned_loss=0.03687, over 4752.00 frames.], tot_loss[loss=0.1464, simple_loss=0.218, pruned_loss=0.03741, over 972075.08 frames.], batch size: 19, lr: 2.91e-04 +2022-05-05 21:59:49,562 INFO [train.py:715] (6/8) Epoch 7, batch 19600, loss[loss=0.1515, simple_loss=0.2153, pruned_loss=0.04389, over 4685.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2173, pruned_loss=0.03722, over 971629.89 frames.], batch size: 15, lr: 2.91e-04 +2022-05-05 22:00:27,123 INFO [train.py:715] (6/8) Epoch 7, batch 19650, loss[loss=0.1295, simple_loss=0.1906, pruned_loss=0.03421, over 4855.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2169, pruned_loss=0.03665, over 971605.99 frames.], batch size: 13, lr: 2.91e-04 +2022-05-05 22:01:05,538 INFO [train.py:715] (6/8) Epoch 7, batch 19700, loss[loss=0.14, simple_loss=0.2126, pruned_loss=0.03373, over 4806.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2169, pruned_loss=0.03681, over 971346.24 frames.], batch size: 21, lr: 2.91e-04 +2022-05-05 22:01:42,747 INFO [train.py:715] (6/8) Epoch 7, batch 19750, loss[loss=0.1626, simple_loss=0.23, pruned_loss=0.04761, over 4823.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2168, pruned_loss=0.037, over 971005.75 frames.], batch size: 25, lr: 2.91e-04 +2022-05-05 22:02:20,221 INFO [train.py:715] (6/8) Epoch 7, batch 19800, loss[loss=0.1564, simple_loss=0.2293, pruned_loss=0.04168, over 4809.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2173, pruned_loss=0.03697, over 971125.11 frames.], batch size: 26, lr: 2.91e-04 +2022-05-05 22:02:58,032 INFO [train.py:715] (6/8) Epoch 7, batch 19850, loss[loss=0.1788, simple_loss=0.2454, pruned_loss=0.05614, over 4832.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2174, pruned_loss=0.03707, over 970938.08 frames.], batch size: 15, lr: 2.91e-04 +2022-05-05 22:03:35,857 INFO [train.py:715] (6/8) Epoch 7, batch 19900, loss[loss=0.1559, simple_loss=0.2195, pruned_loss=0.04613, over 4790.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2183, pruned_loss=0.03802, over 969614.58 frames.], batch size: 14, lr: 2.91e-04 +2022-05-05 22:04:12,820 INFO [train.py:715] (6/8) Epoch 7, batch 19950, loss[loss=0.1483, simple_loss=0.2166, pruned_loss=0.04004, over 4844.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2185, pruned_loss=0.03768, over 969568.81 frames.], batch size: 20, lr: 2.91e-04 +2022-05-05 22:04:50,696 INFO [train.py:715] (6/8) Epoch 7, batch 20000, loss[loss=0.1571, simple_loss=0.2279, pruned_loss=0.0432, over 4791.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2183, pruned_loss=0.03722, over 970509.97 frames.], batch size: 18, lr: 2.91e-04 +2022-05-05 22:05:28,966 INFO [train.py:715] (6/8) Epoch 7, batch 20050, loss[loss=0.1645, simple_loss=0.2285, pruned_loss=0.05023, over 4705.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2172, pruned_loss=0.03686, over 970951.05 frames.], batch size: 15, lr: 2.91e-04 +2022-05-05 22:06:06,296 INFO [train.py:715] (6/8) Epoch 7, batch 20100, loss[loss=0.1361, simple_loss=0.2052, pruned_loss=0.03344, over 4935.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2178, pruned_loss=0.03729, over 972008.32 frames.], batch size: 23, lr: 2.91e-04 +2022-05-05 22:06:43,748 INFO [train.py:715] (6/8) Epoch 7, batch 20150, loss[loss=0.1445, simple_loss=0.2148, pruned_loss=0.03708, over 4829.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2174, pruned_loss=0.03677, over 972066.41 frames.], batch size: 27, lr: 2.91e-04 +2022-05-05 22:07:21,914 INFO [train.py:715] (6/8) Epoch 7, batch 20200, loss[loss=0.1498, simple_loss=0.2264, pruned_loss=0.03655, over 4793.00 frames.], tot_loss[loss=0.1454, simple_loss=0.217, pruned_loss=0.03689, over 972443.67 frames.], batch size: 24, lr: 2.91e-04 +2022-05-05 22:08:00,052 INFO [train.py:715] (6/8) Epoch 7, batch 20250, loss[loss=0.1457, simple_loss=0.2172, pruned_loss=0.03713, over 4753.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2165, pruned_loss=0.03662, over 972268.80 frames.], batch size: 19, lr: 2.91e-04 +2022-05-05 22:08:37,464 INFO [train.py:715] (6/8) Epoch 7, batch 20300, loss[loss=0.1399, simple_loss=0.2155, pruned_loss=0.03214, over 4807.00 frames.], tot_loss[loss=0.144, simple_loss=0.2162, pruned_loss=0.0359, over 972395.45 frames.], batch size: 26, lr: 2.91e-04 +2022-05-05 22:09:17,216 INFO [train.py:715] (6/8) Epoch 7, batch 20350, loss[loss=0.1241, simple_loss=0.2115, pruned_loss=0.01836, over 4814.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2169, pruned_loss=0.03662, over 972280.23 frames.], batch size: 27, lr: 2.91e-04 +2022-05-05 22:09:55,131 INFO [train.py:715] (6/8) Epoch 7, batch 20400, loss[loss=0.1428, simple_loss=0.2188, pruned_loss=0.03335, over 4945.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2172, pruned_loss=0.03697, over 971969.46 frames.], batch size: 29, lr: 2.91e-04 +2022-05-05 22:10:33,045 INFO [train.py:715] (6/8) Epoch 7, batch 20450, loss[loss=0.1379, simple_loss=0.2107, pruned_loss=0.03257, over 4826.00 frames.], tot_loss[loss=0.1455, simple_loss=0.217, pruned_loss=0.03702, over 972507.76 frames.], batch size: 26, lr: 2.91e-04 +2022-05-05 22:11:10,608 INFO [train.py:715] (6/8) Epoch 7, batch 20500, loss[loss=0.1268, simple_loss=0.1977, pruned_loss=0.02799, over 4744.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2189, pruned_loss=0.0376, over 972250.54 frames.], batch size: 16, lr: 2.91e-04 +2022-05-05 22:11:48,692 INFO [train.py:715] (6/8) Epoch 7, batch 20550, loss[loss=0.1273, simple_loss=0.201, pruned_loss=0.02677, over 4777.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2188, pruned_loss=0.03746, over 972239.12 frames.], batch size: 18, lr: 2.91e-04 +2022-05-05 22:12:26,842 INFO [train.py:715] (6/8) Epoch 7, batch 20600, loss[loss=0.1403, simple_loss=0.221, pruned_loss=0.0298, over 4915.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2189, pruned_loss=0.03719, over 971600.11 frames.], batch size: 39, lr: 2.91e-04 +2022-05-05 22:13:04,070 INFO [train.py:715] (6/8) Epoch 7, batch 20650, loss[loss=0.1138, simple_loss=0.1714, pruned_loss=0.0281, over 4857.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2179, pruned_loss=0.03694, over 972170.90 frames.], batch size: 32, lr: 2.91e-04 +2022-05-05 22:13:41,770 INFO [train.py:715] (6/8) Epoch 7, batch 20700, loss[loss=0.1431, simple_loss=0.222, pruned_loss=0.03217, over 4935.00 frames.], tot_loss[loss=0.1451, simple_loss=0.217, pruned_loss=0.0366, over 972033.28 frames.], batch size: 23, lr: 2.91e-04 +2022-05-05 22:14:19,741 INFO [train.py:715] (6/8) Epoch 7, batch 20750, loss[loss=0.1426, simple_loss=0.203, pruned_loss=0.04108, over 4774.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2177, pruned_loss=0.0366, over 972622.61 frames.], batch size: 17, lr: 2.91e-04 +2022-05-05 22:14:57,388 INFO [train.py:715] (6/8) Epoch 7, batch 20800, loss[loss=0.1368, simple_loss=0.2047, pruned_loss=0.03442, over 4756.00 frames.], tot_loss[loss=0.1449, simple_loss=0.217, pruned_loss=0.03641, over 972616.40 frames.], batch size: 14, lr: 2.91e-04 +2022-05-05 22:15:34,691 INFO [train.py:715] (6/8) Epoch 7, batch 20850, loss[loss=0.14, simple_loss=0.2101, pruned_loss=0.03496, over 4964.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2157, pruned_loss=0.03595, over 973092.62 frames.], batch size: 28, lr: 2.90e-04 +2022-05-05 22:16:13,017 INFO [train.py:715] (6/8) Epoch 7, batch 20900, loss[loss=0.1085, simple_loss=0.1777, pruned_loss=0.01961, over 4751.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2162, pruned_loss=0.03579, over 972978.99 frames.], batch size: 12, lr: 2.90e-04 +2022-05-05 22:16:50,926 INFO [train.py:715] (6/8) Epoch 7, batch 20950, loss[loss=0.1361, simple_loss=0.2163, pruned_loss=0.02792, over 4792.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2163, pruned_loss=0.03569, over 972794.69 frames.], batch size: 24, lr: 2.90e-04 +2022-05-05 22:17:29,183 INFO [train.py:715] (6/8) Epoch 7, batch 21000, loss[loss=0.1162, simple_loss=0.1873, pruned_loss=0.02259, over 4759.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2169, pruned_loss=0.03642, over 973600.08 frames.], batch size: 19, lr: 2.90e-04 +2022-05-05 22:17:29,184 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 22:17:39,072 INFO [train.py:742] (6/8) Epoch 7, validation: loss=0.1082, simple_loss=0.193, pruned_loss=0.01169, over 914524.00 frames. +2022-05-05 22:18:17,066 INFO [train.py:715] (6/8) Epoch 7, batch 21050, loss[loss=0.1405, simple_loss=0.2177, pruned_loss=0.03166, over 4855.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2175, pruned_loss=0.03658, over 973514.68 frames.], batch size: 32, lr: 2.90e-04 +2022-05-05 22:18:54,964 INFO [train.py:715] (6/8) Epoch 7, batch 21100, loss[loss=0.1445, simple_loss=0.2216, pruned_loss=0.03371, over 4986.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2179, pruned_loss=0.03688, over 973116.21 frames.], batch size: 15, lr: 2.90e-04 +2022-05-05 22:19:32,990 INFO [train.py:715] (6/8) Epoch 7, batch 21150, loss[loss=0.1211, simple_loss=0.1905, pruned_loss=0.02583, over 4707.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2181, pruned_loss=0.0369, over 972517.05 frames.], batch size: 15, lr: 2.90e-04 +2022-05-05 22:20:10,777 INFO [train.py:715] (6/8) Epoch 7, batch 21200, loss[loss=0.1564, simple_loss=0.2135, pruned_loss=0.04968, over 4741.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2183, pruned_loss=0.03725, over 971764.03 frames.], batch size: 16, lr: 2.90e-04 +2022-05-05 22:20:49,001 INFO [train.py:715] (6/8) Epoch 7, batch 21250, loss[loss=0.1239, simple_loss=0.2059, pruned_loss=0.02094, over 4651.00 frames.], tot_loss[loss=0.147, simple_loss=0.2192, pruned_loss=0.03745, over 972368.67 frames.], batch size: 13, lr: 2.90e-04 +2022-05-05 22:21:27,130 INFO [train.py:715] (6/8) Epoch 7, batch 21300, loss[loss=0.1501, simple_loss=0.2357, pruned_loss=0.03224, over 4854.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2184, pruned_loss=0.03694, over 972546.10 frames.], batch size: 20, lr: 2.90e-04 +2022-05-05 22:22:04,500 INFO [train.py:715] (6/8) Epoch 7, batch 21350, loss[loss=0.1548, simple_loss=0.2143, pruned_loss=0.04765, over 4770.00 frames.], tot_loss[loss=0.1458, simple_loss=0.218, pruned_loss=0.03675, over 971724.28 frames.], batch size: 14, lr: 2.90e-04 +2022-05-05 22:22:42,287 INFO [train.py:715] (6/8) Epoch 7, batch 21400, loss[loss=0.1536, simple_loss=0.2323, pruned_loss=0.03749, over 4940.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2176, pruned_loss=0.03658, over 971860.30 frames.], batch size: 23, lr: 2.90e-04 +2022-05-05 22:23:20,547 INFO [train.py:715] (6/8) Epoch 7, batch 21450, loss[loss=0.1236, simple_loss=0.2094, pruned_loss=0.01894, over 4636.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2168, pruned_loss=0.03664, over 970865.31 frames.], batch size: 13, lr: 2.90e-04 +2022-05-05 22:23:58,722 INFO [train.py:715] (6/8) Epoch 7, batch 21500, loss[loss=0.134, simple_loss=0.2087, pruned_loss=0.02969, over 4957.00 frames.], tot_loss[loss=0.145, simple_loss=0.2169, pruned_loss=0.03653, over 970757.12 frames.], batch size: 24, lr: 2.90e-04 +2022-05-05 22:24:36,575 INFO [train.py:715] (6/8) Epoch 7, batch 21550, loss[loss=0.1933, simple_loss=0.2568, pruned_loss=0.06495, over 4837.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2177, pruned_loss=0.03735, over 970993.54 frames.], batch size: 15, lr: 2.90e-04 +2022-05-05 22:25:14,832 INFO [train.py:715] (6/8) Epoch 7, batch 21600, loss[loss=0.1375, simple_loss=0.2148, pruned_loss=0.03009, over 4956.00 frames.], tot_loss[loss=0.1465, simple_loss=0.218, pruned_loss=0.0375, over 971787.83 frames.], batch size: 24, lr: 2.90e-04 +2022-05-05 22:25:53,302 INFO [train.py:715] (6/8) Epoch 7, batch 21650, loss[loss=0.1361, simple_loss=0.2057, pruned_loss=0.03322, over 4935.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2185, pruned_loss=0.0374, over 971883.20 frames.], batch size: 29, lr: 2.90e-04 +2022-05-05 22:26:30,667 INFO [train.py:715] (6/8) Epoch 7, batch 21700, loss[loss=0.1394, simple_loss=0.2033, pruned_loss=0.03778, over 4761.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2183, pruned_loss=0.03751, over 972618.42 frames.], batch size: 14, lr: 2.90e-04 +2022-05-05 22:27:08,758 INFO [train.py:715] (6/8) Epoch 7, batch 21750, loss[loss=0.1429, simple_loss=0.2239, pruned_loss=0.03099, over 4703.00 frames.], tot_loss[loss=0.1463, simple_loss=0.218, pruned_loss=0.0373, over 972260.50 frames.], batch size: 15, lr: 2.90e-04 +2022-05-05 22:27:46,872 INFO [train.py:715] (6/8) Epoch 7, batch 21800, loss[loss=0.1246, simple_loss=0.198, pruned_loss=0.02562, over 4974.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2176, pruned_loss=0.03727, over 972472.90 frames.], batch size: 24, lr: 2.90e-04 +2022-05-05 22:28:24,961 INFO [train.py:715] (6/8) Epoch 7, batch 21850, loss[loss=0.1263, simple_loss=0.192, pruned_loss=0.03034, over 4825.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2173, pruned_loss=0.03696, over 972775.68 frames.], batch size: 13, lr: 2.90e-04 +2022-05-05 22:29:02,873 INFO [train.py:715] (6/8) Epoch 7, batch 21900, loss[loss=0.153, simple_loss=0.2188, pruned_loss=0.04357, over 4934.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2181, pruned_loss=0.03746, over 973280.33 frames.], batch size: 29, lr: 2.90e-04 +2022-05-05 22:29:40,816 INFO [train.py:715] (6/8) Epoch 7, batch 21950, loss[loss=0.1097, simple_loss=0.1872, pruned_loss=0.0161, over 4828.00 frames.], tot_loss[loss=0.146, simple_loss=0.2179, pruned_loss=0.03709, over 973426.15 frames.], batch size: 13, lr: 2.90e-04 +2022-05-05 22:30:19,542 INFO [train.py:715] (6/8) Epoch 7, batch 22000, loss[loss=0.1133, simple_loss=0.1892, pruned_loss=0.01866, over 4774.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2177, pruned_loss=0.03692, over 973006.09 frames.], batch size: 19, lr: 2.90e-04 +2022-05-05 22:30:57,078 INFO [train.py:715] (6/8) Epoch 7, batch 22050, loss[loss=0.1364, simple_loss=0.2114, pruned_loss=0.03074, over 4797.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2179, pruned_loss=0.03722, over 972288.90 frames.], batch size: 21, lr: 2.90e-04 +2022-05-05 22:31:35,218 INFO [train.py:715] (6/8) Epoch 7, batch 22100, loss[loss=0.147, simple_loss=0.2149, pruned_loss=0.03953, over 4779.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2173, pruned_loss=0.03706, over 972409.00 frames.], batch size: 17, lr: 2.90e-04 +2022-05-05 22:32:13,474 INFO [train.py:715] (6/8) Epoch 7, batch 22150, loss[loss=0.1397, simple_loss=0.2023, pruned_loss=0.03852, over 4985.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2175, pruned_loss=0.03746, over 972145.75 frames.], batch size: 25, lr: 2.90e-04 +2022-05-05 22:32:51,984 INFO [train.py:715] (6/8) Epoch 7, batch 22200, loss[loss=0.1732, simple_loss=0.2616, pruned_loss=0.04241, over 4921.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2181, pruned_loss=0.03756, over 972605.73 frames.], batch size: 18, lr: 2.90e-04 +2022-05-05 22:33:29,481 INFO [train.py:715] (6/8) Epoch 7, batch 22250, loss[loss=0.1682, simple_loss=0.2393, pruned_loss=0.04854, over 4855.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2173, pruned_loss=0.03731, over 971209.75 frames.], batch size: 32, lr: 2.90e-04 +2022-05-05 22:34:07,237 INFO [train.py:715] (6/8) Epoch 7, batch 22300, loss[loss=0.1272, simple_loss=0.2027, pruned_loss=0.02585, over 4975.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2168, pruned_loss=0.03675, over 971742.27 frames.], batch size: 25, lr: 2.90e-04 +2022-05-05 22:34:45,534 INFO [train.py:715] (6/8) Epoch 7, batch 22350, loss[loss=0.1408, simple_loss=0.2122, pruned_loss=0.03469, over 4783.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2172, pruned_loss=0.03689, over 971660.32 frames.], batch size: 17, lr: 2.90e-04 +2022-05-05 22:35:22,813 INFO [train.py:715] (6/8) Epoch 7, batch 22400, loss[loss=0.1098, simple_loss=0.1689, pruned_loss=0.02534, over 4746.00 frames.], tot_loss[loss=0.145, simple_loss=0.2169, pruned_loss=0.03654, over 971892.61 frames.], batch size: 12, lr: 2.90e-04 +2022-05-05 22:36:00,504 INFO [train.py:715] (6/8) Epoch 7, batch 22450, loss[loss=0.1489, simple_loss=0.2198, pruned_loss=0.03903, over 4817.00 frames.], tot_loss[loss=0.1451, simple_loss=0.217, pruned_loss=0.03658, over 971385.07 frames.], batch size: 21, lr: 2.90e-04 +2022-05-05 22:36:38,649 INFO [train.py:715] (6/8) Epoch 7, batch 22500, loss[loss=0.1247, simple_loss=0.1953, pruned_loss=0.02707, over 4914.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2162, pruned_loss=0.03609, over 971021.85 frames.], batch size: 29, lr: 2.90e-04 +2022-05-05 22:37:16,690 INFO [train.py:715] (6/8) Epoch 7, batch 22550, loss[loss=0.1328, simple_loss=0.211, pruned_loss=0.0273, over 4923.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2158, pruned_loss=0.03603, over 970880.48 frames.], batch size: 18, lr: 2.90e-04 +2022-05-05 22:37:54,356 INFO [train.py:715] (6/8) Epoch 7, batch 22600, loss[loss=0.1338, simple_loss=0.2005, pruned_loss=0.03359, over 4820.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2158, pruned_loss=0.03598, over 971378.27 frames.], batch size: 13, lr: 2.90e-04 +2022-05-05 22:38:32,389 INFO [train.py:715] (6/8) Epoch 7, batch 22650, loss[loss=0.1436, simple_loss=0.2034, pruned_loss=0.04191, over 4783.00 frames.], tot_loss[loss=0.144, simple_loss=0.2165, pruned_loss=0.03573, over 971120.13 frames.], batch size: 18, lr: 2.90e-04 +2022-05-05 22:39:10,751 INFO [train.py:715] (6/8) Epoch 7, batch 22700, loss[loss=0.153, simple_loss=0.2235, pruned_loss=0.04128, over 4957.00 frames.], tot_loss[loss=0.145, simple_loss=0.2172, pruned_loss=0.0364, over 971329.43 frames.], batch size: 15, lr: 2.89e-04 +2022-05-05 22:39:48,096 INFO [train.py:715] (6/8) Epoch 7, batch 22750, loss[loss=0.1327, simple_loss=0.1988, pruned_loss=0.03327, over 4794.00 frames.], tot_loss[loss=0.145, simple_loss=0.2174, pruned_loss=0.03629, over 971547.29 frames.], batch size: 12, lr: 2.89e-04 +2022-05-05 22:40:25,727 INFO [train.py:715] (6/8) Epoch 7, batch 22800, loss[loss=0.1241, simple_loss=0.1926, pruned_loss=0.02778, over 4792.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2174, pruned_loss=0.03606, over 971884.56 frames.], batch size: 17, lr: 2.89e-04 +2022-05-05 22:41:03,918 INFO [train.py:715] (6/8) Epoch 7, batch 22850, loss[loss=0.1591, simple_loss=0.2339, pruned_loss=0.04213, over 4791.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2168, pruned_loss=0.03581, over 972240.61 frames.], batch size: 24, lr: 2.89e-04 +2022-05-05 22:41:41,491 INFO [train.py:715] (6/8) Epoch 7, batch 22900, loss[loss=0.1253, simple_loss=0.1955, pruned_loss=0.02756, over 4809.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2176, pruned_loss=0.03659, over 971275.78 frames.], batch size: 12, lr: 2.89e-04 +2022-05-05 22:42:19,139 INFO [train.py:715] (6/8) Epoch 7, batch 22950, loss[loss=0.1706, simple_loss=0.2393, pruned_loss=0.05098, over 4969.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2174, pruned_loss=0.03635, over 971719.43 frames.], batch size: 24, lr: 2.89e-04 +2022-05-05 22:42:57,044 INFO [train.py:715] (6/8) Epoch 7, batch 23000, loss[loss=0.1639, simple_loss=0.219, pruned_loss=0.05444, over 4891.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2174, pruned_loss=0.03653, over 971579.92 frames.], batch size: 16, lr: 2.89e-04 +2022-05-05 22:43:35,192 INFO [train.py:715] (6/8) Epoch 7, batch 23050, loss[loss=0.1808, simple_loss=0.2518, pruned_loss=0.05487, over 4700.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2167, pruned_loss=0.03656, over 971911.95 frames.], batch size: 15, lr: 2.89e-04 +2022-05-05 22:44:12,639 INFO [train.py:715] (6/8) Epoch 7, batch 23100, loss[loss=0.1229, simple_loss=0.196, pruned_loss=0.02491, over 4962.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2162, pruned_loss=0.03603, over 971736.24 frames.], batch size: 15, lr: 2.89e-04 +2022-05-05 22:44:49,935 INFO [train.py:715] (6/8) Epoch 7, batch 23150, loss[loss=0.1475, simple_loss=0.2159, pruned_loss=0.03956, over 4733.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2167, pruned_loss=0.03644, over 972222.42 frames.], batch size: 16, lr: 2.89e-04 +2022-05-05 22:45:28,254 INFO [train.py:715] (6/8) Epoch 7, batch 23200, loss[loss=0.1697, simple_loss=0.2479, pruned_loss=0.04573, over 4808.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2173, pruned_loss=0.03704, over 971912.82 frames.], batch size: 25, lr: 2.89e-04 +2022-05-05 22:46:06,319 INFO [train.py:715] (6/8) Epoch 7, batch 23250, loss[loss=0.1401, simple_loss=0.21, pruned_loss=0.03507, over 4919.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2172, pruned_loss=0.03701, over 972305.76 frames.], batch size: 19, lr: 2.89e-04 +2022-05-05 22:46:43,801 INFO [train.py:715] (6/8) Epoch 7, batch 23300, loss[loss=0.1509, simple_loss=0.2186, pruned_loss=0.04154, over 4963.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2166, pruned_loss=0.03649, over 972472.68 frames.], batch size: 21, lr: 2.89e-04 +2022-05-05 22:47:22,579 INFO [train.py:715] (6/8) Epoch 7, batch 23350, loss[loss=0.1545, simple_loss=0.2197, pruned_loss=0.04467, over 4840.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2165, pruned_loss=0.03661, over 972937.11 frames.], batch size: 34, lr: 2.89e-04 +2022-05-05 22:48:01,691 INFO [train.py:715] (6/8) Epoch 7, batch 23400, loss[loss=0.1332, simple_loss=0.2139, pruned_loss=0.02622, over 4744.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2161, pruned_loss=0.03651, over 971879.64 frames.], batch size: 16, lr: 2.89e-04 +2022-05-05 22:48:40,124 INFO [train.py:715] (6/8) Epoch 7, batch 23450, loss[loss=0.1516, simple_loss=0.2253, pruned_loss=0.03896, over 4894.00 frames.], tot_loss[loss=0.144, simple_loss=0.2155, pruned_loss=0.03621, over 972086.55 frames.], batch size: 22, lr: 2.89e-04 +2022-05-05 22:49:18,248 INFO [train.py:715] (6/8) Epoch 7, batch 23500, loss[loss=0.1316, simple_loss=0.209, pruned_loss=0.02705, over 4883.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2152, pruned_loss=0.0362, over 972584.26 frames.], batch size: 22, lr: 2.89e-04 +2022-05-05 22:49:56,229 INFO [train.py:715] (6/8) Epoch 7, batch 23550, loss[loss=0.1529, simple_loss=0.2273, pruned_loss=0.03926, over 4886.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2148, pruned_loss=0.03611, over 971818.80 frames.], batch size: 16, lr: 2.89e-04 +2022-05-05 22:50:34,438 INFO [train.py:715] (6/8) Epoch 7, batch 23600, loss[loss=0.1262, simple_loss=0.1973, pruned_loss=0.02755, over 4824.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2151, pruned_loss=0.03619, over 970956.06 frames.], batch size: 15, lr: 2.89e-04 +2022-05-05 22:51:11,416 INFO [train.py:715] (6/8) Epoch 7, batch 23650, loss[loss=0.1411, simple_loss=0.2145, pruned_loss=0.03389, over 4881.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2156, pruned_loss=0.03637, over 971981.97 frames.], batch size: 16, lr: 2.89e-04 +2022-05-05 22:51:49,263 INFO [train.py:715] (6/8) Epoch 7, batch 23700, loss[loss=0.1209, simple_loss=0.1891, pruned_loss=0.02641, over 4759.00 frames.], tot_loss[loss=0.1444, simple_loss=0.216, pruned_loss=0.03641, over 972291.25 frames.], batch size: 19, lr: 2.89e-04 +2022-05-05 22:52:27,396 INFO [train.py:715] (6/8) Epoch 7, batch 23750, loss[loss=0.1828, simple_loss=0.2473, pruned_loss=0.05912, over 4858.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2164, pruned_loss=0.0362, over 973180.21 frames.], batch size: 32, lr: 2.89e-04 +2022-05-05 22:53:04,577 INFO [train.py:715] (6/8) Epoch 7, batch 23800, loss[loss=0.1481, simple_loss=0.2161, pruned_loss=0.04006, over 4827.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2164, pruned_loss=0.0364, over 972790.97 frames.], batch size: 25, lr: 2.89e-04 +2022-05-05 22:53:42,352 INFO [train.py:715] (6/8) Epoch 7, batch 23850, loss[loss=0.1664, simple_loss=0.2305, pruned_loss=0.05118, over 4765.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2171, pruned_loss=0.03649, over 973116.04 frames.], batch size: 18, lr: 2.89e-04 +2022-05-05 22:54:21,019 INFO [train.py:715] (6/8) Epoch 7, batch 23900, loss[loss=0.1226, simple_loss=0.1965, pruned_loss=0.02436, over 4765.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2175, pruned_loss=0.03656, over 973289.71 frames.], batch size: 14, lr: 2.89e-04 +2022-05-05 22:54:59,165 INFO [train.py:715] (6/8) Epoch 7, batch 23950, loss[loss=0.1266, simple_loss=0.2044, pruned_loss=0.02442, over 4785.00 frames.], tot_loss[loss=0.1447, simple_loss=0.217, pruned_loss=0.03622, over 973482.66 frames.], batch size: 18, lr: 2.89e-04 +2022-05-05 22:55:36,638 INFO [train.py:715] (6/8) Epoch 7, batch 24000, loss[loss=0.105, simple_loss=0.1763, pruned_loss=0.01681, over 4770.00 frames.], tot_loss[loss=0.144, simple_loss=0.2162, pruned_loss=0.03589, over 972902.67 frames.], batch size: 12, lr: 2.89e-04 +2022-05-05 22:55:36,638 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 22:55:46,187 INFO [train.py:742] (6/8) Epoch 7, validation: loss=0.108, simple_loss=0.1929, pruned_loss=0.01156, over 914524.00 frames. +2022-05-05 22:56:23,729 INFO [train.py:715] (6/8) Epoch 7, batch 24050, loss[loss=0.1678, simple_loss=0.2304, pruned_loss=0.05265, over 4971.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2165, pruned_loss=0.03611, over 972972.57 frames.], batch size: 15, lr: 2.89e-04 +2022-05-05 22:57:02,033 INFO [train.py:715] (6/8) Epoch 7, batch 24100, loss[loss=0.1708, simple_loss=0.2379, pruned_loss=0.05191, over 4958.00 frames.], tot_loss[loss=0.145, simple_loss=0.2168, pruned_loss=0.03656, over 973134.21 frames.], batch size: 35, lr: 2.89e-04 +2022-05-05 22:57:40,437 INFO [train.py:715] (6/8) Epoch 7, batch 24150, loss[loss=0.151, simple_loss=0.2234, pruned_loss=0.03929, over 4876.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2171, pruned_loss=0.03673, over 972077.16 frames.], batch size: 30, lr: 2.89e-04 +2022-05-05 22:58:18,172 INFO [train.py:715] (6/8) Epoch 7, batch 24200, loss[loss=0.1444, simple_loss=0.2259, pruned_loss=0.0314, over 4980.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2176, pruned_loss=0.0369, over 972392.85 frames.], batch size: 25, lr: 2.89e-04 +2022-05-05 22:58:55,939 INFO [train.py:715] (6/8) Epoch 7, batch 24250, loss[loss=0.1445, simple_loss=0.2197, pruned_loss=0.03462, over 4801.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2184, pruned_loss=0.03767, over 972041.97 frames.], batch size: 24, lr: 2.89e-04 +2022-05-05 22:59:34,583 INFO [train.py:715] (6/8) Epoch 7, batch 24300, loss[loss=0.1419, simple_loss=0.2088, pruned_loss=0.03753, over 4804.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2187, pruned_loss=0.03751, over 971602.59 frames.], batch size: 13, lr: 2.89e-04 +2022-05-05 23:00:12,423 INFO [train.py:715] (6/8) Epoch 7, batch 24350, loss[loss=0.1675, simple_loss=0.2402, pruned_loss=0.04738, over 4785.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2177, pruned_loss=0.03722, over 972595.40 frames.], batch size: 17, lr: 2.89e-04 +2022-05-05 23:00:50,090 INFO [train.py:715] (6/8) Epoch 7, batch 24400, loss[loss=0.1443, simple_loss=0.2096, pruned_loss=0.03953, over 4920.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2173, pruned_loss=0.0371, over 973232.83 frames.], batch size: 18, lr: 2.89e-04 +2022-05-05 23:01:28,244 INFO [train.py:715] (6/8) Epoch 7, batch 24450, loss[loss=0.1682, simple_loss=0.2301, pruned_loss=0.05316, over 4637.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2175, pruned_loss=0.03706, over 973017.62 frames.], batch size: 13, lr: 2.89e-04 +2022-05-05 23:02:06,216 INFO [train.py:715] (6/8) Epoch 7, batch 24500, loss[loss=0.1522, simple_loss=0.2165, pruned_loss=0.0439, over 4692.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2168, pruned_loss=0.03706, over 971866.68 frames.], batch size: 15, lr: 2.89e-04 +2022-05-05 23:02:43,831 INFO [train.py:715] (6/8) Epoch 7, batch 24550, loss[loss=0.1623, simple_loss=0.2379, pruned_loss=0.04334, over 4902.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2169, pruned_loss=0.0369, over 972622.83 frames.], batch size: 18, lr: 2.88e-04 +2022-05-05 23:03:22,003 INFO [train.py:715] (6/8) Epoch 7, batch 24600, loss[loss=0.1752, simple_loss=0.2461, pruned_loss=0.05215, over 4938.00 frames.], tot_loss[loss=0.145, simple_loss=0.217, pruned_loss=0.03651, over 972841.75 frames.], batch size: 39, lr: 2.88e-04 +2022-05-05 23:04:01,134 INFO [train.py:715] (6/8) Epoch 7, batch 24650, loss[loss=0.1508, simple_loss=0.2257, pruned_loss=0.03792, over 4986.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2172, pruned_loss=0.03652, over 972560.44 frames.], batch size: 25, lr: 2.88e-04 +2022-05-05 23:04:39,591 INFO [train.py:715] (6/8) Epoch 7, batch 24700, loss[loss=0.1519, simple_loss=0.2279, pruned_loss=0.03799, over 4657.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2178, pruned_loss=0.03692, over 972605.91 frames.], batch size: 13, lr: 2.88e-04 +2022-05-05 23:05:17,712 INFO [train.py:715] (6/8) Epoch 7, batch 24750, loss[loss=0.148, simple_loss=0.2226, pruned_loss=0.03665, over 4969.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2166, pruned_loss=0.03656, over 972436.32 frames.], batch size: 15, lr: 2.88e-04 +2022-05-05 23:05:56,158 INFO [train.py:715] (6/8) Epoch 7, batch 24800, loss[loss=0.1146, simple_loss=0.1763, pruned_loss=0.02647, over 4788.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2172, pruned_loss=0.03682, over 971786.52 frames.], batch size: 12, lr: 2.88e-04 +2022-05-05 23:06:35,231 INFO [train.py:715] (6/8) Epoch 7, batch 24850, loss[loss=0.1622, simple_loss=0.2222, pruned_loss=0.05108, over 4906.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2182, pruned_loss=0.03746, over 972910.97 frames.], batch size: 17, lr: 2.88e-04 +2022-05-05 23:07:13,829 INFO [train.py:715] (6/8) Epoch 7, batch 24900, loss[loss=0.1615, simple_loss=0.2403, pruned_loss=0.04134, over 4937.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2179, pruned_loss=0.03721, over 972558.25 frames.], batch size: 29, lr: 2.88e-04 +2022-05-05 23:07:53,095 INFO [train.py:715] (6/8) Epoch 7, batch 24950, loss[loss=0.1274, simple_loss=0.1993, pruned_loss=0.02771, over 4859.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2177, pruned_loss=0.03741, over 973095.13 frames.], batch size: 20, lr: 2.88e-04 +2022-05-05 23:08:32,940 INFO [train.py:715] (6/8) Epoch 7, batch 25000, loss[loss=0.1473, simple_loss=0.2227, pruned_loss=0.03596, over 4809.00 frames.], tot_loss[loss=0.1464, simple_loss=0.218, pruned_loss=0.03744, over 972636.02 frames.], batch size: 26, lr: 2.88e-04 +2022-05-05 23:09:12,211 INFO [train.py:715] (6/8) Epoch 7, batch 25050, loss[loss=0.1496, simple_loss=0.2199, pruned_loss=0.03962, over 4750.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2179, pruned_loss=0.03732, over 972670.42 frames.], batch size: 12, lr: 2.88e-04 +2022-05-05 23:09:51,233 INFO [train.py:715] (6/8) Epoch 7, batch 25100, loss[loss=0.1237, simple_loss=0.1955, pruned_loss=0.02591, over 4798.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2177, pruned_loss=0.03706, over 972401.87 frames.], batch size: 14, lr: 2.88e-04 +2022-05-05 23:10:31,401 INFO [train.py:715] (6/8) Epoch 7, batch 25150, loss[loss=0.1548, simple_loss=0.2283, pruned_loss=0.04065, over 4913.00 frames.], tot_loss[loss=0.145, simple_loss=0.2169, pruned_loss=0.03656, over 972270.47 frames.], batch size: 23, lr: 2.88e-04 +2022-05-05 23:11:11,712 INFO [train.py:715] (6/8) Epoch 7, batch 25200, loss[loss=0.1476, simple_loss=0.229, pruned_loss=0.03307, over 4780.00 frames.], tot_loss[loss=0.145, simple_loss=0.2167, pruned_loss=0.0367, over 971542.14 frames.], batch size: 18, lr: 2.88e-04 +2022-05-05 23:11:51,364 INFO [train.py:715] (6/8) Epoch 7, batch 25250, loss[loss=0.1347, simple_loss=0.2145, pruned_loss=0.02745, over 4788.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2178, pruned_loss=0.03672, over 971782.83 frames.], batch size: 17, lr: 2.88e-04 +2022-05-05 23:12:31,935 INFO [train.py:715] (6/8) Epoch 7, batch 25300, loss[loss=0.1666, simple_loss=0.232, pruned_loss=0.05065, over 4742.00 frames.], tot_loss[loss=0.1451, simple_loss=0.217, pruned_loss=0.03661, over 971746.01 frames.], batch size: 19, lr: 2.88e-04 +2022-05-05 23:13:13,666 INFO [train.py:715] (6/8) Epoch 7, batch 25350, loss[loss=0.1523, simple_loss=0.2174, pruned_loss=0.04366, over 4788.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2161, pruned_loss=0.03643, over 971011.41 frames.], batch size: 18, lr: 2.88e-04 +2022-05-05 23:13:55,232 INFO [train.py:715] (6/8) Epoch 7, batch 25400, loss[loss=0.138, simple_loss=0.2133, pruned_loss=0.03138, over 4906.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2176, pruned_loss=0.03694, over 971251.22 frames.], batch size: 19, lr: 2.88e-04 +2022-05-05 23:14:36,167 INFO [train.py:715] (6/8) Epoch 7, batch 25450, loss[loss=0.1418, simple_loss=0.2165, pruned_loss=0.03358, over 4984.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2175, pruned_loss=0.037, over 971048.50 frames.], batch size: 25, lr: 2.88e-04 +2022-05-05 23:15:18,369 INFO [train.py:715] (6/8) Epoch 7, batch 25500, loss[loss=0.1251, simple_loss=0.1863, pruned_loss=0.03198, over 4799.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2176, pruned_loss=0.03736, over 970825.04 frames.], batch size: 14, lr: 2.88e-04 +2022-05-05 23:16:00,249 INFO [train.py:715] (6/8) Epoch 7, batch 25550, loss[loss=0.1485, simple_loss=0.2251, pruned_loss=0.0359, over 4991.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2169, pruned_loss=0.03703, over 970853.00 frames.], batch size: 25, lr: 2.88e-04 +2022-05-05 23:16:41,020 INFO [train.py:715] (6/8) Epoch 7, batch 25600, loss[loss=0.1595, simple_loss=0.2301, pruned_loss=0.0445, over 4712.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2188, pruned_loss=0.03735, over 971153.67 frames.], batch size: 15, lr: 2.88e-04 +2022-05-05 23:17:22,271 INFO [train.py:715] (6/8) Epoch 7, batch 25650, loss[loss=0.1211, simple_loss=0.2027, pruned_loss=0.01973, over 4846.00 frames.], tot_loss[loss=0.1458, simple_loss=0.218, pruned_loss=0.03683, over 971477.57 frames.], batch size: 13, lr: 2.88e-04 +2022-05-05 23:18:03,671 INFO [train.py:715] (6/8) Epoch 7, batch 25700, loss[loss=0.1482, simple_loss=0.2331, pruned_loss=0.03169, over 4884.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2174, pruned_loss=0.03654, over 971905.81 frames.], batch size: 22, lr: 2.88e-04 +2022-05-05 23:18:45,505 INFO [train.py:715] (6/8) Epoch 7, batch 25750, loss[loss=0.1511, simple_loss=0.223, pruned_loss=0.03962, over 4845.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2167, pruned_loss=0.03638, over 972252.76 frames.], batch size: 30, lr: 2.88e-04 +2022-05-05 23:19:26,139 INFO [train.py:715] (6/8) Epoch 7, batch 25800, loss[loss=0.1471, simple_loss=0.2242, pruned_loss=0.03498, over 4873.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2167, pruned_loss=0.03639, over 972615.29 frames.], batch size: 16, lr: 2.88e-04 +2022-05-05 23:20:08,461 INFO [train.py:715] (6/8) Epoch 7, batch 25850, loss[loss=0.1367, simple_loss=0.2147, pruned_loss=0.02932, over 4866.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2167, pruned_loss=0.0362, over 972310.40 frames.], batch size: 20, lr: 2.88e-04 +2022-05-05 23:20:50,391 INFO [train.py:715] (6/8) Epoch 7, batch 25900, loss[loss=0.1386, simple_loss=0.2118, pruned_loss=0.03268, over 4905.00 frames.], tot_loss[loss=0.144, simple_loss=0.2162, pruned_loss=0.03592, over 972818.22 frames.], batch size: 19, lr: 2.88e-04 +2022-05-05 23:21:31,305 INFO [train.py:715] (6/8) Epoch 7, batch 25950, loss[loss=0.1469, simple_loss=0.209, pruned_loss=0.04235, over 4751.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2171, pruned_loss=0.03691, over 972651.08 frames.], batch size: 12, lr: 2.88e-04 +2022-05-05 23:22:12,742 INFO [train.py:715] (6/8) Epoch 7, batch 26000, loss[loss=0.1302, simple_loss=0.2039, pruned_loss=0.0283, over 4972.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2165, pruned_loss=0.03619, over 972614.93 frames.], batch size: 25, lr: 2.88e-04 +2022-05-05 23:22:54,190 INFO [train.py:715] (6/8) Epoch 7, batch 26050, loss[loss=0.1638, simple_loss=0.2265, pruned_loss=0.05061, over 4778.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2156, pruned_loss=0.03568, over 972263.46 frames.], batch size: 18, lr: 2.88e-04 +2022-05-05 23:23:36,133 INFO [train.py:715] (6/8) Epoch 7, batch 26100, loss[loss=0.1529, simple_loss=0.2332, pruned_loss=0.03628, over 4856.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2148, pruned_loss=0.03532, over 972155.09 frames.], batch size: 32, lr: 2.88e-04 +2022-05-05 23:24:16,485 INFO [train.py:715] (6/8) Epoch 7, batch 26150, loss[loss=0.1402, simple_loss=0.2236, pruned_loss=0.02838, over 4883.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2147, pruned_loss=0.03523, over 971980.63 frames.], batch size: 19, lr: 2.88e-04 +2022-05-05 23:24:57,992 INFO [train.py:715] (6/8) Epoch 7, batch 26200, loss[loss=0.1577, simple_loss=0.2349, pruned_loss=0.04026, over 4911.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2149, pruned_loss=0.03494, over 972080.27 frames.], batch size: 18, lr: 2.88e-04 +2022-05-05 23:25:39,233 INFO [train.py:715] (6/8) Epoch 7, batch 26250, loss[loss=0.1561, simple_loss=0.2263, pruned_loss=0.04295, over 4846.00 frames.], tot_loss[loss=0.1429, simple_loss=0.215, pruned_loss=0.03537, over 971768.29 frames.], batch size: 15, lr: 2.88e-04 +2022-05-05 23:26:19,600 INFO [train.py:715] (6/8) Epoch 7, batch 26300, loss[loss=0.1593, simple_loss=0.2294, pruned_loss=0.04459, over 4879.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2163, pruned_loss=0.03601, over 972200.47 frames.], batch size: 16, lr: 2.88e-04 +2022-05-05 23:26:59,776 INFO [train.py:715] (6/8) Epoch 7, batch 26350, loss[loss=0.144, simple_loss=0.2116, pruned_loss=0.03821, over 4945.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2167, pruned_loss=0.03633, over 973295.05 frames.], batch size: 23, lr: 2.88e-04 +2022-05-05 23:27:40,227 INFO [train.py:715] (6/8) Epoch 7, batch 26400, loss[loss=0.1416, simple_loss=0.2178, pruned_loss=0.03274, over 4926.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2171, pruned_loss=0.03637, over 974205.78 frames.], batch size: 23, lr: 2.87e-04 +2022-05-05 23:28:20,881 INFO [train.py:715] (6/8) Epoch 7, batch 26450, loss[loss=0.1287, simple_loss=0.2062, pruned_loss=0.02559, over 4763.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2169, pruned_loss=0.03648, over 973389.90 frames.], batch size: 19, lr: 2.87e-04 +2022-05-05 23:29:00,624 INFO [train.py:715] (6/8) Epoch 7, batch 26500, loss[loss=0.1293, simple_loss=0.1979, pruned_loss=0.03033, over 4804.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2171, pruned_loss=0.03675, over 972486.49 frames.], batch size: 21, lr: 2.87e-04 +2022-05-05 23:29:40,312 INFO [train.py:715] (6/8) Epoch 7, batch 26550, loss[loss=0.1362, simple_loss=0.2173, pruned_loss=0.02752, over 4753.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2176, pruned_loss=0.03657, over 972657.75 frames.], batch size: 16, lr: 2.87e-04 +2022-05-05 23:30:20,785 INFO [train.py:715] (6/8) Epoch 7, batch 26600, loss[loss=0.1451, simple_loss=0.2243, pruned_loss=0.033, over 4988.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2174, pruned_loss=0.03614, over 973543.97 frames.], batch size: 25, lr: 2.87e-04 +2022-05-05 23:31:00,458 INFO [train.py:715] (6/8) Epoch 7, batch 26650, loss[loss=0.1234, simple_loss=0.1923, pruned_loss=0.02725, over 4796.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2178, pruned_loss=0.03624, over 972972.12 frames.], batch size: 12, lr: 2.87e-04 +2022-05-05 23:31:40,550 INFO [train.py:715] (6/8) Epoch 7, batch 26700, loss[loss=0.1345, simple_loss=0.2088, pruned_loss=0.03011, over 4932.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2169, pruned_loss=0.03594, over 973063.68 frames.], batch size: 29, lr: 2.87e-04 +2022-05-05 23:32:21,229 INFO [train.py:715] (6/8) Epoch 7, batch 26750, loss[loss=0.1787, simple_loss=0.2457, pruned_loss=0.05584, over 4774.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2176, pruned_loss=0.0366, over 972704.41 frames.], batch size: 17, lr: 2.87e-04 +2022-05-05 23:33:01,189 INFO [train.py:715] (6/8) Epoch 7, batch 26800, loss[loss=0.1444, simple_loss=0.2221, pruned_loss=0.03337, over 4940.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2178, pruned_loss=0.03678, over 972226.59 frames.], batch size: 21, lr: 2.87e-04 +2022-05-05 23:33:40,952 INFO [train.py:715] (6/8) Epoch 7, batch 26850, loss[loss=0.1305, simple_loss=0.2044, pruned_loss=0.02833, over 4985.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2183, pruned_loss=0.0373, over 972873.83 frames.], batch size: 25, lr: 2.87e-04 +2022-05-05 23:34:21,592 INFO [train.py:715] (6/8) Epoch 7, batch 26900, loss[loss=0.1425, simple_loss=0.2142, pruned_loss=0.03534, over 4966.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2181, pruned_loss=0.03752, over 973132.92 frames.], batch size: 39, lr: 2.87e-04 +2022-05-05 23:35:02,624 INFO [train.py:715] (6/8) Epoch 7, batch 26950, loss[loss=0.1279, simple_loss=0.2122, pruned_loss=0.02181, over 4799.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2173, pruned_loss=0.03675, over 972897.14 frames.], batch size: 21, lr: 2.87e-04 +2022-05-05 23:35:42,957 INFO [train.py:715] (6/8) Epoch 7, batch 27000, loss[loss=0.1317, simple_loss=0.2041, pruned_loss=0.0296, over 4937.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2171, pruned_loss=0.0365, over 973588.36 frames.], batch size: 21, lr: 2.87e-04 +2022-05-05 23:35:42,958 INFO [train.py:733] (6/8) Computing validation loss +2022-05-05 23:35:52,667 INFO [train.py:742] (6/8) Epoch 7, validation: loss=0.108, simple_loss=0.1928, pruned_loss=0.01156, over 914524.00 frames. +2022-05-05 23:36:33,215 INFO [train.py:715] (6/8) Epoch 7, batch 27050, loss[loss=0.1697, simple_loss=0.2323, pruned_loss=0.05353, over 4814.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2168, pruned_loss=0.03677, over 972804.36 frames.], batch size: 25, lr: 2.87e-04 +2022-05-05 23:37:14,391 INFO [train.py:715] (6/8) Epoch 7, batch 27100, loss[loss=0.1306, simple_loss=0.2169, pruned_loss=0.02222, over 4932.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2162, pruned_loss=0.03652, over 972390.82 frames.], batch size: 29, lr: 2.87e-04 +2022-05-05 23:37:56,261 INFO [train.py:715] (6/8) Epoch 7, batch 27150, loss[loss=0.1837, simple_loss=0.2528, pruned_loss=0.05728, over 4779.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2171, pruned_loss=0.03629, over 972274.11 frames.], batch size: 17, lr: 2.87e-04 +2022-05-05 23:38:37,518 INFO [train.py:715] (6/8) Epoch 7, batch 27200, loss[loss=0.1508, simple_loss=0.2229, pruned_loss=0.03939, over 4944.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2169, pruned_loss=0.03643, over 972033.21 frames.], batch size: 29, lr: 2.87e-04 +2022-05-05 23:39:18,969 INFO [train.py:715] (6/8) Epoch 7, batch 27250, loss[loss=0.1657, simple_loss=0.2355, pruned_loss=0.04798, over 4831.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2176, pruned_loss=0.03649, over 972865.31 frames.], batch size: 15, lr: 2.87e-04 +2022-05-05 23:40:00,823 INFO [train.py:715] (6/8) Epoch 7, batch 27300, loss[loss=0.1732, simple_loss=0.2508, pruned_loss=0.0478, over 4930.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2174, pruned_loss=0.03667, over 972603.05 frames.], batch size: 23, lr: 2.87e-04 +2022-05-05 23:40:41,770 INFO [train.py:715] (6/8) Epoch 7, batch 27350, loss[loss=0.1393, simple_loss=0.219, pruned_loss=0.02976, over 4880.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2173, pruned_loss=0.03673, over 973339.39 frames.], batch size: 22, lr: 2.87e-04 +2022-05-05 23:41:23,060 INFO [train.py:715] (6/8) Epoch 7, batch 27400, loss[loss=0.1414, simple_loss=0.2163, pruned_loss=0.03329, over 4792.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2176, pruned_loss=0.03662, over 973149.57 frames.], batch size: 21, lr: 2.87e-04 +2022-05-05 23:42:04,089 INFO [train.py:715] (6/8) Epoch 7, batch 27450, loss[loss=0.1217, simple_loss=0.1954, pruned_loss=0.02397, over 4895.00 frames.], tot_loss[loss=0.1461, simple_loss=0.218, pruned_loss=0.03708, over 972738.66 frames.], batch size: 19, lr: 2.87e-04 +2022-05-05 23:42:45,310 INFO [train.py:715] (6/8) Epoch 7, batch 27500, loss[loss=0.1674, simple_loss=0.2269, pruned_loss=0.05393, over 4889.00 frames.], tot_loss[loss=0.1457, simple_loss=0.218, pruned_loss=0.03667, over 972984.51 frames.], batch size: 32, lr: 2.87e-04 +2022-05-05 23:43:25,880 INFO [train.py:715] (6/8) Epoch 7, batch 27550, loss[loss=0.1346, simple_loss=0.1981, pruned_loss=0.03551, over 4873.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2193, pruned_loss=0.03777, over 972481.43 frames.], batch size: 16, lr: 2.87e-04 +2022-05-05 23:44:06,403 INFO [train.py:715] (6/8) Epoch 7, batch 27600, loss[loss=0.1398, simple_loss=0.1911, pruned_loss=0.04429, over 4953.00 frames.], tot_loss[loss=0.1464, simple_loss=0.218, pruned_loss=0.0374, over 972477.22 frames.], batch size: 21, lr: 2.87e-04 +2022-05-05 23:44:47,791 INFO [train.py:715] (6/8) Epoch 7, batch 27650, loss[loss=0.1457, simple_loss=0.2205, pruned_loss=0.03545, over 4803.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2182, pruned_loss=0.03771, over 973573.03 frames.], batch size: 21, lr: 2.87e-04 +2022-05-05 23:45:28,511 INFO [train.py:715] (6/8) Epoch 7, batch 27700, loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.03171, over 4925.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2183, pruned_loss=0.03777, over 972891.13 frames.], batch size: 29, lr: 2.87e-04 +2022-05-05 23:46:09,251 INFO [train.py:715] (6/8) Epoch 7, batch 27750, loss[loss=0.1529, simple_loss=0.2209, pruned_loss=0.04241, over 4773.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2174, pruned_loss=0.03734, over 972872.79 frames.], batch size: 14, lr: 2.87e-04 +2022-05-05 23:46:50,127 INFO [train.py:715] (6/8) Epoch 7, batch 27800, loss[loss=0.1244, simple_loss=0.2006, pruned_loss=0.02408, over 4743.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2174, pruned_loss=0.03696, over 971952.43 frames.], batch size: 16, lr: 2.87e-04 +2022-05-05 23:47:31,348 INFO [train.py:715] (6/8) Epoch 7, batch 27850, loss[loss=0.09973, simple_loss=0.177, pruned_loss=0.01124, over 4917.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2183, pruned_loss=0.03757, over 972347.93 frames.], batch size: 23, lr: 2.87e-04 +2022-05-05 23:48:11,407 INFO [train.py:715] (6/8) Epoch 7, batch 27900, loss[loss=0.1343, simple_loss=0.2133, pruned_loss=0.02768, over 4820.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2184, pruned_loss=0.03758, over 971673.20 frames.], batch size: 25, lr: 2.87e-04 +2022-05-05 23:48:52,369 INFO [train.py:715] (6/8) Epoch 7, batch 27950, loss[loss=0.1483, simple_loss=0.2117, pruned_loss=0.04248, over 4898.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2181, pruned_loss=0.03724, over 972222.75 frames.], batch size: 19, lr: 2.87e-04 +2022-05-05 23:49:33,560 INFO [train.py:715] (6/8) Epoch 7, batch 28000, loss[loss=0.116, simple_loss=0.1863, pruned_loss=0.02281, over 4797.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2171, pruned_loss=0.03688, over 972296.48 frames.], batch size: 14, lr: 2.87e-04 +2022-05-05 23:50:14,248 INFO [train.py:715] (6/8) Epoch 7, batch 28050, loss[loss=0.1391, simple_loss=0.2142, pruned_loss=0.03197, over 4820.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2171, pruned_loss=0.03667, over 972720.99 frames.], batch size: 25, lr: 2.87e-04 +2022-05-05 23:50:54,411 INFO [train.py:715] (6/8) Epoch 7, batch 28100, loss[loss=0.1283, simple_loss=0.2051, pruned_loss=0.02572, over 4782.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2173, pruned_loss=0.03682, over 973043.40 frames.], batch size: 18, lr: 2.87e-04 +2022-05-05 23:51:35,210 INFO [train.py:715] (6/8) Epoch 7, batch 28150, loss[loss=0.1527, simple_loss=0.2346, pruned_loss=0.03543, over 4932.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2186, pruned_loss=0.03723, over 973063.05 frames.], batch size: 23, lr: 2.87e-04 +2022-05-05 23:52:16,648 INFO [train.py:715] (6/8) Epoch 7, batch 28200, loss[loss=0.1318, simple_loss=0.2081, pruned_loss=0.02773, over 4879.00 frames.], tot_loss[loss=0.146, simple_loss=0.2182, pruned_loss=0.03691, over 972772.74 frames.], batch size: 16, lr: 2.87e-04 +2022-05-05 23:52:56,869 INFO [train.py:715] (6/8) Epoch 7, batch 28250, loss[loss=0.1547, simple_loss=0.224, pruned_loss=0.04271, over 4853.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2183, pruned_loss=0.03704, over 971757.22 frames.], batch size: 13, lr: 2.87e-04 +2022-05-05 23:53:38,377 INFO [train.py:715] (6/8) Epoch 7, batch 28300, loss[loss=0.1292, simple_loss=0.2051, pruned_loss=0.02659, over 4927.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2182, pruned_loss=0.03716, over 972345.07 frames.], batch size: 18, lr: 2.86e-04 +2022-05-05 23:54:21,488 INFO [train.py:715] (6/8) Epoch 7, batch 28350, loss[loss=0.1655, simple_loss=0.2405, pruned_loss=0.04532, over 4969.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2174, pruned_loss=0.03682, over 971852.21 frames.], batch size: 39, lr: 2.86e-04 +2022-05-05 23:55:01,300 INFO [train.py:715] (6/8) Epoch 7, batch 28400, loss[loss=0.1253, simple_loss=0.2073, pruned_loss=0.02167, over 4834.00 frames.], tot_loss[loss=0.146, simple_loss=0.2178, pruned_loss=0.03708, over 971235.50 frames.], batch size: 26, lr: 2.86e-04 +2022-05-05 23:55:40,832 INFO [train.py:715] (6/8) Epoch 7, batch 28450, loss[loss=0.1374, simple_loss=0.2111, pruned_loss=0.03185, over 4836.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2181, pruned_loss=0.03718, over 971683.83 frames.], batch size: 15, lr: 2.86e-04 +2022-05-05 23:56:20,938 INFO [train.py:715] (6/8) Epoch 7, batch 28500, loss[loss=0.1941, simple_loss=0.2541, pruned_loss=0.06708, over 4934.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2174, pruned_loss=0.03705, over 972760.44 frames.], batch size: 39, lr: 2.86e-04 +2022-05-05 23:57:01,429 INFO [train.py:715] (6/8) Epoch 7, batch 28550, loss[loss=0.1676, simple_loss=0.2412, pruned_loss=0.04693, over 4809.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2169, pruned_loss=0.0369, over 973789.72 frames.], batch size: 25, lr: 2.86e-04 +2022-05-05 23:57:41,422 INFO [train.py:715] (6/8) Epoch 7, batch 28600, loss[loss=0.1514, simple_loss=0.2109, pruned_loss=0.04593, over 4890.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2175, pruned_loss=0.03715, over 973972.61 frames.], batch size: 32, lr: 2.86e-04 +2022-05-05 23:58:21,642 INFO [train.py:715] (6/8) Epoch 7, batch 28650, loss[loss=0.1325, simple_loss=0.2002, pruned_loss=0.03234, over 4865.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2181, pruned_loss=0.03719, over 972952.63 frames.], batch size: 20, lr: 2.86e-04 +2022-05-05 23:59:03,078 INFO [train.py:715] (6/8) Epoch 7, batch 28700, loss[loss=0.1869, simple_loss=0.2492, pruned_loss=0.06231, over 4889.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2181, pruned_loss=0.03737, over 973116.86 frames.], batch size: 38, lr: 2.86e-04 +2022-05-05 23:59:43,960 INFO [train.py:715] (6/8) Epoch 7, batch 28750, loss[loss=0.1572, simple_loss=0.2196, pruned_loss=0.04735, over 4957.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2172, pruned_loss=0.03686, over 972840.39 frames.], batch size: 23, lr: 2.86e-04 +2022-05-06 00:00:24,193 INFO [train.py:715] (6/8) Epoch 7, batch 28800, loss[loss=0.1407, simple_loss=0.2287, pruned_loss=0.02636, over 4741.00 frames.], tot_loss[loss=0.1461, simple_loss=0.218, pruned_loss=0.03717, over 972621.90 frames.], batch size: 16, lr: 2.86e-04 +2022-05-06 00:01:04,808 INFO [train.py:715] (6/8) Epoch 7, batch 28850, loss[loss=0.1435, simple_loss=0.2116, pruned_loss=0.03772, over 4768.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2184, pruned_loss=0.03727, over 972525.21 frames.], batch size: 17, lr: 2.86e-04 +2022-05-06 00:01:45,178 INFO [train.py:715] (6/8) Epoch 7, batch 28900, loss[loss=0.1818, simple_loss=0.2504, pruned_loss=0.05663, over 4907.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2185, pruned_loss=0.0375, over 972302.75 frames.], batch size: 19, lr: 2.86e-04 +2022-05-06 00:02:24,701 INFO [train.py:715] (6/8) Epoch 7, batch 28950, loss[loss=0.1328, simple_loss=0.2057, pruned_loss=0.02997, over 4938.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2178, pruned_loss=0.03687, over 971776.07 frames.], batch size: 29, lr: 2.86e-04 +2022-05-06 00:03:04,254 INFO [train.py:715] (6/8) Epoch 7, batch 29000, loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02898, over 4775.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2172, pruned_loss=0.03654, over 971266.09 frames.], batch size: 17, lr: 2.86e-04 +2022-05-06 00:03:44,918 INFO [train.py:715] (6/8) Epoch 7, batch 29050, loss[loss=0.129, simple_loss=0.2036, pruned_loss=0.02716, over 4846.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2171, pruned_loss=0.03671, over 971396.44 frames.], batch size: 30, lr: 2.86e-04 +2022-05-06 00:04:24,483 INFO [train.py:715] (6/8) Epoch 7, batch 29100, loss[loss=0.1514, simple_loss=0.2155, pruned_loss=0.04368, over 4968.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2171, pruned_loss=0.03673, over 971932.51 frames.], batch size: 15, lr: 2.86e-04 +2022-05-06 00:05:04,256 INFO [train.py:715] (6/8) Epoch 7, batch 29150, loss[loss=0.1558, simple_loss=0.2164, pruned_loss=0.04764, over 4685.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2171, pruned_loss=0.03664, over 971429.96 frames.], batch size: 15, lr: 2.86e-04 +2022-05-06 00:05:44,151 INFO [train.py:715] (6/8) Epoch 7, batch 29200, loss[loss=0.1143, simple_loss=0.1938, pruned_loss=0.01738, over 4793.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2167, pruned_loss=0.0365, over 971524.51 frames.], batch size: 12, lr: 2.86e-04 +2022-05-06 00:06:24,441 INFO [train.py:715] (6/8) Epoch 7, batch 29250, loss[loss=0.1194, simple_loss=0.1956, pruned_loss=0.02162, over 4799.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2174, pruned_loss=0.03703, over 971339.20 frames.], batch size: 21, lr: 2.86e-04 +2022-05-06 00:07:04,332 INFO [train.py:715] (6/8) Epoch 7, batch 29300, loss[loss=0.1224, simple_loss=0.2072, pruned_loss=0.01882, over 4828.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2173, pruned_loss=0.03726, over 970983.71 frames.], batch size: 26, lr: 2.86e-04 +2022-05-06 00:07:44,021 INFO [train.py:715] (6/8) Epoch 7, batch 29350, loss[loss=0.1355, simple_loss=0.2082, pruned_loss=0.03146, over 4940.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2175, pruned_loss=0.03719, over 970927.96 frames.], batch size: 40, lr: 2.86e-04 +2022-05-06 00:08:24,291 INFO [train.py:715] (6/8) Epoch 7, batch 29400, loss[loss=0.156, simple_loss=0.223, pruned_loss=0.04446, over 4859.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2178, pruned_loss=0.03733, over 970813.01 frames.], batch size: 20, lr: 2.86e-04 +2022-05-06 00:09:03,568 INFO [train.py:715] (6/8) Epoch 7, batch 29450, loss[loss=0.1581, simple_loss=0.2278, pruned_loss=0.0442, over 4900.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2179, pruned_loss=0.03725, over 971884.96 frames.], batch size: 17, lr: 2.86e-04 +2022-05-06 00:09:43,850 INFO [train.py:715] (6/8) Epoch 7, batch 29500, loss[loss=0.1387, simple_loss=0.2155, pruned_loss=0.03092, over 4885.00 frames.], tot_loss[loss=0.146, simple_loss=0.2181, pruned_loss=0.037, over 972400.05 frames.], batch size: 16, lr: 2.86e-04 +2022-05-06 00:10:23,576 INFO [train.py:715] (6/8) Epoch 7, batch 29550, loss[loss=0.1526, simple_loss=0.2179, pruned_loss=0.04361, over 4881.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2175, pruned_loss=0.0369, over 971892.71 frames.], batch size: 20, lr: 2.86e-04 +2022-05-06 00:11:03,254 INFO [train.py:715] (6/8) Epoch 7, batch 29600, loss[loss=0.142, simple_loss=0.2084, pruned_loss=0.03777, over 4969.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2166, pruned_loss=0.03633, over 972876.55 frames.], batch size: 28, lr: 2.86e-04 +2022-05-06 00:11:43,211 INFO [train.py:715] (6/8) Epoch 7, batch 29650, loss[loss=0.1382, simple_loss=0.2087, pruned_loss=0.03381, over 4951.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2168, pruned_loss=0.03638, over 972844.98 frames.], batch size: 35, lr: 2.86e-04 +2022-05-06 00:12:23,008 INFO [train.py:715] (6/8) Epoch 7, batch 29700, loss[loss=0.1403, simple_loss=0.214, pruned_loss=0.03336, over 4988.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2166, pruned_loss=0.03619, over 973493.12 frames.], batch size: 26, lr: 2.86e-04 +2022-05-06 00:13:02,667 INFO [train.py:715] (6/8) Epoch 7, batch 29750, loss[loss=0.1235, simple_loss=0.1994, pruned_loss=0.02384, over 4970.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2172, pruned_loss=0.03628, over 973862.91 frames.], batch size: 14, lr: 2.86e-04 +2022-05-06 00:13:42,299 INFO [train.py:715] (6/8) Epoch 7, batch 29800, loss[loss=0.1261, simple_loss=0.1938, pruned_loss=0.02921, over 4814.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2167, pruned_loss=0.03642, over 973221.25 frames.], batch size: 13, lr: 2.86e-04 +2022-05-06 00:14:22,418 INFO [train.py:715] (6/8) Epoch 7, batch 29850, loss[loss=0.1245, simple_loss=0.2019, pruned_loss=0.02356, over 4861.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2161, pruned_loss=0.03625, over 972628.14 frames.], batch size: 30, lr: 2.86e-04 +2022-05-06 00:15:02,285 INFO [train.py:715] (6/8) Epoch 7, batch 29900, loss[loss=0.1405, simple_loss=0.2099, pruned_loss=0.03551, over 4955.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2161, pruned_loss=0.03619, over 972428.34 frames.], batch size: 35, lr: 2.86e-04 +2022-05-06 00:15:41,866 INFO [train.py:715] (6/8) Epoch 7, batch 29950, loss[loss=0.181, simple_loss=0.2437, pruned_loss=0.05913, over 4879.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2163, pruned_loss=0.03615, over 972470.54 frames.], batch size: 16, lr: 2.86e-04 +2022-05-06 00:16:21,228 INFO [train.py:715] (6/8) Epoch 7, batch 30000, loss[loss=0.1331, simple_loss=0.2123, pruned_loss=0.02688, over 4923.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2159, pruned_loss=0.03589, over 972587.17 frames.], batch size: 29, lr: 2.86e-04 +2022-05-06 00:16:21,230 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 00:16:41,747 INFO [train.py:742] (6/8) Epoch 7, validation: loss=0.1081, simple_loss=0.1929, pruned_loss=0.01164, over 914524.00 frames. +2022-05-06 00:17:21,555 INFO [train.py:715] (6/8) Epoch 7, batch 30050, loss[loss=0.175, simple_loss=0.2491, pruned_loss=0.05045, over 4695.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2163, pruned_loss=0.03601, over 972475.17 frames.], batch size: 15, lr: 2.86e-04 +2022-05-06 00:18:00,838 INFO [train.py:715] (6/8) Epoch 7, batch 30100, loss[loss=0.121, simple_loss=0.1894, pruned_loss=0.02628, over 4958.00 frames.], tot_loss[loss=0.144, simple_loss=0.2166, pruned_loss=0.0357, over 972085.25 frames.], batch size: 14, lr: 2.86e-04 +2022-05-06 00:18:40,791 INFO [train.py:715] (6/8) Epoch 7, batch 30150, loss[loss=0.143, simple_loss=0.2233, pruned_loss=0.03132, over 4823.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2164, pruned_loss=0.03534, over 972096.75 frames.], batch size: 26, lr: 2.86e-04 +2022-05-06 00:19:20,434 INFO [train.py:715] (6/8) Epoch 7, batch 30200, loss[loss=0.1234, simple_loss=0.1944, pruned_loss=0.02618, over 4829.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2158, pruned_loss=0.03526, over 971967.73 frames.], batch size: 13, lr: 2.85e-04 +2022-05-06 00:20:00,693 INFO [train.py:715] (6/8) Epoch 7, batch 30250, loss[loss=0.1512, simple_loss=0.2245, pruned_loss=0.0389, over 4754.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2165, pruned_loss=0.03569, over 972469.28 frames.], batch size: 16, lr: 2.85e-04 +2022-05-06 00:20:39,869 INFO [train.py:715] (6/8) Epoch 7, batch 30300, loss[loss=0.1328, simple_loss=0.2115, pruned_loss=0.02704, over 4862.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2177, pruned_loss=0.03694, over 972812.99 frames.], batch size: 13, lr: 2.85e-04 +2022-05-06 00:21:19,492 INFO [train.py:715] (6/8) Epoch 7, batch 30350, loss[loss=0.1218, simple_loss=0.1902, pruned_loss=0.02672, over 4845.00 frames.], tot_loss[loss=0.145, simple_loss=0.2173, pruned_loss=0.03638, over 971936.72 frames.], batch size: 13, lr: 2.85e-04 +2022-05-06 00:21:58,990 INFO [train.py:715] (6/8) Epoch 7, batch 30400, loss[loss=0.1291, simple_loss=0.2035, pruned_loss=0.02731, over 4934.00 frames.], tot_loss[loss=0.144, simple_loss=0.2164, pruned_loss=0.03586, over 972363.26 frames.], batch size: 21, lr: 2.85e-04 +2022-05-06 00:22:38,982 INFO [train.py:715] (6/8) Epoch 7, batch 30450, loss[loss=0.1249, simple_loss=0.2095, pruned_loss=0.02013, over 4765.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2163, pruned_loss=0.03547, over 972952.80 frames.], batch size: 14, lr: 2.85e-04 +2022-05-06 00:23:18,899 INFO [train.py:715] (6/8) Epoch 7, batch 30500, loss[loss=0.2112, simple_loss=0.2761, pruned_loss=0.07311, over 4844.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2165, pruned_loss=0.03547, over 972833.75 frames.], batch size: 15, lr: 2.85e-04 +2022-05-06 00:23:58,829 INFO [train.py:715] (6/8) Epoch 7, batch 30550, loss[loss=0.1548, simple_loss=0.224, pruned_loss=0.04286, over 4796.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2166, pruned_loss=0.03594, over 972545.83 frames.], batch size: 21, lr: 2.85e-04 +2022-05-06 00:24:38,531 INFO [train.py:715] (6/8) Epoch 7, batch 30600, loss[loss=0.165, simple_loss=0.2433, pruned_loss=0.04339, over 4983.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2173, pruned_loss=0.03628, over 974051.57 frames.], batch size: 15, lr: 2.85e-04 +2022-05-06 00:25:18,168 INFO [train.py:715] (6/8) Epoch 7, batch 30650, loss[loss=0.1518, simple_loss=0.2194, pruned_loss=0.04211, over 4905.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2169, pruned_loss=0.03613, over 973627.28 frames.], batch size: 17, lr: 2.85e-04 +2022-05-06 00:25:57,790 INFO [train.py:715] (6/8) Epoch 7, batch 30700, loss[loss=0.1521, simple_loss=0.2189, pruned_loss=0.04262, over 4962.00 frames.], tot_loss[loss=0.1441, simple_loss=0.216, pruned_loss=0.03608, over 973317.66 frames.], batch size: 35, lr: 2.85e-04 +2022-05-06 00:26:36,843 INFO [train.py:715] (6/8) Epoch 7, batch 30750, loss[loss=0.156, simple_loss=0.2325, pruned_loss=0.03969, over 4778.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2158, pruned_loss=0.03626, over 973238.23 frames.], batch size: 18, lr: 2.85e-04 +2022-05-06 00:27:15,906 INFO [train.py:715] (6/8) Epoch 7, batch 30800, loss[loss=0.1324, simple_loss=0.2043, pruned_loss=0.0303, over 4786.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2159, pruned_loss=0.03598, over 973304.30 frames.], batch size: 18, lr: 2.85e-04 +2022-05-06 00:27:55,688 INFO [train.py:715] (6/8) Epoch 7, batch 30850, loss[loss=0.1474, simple_loss=0.2114, pruned_loss=0.0417, over 4892.00 frames.], tot_loss[loss=0.144, simple_loss=0.2159, pruned_loss=0.03608, over 972540.07 frames.], batch size: 22, lr: 2.85e-04 +2022-05-06 00:28:35,194 INFO [train.py:715] (6/8) Epoch 7, batch 30900, loss[loss=0.1126, simple_loss=0.1773, pruned_loss=0.02391, over 4951.00 frames.], tot_loss[loss=0.1441, simple_loss=0.216, pruned_loss=0.03611, over 972885.12 frames.], batch size: 29, lr: 2.85e-04 +2022-05-06 00:29:15,592 INFO [train.py:715] (6/8) Epoch 7, batch 30950, loss[loss=0.1437, simple_loss=0.2096, pruned_loss=0.03892, over 4967.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2168, pruned_loss=0.03669, over 973152.88 frames.], batch size: 15, lr: 2.85e-04 +2022-05-06 00:29:54,984 INFO [train.py:715] (6/8) Epoch 7, batch 31000, loss[loss=0.16, simple_loss=0.2284, pruned_loss=0.0458, over 4863.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2165, pruned_loss=0.0363, over 973288.28 frames.], batch size: 38, lr: 2.85e-04 +2022-05-06 00:30:34,541 INFO [train.py:715] (6/8) Epoch 7, batch 31050, loss[loss=0.1595, simple_loss=0.2236, pruned_loss=0.04776, over 4892.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2172, pruned_loss=0.03633, over 973193.01 frames.], batch size: 16, lr: 2.85e-04 +2022-05-06 00:31:14,375 INFO [train.py:715] (6/8) Epoch 7, batch 31100, loss[loss=0.1612, simple_loss=0.2121, pruned_loss=0.05516, over 4822.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2174, pruned_loss=0.03635, over 972819.36 frames.], batch size: 12, lr: 2.85e-04 +2022-05-06 00:31:54,496 INFO [train.py:715] (6/8) Epoch 7, batch 31150, loss[loss=0.1327, simple_loss=0.2078, pruned_loss=0.02879, over 4811.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2169, pruned_loss=0.03622, over 972655.35 frames.], batch size: 24, lr: 2.85e-04 +2022-05-06 00:32:33,848 INFO [train.py:715] (6/8) Epoch 7, batch 31200, loss[loss=0.1887, simple_loss=0.2559, pruned_loss=0.06072, over 4885.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2179, pruned_loss=0.03659, over 972651.02 frames.], batch size: 39, lr: 2.85e-04 +2022-05-06 00:33:13,817 INFO [train.py:715] (6/8) Epoch 7, batch 31250, loss[loss=0.1511, simple_loss=0.2299, pruned_loss=0.03614, over 4755.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2185, pruned_loss=0.03695, over 972284.45 frames.], batch size: 19, lr: 2.85e-04 +2022-05-06 00:33:54,544 INFO [train.py:715] (6/8) Epoch 7, batch 31300, loss[loss=0.1696, simple_loss=0.2267, pruned_loss=0.0563, over 4867.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2173, pruned_loss=0.03652, over 972299.05 frames.], batch size: 32, lr: 2.85e-04 +2022-05-06 00:34:34,122 INFO [train.py:715] (6/8) Epoch 7, batch 31350, loss[loss=0.1178, simple_loss=0.1961, pruned_loss=0.01975, over 4963.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2169, pruned_loss=0.03618, over 972659.74 frames.], batch size: 14, lr: 2.85e-04 +2022-05-06 00:35:14,070 INFO [train.py:715] (6/8) Epoch 7, batch 31400, loss[loss=0.1499, simple_loss=0.2092, pruned_loss=0.04528, over 4798.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2169, pruned_loss=0.03641, over 972329.93 frames.], batch size: 12, lr: 2.85e-04 +2022-05-06 00:35:53,412 INFO [train.py:715] (6/8) Epoch 7, batch 31450, loss[loss=0.1453, simple_loss=0.2284, pruned_loss=0.03106, over 4756.00 frames.], tot_loss[loss=0.145, simple_loss=0.2172, pruned_loss=0.0364, over 973033.08 frames.], batch size: 19, lr: 2.85e-04 +2022-05-06 00:36:33,191 INFO [train.py:715] (6/8) Epoch 7, batch 31500, loss[loss=0.1397, simple_loss=0.2137, pruned_loss=0.03282, over 4779.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2165, pruned_loss=0.03641, over 972901.84 frames.], batch size: 17, lr: 2.85e-04 +2022-05-06 00:37:12,319 INFO [train.py:715] (6/8) Epoch 7, batch 31550, loss[loss=0.169, simple_loss=0.2441, pruned_loss=0.04693, over 4836.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2165, pruned_loss=0.03622, over 973824.70 frames.], batch size: 20, lr: 2.85e-04 +2022-05-06 00:37:52,278 INFO [train.py:715] (6/8) Epoch 7, batch 31600, loss[loss=0.1581, simple_loss=0.2253, pruned_loss=0.04543, over 4889.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2167, pruned_loss=0.03643, over 973186.20 frames.], batch size: 19, lr: 2.85e-04 +2022-05-06 00:38:32,107 INFO [train.py:715] (6/8) Epoch 7, batch 31650, loss[loss=0.1579, simple_loss=0.2414, pruned_loss=0.03725, over 4815.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2174, pruned_loss=0.03712, over 973718.09 frames.], batch size: 25, lr: 2.85e-04 +2022-05-06 00:39:11,529 INFO [train.py:715] (6/8) Epoch 7, batch 31700, loss[loss=0.1343, simple_loss=0.2049, pruned_loss=0.03189, over 4936.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2176, pruned_loss=0.03711, over 973452.05 frames.], batch size: 23, lr: 2.85e-04 +2022-05-06 00:39:51,223 INFO [train.py:715] (6/8) Epoch 7, batch 31750, loss[loss=0.1482, simple_loss=0.2156, pruned_loss=0.0404, over 4811.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2182, pruned_loss=0.037, over 972659.84 frames.], batch size: 21, lr: 2.85e-04 +2022-05-06 00:40:30,495 INFO [train.py:715] (6/8) Epoch 7, batch 31800, loss[loss=0.1544, simple_loss=0.222, pruned_loss=0.04339, over 4902.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2179, pruned_loss=0.03675, over 972431.27 frames.], batch size: 19, lr: 2.85e-04 +2022-05-06 00:41:09,608 INFO [train.py:715] (6/8) Epoch 7, batch 31850, loss[loss=0.1736, simple_loss=0.2419, pruned_loss=0.05261, over 4870.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2184, pruned_loss=0.03699, over 972660.83 frames.], batch size: 30, lr: 2.85e-04 +2022-05-06 00:41:49,865 INFO [train.py:715] (6/8) Epoch 7, batch 31900, loss[loss=0.1202, simple_loss=0.1997, pruned_loss=0.0204, over 4820.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2173, pruned_loss=0.03674, over 972894.38 frames.], batch size: 26, lr: 2.85e-04 +2022-05-06 00:42:30,604 INFO [train.py:715] (6/8) Epoch 7, batch 31950, loss[loss=0.1542, simple_loss=0.2188, pruned_loss=0.04483, over 4772.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2174, pruned_loss=0.03671, over 972571.96 frames.], batch size: 17, lr: 2.85e-04 +2022-05-06 00:43:11,065 INFO [train.py:715] (6/8) Epoch 7, batch 32000, loss[loss=0.1438, simple_loss=0.2162, pruned_loss=0.03569, over 4804.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2181, pruned_loss=0.03652, over 973629.53 frames.], batch size: 25, lr: 2.85e-04 +2022-05-06 00:43:50,734 INFO [train.py:715] (6/8) Epoch 7, batch 32050, loss[loss=0.174, simple_loss=0.2446, pruned_loss=0.05174, over 4774.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2192, pruned_loss=0.03726, over 973161.40 frames.], batch size: 14, lr: 2.85e-04 +2022-05-06 00:44:30,667 INFO [train.py:715] (6/8) Epoch 7, batch 32100, loss[loss=0.1436, simple_loss=0.2074, pruned_loss=0.03993, over 4786.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2188, pruned_loss=0.03687, over 972866.16 frames.], batch size: 18, lr: 2.85e-04 +2022-05-06 00:45:10,481 INFO [train.py:715] (6/8) Epoch 7, batch 32150, loss[loss=0.1435, simple_loss=0.2131, pruned_loss=0.03689, over 4731.00 frames.], tot_loss[loss=0.1457, simple_loss=0.218, pruned_loss=0.03671, over 972056.27 frames.], batch size: 12, lr: 2.84e-04 +2022-05-06 00:45:50,034 INFO [train.py:715] (6/8) Epoch 7, batch 32200, loss[loss=0.1406, simple_loss=0.2133, pruned_loss=0.03401, over 4886.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2181, pruned_loss=0.03684, over 972233.96 frames.], batch size: 22, lr: 2.84e-04 +2022-05-06 00:46:29,886 INFO [train.py:715] (6/8) Epoch 7, batch 32250, loss[loss=0.1393, simple_loss=0.2318, pruned_loss=0.02337, over 4815.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2179, pruned_loss=0.03671, over 971020.46 frames.], batch size: 26, lr: 2.84e-04 +2022-05-06 00:47:09,678 INFO [train.py:715] (6/8) Epoch 7, batch 32300, loss[loss=0.122, simple_loss=0.1975, pruned_loss=0.02327, over 4876.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2174, pruned_loss=0.03665, over 971120.15 frames.], batch size: 22, lr: 2.84e-04 +2022-05-06 00:47:50,017 INFO [train.py:715] (6/8) Epoch 7, batch 32350, loss[loss=0.1413, simple_loss=0.2141, pruned_loss=0.03428, over 4794.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2166, pruned_loss=0.03625, over 971522.93 frames.], batch size: 14, lr: 2.84e-04 +2022-05-06 00:48:29,376 INFO [train.py:715] (6/8) Epoch 7, batch 32400, loss[loss=0.1176, simple_loss=0.1889, pruned_loss=0.02312, over 4645.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2161, pruned_loss=0.0361, over 970765.13 frames.], batch size: 13, lr: 2.84e-04 +2022-05-06 00:49:09,265 INFO [train.py:715] (6/8) Epoch 7, batch 32450, loss[loss=0.1724, simple_loss=0.2454, pruned_loss=0.04966, over 4799.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2169, pruned_loss=0.03644, over 971204.32 frames.], batch size: 21, lr: 2.84e-04 +2022-05-06 00:49:48,740 INFO [train.py:715] (6/8) Epoch 7, batch 32500, loss[loss=0.1496, simple_loss=0.2245, pruned_loss=0.03734, over 4923.00 frames.], tot_loss[loss=0.145, simple_loss=0.2167, pruned_loss=0.03664, over 971791.36 frames.], batch size: 39, lr: 2.84e-04 +2022-05-06 00:50:28,304 INFO [train.py:715] (6/8) Epoch 7, batch 32550, loss[loss=0.1246, simple_loss=0.1883, pruned_loss=0.03043, over 4746.00 frames.], tot_loss[loss=0.144, simple_loss=0.2155, pruned_loss=0.03629, over 972088.29 frames.], batch size: 19, lr: 2.84e-04 +2022-05-06 00:51:08,056 INFO [train.py:715] (6/8) Epoch 7, batch 32600, loss[loss=0.173, simple_loss=0.2341, pruned_loss=0.056, over 4932.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2157, pruned_loss=0.0365, over 972659.78 frames.], batch size: 23, lr: 2.84e-04 +2022-05-06 00:51:47,570 INFO [train.py:715] (6/8) Epoch 7, batch 32650, loss[loss=0.1687, simple_loss=0.2308, pruned_loss=0.05333, over 4783.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2164, pruned_loss=0.03687, over 972526.33 frames.], batch size: 18, lr: 2.84e-04 +2022-05-06 00:52:27,385 INFO [train.py:715] (6/8) Epoch 7, batch 32700, loss[loss=0.1465, simple_loss=0.2219, pruned_loss=0.03556, over 4803.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2168, pruned_loss=0.03655, over 972483.47 frames.], batch size: 24, lr: 2.84e-04 +2022-05-06 00:53:06,819 INFO [train.py:715] (6/8) Epoch 7, batch 32750, loss[loss=0.1671, simple_loss=0.2329, pruned_loss=0.05062, over 4821.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2167, pruned_loss=0.03641, over 972267.86 frames.], batch size: 15, lr: 2.84e-04 +2022-05-06 00:53:47,309 INFO [train.py:715] (6/8) Epoch 7, batch 32800, loss[loss=0.1451, simple_loss=0.2167, pruned_loss=0.03671, over 4849.00 frames.], tot_loss[loss=0.145, simple_loss=0.2166, pruned_loss=0.03671, over 972800.61 frames.], batch size: 32, lr: 2.84e-04 +2022-05-06 00:54:27,994 INFO [train.py:715] (6/8) Epoch 7, batch 32850, loss[loss=0.1649, simple_loss=0.2378, pruned_loss=0.04607, over 4815.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2166, pruned_loss=0.03644, over 972941.66 frames.], batch size: 25, lr: 2.84e-04 +2022-05-06 00:55:08,135 INFO [train.py:715] (6/8) Epoch 7, batch 32900, loss[loss=0.1537, simple_loss=0.2395, pruned_loss=0.03396, over 4937.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2168, pruned_loss=0.03675, over 973299.96 frames.], batch size: 23, lr: 2.84e-04 +2022-05-06 00:55:48,474 INFO [train.py:715] (6/8) Epoch 7, batch 32950, loss[loss=0.1351, simple_loss=0.2137, pruned_loss=0.02829, over 4801.00 frames.], tot_loss[loss=0.145, simple_loss=0.2166, pruned_loss=0.03668, over 973035.98 frames.], batch size: 24, lr: 2.84e-04 +2022-05-06 00:56:28,434 INFO [train.py:715] (6/8) Epoch 7, batch 33000, loss[loss=0.1659, simple_loss=0.2325, pruned_loss=0.04964, over 4805.00 frames.], tot_loss[loss=0.1447, simple_loss=0.216, pruned_loss=0.03672, over 972555.94 frames.], batch size: 14, lr: 2.84e-04 +2022-05-06 00:56:28,435 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 00:56:38,007 INFO [train.py:742] (6/8) Epoch 7, validation: loss=0.108, simple_loss=0.1927, pruned_loss=0.01164, over 914524.00 frames. +2022-05-06 00:57:17,528 INFO [train.py:715] (6/8) Epoch 7, batch 33050, loss[loss=0.1696, simple_loss=0.2442, pruned_loss=0.04753, over 4784.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2165, pruned_loss=0.03681, over 972863.43 frames.], batch size: 17, lr: 2.84e-04 +2022-05-06 00:57:57,506 INFO [train.py:715] (6/8) Epoch 7, batch 33100, loss[loss=0.1647, simple_loss=0.2412, pruned_loss=0.04415, over 4886.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2168, pruned_loss=0.03697, over 973140.60 frames.], batch size: 16, lr: 2.84e-04 +2022-05-06 00:58:36,958 INFO [train.py:715] (6/8) Epoch 7, batch 33150, loss[loss=0.1393, simple_loss=0.2152, pruned_loss=0.03167, over 4876.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2164, pruned_loss=0.03624, over 973715.35 frames.], batch size: 16, lr: 2.84e-04 +2022-05-06 00:59:16,728 INFO [train.py:715] (6/8) Epoch 7, batch 33200, loss[loss=0.1285, simple_loss=0.21, pruned_loss=0.02346, over 4976.00 frames.], tot_loss[loss=0.144, simple_loss=0.2159, pruned_loss=0.03606, over 974045.38 frames.], batch size: 24, lr: 2.84e-04 +2022-05-06 00:59:56,299 INFO [train.py:715] (6/8) Epoch 7, batch 33250, loss[loss=0.1234, simple_loss=0.2039, pruned_loss=0.02146, over 4965.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2169, pruned_loss=0.03633, over 973876.05 frames.], batch size: 24, lr: 2.84e-04 +2022-05-06 01:00:35,762 INFO [train.py:715] (6/8) Epoch 7, batch 33300, loss[loss=0.1598, simple_loss=0.2264, pruned_loss=0.04666, over 4982.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2181, pruned_loss=0.03715, over 974095.59 frames.], batch size: 31, lr: 2.84e-04 +2022-05-06 01:01:15,280 INFO [train.py:715] (6/8) Epoch 7, batch 33350, loss[loss=0.1333, simple_loss=0.2008, pruned_loss=0.03291, over 4797.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2174, pruned_loss=0.03664, over 973728.42 frames.], batch size: 21, lr: 2.84e-04 +2022-05-06 01:01:55,594 INFO [train.py:715] (6/8) Epoch 7, batch 33400, loss[loss=0.1636, simple_loss=0.232, pruned_loss=0.04767, over 4698.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2183, pruned_loss=0.03708, over 972945.37 frames.], batch size: 15, lr: 2.84e-04 +2022-05-06 01:02:35,690 INFO [train.py:715] (6/8) Epoch 7, batch 33450, loss[loss=0.1059, simple_loss=0.1773, pruned_loss=0.01725, over 4984.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2185, pruned_loss=0.0371, over 973018.78 frames.], batch size: 25, lr: 2.84e-04 +2022-05-06 01:03:16,275 INFO [train.py:715] (6/8) Epoch 7, batch 33500, loss[loss=0.1557, simple_loss=0.229, pruned_loss=0.04125, over 4921.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2188, pruned_loss=0.0372, over 973332.79 frames.], batch size: 23, lr: 2.84e-04 +2022-05-06 01:03:56,852 INFO [train.py:715] (6/8) Epoch 7, batch 33550, loss[loss=0.1121, simple_loss=0.1908, pruned_loss=0.01675, over 4942.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2183, pruned_loss=0.03726, over 973572.45 frames.], batch size: 29, lr: 2.84e-04 +2022-05-06 01:04:37,458 INFO [train.py:715] (6/8) Epoch 7, batch 33600, loss[loss=0.149, simple_loss=0.2348, pruned_loss=0.03165, over 4695.00 frames.], tot_loss[loss=0.146, simple_loss=0.2179, pruned_loss=0.03701, over 973237.28 frames.], batch size: 15, lr: 2.84e-04 +2022-05-06 01:05:17,956 INFO [train.py:715] (6/8) Epoch 7, batch 33650, loss[loss=0.1447, simple_loss=0.206, pruned_loss=0.04165, over 4910.00 frames.], tot_loss[loss=0.1451, simple_loss=0.217, pruned_loss=0.03656, over 972210.21 frames.], batch size: 19, lr: 2.84e-04 +2022-05-06 01:05:57,818 INFO [train.py:715] (6/8) Epoch 7, batch 33700, loss[loss=0.1238, simple_loss=0.2039, pruned_loss=0.02191, over 4985.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2166, pruned_loss=0.03679, over 972129.14 frames.], batch size: 31, lr: 2.84e-04 +2022-05-06 01:06:37,964 INFO [train.py:715] (6/8) Epoch 7, batch 33750, loss[loss=0.1617, simple_loss=0.2243, pruned_loss=0.04959, over 4979.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2163, pruned_loss=0.03622, over 972761.64 frames.], batch size: 14, lr: 2.84e-04 +2022-05-06 01:07:17,448 INFO [train.py:715] (6/8) Epoch 7, batch 33800, loss[loss=0.1557, simple_loss=0.2291, pruned_loss=0.04119, over 4929.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2172, pruned_loss=0.03677, over 971573.95 frames.], batch size: 23, lr: 2.84e-04 +2022-05-06 01:07:58,048 INFO [train.py:715] (6/8) Epoch 7, batch 33850, loss[loss=0.1277, simple_loss=0.202, pruned_loss=0.02666, over 4779.00 frames.], tot_loss[loss=0.145, simple_loss=0.2172, pruned_loss=0.03643, over 971281.11 frames.], batch size: 18, lr: 2.84e-04 +2022-05-06 01:08:37,725 INFO [train.py:715] (6/8) Epoch 7, batch 33900, loss[loss=0.1346, simple_loss=0.2045, pruned_loss=0.0324, over 4918.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2167, pruned_loss=0.03611, over 971441.34 frames.], batch size: 17, lr: 2.84e-04 +2022-05-06 01:09:17,828 INFO [train.py:715] (6/8) Epoch 7, batch 33950, loss[loss=0.1282, simple_loss=0.2035, pruned_loss=0.02646, over 4971.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2178, pruned_loss=0.03718, over 970679.84 frames.], batch size: 15, lr: 2.84e-04 +2022-05-06 01:09:57,287 INFO [train.py:715] (6/8) Epoch 7, batch 34000, loss[loss=0.1284, simple_loss=0.2029, pruned_loss=0.02692, over 4829.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2187, pruned_loss=0.03774, over 971397.78 frames.], batch size: 26, lr: 2.84e-04 +2022-05-06 01:10:37,480 INFO [train.py:715] (6/8) Epoch 7, batch 34050, loss[loss=0.128, simple_loss=0.2011, pruned_loss=0.02746, over 4888.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2181, pruned_loss=0.03735, over 970926.24 frames.], batch size: 22, lr: 2.84e-04 +2022-05-06 01:11:17,480 INFO [train.py:715] (6/8) Epoch 7, batch 34100, loss[loss=0.1649, simple_loss=0.2349, pruned_loss=0.04743, over 4804.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2175, pruned_loss=0.03699, over 971380.15 frames.], batch size: 21, lr: 2.83e-04 +2022-05-06 01:11:56,987 INFO [train.py:715] (6/8) Epoch 7, batch 34150, loss[loss=0.1131, simple_loss=0.1904, pruned_loss=0.01788, over 4903.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2166, pruned_loss=0.03658, over 971118.75 frames.], batch size: 17, lr: 2.83e-04 +2022-05-06 01:12:37,407 INFO [train.py:715] (6/8) Epoch 7, batch 34200, loss[loss=0.1391, simple_loss=0.2098, pruned_loss=0.0342, over 4992.00 frames.], tot_loss[loss=0.144, simple_loss=0.2161, pruned_loss=0.036, over 970822.48 frames.], batch size: 14, lr: 2.83e-04 +2022-05-06 01:13:17,641 INFO [train.py:715] (6/8) Epoch 7, batch 34250, loss[loss=0.1378, simple_loss=0.2151, pruned_loss=0.03025, over 4917.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2162, pruned_loss=0.03597, over 971158.30 frames.], batch size: 23, lr: 2.83e-04 +2022-05-06 01:13:58,304 INFO [train.py:715] (6/8) Epoch 7, batch 34300, loss[loss=0.1154, simple_loss=0.191, pruned_loss=0.01985, over 4977.00 frames.], tot_loss[loss=0.1438, simple_loss=0.216, pruned_loss=0.0358, over 971715.21 frames.], batch size: 15, lr: 2.83e-04 +2022-05-06 01:14:38,117 INFO [train.py:715] (6/8) Epoch 7, batch 34350, loss[loss=0.1207, simple_loss=0.1969, pruned_loss=0.02221, over 4927.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2155, pruned_loss=0.03576, over 971548.21 frames.], batch size: 23, lr: 2.83e-04 +2022-05-06 01:15:18,249 INFO [train.py:715] (6/8) Epoch 7, batch 34400, loss[loss=0.1536, simple_loss=0.221, pruned_loss=0.04315, over 4979.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2166, pruned_loss=0.03584, over 972653.66 frames.], batch size: 25, lr: 2.83e-04 +2022-05-06 01:15:58,924 INFO [train.py:715] (6/8) Epoch 7, batch 34450, loss[loss=0.1607, simple_loss=0.2281, pruned_loss=0.04668, over 4813.00 frames.], tot_loss[loss=0.1445, simple_loss=0.217, pruned_loss=0.03602, over 971937.04 frames.], batch size: 13, lr: 2.83e-04 +2022-05-06 01:16:38,147 INFO [train.py:715] (6/8) Epoch 7, batch 34500, loss[loss=0.1798, simple_loss=0.2556, pruned_loss=0.05207, over 4935.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2184, pruned_loss=0.03693, over 970961.21 frames.], batch size: 29, lr: 2.83e-04 +2022-05-06 01:17:18,214 INFO [train.py:715] (6/8) Epoch 7, batch 34550, loss[loss=0.1509, simple_loss=0.2224, pruned_loss=0.03964, over 4929.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2178, pruned_loss=0.03676, over 971758.39 frames.], batch size: 18, lr: 2.83e-04 +2022-05-06 01:17:58,852 INFO [train.py:715] (6/8) Epoch 7, batch 34600, loss[loss=0.1485, simple_loss=0.2233, pruned_loss=0.03681, over 4921.00 frames.], tot_loss[loss=0.146, simple_loss=0.2178, pruned_loss=0.03714, over 971348.13 frames.], batch size: 17, lr: 2.83e-04 +2022-05-06 01:18:38,818 INFO [train.py:715] (6/8) Epoch 7, batch 34650, loss[loss=0.1481, simple_loss=0.2256, pruned_loss=0.0353, over 4830.00 frames.], tot_loss[loss=0.146, simple_loss=0.2177, pruned_loss=0.03712, over 971393.27 frames.], batch size: 15, lr: 2.83e-04 +2022-05-06 01:19:19,031 INFO [train.py:715] (6/8) Epoch 7, batch 34700, loss[loss=0.1443, simple_loss=0.2187, pruned_loss=0.035, over 4793.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2174, pruned_loss=0.03698, over 971619.68 frames.], batch size: 14, lr: 2.83e-04 +2022-05-06 01:19:57,507 INFO [train.py:715] (6/8) Epoch 7, batch 34750, loss[loss=0.1342, simple_loss=0.2051, pruned_loss=0.03165, over 4836.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2171, pruned_loss=0.0367, over 971493.56 frames.], batch size: 30, lr: 2.83e-04 +2022-05-06 01:20:35,945 INFO [train.py:715] (6/8) Epoch 7, batch 34800, loss[loss=0.1241, simple_loss=0.1849, pruned_loss=0.03165, over 4781.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2157, pruned_loss=0.03643, over 970287.00 frames.], batch size: 12, lr: 2.83e-04 +2022-05-06 01:21:27,015 INFO [train.py:715] (6/8) Epoch 8, batch 0, loss[loss=0.1667, simple_loss=0.2501, pruned_loss=0.0416, over 4955.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2501, pruned_loss=0.0416, over 4955.00 frames.], batch size: 35, lr: 2.69e-04 +2022-05-06 01:22:06,302 INFO [train.py:715] (6/8) Epoch 8, batch 50, loss[loss=0.1335, simple_loss=0.2107, pruned_loss=0.0281, over 4970.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2177, pruned_loss=0.03768, over 219626.35 frames.], batch size: 15, lr: 2.69e-04 +2022-05-06 01:22:47,072 INFO [train.py:715] (6/8) Epoch 8, batch 100, loss[loss=0.1425, simple_loss=0.2154, pruned_loss=0.03483, over 4978.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2161, pruned_loss=0.03585, over 386217.92 frames.], batch size: 24, lr: 2.69e-04 +2022-05-06 01:23:26,804 INFO [train.py:715] (6/8) Epoch 8, batch 150, loss[loss=0.1417, simple_loss=0.2088, pruned_loss=0.03727, over 4878.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2155, pruned_loss=0.03547, over 516263.12 frames.], batch size: 22, lr: 2.69e-04 +2022-05-06 01:24:07,308 INFO [train.py:715] (6/8) Epoch 8, batch 200, loss[loss=0.1006, simple_loss=0.1641, pruned_loss=0.01856, over 4810.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2163, pruned_loss=0.03621, over 617612.27 frames.], batch size: 12, lr: 2.69e-04 +2022-05-06 01:24:47,117 INFO [train.py:715] (6/8) Epoch 8, batch 250, loss[loss=0.1314, simple_loss=0.2084, pruned_loss=0.02721, over 4941.00 frames.], tot_loss[loss=0.1442, simple_loss=0.216, pruned_loss=0.03618, over 696051.88 frames.], batch size: 23, lr: 2.69e-04 +2022-05-06 01:25:27,377 INFO [train.py:715] (6/8) Epoch 8, batch 300, loss[loss=0.1792, simple_loss=0.2429, pruned_loss=0.05777, over 4985.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2166, pruned_loss=0.03656, over 757117.54 frames.], batch size: 39, lr: 2.69e-04 +2022-05-06 01:26:07,155 INFO [train.py:715] (6/8) Epoch 8, batch 350, loss[loss=0.1706, simple_loss=0.2352, pruned_loss=0.05301, over 4715.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2174, pruned_loss=0.03647, over 805129.70 frames.], batch size: 15, lr: 2.69e-04 +2022-05-06 01:26:46,038 INFO [train.py:715] (6/8) Epoch 8, batch 400, loss[loss=0.1584, simple_loss=0.2372, pruned_loss=0.03981, over 4912.00 frames.], tot_loss[loss=0.1461, simple_loss=0.218, pruned_loss=0.03705, over 842528.12 frames.], batch size: 18, lr: 2.69e-04 +2022-05-06 01:27:26,636 INFO [train.py:715] (6/8) Epoch 8, batch 450, loss[loss=0.1568, simple_loss=0.2328, pruned_loss=0.04039, over 4974.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2183, pruned_loss=0.03698, over 871438.04 frames.], batch size: 25, lr: 2.69e-04 +2022-05-06 01:28:06,609 INFO [train.py:715] (6/8) Epoch 8, batch 500, loss[loss=0.1294, simple_loss=0.1916, pruned_loss=0.0336, over 4919.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2182, pruned_loss=0.03679, over 894767.11 frames.], batch size: 18, lr: 2.69e-04 +2022-05-06 01:28:47,253 INFO [train.py:715] (6/8) Epoch 8, batch 550, loss[loss=0.1613, simple_loss=0.2139, pruned_loss=0.05433, over 4793.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2175, pruned_loss=0.03651, over 912620.54 frames.], batch size: 12, lr: 2.69e-04 +2022-05-06 01:29:26,937 INFO [train.py:715] (6/8) Epoch 8, batch 600, loss[loss=0.1321, simple_loss=0.207, pruned_loss=0.02856, over 4897.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2167, pruned_loss=0.03617, over 925940.67 frames.], batch size: 19, lr: 2.69e-04 +2022-05-06 01:30:07,132 INFO [train.py:715] (6/8) Epoch 8, batch 650, loss[loss=0.147, simple_loss=0.2186, pruned_loss=0.03771, over 4861.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2167, pruned_loss=0.03657, over 936724.61 frames.], batch size: 20, lr: 2.68e-04 +2022-05-06 01:30:47,389 INFO [train.py:715] (6/8) Epoch 8, batch 700, loss[loss=0.1274, simple_loss=0.2059, pruned_loss=0.02439, over 4815.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2164, pruned_loss=0.03661, over 944216.22 frames.], batch size: 25, lr: 2.68e-04 +2022-05-06 01:31:27,087 INFO [train.py:715] (6/8) Epoch 8, batch 750, loss[loss=0.1406, simple_loss=0.2115, pruned_loss=0.0349, over 4802.00 frames.], tot_loss[loss=0.145, simple_loss=0.2166, pruned_loss=0.03664, over 950870.36 frames.], batch size: 21, lr: 2.68e-04 +2022-05-06 01:32:07,147 INFO [train.py:715] (6/8) Epoch 8, batch 800, loss[loss=0.143, simple_loss=0.2113, pruned_loss=0.03733, over 4937.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2162, pruned_loss=0.03672, over 955654.68 frames.], batch size: 18, lr: 2.68e-04 +2022-05-06 01:32:47,137 INFO [train.py:715] (6/8) Epoch 8, batch 850, loss[loss=0.1474, simple_loss=0.2141, pruned_loss=0.04039, over 4746.00 frames.], tot_loss[loss=0.144, simple_loss=0.2158, pruned_loss=0.03607, over 958999.92 frames.], batch size: 16, lr: 2.68e-04 +2022-05-06 01:33:28,550 INFO [train.py:715] (6/8) Epoch 8, batch 900, loss[loss=0.1558, simple_loss=0.2217, pruned_loss=0.04495, over 4897.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2158, pruned_loss=0.03605, over 961621.66 frames.], batch size: 17, lr: 2.68e-04 +2022-05-06 01:34:08,657 INFO [train.py:715] (6/8) Epoch 8, batch 950, loss[loss=0.11, simple_loss=0.1771, pruned_loss=0.02141, over 4806.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2164, pruned_loss=0.03641, over 964281.33 frames.], batch size: 12, lr: 2.68e-04 +2022-05-06 01:34:49,700 INFO [train.py:715] (6/8) Epoch 8, batch 1000, loss[loss=0.1199, simple_loss=0.1816, pruned_loss=0.02908, over 4895.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2164, pruned_loss=0.03625, over 966099.40 frames.], batch size: 22, lr: 2.68e-04 +2022-05-06 01:35:30,789 INFO [train.py:715] (6/8) Epoch 8, batch 1050, loss[loss=0.1475, simple_loss=0.2194, pruned_loss=0.03784, over 4803.00 frames.], tot_loss[loss=0.145, simple_loss=0.2166, pruned_loss=0.03671, over 967785.46 frames.], batch size: 24, lr: 2.68e-04 +2022-05-06 01:36:11,908 INFO [train.py:715] (6/8) Epoch 8, batch 1100, loss[loss=0.1749, simple_loss=0.2394, pruned_loss=0.05519, over 4916.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2169, pruned_loss=0.03715, over 968339.96 frames.], batch size: 39, lr: 2.68e-04 +2022-05-06 01:36:52,406 INFO [train.py:715] (6/8) Epoch 8, batch 1150, loss[loss=0.1405, simple_loss=0.205, pruned_loss=0.03798, over 4973.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2165, pruned_loss=0.03687, over 969181.81 frames.], batch size: 15, lr: 2.68e-04 +2022-05-06 01:37:33,439 INFO [train.py:715] (6/8) Epoch 8, batch 1200, loss[loss=0.1609, simple_loss=0.2278, pruned_loss=0.04699, over 4779.00 frames.], tot_loss[loss=0.1446, simple_loss=0.216, pruned_loss=0.03657, over 969222.36 frames.], batch size: 17, lr: 2.68e-04 +2022-05-06 01:38:14,755 INFO [train.py:715] (6/8) Epoch 8, batch 1250, loss[loss=0.1234, simple_loss=0.1951, pruned_loss=0.02588, over 4821.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2153, pruned_loss=0.03606, over 969408.69 frames.], batch size: 27, lr: 2.68e-04 +2022-05-06 01:38:55,097 INFO [train.py:715] (6/8) Epoch 8, batch 1300, loss[loss=0.1307, simple_loss=0.1919, pruned_loss=0.03472, over 4848.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2148, pruned_loss=0.03595, over 969792.32 frames.], batch size: 13, lr: 2.68e-04 +2022-05-06 01:39:36,450 INFO [train.py:715] (6/8) Epoch 8, batch 1350, loss[loss=0.1388, simple_loss=0.2105, pruned_loss=0.0335, over 4764.00 frames.], tot_loss[loss=0.1435, simple_loss=0.215, pruned_loss=0.03605, over 970186.94 frames.], batch size: 19, lr: 2.68e-04 +2022-05-06 01:40:17,099 INFO [train.py:715] (6/8) Epoch 8, batch 1400, loss[loss=0.1396, simple_loss=0.1996, pruned_loss=0.03976, over 4970.00 frames.], tot_loss[loss=0.1426, simple_loss=0.214, pruned_loss=0.03557, over 970836.05 frames.], batch size: 28, lr: 2.68e-04 +2022-05-06 01:40:57,934 INFO [train.py:715] (6/8) Epoch 8, batch 1450, loss[loss=0.1319, simple_loss=0.2151, pruned_loss=0.02434, over 4894.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2152, pruned_loss=0.03574, over 970587.31 frames.], batch size: 22, lr: 2.68e-04 +2022-05-06 01:41:37,785 INFO [train.py:715] (6/8) Epoch 8, batch 1500, loss[loss=0.1786, simple_loss=0.2348, pruned_loss=0.06125, over 4785.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2158, pruned_loss=0.03572, over 970966.37 frames.], batch size: 14, lr: 2.68e-04 +2022-05-06 01:42:20,412 INFO [train.py:715] (6/8) Epoch 8, batch 1550, loss[loss=0.1395, simple_loss=0.2169, pruned_loss=0.03104, over 4783.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2166, pruned_loss=0.0358, over 971306.95 frames.], batch size: 17, lr: 2.68e-04 +2022-05-06 01:43:00,534 INFO [train.py:715] (6/8) Epoch 8, batch 1600, loss[loss=0.1256, simple_loss=0.2003, pruned_loss=0.02549, over 4755.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2171, pruned_loss=0.03594, over 971264.94 frames.], batch size: 19, lr: 2.68e-04 +2022-05-06 01:43:39,976 INFO [train.py:715] (6/8) Epoch 8, batch 1650, loss[loss=0.1246, simple_loss=0.1882, pruned_loss=0.0305, over 4802.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2171, pruned_loss=0.0362, over 971231.97 frames.], batch size: 12, lr: 2.68e-04 +2022-05-06 01:44:20,197 INFO [train.py:715] (6/8) Epoch 8, batch 1700, loss[loss=0.1436, simple_loss=0.2149, pruned_loss=0.03615, over 4984.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2178, pruned_loss=0.03675, over 973067.89 frames.], batch size: 24, lr: 2.68e-04 +2022-05-06 01:44:59,609 INFO [train.py:715] (6/8) Epoch 8, batch 1750, loss[loss=0.1292, simple_loss=0.1983, pruned_loss=0.03009, over 4893.00 frames.], tot_loss[loss=0.1461, simple_loss=0.218, pruned_loss=0.03703, over 972020.70 frames.], batch size: 19, lr: 2.68e-04 +2022-05-06 01:45:39,058 INFO [train.py:715] (6/8) Epoch 8, batch 1800, loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02979, over 4900.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2172, pruned_loss=0.03698, over 972134.10 frames.], batch size: 22, lr: 2.68e-04 +2022-05-06 01:46:18,115 INFO [train.py:715] (6/8) Epoch 8, batch 1850, loss[loss=0.1853, simple_loss=0.2364, pruned_loss=0.06714, over 4988.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2169, pruned_loss=0.03684, over 972406.40 frames.], batch size: 14, lr: 2.68e-04 +2022-05-06 01:46:57,517 INFO [train.py:715] (6/8) Epoch 8, batch 1900, loss[loss=0.1173, simple_loss=0.1958, pruned_loss=0.0194, over 4914.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2168, pruned_loss=0.03651, over 972900.00 frames.], batch size: 23, lr: 2.68e-04 +2022-05-06 01:47:37,010 INFO [train.py:715] (6/8) Epoch 8, batch 1950, loss[loss=0.1479, simple_loss=0.222, pruned_loss=0.03688, over 4867.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2164, pruned_loss=0.03626, over 972977.00 frames.], batch size: 32, lr: 2.68e-04 +2022-05-06 01:48:16,132 INFO [train.py:715] (6/8) Epoch 8, batch 2000, loss[loss=0.138, simple_loss=0.2195, pruned_loss=0.02826, over 4919.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2166, pruned_loss=0.03625, over 973741.89 frames.], batch size: 29, lr: 2.68e-04 +2022-05-06 01:48:56,144 INFO [train.py:715] (6/8) Epoch 8, batch 2050, loss[loss=0.1769, simple_loss=0.2365, pruned_loss=0.05871, over 4854.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2163, pruned_loss=0.03609, over 973413.74 frames.], batch size: 38, lr: 2.68e-04 +2022-05-06 01:49:35,102 INFO [train.py:715] (6/8) Epoch 8, batch 2100, loss[loss=0.1325, simple_loss=0.209, pruned_loss=0.02801, over 4798.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2158, pruned_loss=0.03593, over 973702.08 frames.], batch size: 24, lr: 2.68e-04 +2022-05-06 01:50:14,051 INFO [train.py:715] (6/8) Epoch 8, batch 2150, loss[loss=0.1308, simple_loss=0.2055, pruned_loss=0.02802, over 4904.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2159, pruned_loss=0.03587, over 972362.91 frames.], batch size: 17, lr: 2.68e-04 +2022-05-06 01:50:53,035 INFO [train.py:715] (6/8) Epoch 8, batch 2200, loss[loss=0.1158, simple_loss=0.1859, pruned_loss=0.0229, over 4972.00 frames.], tot_loss[loss=0.144, simple_loss=0.2163, pruned_loss=0.03587, over 971960.35 frames.], batch size: 14, lr: 2.68e-04 +2022-05-06 01:51:32,662 INFO [train.py:715] (6/8) Epoch 8, batch 2250, loss[loss=0.1297, simple_loss=0.2016, pruned_loss=0.02889, over 4789.00 frames.], tot_loss[loss=0.1444, simple_loss=0.217, pruned_loss=0.03587, over 972240.25 frames.], batch size: 14, lr: 2.68e-04 +2022-05-06 01:52:12,077 INFO [train.py:715] (6/8) Epoch 8, batch 2300, loss[loss=0.1592, simple_loss=0.2255, pruned_loss=0.04645, over 4868.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2165, pruned_loss=0.03591, over 972020.78 frames.], batch size: 32, lr: 2.68e-04 +2022-05-06 01:52:50,785 INFO [train.py:715] (6/8) Epoch 8, batch 2350, loss[loss=0.1226, simple_loss=0.1876, pruned_loss=0.02881, over 4788.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2166, pruned_loss=0.03596, over 972036.15 frames.], batch size: 18, lr: 2.68e-04 +2022-05-06 01:53:30,837 INFO [train.py:715] (6/8) Epoch 8, batch 2400, loss[loss=0.16, simple_loss=0.2291, pruned_loss=0.04543, over 4914.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2167, pruned_loss=0.03636, over 971262.04 frames.], batch size: 19, lr: 2.68e-04 +2022-05-06 01:54:10,339 INFO [train.py:715] (6/8) Epoch 8, batch 2450, loss[loss=0.1425, simple_loss=0.2056, pruned_loss=0.03973, over 4991.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2172, pruned_loss=0.03656, over 971071.23 frames.], batch size: 14, lr: 2.68e-04 +2022-05-06 01:54:49,895 INFO [train.py:715] (6/8) Epoch 8, batch 2500, loss[loss=0.1555, simple_loss=0.2377, pruned_loss=0.03664, over 4742.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2183, pruned_loss=0.03691, over 971888.87 frames.], batch size: 16, lr: 2.68e-04 +2022-05-06 01:55:28,675 INFO [train.py:715] (6/8) Epoch 8, batch 2550, loss[loss=0.135, simple_loss=0.2206, pruned_loss=0.02475, over 4938.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2178, pruned_loss=0.03675, over 972666.08 frames.], batch size: 23, lr: 2.68e-04 +2022-05-06 01:56:08,300 INFO [train.py:715] (6/8) Epoch 8, batch 2600, loss[loss=0.1506, simple_loss=0.2326, pruned_loss=0.03426, over 4873.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2186, pruned_loss=0.03731, over 972836.27 frames.], batch size: 22, lr: 2.68e-04 +2022-05-06 01:56:47,552 INFO [train.py:715] (6/8) Epoch 8, batch 2650, loss[loss=0.1465, simple_loss=0.2233, pruned_loss=0.03481, over 4835.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2184, pruned_loss=0.03748, over 973102.30 frames.], batch size: 15, lr: 2.68e-04 +2022-05-06 01:57:27,037 INFO [train.py:715] (6/8) Epoch 8, batch 2700, loss[loss=0.1279, simple_loss=0.205, pruned_loss=0.02537, over 4905.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2184, pruned_loss=0.0374, over 972710.51 frames.], batch size: 19, lr: 2.68e-04 +2022-05-06 01:58:06,373 INFO [train.py:715] (6/8) Epoch 8, batch 2750, loss[loss=0.1529, simple_loss=0.2249, pruned_loss=0.04048, over 4974.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2177, pruned_loss=0.03705, over 972577.55 frames.], batch size: 21, lr: 2.67e-04 +2022-05-06 01:58:45,750 INFO [train.py:715] (6/8) Epoch 8, batch 2800, loss[loss=0.169, simple_loss=0.2377, pruned_loss=0.05009, over 4697.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2181, pruned_loss=0.03705, over 972958.20 frames.], batch size: 15, lr: 2.67e-04 +2022-05-06 01:59:25,001 INFO [train.py:715] (6/8) Epoch 8, batch 2850, loss[loss=0.1472, simple_loss=0.2208, pruned_loss=0.03683, over 4903.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2173, pruned_loss=0.03674, over 973047.85 frames.], batch size: 17, lr: 2.67e-04 +2022-05-06 02:00:03,842 INFO [train.py:715] (6/8) Epoch 8, batch 2900, loss[loss=0.1271, simple_loss=0.1991, pruned_loss=0.02754, over 4970.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2164, pruned_loss=0.0366, over 972307.02 frames.], batch size: 15, lr: 2.67e-04 +2022-05-06 02:00:43,808 INFO [train.py:715] (6/8) Epoch 8, batch 2950, loss[loss=0.1669, simple_loss=0.2382, pruned_loss=0.04777, over 4961.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2162, pruned_loss=0.03646, over 971204.10 frames.], batch size: 24, lr: 2.67e-04 +2022-05-06 02:01:22,468 INFO [train.py:715] (6/8) Epoch 8, batch 3000, loss[loss=0.1453, simple_loss=0.2142, pruned_loss=0.0382, over 4869.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2162, pruned_loss=0.03611, over 971297.18 frames.], batch size: 22, lr: 2.67e-04 +2022-05-06 02:01:22,468 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 02:01:32,130 INFO [train.py:742] (6/8) Epoch 8, validation: loss=0.1076, simple_loss=0.1923, pruned_loss=0.0115, over 914524.00 frames. +2022-05-06 02:02:11,365 INFO [train.py:715] (6/8) Epoch 8, batch 3050, loss[loss=0.1479, simple_loss=0.2187, pruned_loss=0.0386, over 4816.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2166, pruned_loss=0.0363, over 972551.09 frames.], batch size: 21, lr: 2.67e-04 +2022-05-06 02:02:50,368 INFO [train.py:715] (6/8) Epoch 8, batch 3100, loss[loss=0.1724, simple_loss=0.2348, pruned_loss=0.05506, over 4833.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2169, pruned_loss=0.0362, over 972304.96 frames.], batch size: 15, lr: 2.67e-04 +2022-05-06 02:03:29,324 INFO [train.py:715] (6/8) Epoch 8, batch 3150, loss[loss=0.146, simple_loss=0.227, pruned_loss=0.03253, over 4835.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2179, pruned_loss=0.03669, over 972973.67 frames.], batch size: 15, lr: 2.67e-04 +2022-05-06 02:04:09,016 INFO [train.py:715] (6/8) Epoch 8, batch 3200, loss[loss=0.14, simple_loss=0.2147, pruned_loss=0.03264, over 4821.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2191, pruned_loss=0.03668, over 973296.15 frames.], batch size: 26, lr: 2.67e-04 +2022-05-06 02:04:48,448 INFO [train.py:715] (6/8) Epoch 8, batch 3250, loss[loss=0.1239, simple_loss=0.2035, pruned_loss=0.02216, over 4923.00 frames.], tot_loss[loss=0.146, simple_loss=0.2187, pruned_loss=0.03668, over 972483.35 frames.], batch size: 29, lr: 2.67e-04 +2022-05-06 02:05:28,482 INFO [train.py:715] (6/8) Epoch 8, batch 3300, loss[loss=0.1494, simple_loss=0.2232, pruned_loss=0.03781, over 4831.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2186, pruned_loss=0.03705, over 971852.23 frames.], batch size: 30, lr: 2.67e-04 +2022-05-06 02:06:08,841 INFO [train.py:715] (6/8) Epoch 8, batch 3350, loss[loss=0.1611, simple_loss=0.2326, pruned_loss=0.04479, over 4902.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2175, pruned_loss=0.0365, over 972893.36 frames.], batch size: 17, lr: 2.67e-04 +2022-05-06 02:06:49,936 INFO [train.py:715] (6/8) Epoch 8, batch 3400, loss[loss=0.1398, simple_loss=0.2132, pruned_loss=0.0332, over 4831.00 frames.], tot_loss[loss=0.1459, simple_loss=0.218, pruned_loss=0.03695, over 972903.14 frames.], batch size: 13, lr: 2.67e-04 +2022-05-06 02:07:30,808 INFO [train.py:715] (6/8) Epoch 8, batch 3450, loss[loss=0.1366, simple_loss=0.2027, pruned_loss=0.03527, over 4792.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2173, pruned_loss=0.03653, over 972376.42 frames.], batch size: 14, lr: 2.67e-04 +2022-05-06 02:08:11,011 INFO [train.py:715] (6/8) Epoch 8, batch 3500, loss[loss=0.1496, simple_loss=0.2184, pruned_loss=0.04037, over 4969.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2181, pruned_loss=0.03718, over 971394.30 frames.], batch size: 35, lr: 2.67e-04 +2022-05-06 02:08:52,349 INFO [train.py:715] (6/8) Epoch 8, batch 3550, loss[loss=0.1112, simple_loss=0.1913, pruned_loss=0.01552, over 4897.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2184, pruned_loss=0.03723, over 971831.57 frames.], batch size: 19, lr: 2.67e-04 +2022-05-06 02:09:33,204 INFO [train.py:715] (6/8) Epoch 8, batch 3600, loss[loss=0.1432, simple_loss=0.2094, pruned_loss=0.0385, over 4766.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2168, pruned_loss=0.03667, over 972208.04 frames.], batch size: 19, lr: 2.67e-04 +2022-05-06 02:10:13,457 INFO [train.py:715] (6/8) Epoch 8, batch 3650, loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02911, over 4857.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2168, pruned_loss=0.03652, over 971516.75 frames.], batch size: 20, lr: 2.67e-04 +2022-05-06 02:10:53,936 INFO [train.py:715] (6/8) Epoch 8, batch 3700, loss[loss=0.1618, simple_loss=0.2333, pruned_loss=0.04519, over 4928.00 frames.], tot_loss[loss=0.1447, simple_loss=0.217, pruned_loss=0.03619, over 971217.10 frames.], batch size: 23, lr: 2.67e-04 +2022-05-06 02:11:34,281 INFO [train.py:715] (6/8) Epoch 8, batch 3750, loss[loss=0.1399, simple_loss=0.2253, pruned_loss=0.02722, over 4885.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2168, pruned_loss=0.03616, over 971642.71 frames.], batch size: 39, lr: 2.67e-04 +2022-05-06 02:12:13,643 INFO [train.py:715] (6/8) Epoch 8, batch 3800, loss[loss=0.1489, simple_loss=0.2159, pruned_loss=0.04097, over 4932.00 frames.], tot_loss[loss=0.1446, simple_loss=0.217, pruned_loss=0.0361, over 971336.52 frames.], batch size: 29, lr: 2.67e-04 +2022-05-06 02:12:54,033 INFO [train.py:715] (6/8) Epoch 8, batch 3850, loss[loss=0.1264, simple_loss=0.1981, pruned_loss=0.02732, over 4817.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2167, pruned_loss=0.03624, over 971546.98 frames.], batch size: 12, lr: 2.67e-04 +2022-05-06 02:13:34,222 INFO [train.py:715] (6/8) Epoch 8, batch 3900, loss[loss=0.1292, simple_loss=0.1984, pruned_loss=0.02995, over 4989.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2163, pruned_loss=0.03592, over 971708.77 frames.], batch size: 28, lr: 2.67e-04 +2022-05-06 02:14:14,994 INFO [train.py:715] (6/8) Epoch 8, batch 3950, loss[loss=0.1495, simple_loss=0.2221, pruned_loss=0.03841, over 4778.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2155, pruned_loss=0.03544, over 972489.05 frames.], batch size: 18, lr: 2.67e-04 +2022-05-06 02:14:54,904 INFO [train.py:715] (6/8) Epoch 8, batch 4000, loss[loss=0.1087, simple_loss=0.1805, pruned_loss=0.01846, over 4966.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2153, pruned_loss=0.03541, over 972467.55 frames.], batch size: 14, lr: 2.67e-04 +2022-05-06 02:15:35,362 INFO [train.py:715] (6/8) Epoch 8, batch 4050, loss[loss=0.1412, simple_loss=0.2209, pruned_loss=0.0307, over 4874.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2155, pruned_loss=0.03575, over 972434.60 frames.], batch size: 20, lr: 2.67e-04 +2022-05-06 02:16:16,175 INFO [train.py:715] (6/8) Epoch 8, batch 4100, loss[loss=0.1051, simple_loss=0.176, pruned_loss=0.01714, over 4773.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2146, pruned_loss=0.03534, over 971593.75 frames.], batch size: 12, lr: 2.67e-04 +2022-05-06 02:16:55,925 INFO [train.py:715] (6/8) Epoch 8, batch 4150, loss[loss=0.1727, simple_loss=0.2405, pruned_loss=0.05244, over 4848.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2145, pruned_loss=0.03554, over 970810.92 frames.], batch size: 32, lr: 2.67e-04 +2022-05-06 02:17:35,661 INFO [train.py:715] (6/8) Epoch 8, batch 4200, loss[loss=0.1632, simple_loss=0.2265, pruned_loss=0.04998, over 4868.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2143, pruned_loss=0.0355, over 971373.97 frames.], batch size: 20, lr: 2.67e-04 +2022-05-06 02:18:15,238 INFO [train.py:715] (6/8) Epoch 8, batch 4250, loss[loss=0.1544, simple_loss=0.2237, pruned_loss=0.04255, over 4692.00 frames.], tot_loss[loss=0.1436, simple_loss=0.215, pruned_loss=0.03607, over 970942.69 frames.], batch size: 15, lr: 2.67e-04 +2022-05-06 02:18:54,990 INFO [train.py:715] (6/8) Epoch 8, batch 4300, loss[loss=0.1377, simple_loss=0.2176, pruned_loss=0.0289, over 4959.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2157, pruned_loss=0.03678, over 970857.54 frames.], batch size: 24, lr: 2.67e-04 +2022-05-06 02:19:34,154 INFO [train.py:715] (6/8) Epoch 8, batch 4350, loss[loss=0.1294, simple_loss=0.2071, pruned_loss=0.02583, over 4978.00 frames.], tot_loss[loss=0.144, simple_loss=0.216, pruned_loss=0.03606, over 971848.57 frames.], batch size: 15, lr: 2.67e-04 +2022-05-06 02:20:13,547 INFO [train.py:715] (6/8) Epoch 8, batch 4400, loss[loss=0.1667, simple_loss=0.2337, pruned_loss=0.04985, over 4770.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2174, pruned_loss=0.03669, over 972587.63 frames.], batch size: 14, lr: 2.67e-04 +2022-05-06 02:20:53,466 INFO [train.py:715] (6/8) Epoch 8, batch 4450, loss[loss=0.1643, simple_loss=0.239, pruned_loss=0.04479, over 4801.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2179, pruned_loss=0.03661, over 972393.06 frames.], batch size: 14, lr: 2.67e-04 +2022-05-06 02:21:33,242 INFO [train.py:715] (6/8) Epoch 8, batch 4500, loss[loss=0.1175, simple_loss=0.1898, pruned_loss=0.02255, over 4763.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2178, pruned_loss=0.03637, over 971995.07 frames.], batch size: 12, lr: 2.67e-04 +2022-05-06 02:22:12,210 INFO [train.py:715] (6/8) Epoch 8, batch 4550, loss[loss=0.1185, simple_loss=0.1953, pruned_loss=0.02083, over 4767.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2173, pruned_loss=0.03639, over 971789.88 frames.], batch size: 18, lr: 2.67e-04 +2022-05-06 02:22:52,192 INFO [train.py:715] (6/8) Epoch 8, batch 4600, loss[loss=0.1278, simple_loss=0.197, pruned_loss=0.02924, over 4987.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2176, pruned_loss=0.03639, over 971305.77 frames.], batch size: 14, lr: 2.67e-04 +2022-05-06 02:23:31,723 INFO [train.py:715] (6/8) Epoch 8, batch 4650, loss[loss=0.1881, simple_loss=0.2488, pruned_loss=0.06364, over 4818.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2178, pruned_loss=0.03645, over 971878.27 frames.], batch size: 27, lr: 2.67e-04 +2022-05-06 02:24:11,304 INFO [train.py:715] (6/8) Epoch 8, batch 4700, loss[loss=0.1413, simple_loss=0.2146, pruned_loss=0.03393, over 4740.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2173, pruned_loss=0.03597, over 971730.15 frames.], batch size: 16, lr: 2.67e-04 +2022-05-06 02:24:50,832 INFO [train.py:715] (6/8) Epoch 8, batch 4750, loss[loss=0.1271, simple_loss=0.1944, pruned_loss=0.02991, over 4873.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2165, pruned_loss=0.03582, over 972022.93 frames.], batch size: 16, lr: 2.67e-04 +2022-05-06 02:25:30,509 INFO [train.py:715] (6/8) Epoch 8, batch 4800, loss[loss=0.1686, simple_loss=0.2361, pruned_loss=0.05053, over 4853.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2165, pruned_loss=0.03623, over 972397.59 frames.], batch size: 30, lr: 2.67e-04 +2022-05-06 02:26:10,393 INFO [train.py:715] (6/8) Epoch 8, batch 4850, loss[loss=0.1352, simple_loss=0.2021, pruned_loss=0.03415, over 4939.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2159, pruned_loss=0.03638, over 972896.23 frames.], batch size: 35, lr: 2.66e-04 +2022-05-06 02:26:49,517 INFO [train.py:715] (6/8) Epoch 8, batch 4900, loss[loss=0.169, simple_loss=0.2491, pruned_loss=0.04439, over 4987.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2166, pruned_loss=0.03628, over 972991.79 frames.], batch size: 25, lr: 2.66e-04 +2022-05-06 02:27:29,280 INFO [train.py:715] (6/8) Epoch 8, batch 4950, loss[loss=0.1388, simple_loss=0.2122, pruned_loss=0.03272, over 4852.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2166, pruned_loss=0.03636, over 973415.95 frames.], batch size: 32, lr: 2.66e-04 +2022-05-06 02:28:08,945 INFO [train.py:715] (6/8) Epoch 8, batch 5000, loss[loss=0.1471, simple_loss=0.2227, pruned_loss=0.03571, over 4906.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2161, pruned_loss=0.03601, over 972481.74 frames.], batch size: 19, lr: 2.66e-04 +2022-05-06 02:28:47,817 INFO [train.py:715] (6/8) Epoch 8, batch 5050, loss[loss=0.1249, simple_loss=0.2058, pruned_loss=0.02194, over 4830.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2157, pruned_loss=0.03586, over 973054.70 frames.], batch size: 15, lr: 2.66e-04 +2022-05-06 02:29:26,965 INFO [train.py:715] (6/8) Epoch 8, batch 5100, loss[loss=0.1691, simple_loss=0.2401, pruned_loss=0.04906, over 4880.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2154, pruned_loss=0.03567, over 973076.68 frames.], batch size: 16, lr: 2.66e-04 +2022-05-06 02:30:06,431 INFO [train.py:715] (6/8) Epoch 8, batch 5150, loss[loss=0.1472, simple_loss=0.2151, pruned_loss=0.03962, over 4801.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2155, pruned_loss=0.03574, over 973183.07 frames.], batch size: 25, lr: 2.66e-04 +2022-05-06 02:30:45,351 INFO [train.py:715] (6/8) Epoch 8, batch 5200, loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02874, over 4953.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2149, pruned_loss=0.03517, over 973683.91 frames.], batch size: 21, lr: 2.66e-04 +2022-05-06 02:31:24,028 INFO [train.py:715] (6/8) Epoch 8, batch 5250, loss[loss=0.1358, simple_loss=0.2025, pruned_loss=0.03451, over 4824.00 frames.], tot_loss[loss=0.143, simple_loss=0.2148, pruned_loss=0.03558, over 973016.57 frames.], batch size: 12, lr: 2.66e-04 +2022-05-06 02:32:04,137 INFO [train.py:715] (6/8) Epoch 8, batch 5300, loss[loss=0.1387, simple_loss=0.2121, pruned_loss=0.03265, over 4963.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2148, pruned_loss=0.03516, over 973231.86 frames.], batch size: 15, lr: 2.66e-04 +2022-05-06 02:32:43,760 INFO [train.py:715] (6/8) Epoch 8, batch 5350, loss[loss=0.1268, simple_loss=0.2069, pruned_loss=0.02333, over 4862.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2147, pruned_loss=0.03516, over 973551.19 frames.], batch size: 20, lr: 2.66e-04 +2022-05-06 02:33:23,713 INFO [train.py:715] (6/8) Epoch 8, batch 5400, loss[loss=0.1314, simple_loss=0.196, pruned_loss=0.03343, over 4979.00 frames.], tot_loss[loss=0.143, simple_loss=0.2151, pruned_loss=0.03543, over 973481.14 frames.], batch size: 28, lr: 2.66e-04 +2022-05-06 02:34:04,202 INFO [train.py:715] (6/8) Epoch 8, batch 5450, loss[loss=0.1389, simple_loss=0.1976, pruned_loss=0.04014, over 4731.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2155, pruned_loss=0.03549, over 973323.95 frames.], batch size: 16, lr: 2.66e-04 +2022-05-06 02:34:44,697 INFO [train.py:715] (6/8) Epoch 8, batch 5500, loss[loss=0.1573, simple_loss=0.2266, pruned_loss=0.04404, over 4853.00 frames.], tot_loss[loss=0.144, simple_loss=0.2161, pruned_loss=0.03599, over 973093.67 frames.], batch size: 20, lr: 2.66e-04 +2022-05-06 02:35:24,991 INFO [train.py:715] (6/8) Epoch 8, batch 5550, loss[loss=0.1219, simple_loss=0.1902, pruned_loss=0.02683, over 4844.00 frames.], tot_loss[loss=0.1438, simple_loss=0.216, pruned_loss=0.03585, over 972361.88 frames.], batch size: 34, lr: 2.66e-04 +2022-05-06 02:36:04,814 INFO [train.py:715] (6/8) Epoch 8, batch 5600, loss[loss=0.1492, simple_loss=0.2267, pruned_loss=0.03589, over 4874.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2166, pruned_loss=0.03618, over 972563.93 frames.], batch size: 16, lr: 2.66e-04 +2022-05-06 02:36:44,879 INFO [train.py:715] (6/8) Epoch 8, batch 5650, loss[loss=0.1469, simple_loss=0.2223, pruned_loss=0.03579, over 4773.00 frames.], tot_loss[loss=0.144, simple_loss=0.2165, pruned_loss=0.03573, over 972395.98 frames.], batch size: 14, lr: 2.66e-04 +2022-05-06 02:37:24,004 INFO [train.py:715] (6/8) Epoch 8, batch 5700, loss[loss=0.1338, simple_loss=0.2047, pruned_loss=0.03144, over 4891.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2159, pruned_loss=0.03558, over 972516.73 frames.], batch size: 22, lr: 2.66e-04 +2022-05-06 02:38:03,519 INFO [train.py:715] (6/8) Epoch 8, batch 5750, loss[loss=0.1319, simple_loss=0.2039, pruned_loss=0.02999, over 4888.00 frames.], tot_loss[loss=0.1426, simple_loss=0.215, pruned_loss=0.03511, over 972325.33 frames.], batch size: 22, lr: 2.66e-04 +2022-05-06 02:38:42,305 INFO [train.py:715] (6/8) Epoch 8, batch 5800, loss[loss=0.1539, simple_loss=0.2255, pruned_loss=0.04118, over 4907.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2143, pruned_loss=0.03492, over 971691.52 frames.], batch size: 18, lr: 2.66e-04 +2022-05-06 02:39:21,801 INFO [train.py:715] (6/8) Epoch 8, batch 5850, loss[loss=0.1455, simple_loss=0.1984, pruned_loss=0.04637, over 4856.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2146, pruned_loss=0.03507, over 971124.69 frames.], batch size: 20, lr: 2.66e-04 +2022-05-06 02:40:00,588 INFO [train.py:715] (6/8) Epoch 8, batch 5900, loss[loss=0.1487, simple_loss=0.2265, pruned_loss=0.03548, over 4979.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2139, pruned_loss=0.03489, over 971744.80 frames.], batch size: 25, lr: 2.66e-04 +2022-05-06 02:40:40,152 INFO [train.py:715] (6/8) Epoch 8, batch 5950, loss[loss=0.1511, simple_loss=0.2222, pruned_loss=0.03994, over 4904.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2142, pruned_loss=0.03552, over 971728.05 frames.], batch size: 19, lr: 2.66e-04 +2022-05-06 02:41:20,036 INFO [train.py:715] (6/8) Epoch 8, batch 6000, loss[loss=0.1431, simple_loss=0.2073, pruned_loss=0.03946, over 4973.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2142, pruned_loss=0.03557, over 971486.48 frames.], batch size: 35, lr: 2.66e-04 +2022-05-06 02:41:20,037 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 02:41:29,608 INFO [train.py:742] (6/8) Epoch 8, validation: loss=0.1075, simple_loss=0.1921, pruned_loss=0.01146, over 914524.00 frames. +2022-05-06 02:42:09,074 INFO [train.py:715] (6/8) Epoch 8, batch 6050, loss[loss=0.1529, simple_loss=0.2276, pruned_loss=0.03916, over 4797.00 frames.], tot_loss[loss=0.143, simple_loss=0.215, pruned_loss=0.03553, over 971426.77 frames.], batch size: 21, lr: 2.66e-04 +2022-05-06 02:42:48,771 INFO [train.py:715] (6/8) Epoch 8, batch 6100, loss[loss=0.112, simple_loss=0.1861, pruned_loss=0.01896, over 4775.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2149, pruned_loss=0.0354, over 971076.15 frames.], batch size: 12, lr: 2.66e-04 +2022-05-06 02:43:28,440 INFO [train.py:715] (6/8) Epoch 8, batch 6150, loss[loss=0.1495, simple_loss=0.2157, pruned_loss=0.04168, over 4851.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2156, pruned_loss=0.03568, over 972274.46 frames.], batch size: 32, lr: 2.66e-04 +2022-05-06 02:44:09,003 INFO [train.py:715] (6/8) Epoch 8, batch 6200, loss[loss=0.1399, simple_loss=0.2097, pruned_loss=0.03502, over 4741.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2151, pruned_loss=0.03542, over 972478.93 frames.], batch size: 16, lr: 2.66e-04 +2022-05-06 02:44:49,493 INFO [train.py:715] (6/8) Epoch 8, batch 6250, loss[loss=0.1236, simple_loss=0.2018, pruned_loss=0.02272, over 4839.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2152, pruned_loss=0.03572, over 971944.19 frames.], batch size: 20, lr: 2.66e-04 +2022-05-06 02:45:29,147 INFO [train.py:715] (6/8) Epoch 8, batch 6300, loss[loss=0.1563, simple_loss=0.2161, pruned_loss=0.04829, over 4916.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2146, pruned_loss=0.03533, over 972109.79 frames.], batch size: 18, lr: 2.66e-04 +2022-05-06 02:46:08,063 INFO [train.py:715] (6/8) Epoch 8, batch 6350, loss[loss=0.1515, simple_loss=0.2228, pruned_loss=0.04007, over 4788.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2159, pruned_loss=0.03589, over 972613.54 frames.], batch size: 18, lr: 2.66e-04 +2022-05-06 02:46:47,834 INFO [train.py:715] (6/8) Epoch 8, batch 6400, loss[loss=0.1494, simple_loss=0.224, pruned_loss=0.03742, over 4823.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2146, pruned_loss=0.03522, over 972718.85 frames.], batch size: 15, lr: 2.66e-04 +2022-05-06 02:47:27,068 INFO [train.py:715] (6/8) Epoch 8, batch 6450, loss[loss=0.132, simple_loss=0.2094, pruned_loss=0.02736, over 4978.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2151, pruned_loss=0.03582, over 972525.39 frames.], batch size: 15, lr: 2.66e-04 +2022-05-06 02:48:06,538 INFO [train.py:715] (6/8) Epoch 8, batch 6500, loss[loss=0.1714, simple_loss=0.2444, pruned_loss=0.04923, over 4961.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2151, pruned_loss=0.03602, over 972293.56 frames.], batch size: 15, lr: 2.66e-04 +2022-05-06 02:48:45,642 INFO [train.py:715] (6/8) Epoch 8, batch 6550, loss[loss=0.1251, simple_loss=0.1899, pruned_loss=0.03013, over 4815.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2152, pruned_loss=0.03587, over 972972.89 frames.], batch size: 13, lr: 2.66e-04 +2022-05-06 02:49:25,300 INFO [train.py:715] (6/8) Epoch 8, batch 6600, loss[loss=0.1424, simple_loss=0.2139, pruned_loss=0.03549, over 4985.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2156, pruned_loss=0.03579, over 973237.29 frames.], batch size: 28, lr: 2.66e-04 +2022-05-06 02:50:04,624 INFO [train.py:715] (6/8) Epoch 8, batch 6650, loss[loss=0.1548, simple_loss=0.2253, pruned_loss=0.04218, over 4867.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2159, pruned_loss=0.03587, over 972954.86 frames.], batch size: 16, lr: 2.66e-04 +2022-05-06 02:50:43,406 INFO [train.py:715] (6/8) Epoch 8, batch 6700, loss[loss=0.156, simple_loss=0.2203, pruned_loss=0.04588, over 4842.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2165, pruned_loss=0.03661, over 972402.25 frames.], batch size: 12, lr: 2.66e-04 +2022-05-06 02:51:23,633 INFO [train.py:715] (6/8) Epoch 8, batch 6750, loss[loss=0.1204, simple_loss=0.1979, pruned_loss=0.02144, over 4956.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2162, pruned_loss=0.03612, over 973060.93 frames.], batch size: 24, lr: 2.66e-04 +2022-05-06 02:52:03,057 INFO [train.py:715] (6/8) Epoch 8, batch 6800, loss[loss=0.1453, simple_loss=0.217, pruned_loss=0.03673, over 4793.00 frames.], tot_loss[loss=0.145, simple_loss=0.2174, pruned_loss=0.03634, over 972554.76 frames.], batch size: 24, lr: 2.66e-04 +2022-05-06 02:52:42,037 INFO [train.py:715] (6/8) Epoch 8, batch 6850, loss[loss=0.1292, simple_loss=0.2091, pruned_loss=0.02462, over 4824.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2173, pruned_loss=0.03621, over 973735.50 frames.], batch size: 25, lr: 2.66e-04 +2022-05-06 02:53:21,949 INFO [train.py:715] (6/8) Epoch 8, batch 6900, loss[loss=0.1537, simple_loss=0.2199, pruned_loss=0.04376, over 4765.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2172, pruned_loss=0.03611, over 971935.07 frames.], batch size: 18, lr: 2.66e-04 +2022-05-06 02:54:02,359 INFO [train.py:715] (6/8) Epoch 8, batch 6950, loss[loss=0.153, simple_loss=0.218, pruned_loss=0.04397, over 4733.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2174, pruned_loss=0.03646, over 971200.21 frames.], batch size: 16, lr: 2.66e-04 +2022-05-06 02:54:42,175 INFO [train.py:715] (6/8) Epoch 8, batch 7000, loss[loss=0.1317, simple_loss=0.2025, pruned_loss=0.03042, over 4882.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2176, pruned_loss=0.03671, over 971190.34 frames.], batch size: 32, lr: 2.65e-04 +2022-05-06 02:55:21,786 INFO [train.py:715] (6/8) Epoch 8, batch 7050, loss[loss=0.1378, simple_loss=0.1991, pruned_loss=0.03822, over 4880.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2172, pruned_loss=0.03671, over 972305.33 frames.], batch size: 16, lr: 2.65e-04 +2022-05-06 02:56:01,478 INFO [train.py:715] (6/8) Epoch 8, batch 7100, loss[loss=0.1595, simple_loss=0.2282, pruned_loss=0.0454, over 4919.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2173, pruned_loss=0.03661, over 971729.73 frames.], batch size: 39, lr: 2.65e-04 +2022-05-06 02:56:41,147 INFO [train.py:715] (6/8) Epoch 8, batch 7150, loss[loss=0.1182, simple_loss=0.191, pruned_loss=0.02271, over 4970.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2172, pruned_loss=0.03669, over 971622.18 frames.], batch size: 15, lr: 2.65e-04 +2022-05-06 02:57:20,446 INFO [train.py:715] (6/8) Epoch 8, batch 7200, loss[loss=0.1859, simple_loss=0.2684, pruned_loss=0.05169, over 4835.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2183, pruned_loss=0.03719, over 971496.42 frames.], batch size: 15, lr: 2.65e-04 +2022-05-06 02:57:59,449 INFO [train.py:715] (6/8) Epoch 8, batch 7250, loss[loss=0.1306, simple_loss=0.207, pruned_loss=0.02708, over 4968.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2178, pruned_loss=0.03688, over 971620.13 frames.], batch size: 14, lr: 2.65e-04 +2022-05-06 02:58:39,558 INFO [train.py:715] (6/8) Epoch 8, batch 7300, loss[loss=0.1583, simple_loss=0.2305, pruned_loss=0.04303, over 4962.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2175, pruned_loss=0.03704, over 972375.59 frames.], batch size: 35, lr: 2.65e-04 +2022-05-06 02:59:18,934 INFO [train.py:715] (6/8) Epoch 8, batch 7350, loss[loss=0.1795, simple_loss=0.2416, pruned_loss=0.05866, over 4878.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2168, pruned_loss=0.03642, over 972307.57 frames.], batch size: 16, lr: 2.65e-04 +2022-05-06 02:59:58,526 INFO [train.py:715] (6/8) Epoch 8, batch 7400, loss[loss=0.1395, simple_loss=0.2111, pruned_loss=0.03393, over 4885.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2167, pruned_loss=0.03638, over 972464.30 frames.], batch size: 22, lr: 2.65e-04 +2022-05-06 03:00:38,459 INFO [train.py:715] (6/8) Epoch 8, batch 7450, loss[loss=0.1527, simple_loss=0.2155, pruned_loss=0.04492, over 4989.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2159, pruned_loss=0.03644, over 972361.85 frames.], batch size: 35, lr: 2.65e-04 +2022-05-06 03:01:18,186 INFO [train.py:715] (6/8) Epoch 8, batch 7500, loss[loss=0.1201, simple_loss=0.189, pruned_loss=0.02564, over 4984.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2161, pruned_loss=0.03603, over 972167.98 frames.], batch size: 16, lr: 2.65e-04 +2022-05-06 03:01:57,876 INFO [train.py:715] (6/8) Epoch 8, batch 7550, loss[loss=0.1487, simple_loss=0.2136, pruned_loss=0.04186, over 4853.00 frames.], tot_loss[loss=0.144, simple_loss=0.2159, pruned_loss=0.03611, over 972092.46 frames.], batch size: 20, lr: 2.65e-04 +2022-05-06 03:02:37,823 INFO [train.py:715] (6/8) Epoch 8, batch 7600, loss[loss=0.1752, simple_loss=0.2446, pruned_loss=0.05293, over 4741.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2164, pruned_loss=0.03656, over 971661.00 frames.], batch size: 16, lr: 2.65e-04 +2022-05-06 03:03:17,993 INFO [train.py:715] (6/8) Epoch 8, batch 7650, loss[loss=0.1484, simple_loss=0.2283, pruned_loss=0.03426, over 4757.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2161, pruned_loss=0.03573, over 972057.28 frames.], batch size: 19, lr: 2.65e-04 +2022-05-06 03:03:57,443 INFO [train.py:715] (6/8) Epoch 8, batch 7700, loss[loss=0.1706, simple_loss=0.2504, pruned_loss=0.04541, over 4762.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2162, pruned_loss=0.03561, over 972000.61 frames.], batch size: 14, lr: 2.65e-04 +2022-05-06 03:04:36,609 INFO [train.py:715] (6/8) Epoch 8, batch 7750, loss[loss=0.1584, simple_loss=0.2349, pruned_loss=0.04098, over 4975.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2153, pruned_loss=0.03501, over 972052.67 frames.], batch size: 25, lr: 2.65e-04 +2022-05-06 03:05:16,819 INFO [train.py:715] (6/8) Epoch 8, batch 7800, loss[loss=0.1745, simple_loss=0.2457, pruned_loss=0.05166, over 4981.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2155, pruned_loss=0.03532, over 970466.59 frames.], batch size: 35, lr: 2.65e-04 +2022-05-06 03:05:56,863 INFO [train.py:715] (6/8) Epoch 8, batch 7850, loss[loss=0.1133, simple_loss=0.1821, pruned_loss=0.02224, over 4865.00 frames.], tot_loss[loss=0.1424, simple_loss=0.215, pruned_loss=0.03484, over 971306.87 frames.], batch size: 20, lr: 2.65e-04 +2022-05-06 03:06:35,518 INFO [train.py:715] (6/8) Epoch 8, batch 7900, loss[loss=0.1027, simple_loss=0.1807, pruned_loss=0.01238, over 4928.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2159, pruned_loss=0.03555, over 971915.91 frames.], batch size: 29, lr: 2.65e-04 +2022-05-06 03:07:15,009 INFO [train.py:715] (6/8) Epoch 8, batch 7950, loss[loss=0.1446, simple_loss=0.2228, pruned_loss=0.03323, over 4940.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2151, pruned_loss=0.03487, over 971988.24 frames.], batch size: 23, lr: 2.65e-04 +2022-05-06 03:07:54,694 INFO [train.py:715] (6/8) Epoch 8, batch 8000, loss[loss=0.1323, simple_loss=0.1949, pruned_loss=0.0349, over 4819.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2152, pruned_loss=0.035, over 972153.52 frames.], batch size: 13, lr: 2.65e-04 +2022-05-06 03:08:33,648 INFO [train.py:715] (6/8) Epoch 8, batch 8050, loss[loss=0.1629, simple_loss=0.233, pruned_loss=0.04639, over 4966.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2146, pruned_loss=0.03492, over 972082.57 frames.], batch size: 35, lr: 2.65e-04 +2022-05-06 03:09:12,022 INFO [train.py:715] (6/8) Epoch 8, batch 8100, loss[loss=0.1104, simple_loss=0.1897, pruned_loss=0.01561, over 4887.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2149, pruned_loss=0.03505, over 972118.81 frames.], batch size: 22, lr: 2.65e-04 +2022-05-06 03:09:51,248 INFO [train.py:715] (6/8) Epoch 8, batch 8150, loss[loss=0.166, simple_loss=0.2422, pruned_loss=0.04493, over 4835.00 frames.], tot_loss[loss=0.1429, simple_loss=0.215, pruned_loss=0.03543, over 971491.65 frames.], batch size: 15, lr: 2.65e-04 +2022-05-06 03:10:31,281 INFO [train.py:715] (6/8) Epoch 8, batch 8200, loss[loss=0.141, simple_loss=0.2073, pruned_loss=0.0373, over 4835.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2156, pruned_loss=0.03581, over 970807.89 frames.], batch size: 30, lr: 2.65e-04 +2022-05-06 03:11:09,919 INFO [train.py:715] (6/8) Epoch 8, batch 8250, loss[loss=0.1365, simple_loss=0.2153, pruned_loss=0.02881, over 4813.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2165, pruned_loss=0.03643, over 970807.83 frames.], batch size: 27, lr: 2.65e-04 +2022-05-06 03:11:48,874 INFO [train.py:715] (6/8) Epoch 8, batch 8300, loss[loss=0.1663, simple_loss=0.2395, pruned_loss=0.04658, over 4914.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2172, pruned_loss=0.03649, over 971029.70 frames.], batch size: 39, lr: 2.65e-04 +2022-05-06 03:12:28,300 INFO [train.py:715] (6/8) Epoch 8, batch 8350, loss[loss=0.1385, simple_loss=0.2119, pruned_loss=0.03261, over 4984.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2171, pruned_loss=0.03659, over 971987.47 frames.], batch size: 28, lr: 2.65e-04 +2022-05-06 03:13:07,314 INFO [train.py:715] (6/8) Epoch 8, batch 8400, loss[loss=0.1327, simple_loss=0.2049, pruned_loss=0.03027, over 4896.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2168, pruned_loss=0.03646, over 972702.50 frames.], batch size: 19, lr: 2.65e-04 +2022-05-06 03:13:45,971 INFO [train.py:715] (6/8) Epoch 8, batch 8450, loss[loss=0.1246, simple_loss=0.1877, pruned_loss=0.03079, over 4850.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2166, pruned_loss=0.03658, over 973631.51 frames.], batch size: 13, lr: 2.65e-04 +2022-05-06 03:14:25,534 INFO [train.py:715] (6/8) Epoch 8, batch 8500, loss[loss=0.1315, simple_loss=0.2018, pruned_loss=0.03062, over 4888.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2154, pruned_loss=0.03587, over 974039.32 frames.], batch size: 19, lr: 2.65e-04 +2022-05-06 03:15:05,500 INFO [train.py:715] (6/8) Epoch 8, batch 8550, loss[loss=0.1384, simple_loss=0.2163, pruned_loss=0.03025, over 4884.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2155, pruned_loss=0.03582, over 974012.13 frames.], batch size: 16, lr: 2.65e-04 +2022-05-06 03:15:44,165 INFO [train.py:715] (6/8) Epoch 8, batch 8600, loss[loss=0.1323, simple_loss=0.2048, pruned_loss=0.02991, over 4794.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2155, pruned_loss=0.0356, over 973763.22 frames.], batch size: 21, lr: 2.65e-04 +2022-05-06 03:16:23,285 INFO [train.py:715] (6/8) Epoch 8, batch 8650, loss[loss=0.138, simple_loss=0.2069, pruned_loss=0.03453, over 4981.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2149, pruned_loss=0.03537, over 973439.48 frames.], batch size: 31, lr: 2.65e-04 +2022-05-06 03:17:02,906 INFO [train.py:715] (6/8) Epoch 8, batch 8700, loss[loss=0.1237, simple_loss=0.2038, pruned_loss=0.0218, over 4863.00 frames.], tot_loss[loss=0.1424, simple_loss=0.215, pruned_loss=0.0349, over 973805.25 frames.], batch size: 38, lr: 2.65e-04 +2022-05-06 03:17:41,703 INFO [train.py:715] (6/8) Epoch 8, batch 8750, loss[loss=0.1712, simple_loss=0.2419, pruned_loss=0.05024, over 4972.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2152, pruned_loss=0.03525, over 974504.54 frames.], batch size: 15, lr: 2.65e-04 +2022-05-06 03:18:20,676 INFO [train.py:715] (6/8) Epoch 8, batch 8800, loss[loss=0.118, simple_loss=0.1972, pruned_loss=0.01938, over 4949.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2146, pruned_loss=0.03496, over 974255.47 frames.], batch size: 24, lr: 2.65e-04 +2022-05-06 03:19:00,220 INFO [train.py:715] (6/8) Epoch 8, batch 8850, loss[loss=0.1497, simple_loss=0.2229, pruned_loss=0.0383, over 4690.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2148, pruned_loss=0.03522, over 972952.98 frames.], batch size: 15, lr: 2.65e-04 +2022-05-06 03:19:39,730 INFO [train.py:715] (6/8) Epoch 8, batch 8900, loss[loss=0.139, simple_loss=0.2094, pruned_loss=0.03433, over 4969.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2154, pruned_loss=0.03544, over 972966.22 frames.], batch size: 15, lr: 2.65e-04 +2022-05-06 03:20:18,230 INFO [train.py:715] (6/8) Epoch 8, batch 8950, loss[loss=0.1323, simple_loss=0.2075, pruned_loss=0.02859, over 4900.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2155, pruned_loss=0.03533, over 973162.98 frames.], batch size: 22, lr: 2.65e-04 +2022-05-06 03:20:57,339 INFO [train.py:715] (6/8) Epoch 8, batch 9000, loss[loss=0.1298, simple_loss=0.1963, pruned_loss=0.03167, over 4856.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2154, pruned_loss=0.03551, over 973448.12 frames.], batch size: 32, lr: 2.65e-04 +2022-05-06 03:20:57,339 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 03:21:06,881 INFO [train.py:742] (6/8) Epoch 8, validation: loss=0.1075, simple_loss=0.1922, pruned_loss=0.01144, over 914524.00 frames. +2022-05-06 03:21:46,745 INFO [train.py:715] (6/8) Epoch 8, batch 9050, loss[loss=0.1523, simple_loss=0.2143, pruned_loss=0.04513, over 4983.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2149, pruned_loss=0.03524, over 973808.21 frames.], batch size: 31, lr: 2.65e-04 +2022-05-06 03:22:26,224 INFO [train.py:715] (6/8) Epoch 8, batch 9100, loss[loss=0.1443, simple_loss=0.2196, pruned_loss=0.03446, over 4974.00 frames.], tot_loss[loss=0.1439, simple_loss=0.216, pruned_loss=0.03593, over 974546.10 frames.], batch size: 14, lr: 2.65e-04 +2022-05-06 03:23:05,924 INFO [train.py:715] (6/8) Epoch 8, batch 9150, loss[loss=0.1394, simple_loss=0.2125, pruned_loss=0.0332, over 4885.00 frames.], tot_loss[loss=0.1434, simple_loss=0.216, pruned_loss=0.0354, over 974892.47 frames.], batch size: 17, lr: 2.64e-04 +2022-05-06 03:23:44,124 INFO [train.py:715] (6/8) Epoch 8, batch 9200, loss[loss=0.157, simple_loss=0.232, pruned_loss=0.04099, over 4662.00 frames.], tot_loss[loss=0.1434, simple_loss=0.216, pruned_loss=0.03539, over 974050.66 frames.], batch size: 13, lr: 2.64e-04 +2022-05-06 03:24:23,671 INFO [train.py:715] (6/8) Epoch 8, batch 9250, loss[loss=0.1463, simple_loss=0.2157, pruned_loss=0.03842, over 4984.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2156, pruned_loss=0.03537, over 974127.07 frames.], batch size: 14, lr: 2.64e-04 +2022-05-06 03:25:03,204 INFO [train.py:715] (6/8) Epoch 8, batch 9300, loss[loss=0.1265, simple_loss=0.1948, pruned_loss=0.02905, over 4841.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2162, pruned_loss=0.03563, over 973735.21 frames.], batch size: 13, lr: 2.64e-04 +2022-05-06 03:25:42,062 INFO [train.py:715] (6/8) Epoch 8, batch 9350, loss[loss=0.1279, simple_loss=0.1943, pruned_loss=0.03077, over 4874.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2155, pruned_loss=0.03562, over 973750.10 frames.], batch size: 22, lr: 2.64e-04 +2022-05-06 03:26:20,919 INFO [train.py:715] (6/8) Epoch 8, batch 9400, loss[loss=0.1137, simple_loss=0.1923, pruned_loss=0.01754, over 4885.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2149, pruned_loss=0.03543, over 972812.37 frames.], batch size: 22, lr: 2.64e-04 +2022-05-06 03:27:00,378 INFO [train.py:715] (6/8) Epoch 8, batch 9450, loss[loss=0.1283, simple_loss=0.2071, pruned_loss=0.02471, over 4734.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2152, pruned_loss=0.03548, over 972947.99 frames.], batch size: 12, lr: 2.64e-04 +2022-05-06 03:27:40,542 INFO [train.py:715] (6/8) Epoch 8, batch 9500, loss[loss=0.1558, simple_loss=0.2373, pruned_loss=0.03716, over 4934.00 frames.], tot_loss[loss=0.143, simple_loss=0.2154, pruned_loss=0.03528, over 973355.48 frames.], batch size: 21, lr: 2.64e-04 +2022-05-06 03:28:21,702 INFO [train.py:715] (6/8) Epoch 8, batch 9550, loss[loss=0.1401, simple_loss=0.2186, pruned_loss=0.03086, over 4933.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2163, pruned_loss=0.03578, over 973215.92 frames.], batch size: 35, lr: 2.64e-04 +2022-05-06 03:29:01,735 INFO [train.py:715] (6/8) Epoch 8, batch 9600, loss[loss=0.1344, simple_loss=0.2045, pruned_loss=0.03217, over 4963.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2163, pruned_loss=0.03562, over 972339.30 frames.], batch size: 15, lr: 2.64e-04 +2022-05-06 03:29:41,773 INFO [train.py:715] (6/8) Epoch 8, batch 9650, loss[loss=0.1625, simple_loss=0.2291, pruned_loss=0.04792, over 4940.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2166, pruned_loss=0.03598, over 972974.08 frames.], batch size: 35, lr: 2.64e-04 +2022-05-06 03:30:21,099 INFO [train.py:715] (6/8) Epoch 8, batch 9700, loss[loss=0.1461, simple_loss=0.1982, pruned_loss=0.04695, over 4727.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2164, pruned_loss=0.0361, over 972749.19 frames.], batch size: 16, lr: 2.64e-04 +2022-05-06 03:30:59,869 INFO [train.py:715] (6/8) Epoch 8, batch 9750, loss[loss=0.1439, simple_loss=0.2157, pruned_loss=0.03605, over 4917.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2156, pruned_loss=0.03573, over 971946.89 frames.], batch size: 18, lr: 2.64e-04 +2022-05-06 03:31:39,485 INFO [train.py:715] (6/8) Epoch 8, batch 9800, loss[loss=0.1429, simple_loss=0.2054, pruned_loss=0.0402, over 4772.00 frames.], tot_loss[loss=0.144, simple_loss=0.216, pruned_loss=0.036, over 971183.03 frames.], batch size: 14, lr: 2.64e-04 +2022-05-06 03:32:18,975 INFO [train.py:715] (6/8) Epoch 8, batch 9850, loss[loss=0.1721, simple_loss=0.2373, pruned_loss=0.05344, over 4836.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2168, pruned_loss=0.03639, over 971206.71 frames.], batch size: 20, lr: 2.64e-04 +2022-05-06 03:32:58,276 INFO [train.py:715] (6/8) Epoch 8, batch 9900, loss[loss=0.1259, simple_loss=0.2063, pruned_loss=0.02276, over 4962.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2172, pruned_loss=0.03665, over 971877.02 frames.], batch size: 15, lr: 2.64e-04 +2022-05-06 03:33:37,623 INFO [train.py:715] (6/8) Epoch 8, batch 9950, loss[loss=0.1281, simple_loss=0.2019, pruned_loss=0.02718, over 4926.00 frames.], tot_loss[loss=0.145, simple_loss=0.217, pruned_loss=0.03648, over 971765.71 frames.], batch size: 23, lr: 2.64e-04 +2022-05-06 03:34:17,534 INFO [train.py:715] (6/8) Epoch 8, batch 10000, loss[loss=0.1238, simple_loss=0.1893, pruned_loss=0.0291, over 4974.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2162, pruned_loss=0.03628, over 971646.64 frames.], batch size: 14, lr: 2.64e-04 +2022-05-06 03:34:56,515 INFO [train.py:715] (6/8) Epoch 8, batch 10050, loss[loss=0.1344, simple_loss=0.207, pruned_loss=0.03086, over 4852.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2162, pruned_loss=0.03619, over 971081.38 frames.], batch size: 30, lr: 2.64e-04 +2022-05-06 03:35:35,063 INFO [train.py:715] (6/8) Epoch 8, batch 10100, loss[loss=0.1512, simple_loss=0.2227, pruned_loss=0.03983, over 4830.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2159, pruned_loss=0.03588, over 971752.98 frames.], batch size: 15, lr: 2.64e-04 +2022-05-06 03:36:15,142 INFO [train.py:715] (6/8) Epoch 8, batch 10150, loss[loss=0.1351, simple_loss=0.2093, pruned_loss=0.03044, over 4914.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2159, pruned_loss=0.0362, over 972484.87 frames.], batch size: 23, lr: 2.64e-04 +2022-05-06 03:36:55,145 INFO [train.py:715] (6/8) Epoch 8, batch 10200, loss[loss=0.1775, simple_loss=0.2385, pruned_loss=0.05824, over 4752.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2156, pruned_loss=0.03575, over 972538.50 frames.], batch size: 16, lr: 2.64e-04 +2022-05-06 03:37:34,646 INFO [train.py:715] (6/8) Epoch 8, batch 10250, loss[loss=0.156, simple_loss=0.2304, pruned_loss=0.04074, over 4932.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2157, pruned_loss=0.03564, over 972614.16 frames.], batch size: 18, lr: 2.64e-04 +2022-05-06 03:38:14,432 INFO [train.py:715] (6/8) Epoch 8, batch 10300, loss[loss=0.1363, simple_loss=0.2125, pruned_loss=0.03006, over 4809.00 frames.], tot_loss[loss=0.144, simple_loss=0.2167, pruned_loss=0.03571, over 972378.08 frames.], batch size: 21, lr: 2.64e-04 +2022-05-06 03:38:53,949 INFO [train.py:715] (6/8) Epoch 8, batch 10350, loss[loss=0.1597, simple_loss=0.2371, pruned_loss=0.04121, over 4934.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2166, pruned_loss=0.03577, over 972342.50 frames.], batch size: 39, lr: 2.64e-04 +2022-05-06 03:39:32,639 INFO [train.py:715] (6/8) Epoch 8, batch 10400, loss[loss=0.1229, simple_loss=0.198, pruned_loss=0.02391, over 4818.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2159, pruned_loss=0.03543, over 972053.27 frames.], batch size: 27, lr: 2.64e-04 +2022-05-06 03:40:12,242 INFO [train.py:715] (6/8) Epoch 8, batch 10450, loss[loss=0.1287, simple_loss=0.201, pruned_loss=0.02819, over 4845.00 frames.], tot_loss[loss=0.143, simple_loss=0.2155, pruned_loss=0.03521, over 972213.75 frames.], batch size: 30, lr: 2.64e-04 +2022-05-06 03:40:51,307 INFO [train.py:715] (6/8) Epoch 8, batch 10500, loss[loss=0.1543, simple_loss=0.2167, pruned_loss=0.04595, over 4822.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2153, pruned_loss=0.03544, over 972334.57 frames.], batch size: 26, lr: 2.64e-04 +2022-05-06 03:41:30,156 INFO [train.py:715] (6/8) Epoch 8, batch 10550, loss[loss=0.1443, simple_loss=0.2283, pruned_loss=0.03014, over 4879.00 frames.], tot_loss[loss=0.143, simple_loss=0.2156, pruned_loss=0.03525, over 971705.27 frames.], batch size: 22, lr: 2.64e-04 +2022-05-06 03:42:08,772 INFO [train.py:715] (6/8) Epoch 8, batch 10600, loss[loss=0.1329, simple_loss=0.2055, pruned_loss=0.03017, over 4941.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2154, pruned_loss=0.03474, over 972050.48 frames.], batch size: 24, lr: 2.64e-04 +2022-05-06 03:42:48,075 INFO [train.py:715] (6/8) Epoch 8, batch 10650, loss[loss=0.1612, simple_loss=0.2337, pruned_loss=0.04435, over 4802.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2159, pruned_loss=0.03497, over 972303.51 frames.], batch size: 21, lr: 2.64e-04 +2022-05-06 03:43:27,263 INFO [train.py:715] (6/8) Epoch 8, batch 10700, loss[loss=0.1496, simple_loss=0.2198, pruned_loss=0.03965, over 4925.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2153, pruned_loss=0.03499, over 972063.03 frames.], batch size: 18, lr: 2.64e-04 +2022-05-06 03:44:06,355 INFO [train.py:715] (6/8) Epoch 8, batch 10750, loss[loss=0.135, simple_loss=0.1971, pruned_loss=0.03648, over 4859.00 frames.], tot_loss[loss=0.1435, simple_loss=0.216, pruned_loss=0.03553, over 972865.94 frames.], batch size: 12, lr: 2.64e-04 +2022-05-06 03:44:46,294 INFO [train.py:715] (6/8) Epoch 8, batch 10800, loss[loss=0.1222, simple_loss=0.1824, pruned_loss=0.03099, over 4833.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2159, pruned_loss=0.03579, over 972572.16 frames.], batch size: 12, lr: 2.64e-04 +2022-05-06 03:45:26,106 INFO [train.py:715] (6/8) Epoch 8, batch 10850, loss[loss=0.1368, simple_loss=0.1987, pruned_loss=0.03745, over 4781.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2143, pruned_loss=0.03522, over 971994.74 frames.], batch size: 17, lr: 2.64e-04 +2022-05-06 03:46:05,371 INFO [train.py:715] (6/8) Epoch 8, batch 10900, loss[loss=0.1842, simple_loss=0.2549, pruned_loss=0.05674, over 4978.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2142, pruned_loss=0.03521, over 972253.08 frames.], batch size: 39, lr: 2.64e-04 +2022-05-06 03:46:44,375 INFO [train.py:715] (6/8) Epoch 8, batch 10950, loss[loss=0.1556, simple_loss=0.2262, pruned_loss=0.0425, over 4961.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2139, pruned_loss=0.03532, over 972649.06 frames.], batch size: 15, lr: 2.64e-04 +2022-05-06 03:47:24,378 INFO [train.py:715] (6/8) Epoch 8, batch 11000, loss[loss=0.1169, simple_loss=0.1895, pruned_loss=0.02221, over 4832.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2142, pruned_loss=0.03507, over 972266.82 frames.], batch size: 15, lr: 2.64e-04 +2022-05-06 03:48:03,913 INFO [train.py:715] (6/8) Epoch 8, batch 11050, loss[loss=0.1267, simple_loss=0.1997, pruned_loss=0.02684, over 4796.00 frames.], tot_loss[loss=0.142, simple_loss=0.2146, pruned_loss=0.03475, over 972469.05 frames.], batch size: 21, lr: 2.64e-04 +2022-05-06 03:48:42,672 INFO [train.py:715] (6/8) Epoch 8, batch 11100, loss[loss=0.1875, simple_loss=0.2464, pruned_loss=0.06432, over 4836.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2148, pruned_loss=0.03549, over 973011.88 frames.], batch size: 15, lr: 2.64e-04 +2022-05-06 03:49:22,146 INFO [train.py:715] (6/8) Epoch 8, batch 11150, loss[loss=0.146, simple_loss=0.2161, pruned_loss=0.03792, over 4924.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2144, pruned_loss=0.03519, over 972694.06 frames.], batch size: 29, lr: 2.64e-04 +2022-05-06 03:50:01,943 INFO [train.py:715] (6/8) Epoch 8, batch 11200, loss[loss=0.1163, simple_loss=0.2008, pruned_loss=0.01585, over 4882.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2149, pruned_loss=0.03544, over 972137.47 frames.], batch size: 16, lr: 2.64e-04 +2022-05-06 03:50:40,570 INFO [train.py:715] (6/8) Epoch 8, batch 11250, loss[loss=0.1141, simple_loss=0.1985, pruned_loss=0.01489, over 4929.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2156, pruned_loss=0.03562, over 972621.06 frames.], batch size: 23, lr: 2.64e-04 +2022-05-06 03:51:19,595 INFO [train.py:715] (6/8) Epoch 8, batch 11300, loss[loss=0.1371, simple_loss=0.2053, pruned_loss=0.03447, over 4788.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2154, pruned_loss=0.03571, over 972833.40 frames.], batch size: 17, lr: 2.64e-04 +2022-05-06 03:51:58,926 INFO [train.py:715] (6/8) Epoch 8, batch 11350, loss[loss=0.1853, simple_loss=0.265, pruned_loss=0.05286, over 4781.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2154, pruned_loss=0.0359, over 973453.50 frames.], batch size: 17, lr: 2.63e-04 +2022-05-06 03:52:37,407 INFO [train.py:715] (6/8) Epoch 8, batch 11400, loss[loss=0.1485, simple_loss=0.2209, pruned_loss=0.03805, over 4987.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2165, pruned_loss=0.03627, over 973532.67 frames.], batch size: 14, lr: 2.63e-04 +2022-05-06 03:53:16,049 INFO [train.py:715] (6/8) Epoch 8, batch 11450, loss[loss=0.1371, simple_loss=0.2199, pruned_loss=0.02719, over 4985.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2167, pruned_loss=0.0359, over 972385.19 frames.], batch size: 15, lr: 2.63e-04 +2022-05-06 03:53:55,353 INFO [train.py:715] (6/8) Epoch 8, batch 11500, loss[loss=0.1225, simple_loss=0.1973, pruned_loss=0.02388, over 4948.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2164, pruned_loss=0.03592, over 972501.70 frames.], batch size: 23, lr: 2.63e-04 +2022-05-06 03:54:34,457 INFO [train.py:715] (6/8) Epoch 8, batch 11550, loss[loss=0.1261, simple_loss=0.1884, pruned_loss=0.03189, over 4782.00 frames.], tot_loss[loss=0.144, simple_loss=0.2162, pruned_loss=0.03595, over 972052.28 frames.], batch size: 17, lr: 2.63e-04 +2022-05-06 03:55:13,513 INFO [train.py:715] (6/8) Epoch 8, batch 11600, loss[loss=0.1464, simple_loss=0.2151, pruned_loss=0.03882, over 4874.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2157, pruned_loss=0.03568, over 972684.65 frames.], batch size: 32, lr: 2.63e-04 +2022-05-06 03:55:53,446 INFO [train.py:715] (6/8) Epoch 8, batch 11650, loss[loss=0.1752, simple_loss=0.25, pruned_loss=0.05016, over 4949.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2156, pruned_loss=0.03534, over 972309.96 frames.], batch size: 21, lr: 2.63e-04 +2022-05-06 03:56:33,836 INFO [train.py:715] (6/8) Epoch 8, batch 11700, loss[loss=0.1152, simple_loss=0.1815, pruned_loss=0.02448, over 4815.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2161, pruned_loss=0.03576, over 972542.63 frames.], batch size: 12, lr: 2.63e-04 +2022-05-06 03:57:13,266 INFO [train.py:715] (6/8) Epoch 8, batch 11750, loss[loss=0.1446, simple_loss=0.2174, pruned_loss=0.03588, over 4898.00 frames.], tot_loss[loss=0.144, simple_loss=0.2161, pruned_loss=0.03597, over 972462.15 frames.], batch size: 19, lr: 2.63e-04 +2022-05-06 03:57:52,305 INFO [train.py:715] (6/8) Epoch 8, batch 11800, loss[loss=0.1514, simple_loss=0.2207, pruned_loss=0.04104, over 4932.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2171, pruned_loss=0.03686, over 972780.40 frames.], batch size: 21, lr: 2.63e-04 +2022-05-06 03:58:32,049 INFO [train.py:715] (6/8) Epoch 8, batch 11850, loss[loss=0.1334, simple_loss=0.2081, pruned_loss=0.02938, over 4853.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2172, pruned_loss=0.03696, over 972995.77 frames.], batch size: 15, lr: 2.63e-04 +2022-05-06 03:59:11,744 INFO [train.py:715] (6/8) Epoch 8, batch 11900, loss[loss=0.1517, simple_loss=0.2241, pruned_loss=0.03964, over 4916.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2166, pruned_loss=0.0363, over 972762.85 frames.], batch size: 17, lr: 2.63e-04 +2022-05-06 03:59:51,346 INFO [train.py:715] (6/8) Epoch 8, batch 11950, loss[loss=0.1416, simple_loss=0.2183, pruned_loss=0.0324, over 4811.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2162, pruned_loss=0.03623, over 972311.91 frames.], batch size: 21, lr: 2.63e-04 +2022-05-06 04:00:30,528 INFO [train.py:715] (6/8) Epoch 8, batch 12000, loss[loss=0.1074, simple_loss=0.164, pruned_loss=0.02543, over 4727.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2156, pruned_loss=0.03594, over 972720.40 frames.], batch size: 12, lr: 2.63e-04 +2022-05-06 04:00:30,529 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 04:00:40,091 INFO [train.py:742] (6/8) Epoch 8, validation: loss=0.1076, simple_loss=0.1923, pruned_loss=0.0115, over 914524.00 frames. +2022-05-06 04:01:19,841 INFO [train.py:715] (6/8) Epoch 8, batch 12050, loss[loss=0.1472, simple_loss=0.2261, pruned_loss=0.03412, over 4919.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2163, pruned_loss=0.03616, over 972441.01 frames.], batch size: 18, lr: 2.63e-04 +2022-05-06 04:01:59,450 INFO [train.py:715] (6/8) Epoch 8, batch 12100, loss[loss=0.1664, simple_loss=0.2402, pruned_loss=0.0463, over 4931.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2169, pruned_loss=0.03664, over 973716.98 frames.], batch size: 39, lr: 2.63e-04 +2022-05-06 04:02:38,522 INFO [train.py:715] (6/8) Epoch 8, batch 12150, loss[loss=0.1344, simple_loss=0.2119, pruned_loss=0.0284, over 4986.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2162, pruned_loss=0.03633, over 972938.37 frames.], batch size: 28, lr: 2.63e-04 +2022-05-06 04:03:17,593 INFO [train.py:715] (6/8) Epoch 8, batch 12200, loss[loss=0.1363, simple_loss=0.2179, pruned_loss=0.02736, over 4932.00 frames.], tot_loss[loss=0.1454, simple_loss=0.217, pruned_loss=0.03691, over 973109.34 frames.], batch size: 29, lr: 2.63e-04 +2022-05-06 04:03:57,162 INFO [train.py:715] (6/8) Epoch 8, batch 12250, loss[loss=0.1529, simple_loss=0.2255, pruned_loss=0.04019, over 4871.00 frames.], tot_loss[loss=0.145, simple_loss=0.2169, pruned_loss=0.0365, over 972879.03 frames.], batch size: 13, lr: 2.63e-04 +2022-05-06 04:04:36,395 INFO [train.py:715] (6/8) Epoch 8, batch 12300, loss[loss=0.1323, simple_loss=0.2073, pruned_loss=0.0287, over 4774.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2173, pruned_loss=0.03651, over 972393.70 frames.], batch size: 18, lr: 2.63e-04 +2022-05-06 04:05:15,239 INFO [train.py:715] (6/8) Epoch 8, batch 12350, loss[loss=0.1396, simple_loss=0.2161, pruned_loss=0.03153, over 4866.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2175, pruned_loss=0.03653, over 971839.36 frames.], batch size: 16, lr: 2.63e-04 +2022-05-06 04:05:54,661 INFO [train.py:715] (6/8) Epoch 8, batch 12400, loss[loss=0.1437, simple_loss=0.2134, pruned_loss=0.03695, over 4805.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2176, pruned_loss=0.03663, over 971949.79 frames.], batch size: 21, lr: 2.63e-04 +2022-05-06 04:06:34,255 INFO [train.py:715] (6/8) Epoch 8, batch 12450, loss[loss=0.1592, simple_loss=0.2236, pruned_loss=0.04736, over 4882.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2175, pruned_loss=0.03685, over 971886.71 frames.], batch size: 16, lr: 2.63e-04 +2022-05-06 04:07:13,260 INFO [train.py:715] (6/8) Epoch 8, batch 12500, loss[loss=0.1608, simple_loss=0.2319, pruned_loss=0.04487, over 4745.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2167, pruned_loss=0.03694, over 971906.30 frames.], batch size: 19, lr: 2.63e-04 +2022-05-06 04:07:52,129 INFO [train.py:715] (6/8) Epoch 8, batch 12550, loss[loss=0.1362, simple_loss=0.2143, pruned_loss=0.02907, over 4981.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2167, pruned_loss=0.03676, over 971986.45 frames.], batch size: 15, lr: 2.63e-04 +2022-05-06 04:08:31,833 INFO [train.py:715] (6/8) Epoch 8, batch 12600, loss[loss=0.1291, simple_loss=0.1986, pruned_loss=0.02976, over 4981.00 frames.], tot_loss[loss=0.1454, simple_loss=0.217, pruned_loss=0.03693, over 972108.86 frames.], batch size: 28, lr: 2.63e-04 +2022-05-06 04:09:10,879 INFO [train.py:715] (6/8) Epoch 8, batch 12650, loss[loss=0.1417, simple_loss=0.2137, pruned_loss=0.03491, over 4781.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2158, pruned_loss=0.03647, over 971974.35 frames.], batch size: 18, lr: 2.63e-04 +2022-05-06 04:09:50,736 INFO [train.py:715] (6/8) Epoch 8, batch 12700, loss[loss=0.1405, simple_loss=0.2088, pruned_loss=0.03604, over 4980.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2153, pruned_loss=0.03562, over 971824.87 frames.], batch size: 31, lr: 2.63e-04 +2022-05-06 04:10:30,127 INFO [train.py:715] (6/8) Epoch 8, batch 12750, loss[loss=0.1827, simple_loss=0.2476, pruned_loss=0.05893, over 4975.00 frames.], tot_loss[loss=0.143, simple_loss=0.2149, pruned_loss=0.03555, over 972423.88 frames.], batch size: 25, lr: 2.63e-04 +2022-05-06 04:11:10,324 INFO [train.py:715] (6/8) Epoch 8, batch 12800, loss[loss=0.1512, simple_loss=0.2304, pruned_loss=0.03603, over 4969.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2151, pruned_loss=0.03559, over 972198.72 frames.], batch size: 35, lr: 2.63e-04 +2022-05-06 04:11:48,983 INFO [train.py:715] (6/8) Epoch 8, batch 12850, loss[loss=0.1427, simple_loss=0.2197, pruned_loss=0.0329, over 4896.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2152, pruned_loss=0.03553, over 972870.59 frames.], batch size: 22, lr: 2.63e-04 +2022-05-06 04:12:28,015 INFO [train.py:715] (6/8) Epoch 8, batch 12900, loss[loss=0.1294, simple_loss=0.2062, pruned_loss=0.02627, over 4817.00 frames.], tot_loss[loss=0.144, simple_loss=0.2158, pruned_loss=0.03608, over 971187.20 frames.], batch size: 26, lr: 2.63e-04 +2022-05-06 04:13:07,527 INFO [train.py:715] (6/8) Epoch 8, batch 12950, loss[loss=0.1262, simple_loss=0.2018, pruned_loss=0.02528, over 4815.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2151, pruned_loss=0.036, over 971126.99 frames.], batch size: 13, lr: 2.63e-04 +2022-05-06 04:13:46,914 INFO [train.py:715] (6/8) Epoch 8, batch 13000, loss[loss=0.1375, simple_loss=0.2101, pruned_loss=0.03247, over 4981.00 frames.], tot_loss[loss=0.144, simple_loss=0.2152, pruned_loss=0.03638, over 972095.65 frames.], batch size: 25, lr: 2.63e-04 +2022-05-06 04:14:26,216 INFO [train.py:715] (6/8) Epoch 8, batch 13050, loss[loss=0.1916, simple_loss=0.2451, pruned_loss=0.06901, over 4769.00 frames.], tot_loss[loss=0.1444, simple_loss=0.216, pruned_loss=0.03641, over 972854.26 frames.], batch size: 14, lr: 2.63e-04 +2022-05-06 04:15:05,642 INFO [train.py:715] (6/8) Epoch 8, batch 13100, loss[loss=0.1298, simple_loss=0.2044, pruned_loss=0.02756, over 4743.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2164, pruned_loss=0.03659, over 972922.71 frames.], batch size: 12, lr: 2.63e-04 +2022-05-06 04:15:45,373 INFO [train.py:715] (6/8) Epoch 8, batch 13150, loss[loss=0.1436, simple_loss=0.2212, pruned_loss=0.03293, over 4756.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2165, pruned_loss=0.03652, over 972784.58 frames.], batch size: 19, lr: 2.63e-04 +2022-05-06 04:16:24,330 INFO [train.py:715] (6/8) Epoch 8, batch 13200, loss[loss=0.1569, simple_loss=0.2275, pruned_loss=0.04314, over 4843.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2175, pruned_loss=0.03683, over 973447.24 frames.], batch size: 30, lr: 2.63e-04 +2022-05-06 04:17:03,719 INFO [train.py:715] (6/8) Epoch 8, batch 13250, loss[loss=0.1332, simple_loss=0.2102, pruned_loss=0.02804, over 4849.00 frames.], tot_loss[loss=0.145, simple_loss=0.2168, pruned_loss=0.03661, over 973371.72 frames.], batch size: 20, lr: 2.63e-04 +2022-05-06 04:17:43,336 INFO [train.py:715] (6/8) Epoch 8, batch 13300, loss[loss=0.1299, simple_loss=0.2042, pruned_loss=0.02775, over 4784.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2167, pruned_loss=0.03658, over 973166.90 frames.], batch size: 18, lr: 2.63e-04 +2022-05-06 04:18:22,358 INFO [train.py:715] (6/8) Epoch 8, batch 13350, loss[loss=0.135, simple_loss=0.2079, pruned_loss=0.03109, over 4861.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2164, pruned_loss=0.03621, over 973113.00 frames.], batch size: 20, lr: 2.63e-04 +2022-05-06 04:19:01,003 INFO [train.py:715] (6/8) Epoch 8, batch 13400, loss[loss=0.1429, simple_loss=0.2395, pruned_loss=0.02317, over 4854.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2165, pruned_loss=0.03588, over 972572.98 frames.], batch size: 20, lr: 2.63e-04 +2022-05-06 04:19:39,802 INFO [train.py:715] (6/8) Epoch 8, batch 13450, loss[loss=0.1943, simple_loss=0.2639, pruned_loss=0.0623, over 4813.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2168, pruned_loss=0.03613, over 971729.78 frames.], batch size: 15, lr: 2.63e-04 +2022-05-06 04:20:19,855 INFO [train.py:715] (6/8) Epoch 8, batch 13500, loss[loss=0.1381, simple_loss=0.2065, pruned_loss=0.03482, over 4838.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2177, pruned_loss=0.03645, over 972137.34 frames.], batch size: 15, lr: 2.63e-04 +2022-05-06 04:20:58,646 INFO [train.py:715] (6/8) Epoch 8, batch 13550, loss[loss=0.1542, simple_loss=0.2234, pruned_loss=0.04248, over 4964.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2164, pruned_loss=0.03571, over 972605.82 frames.], batch size: 35, lr: 2.62e-04 +2022-05-06 04:21:37,838 INFO [train.py:715] (6/8) Epoch 8, batch 13600, loss[loss=0.1435, simple_loss=0.2194, pruned_loss=0.03384, over 4916.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2158, pruned_loss=0.0352, over 972292.74 frames.], batch size: 18, lr: 2.62e-04 +2022-05-06 04:22:16,978 INFO [train.py:715] (6/8) Epoch 8, batch 13650, loss[loss=0.1296, simple_loss=0.2016, pruned_loss=0.02882, over 4745.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2161, pruned_loss=0.0356, over 972555.52 frames.], batch size: 12, lr: 2.62e-04 +2022-05-06 04:22:56,146 INFO [train.py:715] (6/8) Epoch 8, batch 13700, loss[loss=0.1316, simple_loss=0.2031, pruned_loss=0.03007, over 4893.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2151, pruned_loss=0.03509, over 972488.39 frames.], batch size: 17, lr: 2.62e-04 +2022-05-06 04:23:34,770 INFO [train.py:715] (6/8) Epoch 8, batch 13750, loss[loss=0.1393, simple_loss=0.2121, pruned_loss=0.03331, over 4813.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2148, pruned_loss=0.03502, over 972645.51 frames.], batch size: 25, lr: 2.62e-04 +2022-05-06 04:24:13,491 INFO [train.py:715] (6/8) Epoch 8, batch 13800, loss[loss=0.1306, simple_loss=0.2077, pruned_loss=0.02678, over 4926.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2157, pruned_loss=0.036, over 972770.09 frames.], batch size: 23, lr: 2.62e-04 +2022-05-06 04:24:52,948 INFO [train.py:715] (6/8) Epoch 8, batch 13850, loss[loss=0.144, simple_loss=0.2176, pruned_loss=0.03521, over 4785.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2146, pruned_loss=0.03547, over 972130.21 frames.], batch size: 18, lr: 2.62e-04 +2022-05-06 04:25:31,241 INFO [train.py:715] (6/8) Epoch 8, batch 13900, loss[loss=0.143, simple_loss=0.2189, pruned_loss=0.03356, over 4853.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2143, pruned_loss=0.03531, over 972153.56 frames.], batch size: 20, lr: 2.62e-04 +2022-05-06 04:26:10,331 INFO [train.py:715] (6/8) Epoch 8, batch 13950, loss[loss=0.1378, simple_loss=0.2163, pruned_loss=0.02967, over 4944.00 frames.], tot_loss[loss=0.1428, simple_loss=0.215, pruned_loss=0.03527, over 971696.58 frames.], batch size: 29, lr: 2.62e-04 +2022-05-06 04:26:49,433 INFO [train.py:715] (6/8) Epoch 8, batch 14000, loss[loss=0.1772, simple_loss=0.2345, pruned_loss=0.05999, over 4962.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2156, pruned_loss=0.03562, over 972608.30 frames.], batch size: 24, lr: 2.62e-04 +2022-05-06 04:27:28,485 INFO [train.py:715] (6/8) Epoch 8, batch 14050, loss[loss=0.1505, simple_loss=0.2254, pruned_loss=0.0378, over 4702.00 frames.], tot_loss[loss=0.144, simple_loss=0.216, pruned_loss=0.03599, over 972809.57 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:28:06,678 INFO [train.py:715] (6/8) Epoch 8, batch 14100, loss[loss=0.1428, simple_loss=0.2159, pruned_loss=0.03481, over 4863.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2158, pruned_loss=0.03555, over 972620.80 frames.], batch size: 32, lr: 2.62e-04 +2022-05-06 04:28:45,332 INFO [train.py:715] (6/8) Epoch 8, batch 14150, loss[loss=0.1507, simple_loss=0.221, pruned_loss=0.0402, over 4864.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2152, pruned_loss=0.03559, over 972441.97 frames.], batch size: 32, lr: 2.62e-04 +2022-05-06 04:29:25,591 INFO [train.py:715] (6/8) Epoch 8, batch 14200, loss[loss=0.1452, simple_loss=0.212, pruned_loss=0.03926, over 4768.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2153, pruned_loss=0.03608, over 973085.14 frames.], batch size: 12, lr: 2.62e-04 +2022-05-06 04:30:04,163 INFO [train.py:715] (6/8) Epoch 8, batch 14250, loss[loss=0.1258, simple_loss=0.1981, pruned_loss=0.02677, over 4960.00 frames.], tot_loss[loss=0.144, simple_loss=0.2154, pruned_loss=0.03632, over 972622.61 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:30:44,070 INFO [train.py:715] (6/8) Epoch 8, batch 14300, loss[loss=0.1456, simple_loss=0.22, pruned_loss=0.03559, over 4911.00 frames.], tot_loss[loss=0.1435, simple_loss=0.215, pruned_loss=0.03605, over 972455.95 frames.], batch size: 39, lr: 2.62e-04 +2022-05-06 04:31:23,534 INFO [train.py:715] (6/8) Epoch 8, batch 14350, loss[loss=0.166, simple_loss=0.243, pruned_loss=0.04452, over 4853.00 frames.], tot_loss[loss=0.1437, simple_loss=0.215, pruned_loss=0.03622, over 973273.86 frames.], batch size: 32, lr: 2.62e-04 +2022-05-06 04:32:02,824 INFO [train.py:715] (6/8) Epoch 8, batch 14400, loss[loss=0.12, simple_loss=0.1876, pruned_loss=0.02617, over 4834.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2155, pruned_loss=0.0361, over 972746.67 frames.], batch size: 13, lr: 2.62e-04 +2022-05-06 04:32:41,519 INFO [train.py:715] (6/8) Epoch 8, batch 14450, loss[loss=0.2047, simple_loss=0.2773, pruned_loss=0.06607, over 4770.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2161, pruned_loss=0.03651, over 972270.83 frames.], batch size: 18, lr: 2.62e-04 +2022-05-06 04:33:20,777 INFO [train.py:715] (6/8) Epoch 8, batch 14500, loss[loss=0.1237, simple_loss=0.1974, pruned_loss=0.02503, over 4986.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2152, pruned_loss=0.03608, over 972142.62 frames.], batch size: 28, lr: 2.62e-04 +2022-05-06 04:34:00,258 INFO [train.py:715] (6/8) Epoch 8, batch 14550, loss[loss=0.1342, simple_loss=0.2094, pruned_loss=0.02951, over 4743.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2152, pruned_loss=0.03596, over 971812.30 frames.], batch size: 16, lr: 2.62e-04 +2022-05-06 04:34:38,292 INFO [train.py:715] (6/8) Epoch 8, batch 14600, loss[loss=0.1396, simple_loss=0.2055, pruned_loss=0.03682, over 4840.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2144, pruned_loss=0.0353, over 971720.59 frames.], batch size: 26, lr: 2.62e-04 +2022-05-06 04:35:17,878 INFO [train.py:715] (6/8) Epoch 8, batch 14650, loss[loss=0.1198, simple_loss=0.1901, pruned_loss=0.02478, over 4767.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2152, pruned_loss=0.03552, over 971906.41 frames.], batch size: 12, lr: 2.62e-04 +2022-05-06 04:35:57,139 INFO [train.py:715] (6/8) Epoch 8, batch 14700, loss[loss=0.1611, simple_loss=0.2345, pruned_loss=0.04384, over 4929.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2155, pruned_loss=0.03554, over 972234.69 frames.], batch size: 39, lr: 2.62e-04 +2022-05-06 04:36:35,958 INFO [train.py:715] (6/8) Epoch 8, batch 14750, loss[loss=0.1621, simple_loss=0.2251, pruned_loss=0.04951, over 4979.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2156, pruned_loss=0.03543, over 971437.77 frames.], batch size: 25, lr: 2.62e-04 +2022-05-06 04:37:14,355 INFO [train.py:715] (6/8) Epoch 8, batch 14800, loss[loss=0.1287, simple_loss=0.2118, pruned_loss=0.02283, over 4878.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2159, pruned_loss=0.03539, over 971746.00 frames.], batch size: 22, lr: 2.62e-04 +2022-05-06 04:37:54,168 INFO [train.py:715] (6/8) Epoch 8, batch 14850, loss[loss=0.1444, simple_loss=0.2249, pruned_loss=0.03191, over 4933.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2166, pruned_loss=0.036, over 970925.84 frames.], batch size: 23, lr: 2.62e-04 +2022-05-06 04:38:33,089 INFO [train.py:715] (6/8) Epoch 8, batch 14900, loss[loss=0.1461, simple_loss=0.2198, pruned_loss=0.03617, over 4826.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2168, pruned_loss=0.03617, over 971527.28 frames.], batch size: 26, lr: 2.62e-04 +2022-05-06 04:39:11,872 INFO [train.py:715] (6/8) Epoch 8, batch 14950, loss[loss=0.1319, simple_loss=0.2041, pruned_loss=0.0298, over 4733.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2157, pruned_loss=0.0356, over 971293.96 frames.], batch size: 16, lr: 2.62e-04 +2022-05-06 04:39:51,071 INFO [train.py:715] (6/8) Epoch 8, batch 15000, loss[loss=0.1284, simple_loss=0.192, pruned_loss=0.03236, over 4797.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2151, pruned_loss=0.03519, over 971170.95 frames.], batch size: 14, lr: 2.62e-04 +2022-05-06 04:39:51,071 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 04:40:00,792 INFO [train.py:742] (6/8) Epoch 8, validation: loss=0.1076, simple_loss=0.1921, pruned_loss=0.01153, over 914524.00 frames. +2022-05-06 04:40:40,557 INFO [train.py:715] (6/8) Epoch 8, batch 15050, loss[loss=0.1475, simple_loss=0.2256, pruned_loss=0.03468, over 4957.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2158, pruned_loss=0.0352, over 971868.08 frames.], batch size: 35, lr: 2.62e-04 +2022-05-06 04:41:19,878 INFO [train.py:715] (6/8) Epoch 8, batch 15100, loss[loss=0.127, simple_loss=0.205, pruned_loss=0.02445, over 4986.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2161, pruned_loss=0.03523, over 971808.46 frames.], batch size: 25, lr: 2.62e-04 +2022-05-06 04:41:59,432 INFO [train.py:715] (6/8) Epoch 8, batch 15150, loss[loss=0.138, simple_loss=0.2099, pruned_loss=0.03308, over 4972.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2156, pruned_loss=0.03528, over 971751.41 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:42:38,854 INFO [train.py:715] (6/8) Epoch 8, batch 15200, loss[loss=0.1466, simple_loss=0.2233, pruned_loss=0.03502, over 4924.00 frames.], tot_loss[loss=0.144, simple_loss=0.2166, pruned_loss=0.03572, over 972784.96 frames.], batch size: 23, lr: 2.62e-04 +2022-05-06 04:43:18,579 INFO [train.py:715] (6/8) Epoch 8, batch 15250, loss[loss=0.1334, simple_loss=0.2045, pruned_loss=0.03114, over 4971.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2158, pruned_loss=0.03556, over 972901.32 frames.], batch size: 24, lr: 2.62e-04 +2022-05-06 04:43:58,548 INFO [train.py:715] (6/8) Epoch 8, batch 15300, loss[loss=0.1336, simple_loss=0.216, pruned_loss=0.02554, over 4930.00 frames.], tot_loss[loss=0.144, simple_loss=0.2164, pruned_loss=0.03581, over 972303.27 frames.], batch size: 29, lr: 2.62e-04 +2022-05-06 04:44:37,105 INFO [train.py:715] (6/8) Epoch 8, batch 15350, loss[loss=0.1306, simple_loss=0.2127, pruned_loss=0.02428, over 4889.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2165, pruned_loss=0.03526, over 971494.34 frames.], batch size: 19, lr: 2.62e-04 +2022-05-06 04:45:16,996 INFO [train.py:715] (6/8) Epoch 8, batch 15400, loss[loss=0.1414, simple_loss=0.2213, pruned_loss=0.03079, over 4833.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2172, pruned_loss=0.03582, over 971385.40 frames.], batch size: 26, lr: 2.62e-04 +2022-05-06 04:45:55,984 INFO [train.py:715] (6/8) Epoch 8, batch 15450, loss[loss=0.1531, simple_loss=0.232, pruned_loss=0.03714, over 4981.00 frames.], tot_loss[loss=0.143, simple_loss=0.2155, pruned_loss=0.03527, over 971301.93 frames.], batch size: 25, lr: 2.62e-04 +2022-05-06 04:46:34,944 INFO [train.py:715] (6/8) Epoch 8, batch 15500, loss[loss=0.1296, simple_loss=0.1997, pruned_loss=0.02975, over 4950.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2157, pruned_loss=0.03567, over 971450.27 frames.], batch size: 21, lr: 2.62e-04 +2022-05-06 04:47:13,677 INFO [train.py:715] (6/8) Epoch 8, batch 15550, loss[loss=0.1176, simple_loss=0.1921, pruned_loss=0.02154, over 4930.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2152, pruned_loss=0.03546, over 971453.68 frames.], batch size: 18, lr: 2.62e-04 +2022-05-06 04:47:52,417 INFO [train.py:715] (6/8) Epoch 8, batch 15600, loss[loss=0.1437, simple_loss=0.2234, pruned_loss=0.03201, over 4931.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2157, pruned_loss=0.03567, over 971855.18 frames.], batch size: 29, lr: 2.62e-04 +2022-05-06 04:48:32,584 INFO [train.py:715] (6/8) Epoch 8, batch 15650, loss[loss=0.1487, simple_loss=0.2266, pruned_loss=0.03538, over 4782.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2162, pruned_loss=0.03621, over 971650.01 frames.], batch size: 17, lr: 2.62e-04 +2022-05-06 04:49:11,090 INFO [train.py:715] (6/8) Epoch 8, batch 15700, loss[loss=0.1602, simple_loss=0.2366, pruned_loss=0.0419, over 4863.00 frames.], tot_loss[loss=0.144, simple_loss=0.216, pruned_loss=0.03604, over 971263.43 frames.], batch size: 20, lr: 2.62e-04 +2022-05-06 04:49:50,911 INFO [train.py:715] (6/8) Epoch 8, batch 15750, loss[loss=0.1447, simple_loss=0.2157, pruned_loss=0.03688, over 4885.00 frames.], tot_loss[loss=0.1431, simple_loss=0.215, pruned_loss=0.03563, over 971563.23 frames.], batch size: 22, lr: 2.62e-04 +2022-05-06 04:50:30,391 INFO [train.py:715] (6/8) Epoch 8, batch 15800, loss[loss=0.1518, simple_loss=0.228, pruned_loss=0.03783, over 4907.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2155, pruned_loss=0.03571, over 972630.69 frames.], batch size: 17, lr: 2.61e-04 +2022-05-06 04:51:09,453 INFO [train.py:715] (6/8) Epoch 8, batch 15850, loss[loss=0.1525, simple_loss=0.2311, pruned_loss=0.03694, over 4868.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2153, pruned_loss=0.03594, over 972914.12 frames.], batch size: 20, lr: 2.61e-04 +2022-05-06 04:51:48,557 INFO [train.py:715] (6/8) Epoch 8, batch 15900, loss[loss=0.1398, simple_loss=0.2107, pruned_loss=0.03445, over 4917.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2155, pruned_loss=0.03582, over 972654.05 frames.], batch size: 18, lr: 2.61e-04 +2022-05-06 04:52:27,776 INFO [train.py:715] (6/8) Epoch 8, batch 15950, loss[loss=0.111, simple_loss=0.1805, pruned_loss=0.02073, over 4907.00 frames.], tot_loss[loss=0.1434, simple_loss=0.215, pruned_loss=0.03584, over 972648.98 frames.], batch size: 17, lr: 2.61e-04 +2022-05-06 04:53:07,057 INFO [train.py:715] (6/8) Epoch 8, batch 16000, loss[loss=0.1349, simple_loss=0.1969, pruned_loss=0.03646, over 4870.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2142, pruned_loss=0.03531, over 972780.30 frames.], batch size: 32, lr: 2.61e-04 +2022-05-06 04:53:45,659 INFO [train.py:715] (6/8) Epoch 8, batch 16050, loss[loss=0.1963, simple_loss=0.2681, pruned_loss=0.0623, over 4946.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2149, pruned_loss=0.03536, over 973625.89 frames.], batch size: 24, lr: 2.61e-04 +2022-05-06 04:54:25,527 INFO [train.py:715] (6/8) Epoch 8, batch 16100, loss[loss=0.1354, simple_loss=0.2025, pruned_loss=0.03415, over 4769.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2152, pruned_loss=0.03558, over 973333.76 frames.], batch size: 12, lr: 2.61e-04 +2022-05-06 04:55:04,006 INFO [train.py:715] (6/8) Epoch 8, batch 16150, loss[loss=0.1381, simple_loss=0.2096, pruned_loss=0.03329, over 4824.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2152, pruned_loss=0.03516, over 972476.04 frames.], batch size: 15, lr: 2.61e-04 +2022-05-06 04:55:43,547 INFO [train.py:715] (6/8) Epoch 8, batch 16200, loss[loss=0.1483, simple_loss=0.2255, pruned_loss=0.03551, over 4954.00 frames.], tot_loss[loss=0.143, simple_loss=0.2152, pruned_loss=0.03538, over 972689.55 frames.], batch size: 21, lr: 2.61e-04 +2022-05-06 04:56:21,934 INFO [train.py:715] (6/8) Epoch 8, batch 16250, loss[loss=0.1199, simple_loss=0.2007, pruned_loss=0.01957, over 4782.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2154, pruned_loss=0.0354, over 972559.18 frames.], batch size: 18, lr: 2.61e-04 +2022-05-06 04:57:01,395 INFO [train.py:715] (6/8) Epoch 8, batch 16300, loss[loss=0.1545, simple_loss=0.2252, pruned_loss=0.04186, over 4781.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2161, pruned_loss=0.03556, over 971647.93 frames.], batch size: 14, lr: 2.61e-04 +2022-05-06 04:57:40,825 INFO [train.py:715] (6/8) Epoch 8, batch 16350, loss[loss=0.1258, simple_loss=0.2052, pruned_loss=0.02318, over 4964.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2168, pruned_loss=0.03599, over 971971.36 frames.], batch size: 24, lr: 2.61e-04 +2022-05-06 04:58:19,599 INFO [train.py:715] (6/8) Epoch 8, batch 16400, loss[loss=0.1454, simple_loss=0.2056, pruned_loss=0.04259, over 4857.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2161, pruned_loss=0.03518, over 971960.00 frames.], batch size: 30, lr: 2.61e-04 +2022-05-06 04:58:58,715 INFO [train.py:715] (6/8) Epoch 8, batch 16450, loss[loss=0.1175, simple_loss=0.1798, pruned_loss=0.02757, over 4971.00 frames.], tot_loss[loss=0.1441, simple_loss=0.217, pruned_loss=0.03556, over 972394.40 frames.], batch size: 14, lr: 2.61e-04 +2022-05-06 04:59:37,562 INFO [train.py:715] (6/8) Epoch 8, batch 16500, loss[loss=0.1404, simple_loss=0.2071, pruned_loss=0.03683, over 4758.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2175, pruned_loss=0.03604, over 972820.71 frames.], batch size: 19, lr: 2.61e-04 +2022-05-06 05:00:17,264 INFO [train.py:715] (6/8) Epoch 8, batch 16550, loss[loss=0.1263, simple_loss=0.1979, pruned_loss=0.02733, over 4779.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2169, pruned_loss=0.03577, over 972998.95 frames.], batch size: 12, lr: 2.61e-04 +2022-05-06 05:00:56,284 INFO [train.py:715] (6/8) Epoch 8, batch 16600, loss[loss=0.1603, simple_loss=0.2271, pruned_loss=0.04673, over 4777.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2166, pruned_loss=0.03562, over 973327.84 frames.], batch size: 14, lr: 2.61e-04 +2022-05-06 05:01:35,314 INFO [train.py:715] (6/8) Epoch 8, batch 16650, loss[loss=0.1675, simple_loss=0.2442, pruned_loss=0.0454, over 4870.00 frames.], tot_loss[loss=0.143, simple_loss=0.2158, pruned_loss=0.03513, over 972741.80 frames.], batch size: 32, lr: 2.61e-04 +2022-05-06 05:02:14,557 INFO [train.py:715] (6/8) Epoch 8, batch 16700, loss[loss=0.183, simple_loss=0.2659, pruned_loss=0.05005, over 4930.00 frames.], tot_loss[loss=0.143, simple_loss=0.2156, pruned_loss=0.03518, over 972055.61 frames.], batch size: 29, lr: 2.61e-04 +2022-05-06 05:02:53,480 INFO [train.py:715] (6/8) Epoch 8, batch 16750, loss[loss=0.1622, simple_loss=0.2215, pruned_loss=0.05147, over 4838.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2156, pruned_loss=0.03515, over 972005.28 frames.], batch size: 13, lr: 2.61e-04 +2022-05-06 05:03:33,070 INFO [train.py:715] (6/8) Epoch 8, batch 16800, loss[loss=0.1177, simple_loss=0.1865, pruned_loss=0.02449, over 4952.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2153, pruned_loss=0.03529, over 972055.52 frames.], batch size: 29, lr: 2.61e-04 +2022-05-06 05:04:12,045 INFO [train.py:715] (6/8) Epoch 8, batch 16850, loss[loss=0.1361, simple_loss=0.2065, pruned_loss=0.03286, over 4951.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2161, pruned_loss=0.0351, over 973009.02 frames.], batch size: 39, lr: 2.61e-04 +2022-05-06 05:04:51,956 INFO [train.py:715] (6/8) Epoch 8, batch 16900, loss[loss=0.1532, simple_loss=0.2292, pruned_loss=0.03862, over 4815.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2155, pruned_loss=0.03511, over 972474.35 frames.], batch size: 15, lr: 2.61e-04 +2022-05-06 05:05:30,453 INFO [train.py:715] (6/8) Epoch 8, batch 16950, loss[loss=0.1456, simple_loss=0.223, pruned_loss=0.03408, over 4766.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2155, pruned_loss=0.03497, over 971658.58 frames.], batch size: 19, lr: 2.61e-04 +2022-05-06 05:06:10,149 INFO [train.py:715] (6/8) Epoch 8, batch 17000, loss[loss=0.1319, simple_loss=0.2058, pruned_loss=0.02901, over 4778.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2154, pruned_loss=0.03484, over 972406.26 frames.], batch size: 18, lr: 2.61e-04 +2022-05-06 05:06:49,663 INFO [train.py:715] (6/8) Epoch 8, batch 17050, loss[loss=0.1703, simple_loss=0.2432, pruned_loss=0.04872, over 4784.00 frames.], tot_loss[loss=0.1424, simple_loss=0.215, pruned_loss=0.03487, over 972506.63 frames.], batch size: 18, lr: 2.61e-04 +2022-05-06 05:07:28,340 INFO [train.py:715] (6/8) Epoch 8, batch 17100, loss[loss=0.1269, simple_loss=0.1974, pruned_loss=0.02817, over 4929.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2143, pruned_loss=0.03454, over 971610.15 frames.], batch size: 17, lr: 2.61e-04 +2022-05-06 05:08:08,034 INFO [train.py:715] (6/8) Epoch 8, batch 17150, loss[loss=0.1332, simple_loss=0.22, pruned_loss=0.02323, over 4755.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2141, pruned_loss=0.03478, over 971491.41 frames.], batch size: 16, lr: 2.61e-04 +2022-05-06 05:08:47,206 INFO [train.py:715] (6/8) Epoch 8, batch 17200, loss[loss=0.1602, simple_loss=0.2319, pruned_loss=0.04431, over 4781.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2144, pruned_loss=0.03461, over 971958.00 frames.], batch size: 17, lr: 2.61e-04 +2022-05-06 05:09:26,326 INFO [train.py:715] (6/8) Epoch 8, batch 17250, loss[loss=0.1748, simple_loss=0.2434, pruned_loss=0.05313, over 4848.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2142, pruned_loss=0.03476, over 972367.88 frames.], batch size: 30, lr: 2.61e-04 +2022-05-06 05:10:04,659 INFO [train.py:715] (6/8) Epoch 8, batch 17300, loss[loss=0.1573, simple_loss=0.2349, pruned_loss=0.03985, over 4862.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2147, pruned_loss=0.03519, over 972330.07 frames.], batch size: 20, lr: 2.61e-04 +2022-05-06 05:10:44,498 INFO [train.py:715] (6/8) Epoch 8, batch 17350, loss[loss=0.138, simple_loss=0.2169, pruned_loss=0.02951, over 4958.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2145, pruned_loss=0.03521, over 972721.89 frames.], batch size: 24, lr: 2.61e-04 +2022-05-06 05:11:23,598 INFO [train.py:715] (6/8) Epoch 8, batch 17400, loss[loss=0.1315, simple_loss=0.198, pruned_loss=0.03253, over 4912.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2156, pruned_loss=0.03583, over 972095.86 frames.], batch size: 19, lr: 2.61e-04 +2022-05-06 05:12:02,692 INFO [train.py:715] (6/8) Epoch 8, batch 17450, loss[loss=0.1358, simple_loss=0.2247, pruned_loss=0.02345, over 4987.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2158, pruned_loss=0.03577, over 973013.35 frames.], batch size: 28, lr: 2.61e-04 +2022-05-06 05:12:42,122 INFO [train.py:715] (6/8) Epoch 8, batch 17500, loss[loss=0.1614, simple_loss=0.2382, pruned_loss=0.04233, over 4905.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2154, pruned_loss=0.03517, over 972813.11 frames.], batch size: 19, lr: 2.61e-04 +2022-05-06 05:13:23,165 INFO [train.py:715] (6/8) Epoch 8, batch 17550, loss[loss=0.1588, simple_loss=0.23, pruned_loss=0.04378, over 4795.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2149, pruned_loss=0.03477, over 973105.39 frames.], batch size: 18, lr: 2.61e-04 +2022-05-06 05:14:02,976 INFO [train.py:715] (6/8) Epoch 8, batch 17600, loss[loss=0.1136, simple_loss=0.1929, pruned_loss=0.01712, over 4873.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2146, pruned_loss=0.0346, over 972729.23 frames.], batch size: 16, lr: 2.61e-04 +2022-05-06 05:14:41,720 INFO [train.py:715] (6/8) Epoch 8, batch 17650, loss[loss=0.1536, simple_loss=0.2286, pruned_loss=0.03926, over 4775.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2143, pruned_loss=0.03471, over 972153.06 frames.], batch size: 18, lr: 2.61e-04 +2022-05-06 05:15:22,840 INFO [train.py:715] (6/8) Epoch 8, batch 17700, loss[loss=0.178, simple_loss=0.2481, pruned_loss=0.05391, over 4920.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2147, pruned_loss=0.03481, over 971588.73 frames.], batch size: 18, lr: 2.61e-04 +2022-05-06 05:16:02,816 INFO [train.py:715] (6/8) Epoch 8, batch 17750, loss[loss=0.1125, simple_loss=0.1926, pruned_loss=0.0162, over 4832.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2144, pruned_loss=0.03496, over 971999.99 frames.], batch size: 26, lr: 2.61e-04 +2022-05-06 05:16:43,282 INFO [train.py:715] (6/8) Epoch 8, batch 17800, loss[loss=0.1631, simple_loss=0.2386, pruned_loss=0.04383, over 4902.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2149, pruned_loss=0.03521, over 972254.54 frames.], batch size: 19, lr: 2.61e-04 +2022-05-06 05:17:23,944 INFO [train.py:715] (6/8) Epoch 8, batch 17850, loss[loss=0.1727, simple_loss=0.2382, pruned_loss=0.05362, over 4746.00 frames.], tot_loss[loss=0.1428, simple_loss=0.215, pruned_loss=0.03527, over 971767.65 frames.], batch size: 19, lr: 2.61e-04 +2022-05-06 05:18:04,807 INFO [train.py:715] (6/8) Epoch 8, batch 17900, loss[loss=0.1408, simple_loss=0.2153, pruned_loss=0.03313, over 4986.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2153, pruned_loss=0.03524, over 972434.45 frames.], batch size: 26, lr: 2.61e-04 +2022-05-06 05:18:46,221 INFO [train.py:715] (6/8) Epoch 8, batch 17950, loss[loss=0.1554, simple_loss=0.2163, pruned_loss=0.04726, over 4692.00 frames.], tot_loss[loss=0.1443, simple_loss=0.216, pruned_loss=0.03625, over 972108.39 frames.], batch size: 15, lr: 2.61e-04 +2022-05-06 05:19:26,628 INFO [train.py:715] (6/8) Epoch 8, batch 18000, loss[loss=0.1412, simple_loss=0.207, pruned_loss=0.03767, over 4936.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2158, pruned_loss=0.03579, over 972178.78 frames.], batch size: 21, lr: 2.61e-04 +2022-05-06 05:19:26,629 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 05:19:36,398 INFO [train.py:742] (6/8) Epoch 8, validation: loss=0.1073, simple_loss=0.1919, pruned_loss=0.01138, over 914524.00 frames. +2022-05-06 05:20:17,015 INFO [train.py:715] (6/8) Epoch 8, batch 18050, loss[loss=0.135, simple_loss=0.2158, pruned_loss=0.02714, over 4781.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2153, pruned_loss=0.03529, over 972285.07 frames.], batch size: 21, lr: 2.60e-04 +2022-05-06 05:20:59,054 INFO [train.py:715] (6/8) Epoch 8, batch 18100, loss[loss=0.1476, simple_loss=0.2167, pruned_loss=0.03931, over 4781.00 frames.], tot_loss[loss=0.144, simple_loss=0.2162, pruned_loss=0.03592, over 971781.01 frames.], batch size: 19, lr: 2.60e-04 +2022-05-06 05:21:40,107 INFO [train.py:715] (6/8) Epoch 8, batch 18150, loss[loss=0.1542, simple_loss=0.2277, pruned_loss=0.04041, over 4760.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2165, pruned_loss=0.03615, over 971661.26 frames.], batch size: 19, lr: 2.60e-04 +2022-05-06 05:22:21,017 INFO [train.py:715] (6/8) Epoch 8, batch 18200, loss[loss=0.1384, simple_loss=0.2137, pruned_loss=0.03155, over 4929.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2163, pruned_loss=0.03608, over 971935.63 frames.], batch size: 29, lr: 2.60e-04 +2022-05-06 05:23:02,796 INFO [train.py:715] (6/8) Epoch 8, batch 18250, loss[loss=0.1141, simple_loss=0.1966, pruned_loss=0.01574, over 4986.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2164, pruned_loss=0.03609, over 972335.91 frames.], batch size: 28, lr: 2.60e-04 +2022-05-06 05:23:43,828 INFO [train.py:715] (6/8) Epoch 8, batch 18300, loss[loss=0.1493, simple_loss=0.2329, pruned_loss=0.03287, over 4988.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2169, pruned_loss=0.03608, over 972885.54 frames.], batch size: 26, lr: 2.60e-04 +2022-05-06 05:24:25,294 INFO [train.py:715] (6/8) Epoch 8, batch 18350, loss[loss=0.1115, simple_loss=0.1779, pruned_loss=0.02252, over 4826.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2168, pruned_loss=0.03581, over 972594.66 frames.], batch size: 13, lr: 2.60e-04 +2022-05-06 05:25:06,146 INFO [train.py:715] (6/8) Epoch 8, batch 18400, loss[loss=0.1485, simple_loss=0.2131, pruned_loss=0.04199, over 4691.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2162, pruned_loss=0.03541, over 972438.16 frames.], batch size: 15, lr: 2.60e-04 +2022-05-06 05:25:47,841 INFO [train.py:715] (6/8) Epoch 8, batch 18450, loss[loss=0.1517, simple_loss=0.2213, pruned_loss=0.04108, over 4749.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2162, pruned_loss=0.03569, over 972839.74 frames.], batch size: 16, lr: 2.60e-04 +2022-05-06 05:26:28,563 INFO [train.py:715] (6/8) Epoch 8, batch 18500, loss[loss=0.1177, simple_loss=0.1829, pruned_loss=0.02625, over 4906.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2155, pruned_loss=0.03549, over 972494.87 frames.], batch size: 19, lr: 2.60e-04 +2022-05-06 05:27:08,967 INFO [train.py:715] (6/8) Epoch 8, batch 18550, loss[loss=0.1475, simple_loss=0.219, pruned_loss=0.03801, over 4835.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2157, pruned_loss=0.03566, over 972303.90 frames.], batch size: 15, lr: 2.60e-04 +2022-05-06 05:27:50,215 INFO [train.py:715] (6/8) Epoch 8, batch 18600, loss[loss=0.1598, simple_loss=0.228, pruned_loss=0.04579, over 4810.00 frames.], tot_loss[loss=0.143, simple_loss=0.2153, pruned_loss=0.03537, over 971876.43 frames.], batch size: 13, lr: 2.60e-04 +2022-05-06 05:28:30,420 INFO [train.py:715] (6/8) Epoch 8, batch 18650, loss[loss=0.1464, simple_loss=0.2249, pruned_loss=0.03391, over 4814.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2158, pruned_loss=0.03565, over 972647.40 frames.], batch size: 21, lr: 2.60e-04 +2022-05-06 05:29:09,926 INFO [train.py:715] (6/8) Epoch 8, batch 18700, loss[loss=0.1428, simple_loss=0.2112, pruned_loss=0.03719, over 4864.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2154, pruned_loss=0.03547, over 972487.51 frames.], batch size: 20, lr: 2.60e-04 +2022-05-06 05:29:49,895 INFO [train.py:715] (6/8) Epoch 8, batch 18750, loss[loss=0.1421, simple_loss=0.21, pruned_loss=0.0371, over 4983.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2142, pruned_loss=0.03474, over 972342.06 frames.], batch size: 35, lr: 2.60e-04 +2022-05-06 05:30:30,988 INFO [train.py:715] (6/8) Epoch 8, batch 18800, loss[loss=0.1358, simple_loss=0.2121, pruned_loss=0.02977, over 4890.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2146, pruned_loss=0.03541, over 971128.81 frames.], batch size: 22, lr: 2.60e-04 +2022-05-06 05:31:10,616 INFO [train.py:715] (6/8) Epoch 8, batch 18850, loss[loss=0.1655, simple_loss=0.2307, pruned_loss=0.05018, over 4907.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2149, pruned_loss=0.03522, over 972618.90 frames.], batch size: 22, lr: 2.60e-04 +2022-05-06 05:31:50,017 INFO [train.py:715] (6/8) Epoch 8, batch 18900, loss[loss=0.2152, simple_loss=0.2793, pruned_loss=0.07555, over 4780.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2165, pruned_loss=0.03608, over 973000.07 frames.], batch size: 17, lr: 2.60e-04 +2022-05-06 05:32:30,292 INFO [train.py:715] (6/8) Epoch 8, batch 18950, loss[loss=0.1279, simple_loss=0.1962, pruned_loss=0.02983, over 4880.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2168, pruned_loss=0.03639, over 973453.99 frames.], batch size: 16, lr: 2.60e-04 +2022-05-06 05:33:10,177 INFO [train.py:715] (6/8) Epoch 8, batch 19000, loss[loss=0.1558, simple_loss=0.2136, pruned_loss=0.04902, over 4738.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2161, pruned_loss=0.03612, over 973354.64 frames.], batch size: 12, lr: 2.60e-04 +2022-05-06 05:33:50,113 INFO [train.py:715] (6/8) Epoch 8, batch 19050, loss[loss=0.1358, simple_loss=0.2157, pruned_loss=0.02793, over 4831.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2156, pruned_loss=0.03562, over 974169.90 frames.], batch size: 26, lr: 2.60e-04 +2022-05-06 05:34:31,417 INFO [train.py:715] (6/8) Epoch 8, batch 19100, loss[loss=0.1397, simple_loss=0.2121, pruned_loss=0.03364, over 4839.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2153, pruned_loss=0.03545, over 974240.32 frames.], batch size: 30, lr: 2.60e-04 +2022-05-06 05:35:13,336 INFO [train.py:715] (6/8) Epoch 8, batch 19150, loss[loss=0.1223, simple_loss=0.1925, pruned_loss=0.02601, over 4992.00 frames.], tot_loss[loss=0.1437, simple_loss=0.216, pruned_loss=0.03565, over 974385.90 frames.], batch size: 14, lr: 2.60e-04 +2022-05-06 05:35:55,000 INFO [train.py:715] (6/8) Epoch 8, batch 19200, loss[loss=0.1565, simple_loss=0.2215, pruned_loss=0.0458, over 4855.00 frames.], tot_loss[loss=0.1437, simple_loss=0.216, pruned_loss=0.03572, over 974410.01 frames.], batch size: 32, lr: 2.60e-04 +2022-05-06 05:36:35,261 INFO [train.py:715] (6/8) Epoch 8, batch 19250, loss[loss=0.118, simple_loss=0.187, pruned_loss=0.02446, over 4814.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2172, pruned_loss=0.03615, over 974518.28 frames.], batch size: 13, lr: 2.60e-04 +2022-05-06 05:37:17,452 INFO [train.py:715] (6/8) Epoch 8, batch 19300, loss[loss=0.1252, simple_loss=0.1948, pruned_loss=0.02781, over 4765.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2169, pruned_loss=0.0365, over 974338.82 frames.], batch size: 18, lr: 2.60e-04 +2022-05-06 05:37:58,611 INFO [train.py:715] (6/8) Epoch 8, batch 19350, loss[loss=0.1505, simple_loss=0.2203, pruned_loss=0.0404, over 4847.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2166, pruned_loss=0.03634, over 974476.39 frames.], batch size: 34, lr: 2.60e-04 +2022-05-06 05:38:39,846 INFO [train.py:715] (6/8) Epoch 8, batch 19400, loss[loss=0.1388, simple_loss=0.2176, pruned_loss=0.03004, over 4737.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2151, pruned_loss=0.03532, over 973760.98 frames.], batch size: 16, lr: 2.60e-04 +2022-05-06 05:39:21,788 INFO [train.py:715] (6/8) Epoch 8, batch 19450, loss[loss=0.1383, simple_loss=0.2185, pruned_loss=0.02905, over 4763.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2148, pruned_loss=0.03468, over 972652.87 frames.], batch size: 19, lr: 2.60e-04 +2022-05-06 05:40:03,274 INFO [train.py:715] (6/8) Epoch 8, batch 19500, loss[loss=0.1679, simple_loss=0.2326, pruned_loss=0.05166, over 4953.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2148, pruned_loss=0.03469, over 972744.04 frames.], batch size: 15, lr: 2.60e-04 +2022-05-06 05:40:44,564 INFO [train.py:715] (6/8) Epoch 8, batch 19550, loss[loss=0.1208, simple_loss=0.1897, pruned_loss=0.02591, over 4748.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2139, pruned_loss=0.0343, over 972688.60 frames.], batch size: 16, lr: 2.60e-04 +2022-05-06 05:41:25,033 INFO [train.py:715] (6/8) Epoch 8, batch 19600, loss[loss=0.1173, simple_loss=0.1899, pruned_loss=0.02239, over 4737.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2138, pruned_loss=0.03459, over 972407.28 frames.], batch size: 16, lr: 2.60e-04 +2022-05-06 05:42:06,543 INFO [train.py:715] (6/8) Epoch 8, batch 19650, loss[loss=0.1333, simple_loss=0.2094, pruned_loss=0.02858, over 4906.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2138, pruned_loss=0.03452, over 972546.20 frames.], batch size: 19, lr: 2.60e-04 +2022-05-06 05:42:47,231 INFO [train.py:715] (6/8) Epoch 8, batch 19700, loss[loss=0.1336, simple_loss=0.1952, pruned_loss=0.03599, over 4699.00 frames.], tot_loss[loss=0.1408, simple_loss=0.213, pruned_loss=0.03431, over 972106.44 frames.], batch size: 15, lr: 2.60e-04 +2022-05-06 05:43:28,189 INFO [train.py:715] (6/8) Epoch 8, batch 19750, loss[loss=0.1113, simple_loss=0.1771, pruned_loss=0.02272, over 4746.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2142, pruned_loss=0.03463, over 972731.40 frames.], batch size: 12, lr: 2.60e-04 +2022-05-06 05:44:09,860 INFO [train.py:715] (6/8) Epoch 8, batch 19800, loss[loss=0.1366, simple_loss=0.2114, pruned_loss=0.03095, over 4804.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2147, pruned_loss=0.03518, over 972245.33 frames.], batch size: 24, lr: 2.60e-04 +2022-05-06 05:44:50,900 INFO [train.py:715] (6/8) Epoch 8, batch 19850, loss[loss=0.1396, simple_loss=0.2219, pruned_loss=0.02863, over 4905.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2154, pruned_loss=0.03551, over 972303.61 frames.], batch size: 19, lr: 2.60e-04 +2022-05-06 05:45:31,216 INFO [train.py:715] (6/8) Epoch 8, batch 19900, loss[loss=0.1381, simple_loss=0.2003, pruned_loss=0.03798, over 4799.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2155, pruned_loss=0.03553, over 972157.82 frames.], batch size: 12, lr: 2.60e-04 +2022-05-06 05:46:10,977 INFO [train.py:715] (6/8) Epoch 8, batch 19950, loss[loss=0.1173, simple_loss=0.1912, pruned_loss=0.02165, over 4919.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2154, pruned_loss=0.03523, over 972640.12 frames.], batch size: 23, lr: 2.60e-04 +2022-05-06 05:46:51,590 INFO [train.py:715] (6/8) Epoch 8, batch 20000, loss[loss=0.1538, simple_loss=0.2426, pruned_loss=0.03247, over 4868.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2152, pruned_loss=0.03501, over 972162.73 frames.], batch size: 32, lr: 2.60e-04 +2022-05-06 05:47:32,117 INFO [train.py:715] (6/8) Epoch 8, batch 20050, loss[loss=0.1251, simple_loss=0.1932, pruned_loss=0.02845, over 4780.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2153, pruned_loss=0.0355, over 971451.23 frames.], batch size: 12, lr: 2.60e-04 +2022-05-06 05:48:12,633 INFO [train.py:715] (6/8) Epoch 8, batch 20100, loss[loss=0.1535, simple_loss=0.2181, pruned_loss=0.04447, over 4885.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2148, pruned_loss=0.03526, over 971880.59 frames.], batch size: 16, lr: 2.60e-04 +2022-05-06 05:48:53,765 INFO [train.py:715] (6/8) Epoch 8, batch 20150, loss[loss=0.1415, simple_loss=0.2142, pruned_loss=0.03436, over 4933.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2153, pruned_loss=0.03554, over 972582.55 frames.], batch size: 18, lr: 2.60e-04 +2022-05-06 05:49:34,572 INFO [train.py:715] (6/8) Epoch 8, batch 20200, loss[loss=0.1305, simple_loss=0.201, pruned_loss=0.02998, over 4816.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2152, pruned_loss=0.03563, over 973111.19 frames.], batch size: 25, lr: 2.60e-04 +2022-05-06 05:50:15,442 INFO [train.py:715] (6/8) Epoch 8, batch 20250, loss[loss=0.1521, simple_loss=0.2092, pruned_loss=0.0475, over 4754.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2152, pruned_loss=0.0358, over 972263.00 frames.], batch size: 16, lr: 2.60e-04 +2022-05-06 05:50:56,711 INFO [train.py:715] (6/8) Epoch 8, batch 20300, loss[loss=0.1438, simple_loss=0.2207, pruned_loss=0.0335, over 4844.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2162, pruned_loss=0.03617, over 971564.60 frames.], batch size: 30, lr: 2.60e-04 +2022-05-06 05:51:37,709 INFO [train.py:715] (6/8) Epoch 8, batch 20350, loss[loss=0.1318, simple_loss=0.2063, pruned_loss=0.02858, over 4820.00 frames.], tot_loss[loss=0.1443, simple_loss=0.216, pruned_loss=0.03629, over 971750.99 frames.], batch size: 26, lr: 2.59e-04 +2022-05-06 05:52:18,260 INFO [train.py:715] (6/8) Epoch 8, batch 20400, loss[loss=0.1564, simple_loss=0.2264, pruned_loss=0.0432, over 4970.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2166, pruned_loss=0.03657, over 972328.77 frames.], batch size: 14, lr: 2.59e-04 +2022-05-06 05:52:58,521 INFO [train.py:715] (6/8) Epoch 8, batch 20450, loss[loss=0.1446, simple_loss=0.2165, pruned_loss=0.03636, over 4962.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2171, pruned_loss=0.03664, over 971764.37 frames.], batch size: 28, lr: 2.59e-04 +2022-05-06 05:53:39,599 INFO [train.py:715] (6/8) Epoch 8, batch 20500, loss[loss=0.1423, simple_loss=0.2053, pruned_loss=0.03967, over 4861.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2175, pruned_loss=0.03665, over 972301.03 frames.], batch size: 20, lr: 2.59e-04 +2022-05-06 05:54:20,091 INFO [train.py:715] (6/8) Epoch 8, batch 20550, loss[loss=0.1082, simple_loss=0.185, pruned_loss=0.0157, over 4924.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2172, pruned_loss=0.03677, over 972372.27 frames.], batch size: 29, lr: 2.59e-04 +2022-05-06 05:55:00,456 INFO [train.py:715] (6/8) Epoch 8, batch 20600, loss[loss=0.1702, simple_loss=0.2401, pruned_loss=0.05019, over 4769.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2172, pruned_loss=0.03663, over 972001.27 frames.], batch size: 14, lr: 2.59e-04 +2022-05-06 05:55:41,414 INFO [train.py:715] (6/8) Epoch 8, batch 20650, loss[loss=0.09923, simple_loss=0.1749, pruned_loss=0.0118, over 4811.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2161, pruned_loss=0.03585, over 971605.67 frames.], batch size: 12, lr: 2.59e-04 +2022-05-06 05:56:22,573 INFO [train.py:715] (6/8) Epoch 8, batch 20700, loss[loss=0.1819, simple_loss=0.2472, pruned_loss=0.05827, over 4855.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2154, pruned_loss=0.03544, over 973046.13 frames.], batch size: 20, lr: 2.59e-04 +2022-05-06 05:57:02,760 INFO [train.py:715] (6/8) Epoch 8, batch 20750, loss[loss=0.1724, simple_loss=0.245, pruned_loss=0.04991, over 4880.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2167, pruned_loss=0.03609, over 972101.86 frames.], batch size: 32, lr: 2.59e-04 +2022-05-06 05:57:42,969 INFO [train.py:715] (6/8) Epoch 8, batch 20800, loss[loss=0.1649, simple_loss=0.2321, pruned_loss=0.04878, over 4909.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2159, pruned_loss=0.03563, over 973297.92 frames.], batch size: 19, lr: 2.59e-04 +2022-05-06 05:58:24,024 INFO [train.py:715] (6/8) Epoch 8, batch 20850, loss[loss=0.134, simple_loss=0.204, pruned_loss=0.03205, over 4913.00 frames.], tot_loss[loss=0.1426, simple_loss=0.215, pruned_loss=0.0351, over 972774.02 frames.], batch size: 18, lr: 2.59e-04 +2022-05-06 05:59:04,439 INFO [train.py:715] (6/8) Epoch 8, batch 20900, loss[loss=0.1383, simple_loss=0.2076, pruned_loss=0.03447, over 4794.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2147, pruned_loss=0.0349, over 972284.45 frames.], batch size: 14, lr: 2.59e-04 +2022-05-06 05:59:43,025 INFO [train.py:715] (6/8) Epoch 8, batch 20950, loss[loss=0.1471, simple_loss=0.2178, pruned_loss=0.03815, over 4976.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2151, pruned_loss=0.03509, over 972259.52 frames.], batch size: 15, lr: 2.59e-04 +2022-05-06 06:00:22,707 INFO [train.py:715] (6/8) Epoch 8, batch 21000, loss[loss=0.1561, simple_loss=0.2302, pruned_loss=0.04101, over 4755.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2151, pruned_loss=0.03503, over 971566.53 frames.], batch size: 19, lr: 2.59e-04 +2022-05-06 06:00:22,707 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 06:00:32,255 INFO [train.py:742] (6/8) Epoch 8, validation: loss=0.1072, simple_loss=0.1919, pruned_loss=0.01129, over 914524.00 frames. +2022-05-06 06:01:12,649 INFO [train.py:715] (6/8) Epoch 8, batch 21050, loss[loss=0.1602, simple_loss=0.2196, pruned_loss=0.05042, over 4984.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2157, pruned_loss=0.03552, over 972423.27 frames.], batch size: 14, lr: 2.59e-04 +2022-05-06 06:01:52,993 INFO [train.py:715] (6/8) Epoch 8, batch 21100, loss[loss=0.1183, simple_loss=0.1928, pruned_loss=0.0219, over 4935.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2155, pruned_loss=0.03542, over 973226.92 frames.], batch size: 18, lr: 2.59e-04 +2022-05-06 06:02:31,465 INFO [train.py:715] (6/8) Epoch 8, batch 21150, loss[loss=0.146, simple_loss=0.2202, pruned_loss=0.0359, over 4862.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2157, pruned_loss=0.03568, over 973256.28 frames.], batch size: 32, lr: 2.59e-04 +2022-05-06 06:03:10,266 INFO [train.py:715] (6/8) Epoch 8, batch 21200, loss[loss=0.174, simple_loss=0.2469, pruned_loss=0.05049, over 4896.00 frames.], tot_loss[loss=0.1439, simple_loss=0.216, pruned_loss=0.03593, over 973783.08 frames.], batch size: 19, lr: 2.59e-04 +2022-05-06 06:03:49,969 INFO [train.py:715] (6/8) Epoch 8, batch 21250, loss[loss=0.125, simple_loss=0.2052, pruned_loss=0.02237, over 4898.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2171, pruned_loss=0.03654, over 974344.46 frames.], batch size: 19, lr: 2.59e-04 +2022-05-06 06:04:29,230 INFO [train.py:715] (6/8) Epoch 8, batch 21300, loss[loss=0.1473, simple_loss=0.2249, pruned_loss=0.03485, over 4851.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2165, pruned_loss=0.03605, over 974266.53 frames.], batch size: 20, lr: 2.59e-04 +2022-05-06 06:05:07,762 INFO [train.py:715] (6/8) Epoch 8, batch 21350, loss[loss=0.1271, simple_loss=0.1966, pruned_loss=0.02884, over 4799.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2164, pruned_loss=0.036, over 973600.56 frames.], batch size: 12, lr: 2.59e-04 +2022-05-06 06:05:47,412 INFO [train.py:715] (6/8) Epoch 8, batch 21400, loss[loss=0.1488, simple_loss=0.2336, pruned_loss=0.03206, over 4953.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2156, pruned_loss=0.03541, over 973948.74 frames.], batch size: 24, lr: 2.59e-04 +2022-05-06 06:06:27,494 INFO [train.py:715] (6/8) Epoch 8, batch 21450, loss[loss=0.1454, simple_loss=0.2163, pruned_loss=0.03728, over 4789.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2151, pruned_loss=0.03537, over 972159.44 frames.], batch size: 24, lr: 2.59e-04 +2022-05-06 06:07:06,791 INFO [train.py:715] (6/8) Epoch 8, batch 21500, loss[loss=0.1776, simple_loss=0.2465, pruned_loss=0.05434, over 4975.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2143, pruned_loss=0.03513, over 972471.44 frames.], batch size: 25, lr: 2.59e-04 +2022-05-06 06:07:45,792 INFO [train.py:715] (6/8) Epoch 8, batch 21550, loss[loss=0.148, simple_loss=0.2207, pruned_loss=0.03771, over 4881.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2143, pruned_loss=0.03532, over 972531.53 frames.], batch size: 19, lr: 2.59e-04 +2022-05-06 06:08:25,816 INFO [train.py:715] (6/8) Epoch 8, batch 21600, loss[loss=0.1493, simple_loss=0.2271, pruned_loss=0.03575, over 4768.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2142, pruned_loss=0.03513, over 972074.69 frames.], batch size: 18, lr: 2.59e-04 +2022-05-06 06:09:04,798 INFO [train.py:715] (6/8) Epoch 8, batch 21650, loss[loss=0.1574, simple_loss=0.2172, pruned_loss=0.04883, over 4985.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2144, pruned_loss=0.03457, over 972827.28 frames.], batch size: 26, lr: 2.59e-04 +2022-05-06 06:09:43,512 INFO [train.py:715] (6/8) Epoch 8, batch 21700, loss[loss=0.1432, simple_loss=0.217, pruned_loss=0.03477, over 4973.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2157, pruned_loss=0.0351, over 972432.28 frames.], batch size: 24, lr: 2.59e-04 +2022-05-06 06:10:23,861 INFO [train.py:715] (6/8) Epoch 8, batch 21750, loss[loss=0.151, simple_loss=0.2284, pruned_loss=0.03686, over 4908.00 frames.], tot_loss[loss=0.1424, simple_loss=0.215, pruned_loss=0.03491, over 972804.93 frames.], batch size: 39, lr: 2.59e-04 +2022-05-06 06:11:03,701 INFO [train.py:715] (6/8) Epoch 8, batch 21800, loss[loss=0.1387, simple_loss=0.2073, pruned_loss=0.03499, over 4981.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2153, pruned_loss=0.03493, over 972478.98 frames.], batch size: 14, lr: 2.59e-04 +2022-05-06 06:11:42,814 INFO [train.py:715] (6/8) Epoch 8, batch 21850, loss[loss=0.1531, simple_loss=0.2233, pruned_loss=0.04144, over 4647.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2151, pruned_loss=0.03508, over 972569.65 frames.], batch size: 13, lr: 2.59e-04 +2022-05-06 06:12:21,179 INFO [train.py:715] (6/8) Epoch 8, batch 21900, loss[loss=0.1504, simple_loss=0.2276, pruned_loss=0.03656, over 4831.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2162, pruned_loss=0.03541, over 972787.59 frames.], batch size: 15, lr: 2.59e-04 +2022-05-06 06:13:00,620 INFO [train.py:715] (6/8) Epoch 8, batch 21950, loss[loss=0.134, simple_loss=0.2121, pruned_loss=0.02796, over 4696.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2151, pruned_loss=0.035, over 971889.88 frames.], batch size: 12, lr: 2.59e-04 +2022-05-06 06:13:39,703 INFO [train.py:715] (6/8) Epoch 8, batch 22000, loss[loss=0.1245, simple_loss=0.1911, pruned_loss=0.0289, over 4914.00 frames.], tot_loss[loss=0.142, simple_loss=0.2144, pruned_loss=0.03473, over 972629.61 frames.], batch size: 17, lr: 2.59e-04 +2022-05-06 06:14:18,331 INFO [train.py:715] (6/8) Epoch 8, batch 22050, loss[loss=0.1598, simple_loss=0.2311, pruned_loss=0.04421, over 4967.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2146, pruned_loss=0.03497, over 972606.98 frames.], batch size: 15, lr: 2.59e-04 +2022-05-06 06:14:58,049 INFO [train.py:715] (6/8) Epoch 8, batch 22100, loss[loss=0.1253, simple_loss=0.198, pruned_loss=0.02628, over 4817.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2145, pruned_loss=0.03461, over 972961.25 frames.], batch size: 26, lr: 2.59e-04 +2022-05-06 06:15:37,425 INFO [train.py:715] (6/8) Epoch 8, batch 22150, loss[loss=0.1845, simple_loss=0.2535, pruned_loss=0.05774, over 4827.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2144, pruned_loss=0.03444, over 973321.38 frames.], batch size: 30, lr: 2.59e-04 +2022-05-06 06:16:16,520 INFO [train.py:715] (6/8) Epoch 8, batch 22200, loss[loss=0.1437, simple_loss=0.2234, pruned_loss=0.03194, over 4987.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2155, pruned_loss=0.03493, over 972448.81 frames.], batch size: 25, lr: 2.59e-04 +2022-05-06 06:16:55,349 INFO [train.py:715] (6/8) Epoch 8, batch 22250, loss[loss=0.1576, simple_loss=0.232, pruned_loss=0.0416, over 4913.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2154, pruned_loss=0.03481, over 971906.49 frames.], batch size: 18, lr: 2.59e-04 +2022-05-06 06:17:34,566 INFO [train.py:715] (6/8) Epoch 8, batch 22300, loss[loss=0.1297, simple_loss=0.207, pruned_loss=0.02617, over 4819.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2159, pruned_loss=0.03513, over 970828.89 frames.], batch size: 27, lr: 2.59e-04 +2022-05-06 06:18:13,312 INFO [train.py:715] (6/8) Epoch 8, batch 22350, loss[loss=0.1192, simple_loss=0.1861, pruned_loss=0.02621, over 4849.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2163, pruned_loss=0.03543, over 970937.74 frames.], batch size: 32, lr: 2.59e-04 +2022-05-06 06:18:51,907 INFO [train.py:715] (6/8) Epoch 8, batch 22400, loss[loss=0.1586, simple_loss=0.2259, pruned_loss=0.04568, over 4978.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2165, pruned_loss=0.03549, over 971170.68 frames.], batch size: 28, lr: 2.59e-04 +2022-05-06 06:19:31,235 INFO [train.py:715] (6/8) Epoch 8, batch 22450, loss[loss=0.1592, simple_loss=0.235, pruned_loss=0.04169, over 4970.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2169, pruned_loss=0.03573, over 970941.06 frames.], batch size: 15, lr: 2.59e-04 +2022-05-06 06:20:10,737 INFO [train.py:715] (6/8) Epoch 8, batch 22500, loss[loss=0.1641, simple_loss=0.2302, pruned_loss=0.049, over 4980.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2163, pruned_loss=0.03552, over 971811.64 frames.], batch size: 15, lr: 2.59e-04 +2022-05-06 06:20:49,333 INFO [train.py:715] (6/8) Epoch 8, batch 22550, loss[loss=0.1534, simple_loss=0.2305, pruned_loss=0.03815, over 4834.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2158, pruned_loss=0.03551, over 971757.61 frames.], batch size: 15, lr: 2.59e-04 +2022-05-06 06:21:28,253 INFO [train.py:715] (6/8) Epoch 8, batch 22600, loss[loss=0.1415, simple_loss=0.2129, pruned_loss=0.03502, over 4968.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2155, pruned_loss=0.03511, over 971748.61 frames.], batch size: 24, lr: 2.59e-04 +2022-05-06 06:22:07,736 INFO [train.py:715] (6/8) Epoch 8, batch 22650, loss[loss=0.145, simple_loss=0.2197, pruned_loss=0.03516, over 4911.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2151, pruned_loss=0.03489, over 971760.35 frames.], batch size: 18, lr: 2.58e-04 +2022-05-06 06:22:46,457 INFO [train.py:715] (6/8) Epoch 8, batch 22700, loss[loss=0.1525, simple_loss=0.2257, pruned_loss=0.03966, over 4826.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2161, pruned_loss=0.03538, over 972334.79 frames.], batch size: 27, lr: 2.58e-04 +2022-05-06 06:23:24,776 INFO [train.py:715] (6/8) Epoch 8, batch 22750, loss[loss=0.1185, simple_loss=0.2027, pruned_loss=0.01713, over 4811.00 frames.], tot_loss[loss=0.1447, simple_loss=0.217, pruned_loss=0.03619, over 972406.66 frames.], batch size: 26, lr: 2.58e-04 +2022-05-06 06:24:04,594 INFO [train.py:715] (6/8) Epoch 8, batch 22800, loss[loss=0.1259, simple_loss=0.2107, pruned_loss=0.02053, over 4832.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2168, pruned_loss=0.0359, over 972722.65 frames.], batch size: 25, lr: 2.58e-04 +2022-05-06 06:24:43,767 INFO [train.py:715] (6/8) Epoch 8, batch 22850, loss[loss=0.1473, simple_loss=0.2135, pruned_loss=0.04057, over 4863.00 frames.], tot_loss[loss=0.1448, simple_loss=0.217, pruned_loss=0.03629, over 972565.01 frames.], batch size: 32, lr: 2.58e-04 +2022-05-06 06:25:22,845 INFO [train.py:715] (6/8) Epoch 8, batch 22900, loss[loss=0.1424, simple_loss=0.2142, pruned_loss=0.03534, over 4841.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2164, pruned_loss=0.0361, over 971502.61 frames.], batch size: 30, lr: 2.58e-04 +2022-05-06 06:26:01,959 INFO [train.py:715] (6/8) Epoch 8, batch 22950, loss[loss=0.1257, simple_loss=0.1971, pruned_loss=0.02719, over 4893.00 frames.], tot_loss[loss=0.1439, simple_loss=0.216, pruned_loss=0.03591, over 970990.86 frames.], batch size: 17, lr: 2.58e-04 +2022-05-06 06:26:41,735 INFO [train.py:715] (6/8) Epoch 8, batch 23000, loss[loss=0.1257, simple_loss=0.1917, pruned_loss=0.02983, over 4772.00 frames.], tot_loss[loss=0.143, simple_loss=0.2152, pruned_loss=0.03538, over 971985.38 frames.], batch size: 18, lr: 2.58e-04 +2022-05-06 06:27:20,529 INFO [train.py:715] (6/8) Epoch 8, batch 23050, loss[loss=0.132, simple_loss=0.2051, pruned_loss=0.02943, over 4792.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2155, pruned_loss=0.03568, over 972185.68 frames.], batch size: 14, lr: 2.58e-04 +2022-05-06 06:27:59,240 INFO [train.py:715] (6/8) Epoch 8, batch 23100, loss[loss=0.1607, simple_loss=0.2297, pruned_loss=0.04586, over 4971.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2145, pruned_loss=0.03547, over 972484.44 frames.], batch size: 28, lr: 2.58e-04 +2022-05-06 06:28:39,378 INFO [train.py:715] (6/8) Epoch 8, batch 23150, loss[loss=0.1943, simple_loss=0.2592, pruned_loss=0.06472, over 4901.00 frames.], tot_loss[loss=0.1421, simple_loss=0.214, pruned_loss=0.03514, over 972784.73 frames.], batch size: 18, lr: 2.58e-04 +2022-05-06 06:29:18,755 INFO [train.py:715] (6/8) Epoch 8, batch 23200, loss[loss=0.1528, simple_loss=0.2189, pruned_loss=0.04337, over 4864.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2143, pruned_loss=0.03536, over 972973.73 frames.], batch size: 22, lr: 2.58e-04 +2022-05-06 06:29:57,398 INFO [train.py:715] (6/8) Epoch 8, batch 23250, loss[loss=0.1251, simple_loss=0.1924, pruned_loss=0.0289, over 4777.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2134, pruned_loss=0.03484, over 973261.74 frames.], batch size: 12, lr: 2.58e-04 +2022-05-06 06:30:36,514 INFO [train.py:715] (6/8) Epoch 8, batch 23300, loss[loss=0.1613, simple_loss=0.2439, pruned_loss=0.03932, over 4791.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2135, pruned_loss=0.03472, over 971518.82 frames.], batch size: 18, lr: 2.58e-04 +2022-05-06 06:31:16,265 INFO [train.py:715] (6/8) Epoch 8, batch 23350, loss[loss=0.1484, simple_loss=0.2158, pruned_loss=0.04046, over 4853.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2128, pruned_loss=0.03441, over 971568.76 frames.], batch size: 20, lr: 2.58e-04 +2022-05-06 06:31:55,027 INFO [train.py:715] (6/8) Epoch 8, batch 23400, loss[loss=0.1457, simple_loss=0.2181, pruned_loss=0.03664, over 4849.00 frames.], tot_loss[loss=0.1412, simple_loss=0.213, pruned_loss=0.03472, over 970986.52 frames.], batch size: 30, lr: 2.58e-04 +2022-05-06 06:32:33,887 INFO [train.py:715] (6/8) Epoch 8, batch 23450, loss[loss=0.1225, simple_loss=0.1912, pruned_loss=0.02694, over 4843.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2125, pruned_loss=0.03426, over 971420.72 frames.], batch size: 13, lr: 2.58e-04 +2022-05-06 06:33:13,364 INFO [train.py:715] (6/8) Epoch 8, batch 23500, loss[loss=0.1275, simple_loss=0.1982, pruned_loss=0.02838, over 4868.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2132, pruned_loss=0.03433, over 971648.91 frames.], batch size: 20, lr: 2.58e-04 +2022-05-06 06:33:52,530 INFO [train.py:715] (6/8) Epoch 8, batch 23550, loss[loss=0.1433, simple_loss=0.2189, pruned_loss=0.03387, over 4757.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2145, pruned_loss=0.03507, over 971562.27 frames.], batch size: 12, lr: 2.58e-04 +2022-05-06 06:34:31,320 INFO [train.py:715] (6/8) Epoch 8, batch 23600, loss[loss=0.1364, simple_loss=0.2133, pruned_loss=0.02973, over 4974.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2143, pruned_loss=0.0354, over 971420.25 frames.], batch size: 24, lr: 2.58e-04 +2022-05-06 06:35:10,240 INFO [train.py:715] (6/8) Epoch 8, batch 23650, loss[loss=0.162, simple_loss=0.236, pruned_loss=0.044, over 4803.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2144, pruned_loss=0.03528, over 971344.05 frames.], batch size: 21, lr: 2.58e-04 +2022-05-06 06:35:50,046 INFO [train.py:715] (6/8) Epoch 8, batch 23700, loss[loss=0.1533, simple_loss=0.2124, pruned_loss=0.04714, over 4832.00 frames.], tot_loss[loss=0.143, simple_loss=0.2148, pruned_loss=0.03565, over 972067.73 frames.], batch size: 13, lr: 2.58e-04 +2022-05-06 06:36:28,666 INFO [train.py:715] (6/8) Epoch 8, batch 23750, loss[loss=0.1287, simple_loss=0.2032, pruned_loss=0.0271, over 4960.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2144, pruned_loss=0.03519, over 971221.33 frames.], batch size: 21, lr: 2.58e-04 +2022-05-06 06:37:07,531 INFO [train.py:715] (6/8) Epoch 8, batch 23800, loss[loss=0.1451, simple_loss=0.2084, pruned_loss=0.04092, over 4849.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2152, pruned_loss=0.0355, over 970938.22 frames.], batch size: 30, lr: 2.58e-04 +2022-05-06 06:37:46,983 INFO [train.py:715] (6/8) Epoch 8, batch 23850, loss[loss=0.1507, simple_loss=0.216, pruned_loss=0.0427, over 4812.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2159, pruned_loss=0.03561, over 971400.79 frames.], batch size: 25, lr: 2.58e-04 +2022-05-06 06:38:26,642 INFO [train.py:715] (6/8) Epoch 8, batch 23900, loss[loss=0.1431, simple_loss=0.2143, pruned_loss=0.03599, over 4957.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2165, pruned_loss=0.03581, over 970828.77 frames.], batch size: 21, lr: 2.58e-04 +2022-05-06 06:39:05,510 INFO [train.py:715] (6/8) Epoch 8, batch 23950, loss[loss=0.1261, simple_loss=0.1925, pruned_loss=0.02984, over 4639.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2158, pruned_loss=0.03603, over 970551.85 frames.], batch size: 13, lr: 2.58e-04 +2022-05-06 06:39:44,888 INFO [train.py:715] (6/8) Epoch 8, batch 24000, loss[loss=0.14, simple_loss=0.2149, pruned_loss=0.03256, over 4823.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2155, pruned_loss=0.03563, over 971122.05 frames.], batch size: 13, lr: 2.58e-04 +2022-05-06 06:39:44,889 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 06:39:54,531 INFO [train.py:742] (6/8) Epoch 8, validation: loss=0.1075, simple_loss=0.192, pruned_loss=0.01146, over 914524.00 frames. +2022-05-06 06:40:33,720 INFO [train.py:715] (6/8) Epoch 8, batch 24050, loss[loss=0.1142, simple_loss=0.1843, pruned_loss=0.02206, over 4978.00 frames.], tot_loss[loss=0.1439, simple_loss=0.216, pruned_loss=0.03583, over 970691.59 frames.], batch size: 26, lr: 2.58e-04 +2022-05-06 06:41:13,151 INFO [train.py:715] (6/8) Epoch 8, batch 24100, loss[loss=0.1333, simple_loss=0.1986, pruned_loss=0.03397, over 4838.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2164, pruned_loss=0.03563, over 972013.08 frames.], batch size: 13, lr: 2.58e-04 +2022-05-06 06:41:52,117 INFO [train.py:715] (6/8) Epoch 8, batch 24150, loss[loss=0.1334, simple_loss=0.2084, pruned_loss=0.02917, over 4821.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2157, pruned_loss=0.03536, over 972421.19 frames.], batch size: 26, lr: 2.58e-04 +2022-05-06 06:42:31,051 INFO [train.py:715] (6/8) Epoch 8, batch 24200, loss[loss=0.1314, simple_loss=0.2066, pruned_loss=0.02812, over 4986.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2153, pruned_loss=0.03522, over 972436.47 frames.], batch size: 28, lr: 2.58e-04 +2022-05-06 06:43:11,240 INFO [train.py:715] (6/8) Epoch 8, batch 24250, loss[loss=0.14, simple_loss=0.2192, pruned_loss=0.03038, over 4963.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2149, pruned_loss=0.03505, over 972102.27 frames.], batch size: 15, lr: 2.58e-04 +2022-05-06 06:43:50,603 INFO [train.py:715] (6/8) Epoch 8, batch 24300, loss[loss=0.167, simple_loss=0.2506, pruned_loss=0.04174, over 4741.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2151, pruned_loss=0.03514, over 971905.77 frames.], batch size: 16, lr: 2.58e-04 +2022-05-06 06:44:29,316 INFO [train.py:715] (6/8) Epoch 8, batch 24350, loss[loss=0.1489, simple_loss=0.2319, pruned_loss=0.03293, over 4928.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2141, pruned_loss=0.03459, over 971760.15 frames.], batch size: 29, lr: 2.58e-04 +2022-05-06 06:45:08,118 INFO [train.py:715] (6/8) Epoch 8, batch 24400, loss[loss=0.1293, simple_loss=0.2091, pruned_loss=0.02473, over 4864.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2141, pruned_loss=0.03463, over 971814.94 frames.], batch size: 20, lr: 2.58e-04 +2022-05-06 06:45:47,152 INFO [train.py:715] (6/8) Epoch 8, batch 24450, loss[loss=0.1305, simple_loss=0.206, pruned_loss=0.02747, over 4792.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2143, pruned_loss=0.03454, over 971735.42 frames.], batch size: 24, lr: 2.58e-04 +2022-05-06 06:46:26,138 INFO [train.py:715] (6/8) Epoch 8, batch 24500, loss[loss=0.1584, simple_loss=0.2322, pruned_loss=0.0423, over 4916.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2142, pruned_loss=0.03514, over 972214.27 frames.], batch size: 17, lr: 2.58e-04 +2022-05-06 06:47:04,991 INFO [train.py:715] (6/8) Epoch 8, batch 24550, loss[loss=0.1303, simple_loss=0.2117, pruned_loss=0.0244, over 4988.00 frames.], tot_loss[loss=0.141, simple_loss=0.2133, pruned_loss=0.03431, over 972641.30 frames.], batch size: 28, lr: 2.58e-04 +2022-05-06 06:47:44,931 INFO [train.py:715] (6/8) Epoch 8, batch 24600, loss[loss=0.1354, simple_loss=0.2137, pruned_loss=0.0285, over 4793.00 frames.], tot_loss[loss=0.142, simple_loss=0.2146, pruned_loss=0.0347, over 972636.17 frames.], batch size: 24, lr: 2.58e-04 +2022-05-06 06:48:24,239 INFO [train.py:715] (6/8) Epoch 8, batch 24650, loss[loss=0.166, simple_loss=0.234, pruned_loss=0.04902, over 4860.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2151, pruned_loss=0.03481, over 972565.39 frames.], batch size: 30, lr: 2.58e-04 +2022-05-06 06:49:02,876 INFO [train.py:715] (6/8) Epoch 8, batch 24700, loss[loss=0.1497, simple_loss=0.2217, pruned_loss=0.03882, over 4684.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2149, pruned_loss=0.03428, over 972285.78 frames.], batch size: 15, lr: 2.58e-04 +2022-05-06 06:49:42,050 INFO [train.py:715] (6/8) Epoch 8, batch 24750, loss[loss=0.1595, simple_loss=0.2336, pruned_loss=0.04273, over 4737.00 frames.], tot_loss[loss=0.142, simple_loss=0.2151, pruned_loss=0.03448, over 972616.61 frames.], batch size: 16, lr: 2.58e-04 +2022-05-06 06:50:21,625 INFO [train.py:715] (6/8) Epoch 8, batch 24800, loss[loss=0.1405, simple_loss=0.2174, pruned_loss=0.03175, over 4974.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2154, pruned_loss=0.03474, over 972566.44 frames.], batch size: 25, lr: 2.58e-04 +2022-05-06 06:51:00,474 INFO [train.py:715] (6/8) Epoch 8, batch 24850, loss[loss=0.1256, simple_loss=0.1997, pruned_loss=0.02574, over 4840.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2153, pruned_loss=0.03507, over 973677.60 frames.], batch size: 15, lr: 2.58e-04 +2022-05-06 06:51:39,144 INFO [train.py:715] (6/8) Epoch 8, batch 24900, loss[loss=0.1254, simple_loss=0.1931, pruned_loss=0.02884, over 4968.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2143, pruned_loss=0.035, over 973578.69 frames.], batch size: 35, lr: 2.58e-04 +2022-05-06 06:52:19,147 INFO [train.py:715] (6/8) Epoch 8, batch 24950, loss[loss=0.1561, simple_loss=0.2229, pruned_loss=0.04461, over 4801.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2138, pruned_loss=0.03466, over 972187.62 frames.], batch size: 21, lr: 2.58e-04 +2022-05-06 06:52:58,634 INFO [train.py:715] (6/8) Epoch 8, batch 25000, loss[loss=0.1312, simple_loss=0.2015, pruned_loss=0.03043, over 4785.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2138, pruned_loss=0.03485, over 971988.24 frames.], batch size: 14, lr: 2.57e-04 +2022-05-06 06:53:37,568 INFO [train.py:715] (6/8) Epoch 8, batch 25050, loss[loss=0.1535, simple_loss=0.2249, pruned_loss=0.04101, over 4982.00 frames.], tot_loss[loss=0.1421, simple_loss=0.214, pruned_loss=0.03513, over 971867.95 frames.], batch size: 33, lr: 2.57e-04 +2022-05-06 06:54:16,392 INFO [train.py:715] (6/8) Epoch 8, batch 25100, loss[loss=0.134, simple_loss=0.2003, pruned_loss=0.03384, over 4782.00 frames.], tot_loss[loss=0.143, simple_loss=0.2151, pruned_loss=0.03552, over 972075.87 frames.], batch size: 17, lr: 2.57e-04 +2022-05-06 06:54:55,809 INFO [train.py:715] (6/8) Epoch 8, batch 25150, loss[loss=0.1203, simple_loss=0.1936, pruned_loss=0.02351, over 4801.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2144, pruned_loss=0.03516, over 972414.90 frames.], batch size: 21, lr: 2.57e-04 +2022-05-06 06:55:34,833 INFO [train.py:715] (6/8) Epoch 8, batch 25200, loss[loss=0.1315, simple_loss=0.2046, pruned_loss=0.02923, over 4740.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2149, pruned_loss=0.03502, over 972889.07 frames.], batch size: 16, lr: 2.57e-04 +2022-05-06 06:56:13,822 INFO [train.py:715] (6/8) Epoch 8, batch 25250, loss[loss=0.1228, simple_loss=0.1861, pruned_loss=0.02972, over 4777.00 frames.], tot_loss[loss=0.142, simple_loss=0.2143, pruned_loss=0.03481, over 972549.11 frames.], batch size: 17, lr: 2.57e-04 +2022-05-06 06:56:53,391 INFO [train.py:715] (6/8) Epoch 8, batch 25300, loss[loss=0.1523, simple_loss=0.2287, pruned_loss=0.03801, over 4761.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2146, pruned_loss=0.03478, over 972452.88 frames.], batch size: 19, lr: 2.57e-04 +2022-05-06 06:57:32,356 INFO [train.py:715] (6/8) Epoch 8, batch 25350, loss[loss=0.1575, simple_loss=0.2261, pruned_loss=0.0445, over 4839.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2139, pruned_loss=0.03468, over 972401.50 frames.], batch size: 30, lr: 2.57e-04 +2022-05-06 06:58:11,171 INFO [train.py:715] (6/8) Epoch 8, batch 25400, loss[loss=0.1671, simple_loss=0.231, pruned_loss=0.05164, over 4864.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2142, pruned_loss=0.03518, over 972151.93 frames.], batch size: 32, lr: 2.57e-04 +2022-05-06 06:58:50,231 INFO [train.py:715] (6/8) Epoch 8, batch 25450, loss[loss=0.1581, simple_loss=0.2263, pruned_loss=0.04492, over 4981.00 frames.], tot_loss[loss=0.1421, simple_loss=0.214, pruned_loss=0.03507, over 973075.50 frames.], batch size: 20, lr: 2.57e-04 +2022-05-06 06:59:30,374 INFO [train.py:715] (6/8) Epoch 8, batch 25500, loss[loss=0.1553, simple_loss=0.2275, pruned_loss=0.04159, over 4828.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2145, pruned_loss=0.03507, over 973063.11 frames.], batch size: 26, lr: 2.57e-04 +2022-05-06 07:00:12,381 INFO [train.py:715] (6/8) Epoch 8, batch 25550, loss[loss=0.1442, simple_loss=0.225, pruned_loss=0.03174, over 4769.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2148, pruned_loss=0.03491, over 973176.36 frames.], batch size: 18, lr: 2.57e-04 +2022-05-06 07:00:51,657 INFO [train.py:715] (6/8) Epoch 8, batch 25600, loss[loss=0.1356, simple_loss=0.2207, pruned_loss=0.02522, over 4955.00 frames.], tot_loss[loss=0.1432, simple_loss=0.216, pruned_loss=0.03518, over 973300.66 frames.], batch size: 29, lr: 2.57e-04 +2022-05-06 07:01:30,735 INFO [train.py:715] (6/8) Epoch 8, batch 25650, loss[loss=0.121, simple_loss=0.1983, pruned_loss=0.02185, over 4804.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2153, pruned_loss=0.03515, over 973813.95 frames.], batch size: 18, lr: 2.57e-04 +2022-05-06 07:02:09,699 INFO [train.py:715] (6/8) Epoch 8, batch 25700, loss[loss=0.1594, simple_loss=0.2292, pruned_loss=0.04476, over 4874.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2153, pruned_loss=0.03492, over 973957.75 frames.], batch size: 38, lr: 2.57e-04 +2022-05-06 07:02:48,865 INFO [train.py:715] (6/8) Epoch 8, batch 25750, loss[loss=0.1424, simple_loss=0.2048, pruned_loss=0.03995, over 4986.00 frames.], tot_loss[loss=0.1436, simple_loss=0.216, pruned_loss=0.03561, over 974453.72 frames.], batch size: 31, lr: 2.57e-04 +2022-05-06 07:03:27,692 INFO [train.py:715] (6/8) Epoch 8, batch 25800, loss[loss=0.1662, simple_loss=0.2172, pruned_loss=0.05757, over 4653.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2157, pruned_loss=0.03565, over 974354.53 frames.], batch size: 13, lr: 2.57e-04 +2022-05-06 07:04:06,654 INFO [train.py:715] (6/8) Epoch 8, batch 25850, loss[loss=0.1701, simple_loss=0.2342, pruned_loss=0.05304, over 4860.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2159, pruned_loss=0.0358, over 974378.66 frames.], batch size: 20, lr: 2.57e-04 +2022-05-06 07:04:45,940 INFO [train.py:715] (6/8) Epoch 8, batch 25900, loss[loss=0.1297, simple_loss=0.212, pruned_loss=0.0237, over 4910.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2159, pruned_loss=0.03561, over 973681.63 frames.], batch size: 23, lr: 2.57e-04 +2022-05-06 07:05:24,610 INFO [train.py:715] (6/8) Epoch 8, batch 25950, loss[loss=0.1169, simple_loss=0.1956, pruned_loss=0.01914, over 4905.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2166, pruned_loss=0.03578, over 974354.35 frames.], batch size: 19, lr: 2.57e-04 +2022-05-06 07:06:03,742 INFO [train.py:715] (6/8) Epoch 8, batch 26000, loss[loss=0.1615, simple_loss=0.2316, pruned_loss=0.04569, over 4758.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2167, pruned_loss=0.03607, over 973446.45 frames.], batch size: 16, lr: 2.57e-04 +2022-05-06 07:06:42,908 INFO [train.py:715] (6/8) Epoch 8, batch 26050, loss[loss=0.1138, simple_loss=0.1847, pruned_loss=0.02152, over 4947.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2165, pruned_loss=0.03604, over 973818.86 frames.], batch size: 35, lr: 2.57e-04 +2022-05-06 07:07:21,670 INFO [train.py:715] (6/8) Epoch 8, batch 26100, loss[loss=0.1203, simple_loss=0.191, pruned_loss=0.02477, over 4947.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2158, pruned_loss=0.03555, over 973945.28 frames.], batch size: 29, lr: 2.57e-04 +2022-05-06 07:08:01,307 INFO [train.py:715] (6/8) Epoch 8, batch 26150, loss[loss=0.1292, simple_loss=0.2078, pruned_loss=0.02532, over 4902.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2154, pruned_loss=0.03568, over 973718.34 frames.], batch size: 39, lr: 2.57e-04 +2022-05-06 07:08:40,494 INFO [train.py:715] (6/8) Epoch 8, batch 26200, loss[loss=0.1544, simple_loss=0.2233, pruned_loss=0.04274, over 4884.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2146, pruned_loss=0.03557, over 973568.85 frames.], batch size: 22, lr: 2.57e-04 +2022-05-06 07:09:19,622 INFO [train.py:715] (6/8) Epoch 8, batch 26250, loss[loss=0.1313, simple_loss=0.2035, pruned_loss=0.02957, over 4788.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2142, pruned_loss=0.03529, over 973145.57 frames.], batch size: 12, lr: 2.57e-04 +2022-05-06 07:09:57,939 INFO [train.py:715] (6/8) Epoch 8, batch 26300, loss[loss=0.1576, simple_loss=0.2245, pruned_loss=0.04531, over 4868.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2153, pruned_loss=0.03587, over 972598.19 frames.], batch size: 32, lr: 2.57e-04 +2022-05-06 07:10:37,573 INFO [train.py:715] (6/8) Epoch 8, batch 26350, loss[loss=0.1225, simple_loss=0.2074, pruned_loss=0.01878, over 4933.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2159, pruned_loss=0.03601, over 972485.99 frames.], batch size: 23, lr: 2.57e-04 +2022-05-06 07:11:16,889 INFO [train.py:715] (6/8) Epoch 8, batch 26400, loss[loss=0.1435, simple_loss=0.2131, pruned_loss=0.037, over 4985.00 frames.], tot_loss[loss=0.1438, simple_loss=0.216, pruned_loss=0.03579, over 973231.42 frames.], batch size: 31, lr: 2.57e-04 +2022-05-06 07:11:55,837 INFO [train.py:715] (6/8) Epoch 8, batch 26450, loss[loss=0.1464, simple_loss=0.2264, pruned_loss=0.03318, over 4745.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2166, pruned_loss=0.03605, over 973693.91 frames.], batch size: 16, lr: 2.57e-04 +2022-05-06 07:12:34,675 INFO [train.py:715] (6/8) Epoch 8, batch 26500, loss[loss=0.132, simple_loss=0.2046, pruned_loss=0.02967, over 4647.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2168, pruned_loss=0.03593, over 973440.92 frames.], batch size: 13, lr: 2.57e-04 +2022-05-06 07:13:13,275 INFO [train.py:715] (6/8) Epoch 8, batch 26550, loss[loss=0.1256, simple_loss=0.1938, pruned_loss=0.02866, over 4759.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2158, pruned_loss=0.03544, over 973093.03 frames.], batch size: 12, lr: 2.57e-04 +2022-05-06 07:13:52,660 INFO [train.py:715] (6/8) Epoch 8, batch 26600, loss[loss=0.1769, simple_loss=0.242, pruned_loss=0.05595, over 4923.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2158, pruned_loss=0.03545, over 973989.15 frames.], batch size: 39, lr: 2.57e-04 +2022-05-06 07:14:30,717 INFO [train.py:715] (6/8) Epoch 8, batch 26650, loss[loss=0.1088, simple_loss=0.1869, pruned_loss=0.0154, over 4767.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2152, pruned_loss=0.035, over 973371.14 frames.], batch size: 14, lr: 2.57e-04 +2022-05-06 07:15:10,079 INFO [train.py:715] (6/8) Epoch 8, batch 26700, loss[loss=0.1496, simple_loss=0.2241, pruned_loss=0.03757, over 4742.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2145, pruned_loss=0.03495, over 973167.88 frames.], batch size: 16, lr: 2.57e-04 +2022-05-06 07:15:49,152 INFO [train.py:715] (6/8) Epoch 8, batch 26750, loss[loss=0.1727, simple_loss=0.2514, pruned_loss=0.047, over 4919.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2147, pruned_loss=0.03551, over 973290.06 frames.], batch size: 23, lr: 2.57e-04 +2022-05-06 07:16:27,935 INFO [train.py:715] (6/8) Epoch 8, batch 26800, loss[loss=0.1568, simple_loss=0.2277, pruned_loss=0.04292, over 4897.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2149, pruned_loss=0.03564, over 972727.00 frames.], batch size: 19, lr: 2.57e-04 +2022-05-06 07:17:07,168 INFO [train.py:715] (6/8) Epoch 8, batch 26850, loss[loss=0.1368, simple_loss=0.2112, pruned_loss=0.03114, over 4825.00 frames.], tot_loss[loss=0.1429, simple_loss=0.215, pruned_loss=0.03545, over 972297.04 frames.], batch size: 15, lr: 2.57e-04 +2022-05-06 07:17:46,419 INFO [train.py:715] (6/8) Epoch 8, batch 26900, loss[loss=0.1433, simple_loss=0.2155, pruned_loss=0.03555, over 4808.00 frames.], tot_loss[loss=0.143, simple_loss=0.2149, pruned_loss=0.03556, over 971889.21 frames.], batch size: 21, lr: 2.57e-04 +2022-05-06 07:18:25,465 INFO [train.py:715] (6/8) Epoch 8, batch 26950, loss[loss=0.1387, simple_loss=0.2163, pruned_loss=0.03051, over 4907.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2146, pruned_loss=0.03538, over 972402.59 frames.], batch size: 18, lr: 2.57e-04 +2022-05-06 07:19:04,354 INFO [train.py:715] (6/8) Epoch 8, batch 27000, loss[loss=0.1307, simple_loss=0.2105, pruned_loss=0.02545, over 4873.00 frames.], tot_loss[loss=0.1432, simple_loss=0.215, pruned_loss=0.03566, over 973421.78 frames.], batch size: 22, lr: 2.57e-04 +2022-05-06 07:19:04,355 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 07:19:13,678 INFO [train.py:742] (6/8) Epoch 8, validation: loss=0.1072, simple_loss=0.1919, pruned_loss=0.01129, over 914524.00 frames. +2022-05-06 07:19:52,527 INFO [train.py:715] (6/8) Epoch 8, batch 27050, loss[loss=0.1322, simple_loss=0.2103, pruned_loss=0.02701, over 4886.00 frames.], tot_loss[loss=0.143, simple_loss=0.2148, pruned_loss=0.03562, over 973533.42 frames.], batch size: 18, lr: 2.57e-04 +2022-05-06 07:20:31,870 INFO [train.py:715] (6/8) Epoch 8, batch 27100, loss[loss=0.1499, simple_loss=0.2179, pruned_loss=0.04095, over 4918.00 frames.], tot_loss[loss=0.142, simple_loss=0.2141, pruned_loss=0.03497, over 973666.36 frames.], batch size: 23, lr: 2.57e-04 +2022-05-06 07:21:10,973 INFO [train.py:715] (6/8) Epoch 8, batch 27150, loss[loss=0.1741, simple_loss=0.246, pruned_loss=0.05108, over 4754.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2142, pruned_loss=0.03505, over 973830.59 frames.], batch size: 19, lr: 2.57e-04 +2022-05-06 07:21:49,183 INFO [train.py:715] (6/8) Epoch 8, batch 27200, loss[loss=0.1172, simple_loss=0.182, pruned_loss=0.02622, over 4786.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2149, pruned_loss=0.03531, over 973975.13 frames.], batch size: 12, lr: 2.57e-04 +2022-05-06 07:22:28,513 INFO [train.py:715] (6/8) Epoch 8, batch 27250, loss[loss=0.1179, simple_loss=0.1869, pruned_loss=0.02447, over 4802.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2144, pruned_loss=0.03511, over 973243.00 frames.], batch size: 12, lr: 2.57e-04 +2022-05-06 07:23:07,828 INFO [train.py:715] (6/8) Epoch 8, batch 27300, loss[loss=0.1632, simple_loss=0.2343, pruned_loss=0.04609, over 4827.00 frames.], tot_loss[loss=0.1427, simple_loss=0.215, pruned_loss=0.03521, over 972957.82 frames.], batch size: 26, lr: 2.57e-04 +2022-05-06 07:23:46,496 INFO [train.py:715] (6/8) Epoch 8, batch 27350, loss[loss=0.1292, simple_loss=0.2035, pruned_loss=0.02749, over 4738.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2146, pruned_loss=0.03523, over 972030.18 frames.], batch size: 16, lr: 2.57e-04 +2022-05-06 07:24:25,185 INFO [train.py:715] (6/8) Epoch 8, batch 27400, loss[loss=0.1792, simple_loss=0.2523, pruned_loss=0.05304, over 4840.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2157, pruned_loss=0.03511, over 972563.83 frames.], batch size: 30, lr: 2.56e-04 +2022-05-06 07:25:04,325 INFO [train.py:715] (6/8) Epoch 8, batch 27450, loss[loss=0.1247, simple_loss=0.2032, pruned_loss=0.02309, over 4912.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2158, pruned_loss=0.03525, over 972421.90 frames.], batch size: 18, lr: 2.56e-04 +2022-05-06 07:25:43,020 INFO [train.py:715] (6/8) Epoch 8, batch 27500, loss[loss=0.1497, simple_loss=0.2242, pruned_loss=0.03763, over 4916.00 frames.], tot_loss[loss=0.143, simple_loss=0.2154, pruned_loss=0.03529, over 972324.57 frames.], batch size: 18, lr: 2.56e-04 +2022-05-06 07:26:21,675 INFO [train.py:715] (6/8) Epoch 8, batch 27550, loss[loss=0.1468, simple_loss=0.2114, pruned_loss=0.04115, over 4907.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2153, pruned_loss=0.03542, over 972183.28 frames.], batch size: 17, lr: 2.56e-04 +2022-05-06 07:27:01,335 INFO [train.py:715] (6/8) Epoch 8, batch 27600, loss[loss=0.1382, simple_loss=0.2153, pruned_loss=0.03049, over 4842.00 frames.], tot_loss[loss=0.1427, simple_loss=0.215, pruned_loss=0.03521, over 972729.09 frames.], batch size: 30, lr: 2.56e-04 +2022-05-06 07:27:40,424 INFO [train.py:715] (6/8) Epoch 8, batch 27650, loss[loss=0.1428, simple_loss=0.2214, pruned_loss=0.03211, over 4780.00 frames.], tot_loss[loss=0.1436, simple_loss=0.216, pruned_loss=0.0356, over 972542.14 frames.], batch size: 14, lr: 2.56e-04 +2022-05-06 07:28:19,096 INFO [train.py:715] (6/8) Epoch 8, batch 27700, loss[loss=0.1602, simple_loss=0.2244, pruned_loss=0.04798, over 4981.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2161, pruned_loss=0.03609, over 972496.13 frames.], batch size: 31, lr: 2.56e-04 +2022-05-06 07:28:58,325 INFO [train.py:715] (6/8) Epoch 8, batch 27750, loss[loss=0.1649, simple_loss=0.242, pruned_loss=0.0439, over 4973.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2164, pruned_loss=0.03612, over 973160.36 frames.], batch size: 25, lr: 2.56e-04 +2022-05-06 07:29:38,022 INFO [train.py:715] (6/8) Epoch 8, batch 27800, loss[loss=0.1319, simple_loss=0.2083, pruned_loss=0.02772, over 4800.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2159, pruned_loss=0.03598, over 973429.92 frames.], batch size: 21, lr: 2.56e-04 +2022-05-06 07:30:16,790 INFO [train.py:715] (6/8) Epoch 8, batch 27850, loss[loss=0.1354, simple_loss=0.2102, pruned_loss=0.03034, over 4791.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2155, pruned_loss=0.03575, over 973467.52 frames.], batch size: 24, lr: 2.56e-04 +2022-05-06 07:30:54,920 INFO [train.py:715] (6/8) Epoch 8, batch 27900, loss[loss=0.114, simple_loss=0.1915, pruned_loss=0.01828, over 4894.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2156, pruned_loss=0.03557, over 973151.85 frames.], batch size: 17, lr: 2.56e-04 +2022-05-06 07:31:34,150 INFO [train.py:715] (6/8) Epoch 8, batch 27950, loss[loss=0.1629, simple_loss=0.2268, pruned_loss=0.04947, over 4774.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2166, pruned_loss=0.03617, over 972209.48 frames.], batch size: 18, lr: 2.56e-04 +2022-05-06 07:32:13,476 INFO [train.py:715] (6/8) Epoch 8, batch 28000, loss[loss=0.1373, simple_loss=0.2026, pruned_loss=0.03601, over 4935.00 frames.], tot_loss[loss=0.1437, simple_loss=0.216, pruned_loss=0.03572, over 972961.89 frames.], batch size: 35, lr: 2.56e-04 +2022-05-06 07:32:51,690 INFO [train.py:715] (6/8) Epoch 8, batch 28050, loss[loss=0.156, simple_loss=0.2284, pruned_loss=0.04179, over 4685.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2161, pruned_loss=0.03571, over 972643.16 frames.], batch size: 15, lr: 2.56e-04 +2022-05-06 07:33:31,447 INFO [train.py:715] (6/8) Epoch 8, batch 28100, loss[loss=0.169, simple_loss=0.23, pruned_loss=0.05394, over 4876.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2166, pruned_loss=0.03614, over 972278.80 frames.], batch size: 22, lr: 2.56e-04 +2022-05-06 07:34:10,515 INFO [train.py:715] (6/8) Epoch 8, batch 28150, loss[loss=0.1471, simple_loss=0.2232, pruned_loss=0.03544, over 4904.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2173, pruned_loss=0.03641, over 972093.78 frames.], batch size: 17, lr: 2.56e-04 +2022-05-06 07:34:49,990 INFO [train.py:715] (6/8) Epoch 8, batch 28200, loss[loss=0.1284, simple_loss=0.212, pruned_loss=0.02241, over 4818.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2167, pruned_loss=0.03587, over 972251.88 frames.], batch size: 13, lr: 2.56e-04 +2022-05-06 07:35:29,404 INFO [train.py:715] (6/8) Epoch 8, batch 28250, loss[loss=0.1298, simple_loss=0.2057, pruned_loss=0.02698, over 4827.00 frames.], tot_loss[loss=0.144, simple_loss=0.2167, pruned_loss=0.03571, over 971792.37 frames.], batch size: 27, lr: 2.56e-04 +2022-05-06 07:36:09,675 INFO [train.py:715] (6/8) Epoch 8, batch 28300, loss[loss=0.1182, simple_loss=0.1881, pruned_loss=0.02417, over 4909.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2173, pruned_loss=0.03599, over 971750.46 frames.], batch size: 19, lr: 2.56e-04 +2022-05-06 07:36:49,592 INFO [train.py:715] (6/8) Epoch 8, batch 28350, loss[loss=0.122, simple_loss=0.1974, pruned_loss=0.02332, over 4790.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2169, pruned_loss=0.03611, over 971848.76 frames.], batch size: 14, lr: 2.56e-04 +2022-05-06 07:37:28,940 INFO [train.py:715] (6/8) Epoch 8, batch 28400, loss[loss=0.163, simple_loss=0.2298, pruned_loss=0.0481, over 4934.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2164, pruned_loss=0.03604, over 971573.34 frames.], batch size: 39, lr: 2.56e-04 +2022-05-06 07:38:08,994 INFO [train.py:715] (6/8) Epoch 8, batch 28450, loss[loss=0.1828, simple_loss=0.2408, pruned_loss=0.06235, over 4905.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2151, pruned_loss=0.0353, over 972265.83 frames.], batch size: 19, lr: 2.56e-04 +2022-05-06 07:38:48,159 INFO [train.py:715] (6/8) Epoch 8, batch 28500, loss[loss=0.1547, simple_loss=0.225, pruned_loss=0.04217, over 4905.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2158, pruned_loss=0.03575, over 972124.06 frames.], batch size: 18, lr: 2.56e-04 +2022-05-06 07:39:26,866 INFO [train.py:715] (6/8) Epoch 8, batch 28550, loss[loss=0.1503, simple_loss=0.2148, pruned_loss=0.04297, over 4854.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2153, pruned_loss=0.03543, over 973060.77 frames.], batch size: 20, lr: 2.56e-04 +2022-05-06 07:40:05,727 INFO [train.py:715] (6/8) Epoch 8, batch 28600, loss[loss=0.1546, simple_loss=0.2207, pruned_loss=0.04428, over 4855.00 frames.], tot_loss[loss=0.1437, simple_loss=0.216, pruned_loss=0.03573, over 973909.19 frames.], batch size: 32, lr: 2.56e-04 +2022-05-06 07:40:45,400 INFO [train.py:715] (6/8) Epoch 8, batch 28650, loss[loss=0.132, simple_loss=0.212, pruned_loss=0.02598, over 4906.00 frames.], tot_loss[loss=0.144, simple_loss=0.2164, pruned_loss=0.03578, over 973698.58 frames.], batch size: 22, lr: 2.56e-04 +2022-05-06 07:41:24,254 INFO [train.py:715] (6/8) Epoch 8, batch 28700, loss[loss=0.1442, simple_loss=0.2183, pruned_loss=0.03503, over 4957.00 frames.], tot_loss[loss=0.144, simple_loss=0.2161, pruned_loss=0.03595, over 972956.23 frames.], batch size: 14, lr: 2.56e-04 +2022-05-06 07:42:02,599 INFO [train.py:715] (6/8) Epoch 8, batch 28750, loss[loss=0.159, simple_loss=0.2202, pruned_loss=0.04887, over 4877.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2159, pruned_loss=0.03601, over 971707.41 frames.], batch size: 32, lr: 2.56e-04 +2022-05-06 07:42:42,147 INFO [train.py:715] (6/8) Epoch 8, batch 28800, loss[loss=0.1603, simple_loss=0.2312, pruned_loss=0.04473, over 4764.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2156, pruned_loss=0.03569, over 971217.66 frames.], batch size: 14, lr: 2.56e-04 +2022-05-06 07:43:21,541 INFO [train.py:715] (6/8) Epoch 8, batch 28850, loss[loss=0.1291, simple_loss=0.2008, pruned_loss=0.02873, over 4842.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2159, pruned_loss=0.03587, over 971016.85 frames.], batch size: 20, lr: 2.56e-04 +2022-05-06 07:44:00,547 INFO [train.py:715] (6/8) Epoch 8, batch 28900, loss[loss=0.1543, simple_loss=0.2279, pruned_loss=0.04034, over 4810.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2154, pruned_loss=0.03569, over 972034.89 frames.], batch size: 21, lr: 2.56e-04 +2022-05-06 07:44:39,172 INFO [train.py:715] (6/8) Epoch 8, batch 28950, loss[loss=0.1408, simple_loss=0.2094, pruned_loss=0.03611, over 4947.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2169, pruned_loss=0.03648, over 972520.30 frames.], batch size: 21, lr: 2.56e-04 +2022-05-06 07:45:18,517 INFO [train.py:715] (6/8) Epoch 8, batch 29000, loss[loss=0.1413, simple_loss=0.2129, pruned_loss=0.03489, over 4990.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2159, pruned_loss=0.03569, over 972446.96 frames.], batch size: 26, lr: 2.56e-04 +2022-05-06 07:45:57,178 INFO [train.py:715] (6/8) Epoch 8, batch 29050, loss[loss=0.125, simple_loss=0.2021, pruned_loss=0.02395, over 4965.00 frames.], tot_loss[loss=0.1441, simple_loss=0.216, pruned_loss=0.03609, over 972647.15 frames.], batch size: 24, lr: 2.56e-04 +2022-05-06 07:46:36,419 INFO [train.py:715] (6/8) Epoch 8, batch 29100, loss[loss=0.1539, simple_loss=0.2291, pruned_loss=0.03937, over 4849.00 frames.], tot_loss[loss=0.144, simple_loss=0.216, pruned_loss=0.036, over 973817.57 frames.], batch size: 30, lr: 2.56e-04 +2022-05-06 07:47:14,941 INFO [train.py:715] (6/8) Epoch 8, batch 29150, loss[loss=0.156, simple_loss=0.2327, pruned_loss=0.03967, over 4955.00 frames.], tot_loss[loss=0.144, simple_loss=0.2161, pruned_loss=0.03597, over 973580.38 frames.], batch size: 24, lr: 2.56e-04 +2022-05-06 07:47:54,242 INFO [train.py:715] (6/8) Epoch 8, batch 29200, loss[loss=0.145, simple_loss=0.223, pruned_loss=0.03355, over 4752.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2151, pruned_loss=0.03575, over 972648.51 frames.], batch size: 16, lr: 2.56e-04 +2022-05-06 07:48:32,865 INFO [train.py:715] (6/8) Epoch 8, batch 29250, loss[loss=0.13, simple_loss=0.1989, pruned_loss=0.03053, over 4903.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2152, pruned_loss=0.03599, over 972283.23 frames.], batch size: 17, lr: 2.56e-04 +2022-05-06 07:49:11,142 INFO [train.py:715] (6/8) Epoch 8, batch 29300, loss[loss=0.1493, simple_loss=0.2329, pruned_loss=0.03285, over 4932.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2149, pruned_loss=0.03589, over 972439.51 frames.], batch size: 29, lr: 2.56e-04 +2022-05-06 07:49:50,326 INFO [train.py:715] (6/8) Epoch 8, batch 29350, loss[loss=0.1887, simple_loss=0.2631, pruned_loss=0.05715, over 4969.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2153, pruned_loss=0.03612, over 972491.32 frames.], batch size: 15, lr: 2.56e-04 +2022-05-06 07:50:29,151 INFO [train.py:715] (6/8) Epoch 8, batch 29400, loss[loss=0.126, simple_loss=0.2008, pruned_loss=0.02562, over 4897.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2159, pruned_loss=0.03625, over 972713.44 frames.], batch size: 29, lr: 2.56e-04 +2022-05-06 07:51:08,796 INFO [train.py:715] (6/8) Epoch 8, batch 29450, loss[loss=0.129, simple_loss=0.2005, pruned_loss=0.02878, over 4848.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2162, pruned_loss=0.03646, over 971733.54 frames.], batch size: 13, lr: 2.56e-04 +2022-05-06 07:51:48,080 INFO [train.py:715] (6/8) Epoch 8, batch 29500, loss[loss=0.1357, simple_loss=0.2117, pruned_loss=0.02984, over 4904.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2171, pruned_loss=0.03725, over 971374.88 frames.], batch size: 22, lr: 2.56e-04 +2022-05-06 07:52:27,551 INFO [train.py:715] (6/8) Epoch 8, batch 29550, loss[loss=0.1281, simple_loss=0.1898, pruned_loss=0.03322, over 4815.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2161, pruned_loss=0.03645, over 971885.89 frames.], batch size: 13, lr: 2.56e-04 +2022-05-06 07:53:06,113 INFO [train.py:715] (6/8) Epoch 8, batch 29600, loss[loss=0.1342, simple_loss=0.2111, pruned_loss=0.02866, over 4818.00 frames.], tot_loss[loss=0.145, simple_loss=0.2165, pruned_loss=0.03674, over 972702.27 frames.], batch size: 25, lr: 2.56e-04 +2022-05-06 07:53:45,384 INFO [train.py:715] (6/8) Epoch 8, batch 29650, loss[loss=0.1887, simple_loss=0.2568, pruned_loss=0.06035, over 4805.00 frames.], tot_loss[loss=0.1447, simple_loss=0.216, pruned_loss=0.03672, over 972479.26 frames.], batch size: 15, lr: 2.56e-04 +2022-05-06 07:54:24,986 INFO [train.py:715] (6/8) Epoch 8, batch 29700, loss[loss=0.1492, simple_loss=0.2136, pruned_loss=0.04242, over 4892.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2153, pruned_loss=0.03646, over 973478.88 frames.], batch size: 17, lr: 2.56e-04 +2022-05-06 07:55:03,541 INFO [train.py:715] (6/8) Epoch 8, batch 29750, loss[loss=0.1297, simple_loss=0.206, pruned_loss=0.02665, over 4809.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2156, pruned_loss=0.03607, over 972696.49 frames.], batch size: 21, lr: 2.56e-04 +2022-05-06 07:55:42,379 INFO [train.py:715] (6/8) Epoch 8, batch 29800, loss[loss=0.1547, simple_loss=0.2252, pruned_loss=0.04214, over 4909.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2165, pruned_loss=0.03668, over 972672.92 frames.], batch size: 19, lr: 2.55e-04 +2022-05-06 07:56:21,286 INFO [train.py:715] (6/8) Epoch 8, batch 29850, loss[loss=0.1593, simple_loss=0.2289, pruned_loss=0.04485, over 4782.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2162, pruned_loss=0.03631, over 972979.51 frames.], batch size: 17, lr: 2.55e-04 +2022-05-06 07:57:00,653 INFO [train.py:715] (6/8) Epoch 8, batch 29900, loss[loss=0.144, simple_loss=0.2173, pruned_loss=0.03536, over 4880.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2163, pruned_loss=0.03625, over 973209.08 frames.], batch size: 16, lr: 2.55e-04 +2022-05-06 07:57:39,544 INFO [train.py:715] (6/8) Epoch 8, batch 29950, loss[loss=0.1465, simple_loss=0.2152, pruned_loss=0.03896, over 4775.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2167, pruned_loss=0.03655, over 972736.89 frames.], batch size: 18, lr: 2.55e-04 +2022-05-06 07:58:18,658 INFO [train.py:715] (6/8) Epoch 8, batch 30000, loss[loss=0.1156, simple_loss=0.1908, pruned_loss=0.0202, over 4878.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2161, pruned_loss=0.03621, over 973264.13 frames.], batch size: 16, lr: 2.55e-04 +2022-05-06 07:58:18,659 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 07:58:28,240 INFO [train.py:742] (6/8) Epoch 8, validation: loss=0.1073, simple_loss=0.1918, pruned_loss=0.01141, over 914524.00 frames. +2022-05-06 07:59:07,027 INFO [train.py:715] (6/8) Epoch 8, batch 30050, loss[loss=0.1284, simple_loss=0.2088, pruned_loss=0.02398, over 4761.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2155, pruned_loss=0.03585, over 973994.12 frames.], batch size: 19, lr: 2.55e-04 +2022-05-06 07:59:46,358 INFO [train.py:715] (6/8) Epoch 8, batch 30100, loss[loss=0.1206, simple_loss=0.2059, pruned_loss=0.01762, over 4875.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2156, pruned_loss=0.03555, over 974096.80 frames.], batch size: 32, lr: 2.55e-04 +2022-05-06 08:00:25,660 INFO [train.py:715] (6/8) Epoch 8, batch 30150, loss[loss=0.1872, simple_loss=0.2408, pruned_loss=0.06677, over 4811.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2152, pruned_loss=0.03529, over 973398.04 frames.], batch size: 15, lr: 2.55e-04 +2022-05-06 08:01:04,256 INFO [train.py:715] (6/8) Epoch 8, batch 30200, loss[loss=0.1519, simple_loss=0.2334, pruned_loss=0.03524, over 4938.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2162, pruned_loss=0.03582, over 972680.20 frames.], batch size: 24, lr: 2.55e-04 +2022-05-06 08:01:43,185 INFO [train.py:715] (6/8) Epoch 8, batch 30250, loss[loss=0.1541, simple_loss=0.2126, pruned_loss=0.04781, over 4911.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2156, pruned_loss=0.03536, over 972407.65 frames.], batch size: 23, lr: 2.55e-04 +2022-05-06 08:02:22,873 INFO [train.py:715] (6/8) Epoch 8, batch 30300, loss[loss=0.145, simple_loss=0.2147, pruned_loss=0.0376, over 4894.00 frames.], tot_loss[loss=0.1437, simple_loss=0.216, pruned_loss=0.03572, over 972671.63 frames.], batch size: 16, lr: 2.55e-04 +2022-05-06 08:03:01,870 INFO [train.py:715] (6/8) Epoch 8, batch 30350, loss[loss=0.1559, simple_loss=0.2208, pruned_loss=0.04556, over 4939.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2156, pruned_loss=0.03568, over 972879.21 frames.], batch size: 23, lr: 2.55e-04 +2022-05-06 08:03:40,562 INFO [train.py:715] (6/8) Epoch 8, batch 30400, loss[loss=0.134, simple_loss=0.2095, pruned_loss=0.02927, over 4804.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2158, pruned_loss=0.03574, over 972198.71 frames.], batch size: 24, lr: 2.55e-04 +2022-05-06 08:04:19,872 INFO [train.py:715] (6/8) Epoch 8, batch 30450, loss[loss=0.1437, simple_loss=0.2137, pruned_loss=0.03686, over 4748.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2155, pruned_loss=0.03576, over 972851.08 frames.], batch size: 16, lr: 2.55e-04 +2022-05-06 08:04:58,854 INFO [train.py:715] (6/8) Epoch 8, batch 30500, loss[loss=0.1475, simple_loss=0.2152, pruned_loss=0.03989, over 4825.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2162, pruned_loss=0.03619, over 972825.87 frames.], batch size: 27, lr: 2.55e-04 +2022-05-06 08:05:37,500 INFO [train.py:715] (6/8) Epoch 8, batch 30550, loss[loss=0.1384, simple_loss=0.2153, pruned_loss=0.03078, over 4814.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2152, pruned_loss=0.03556, over 972809.08 frames.], batch size: 14, lr: 2.55e-04 +2022-05-06 08:06:16,539 INFO [train.py:715] (6/8) Epoch 8, batch 30600, loss[loss=0.1243, simple_loss=0.1998, pruned_loss=0.02442, over 4931.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2153, pruned_loss=0.03569, over 973194.89 frames.], batch size: 29, lr: 2.55e-04 +2022-05-06 08:06:56,252 INFO [train.py:715] (6/8) Epoch 8, batch 30650, loss[loss=0.1363, simple_loss=0.1981, pruned_loss=0.03728, over 4821.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2142, pruned_loss=0.0353, over 973647.78 frames.], batch size: 13, lr: 2.55e-04 +2022-05-06 08:07:35,435 INFO [train.py:715] (6/8) Epoch 8, batch 30700, loss[loss=0.1508, simple_loss=0.2302, pruned_loss=0.03563, over 4790.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2145, pruned_loss=0.03537, over 972656.33 frames.], batch size: 17, lr: 2.55e-04 +2022-05-06 08:08:15,308 INFO [train.py:715] (6/8) Epoch 8, batch 30750, loss[loss=0.1446, simple_loss=0.2138, pruned_loss=0.03768, over 4818.00 frames.], tot_loss[loss=0.1428, simple_loss=0.215, pruned_loss=0.0353, over 972446.38 frames.], batch size: 25, lr: 2.55e-04 +2022-05-06 08:08:55,428 INFO [train.py:715] (6/8) Epoch 8, batch 30800, loss[loss=0.1262, simple_loss=0.1959, pruned_loss=0.0282, over 4738.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2147, pruned_loss=0.03532, over 972622.52 frames.], batch size: 12, lr: 2.55e-04 +2022-05-06 08:09:33,884 INFO [train.py:715] (6/8) Epoch 8, batch 30850, loss[loss=0.1474, simple_loss=0.2086, pruned_loss=0.04307, over 4982.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2147, pruned_loss=0.03521, over 972702.29 frames.], batch size: 35, lr: 2.55e-04 +2022-05-06 08:10:12,785 INFO [train.py:715] (6/8) Epoch 8, batch 30900, loss[loss=0.1329, simple_loss=0.1942, pruned_loss=0.03579, over 4816.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2145, pruned_loss=0.03517, over 973079.04 frames.], batch size: 13, lr: 2.55e-04 +2022-05-06 08:10:52,540 INFO [train.py:715] (6/8) Epoch 8, batch 30950, loss[loss=0.1558, simple_loss=0.2291, pruned_loss=0.04127, over 4884.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2142, pruned_loss=0.03534, over 972262.16 frames.], batch size: 22, lr: 2.55e-04 +2022-05-06 08:11:32,575 INFO [train.py:715] (6/8) Epoch 8, batch 31000, loss[loss=0.1522, simple_loss=0.2236, pruned_loss=0.04042, over 4916.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2144, pruned_loss=0.03558, over 972078.47 frames.], batch size: 19, lr: 2.55e-04 +2022-05-06 08:12:11,806 INFO [train.py:715] (6/8) Epoch 8, batch 31050, loss[loss=0.1415, simple_loss=0.2094, pruned_loss=0.03679, over 4933.00 frames.], tot_loss[loss=0.1422, simple_loss=0.214, pruned_loss=0.03521, over 973034.97 frames.], batch size: 23, lr: 2.55e-04 +2022-05-06 08:12:51,404 INFO [train.py:715] (6/8) Epoch 8, batch 31100, loss[loss=0.1649, simple_loss=0.2385, pruned_loss=0.04565, over 4778.00 frames.], tot_loss[loss=0.1441, simple_loss=0.216, pruned_loss=0.03613, over 972793.87 frames.], batch size: 14, lr: 2.55e-04 +2022-05-06 08:13:30,956 INFO [train.py:715] (6/8) Epoch 8, batch 31150, loss[loss=0.1455, simple_loss=0.2209, pruned_loss=0.03507, over 4981.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2155, pruned_loss=0.03552, over 971769.29 frames.], batch size: 25, lr: 2.55e-04 +2022-05-06 08:14:09,975 INFO [train.py:715] (6/8) Epoch 8, batch 31200, loss[loss=0.1353, simple_loss=0.2097, pruned_loss=0.03044, over 4834.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2143, pruned_loss=0.03498, over 972621.24 frames.], batch size: 30, lr: 2.55e-04 +2022-05-06 08:14:48,714 INFO [train.py:715] (6/8) Epoch 8, batch 31250, loss[loss=0.1389, simple_loss=0.1995, pruned_loss=0.03912, over 4788.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2151, pruned_loss=0.03564, over 972522.07 frames.], batch size: 14, lr: 2.55e-04 +2022-05-06 08:15:28,181 INFO [train.py:715] (6/8) Epoch 8, batch 31300, loss[loss=0.1283, simple_loss=0.2047, pruned_loss=0.02591, over 4786.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2151, pruned_loss=0.0353, over 972179.56 frames.], batch size: 18, lr: 2.55e-04 +2022-05-06 08:16:07,666 INFO [train.py:715] (6/8) Epoch 8, batch 31350, loss[loss=0.141, simple_loss=0.219, pruned_loss=0.03156, over 4689.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2141, pruned_loss=0.0348, over 972511.34 frames.], batch size: 15, lr: 2.55e-04 +2022-05-06 08:16:46,305 INFO [train.py:715] (6/8) Epoch 8, batch 31400, loss[loss=0.1334, simple_loss=0.2101, pruned_loss=0.02834, over 4967.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2142, pruned_loss=0.03477, over 973072.14 frames.], batch size: 24, lr: 2.55e-04 +2022-05-06 08:17:25,751 INFO [train.py:715] (6/8) Epoch 8, batch 31450, loss[loss=0.1159, simple_loss=0.18, pruned_loss=0.02589, over 4794.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2139, pruned_loss=0.03461, over 972471.32 frames.], batch size: 17, lr: 2.55e-04 +2022-05-06 08:18:05,874 INFO [train.py:715] (6/8) Epoch 8, batch 31500, loss[loss=0.1362, simple_loss=0.2016, pruned_loss=0.03537, over 4990.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2142, pruned_loss=0.03475, over 974140.60 frames.], batch size: 15, lr: 2.55e-04 +2022-05-06 08:18:45,121 INFO [train.py:715] (6/8) Epoch 8, batch 31550, loss[loss=0.1448, simple_loss=0.2134, pruned_loss=0.03811, over 4817.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2138, pruned_loss=0.0349, over 973721.35 frames.], batch size: 25, lr: 2.55e-04 +2022-05-06 08:19:24,102 INFO [train.py:715] (6/8) Epoch 8, batch 31600, loss[loss=0.1275, simple_loss=0.1971, pruned_loss=0.0289, over 4967.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2131, pruned_loss=0.03454, over 972817.32 frames.], batch size: 15, lr: 2.55e-04 +2022-05-06 08:20:03,758 INFO [train.py:715] (6/8) Epoch 8, batch 31650, loss[loss=0.1298, simple_loss=0.1989, pruned_loss=0.03041, over 4785.00 frames.], tot_loss[loss=0.142, simple_loss=0.2139, pruned_loss=0.03506, over 971982.99 frames.], batch size: 14, lr: 2.55e-04 +2022-05-06 08:20:43,078 INFO [train.py:715] (6/8) Epoch 8, batch 31700, loss[loss=0.1378, simple_loss=0.211, pruned_loss=0.03225, over 4985.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2144, pruned_loss=0.03489, over 972569.02 frames.], batch size: 14, lr: 2.55e-04 +2022-05-06 08:21:22,755 INFO [train.py:715] (6/8) Epoch 8, batch 31750, loss[loss=0.1752, simple_loss=0.2495, pruned_loss=0.05041, over 4767.00 frames.], tot_loss[loss=0.143, simple_loss=0.2153, pruned_loss=0.03539, over 972342.06 frames.], batch size: 14, lr: 2.55e-04 +2022-05-06 08:22:01,971 INFO [train.py:715] (6/8) Epoch 8, batch 31800, loss[loss=0.1658, simple_loss=0.2395, pruned_loss=0.04604, over 4993.00 frames.], tot_loss[loss=0.144, simple_loss=0.216, pruned_loss=0.03601, over 972969.11 frames.], batch size: 16, lr: 2.55e-04 +2022-05-06 08:22:41,014 INFO [train.py:715] (6/8) Epoch 8, batch 31850, loss[loss=0.09374, simple_loss=0.1531, pruned_loss=0.01721, over 4786.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2147, pruned_loss=0.03498, over 972444.81 frames.], batch size: 12, lr: 2.55e-04 +2022-05-06 08:23:19,919 INFO [train.py:715] (6/8) Epoch 8, batch 31900, loss[loss=0.1371, simple_loss=0.2038, pruned_loss=0.03522, over 4832.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2144, pruned_loss=0.03502, over 972756.54 frames.], batch size: 30, lr: 2.55e-04 +2022-05-06 08:23:58,321 INFO [train.py:715] (6/8) Epoch 8, batch 31950, loss[loss=0.179, simple_loss=0.2472, pruned_loss=0.05545, over 4906.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2151, pruned_loss=0.03512, over 972417.81 frames.], batch size: 17, lr: 2.55e-04 +2022-05-06 08:24:37,608 INFO [train.py:715] (6/8) Epoch 8, batch 32000, loss[loss=0.1451, simple_loss=0.2172, pruned_loss=0.03646, over 4790.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2158, pruned_loss=0.03526, over 972497.48 frames.], batch size: 14, lr: 2.55e-04 +2022-05-06 08:25:17,171 INFO [train.py:715] (6/8) Epoch 8, batch 32050, loss[loss=0.1242, simple_loss=0.1936, pruned_loss=0.02738, over 4900.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2149, pruned_loss=0.03467, over 972641.62 frames.], batch size: 19, lr: 2.55e-04 +2022-05-06 08:25:55,740 INFO [train.py:715] (6/8) Epoch 8, batch 32100, loss[loss=0.17, simple_loss=0.2453, pruned_loss=0.04734, over 4965.00 frames.], tot_loss[loss=0.143, simple_loss=0.2156, pruned_loss=0.03515, over 972739.04 frames.], batch size: 21, lr: 2.55e-04 +2022-05-06 08:26:34,470 INFO [train.py:715] (6/8) Epoch 8, batch 32150, loss[loss=0.1366, simple_loss=0.2004, pruned_loss=0.03637, over 4789.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2157, pruned_loss=0.03535, over 972706.99 frames.], batch size: 14, lr: 2.55e-04 +2022-05-06 08:27:14,044 INFO [train.py:715] (6/8) Epoch 8, batch 32200, loss[loss=0.1474, simple_loss=0.226, pruned_loss=0.03444, over 4898.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2161, pruned_loss=0.03571, over 971947.22 frames.], batch size: 17, lr: 2.54e-04 +2022-05-06 08:27:52,864 INFO [train.py:715] (6/8) Epoch 8, batch 32250, loss[loss=0.1427, simple_loss=0.2292, pruned_loss=0.02815, over 4781.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2157, pruned_loss=0.03555, over 972554.05 frames.], batch size: 21, lr: 2.54e-04 +2022-05-06 08:28:32,332 INFO [train.py:715] (6/8) Epoch 8, batch 32300, loss[loss=0.14, simple_loss=0.2181, pruned_loss=0.03097, over 4843.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2157, pruned_loss=0.03554, over 972686.09 frames.], batch size: 15, lr: 2.54e-04 +2022-05-06 08:29:11,543 INFO [train.py:715] (6/8) Epoch 8, batch 32350, loss[loss=0.1218, simple_loss=0.1975, pruned_loss=0.02304, over 4779.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2168, pruned_loss=0.03612, over 972878.32 frames.], batch size: 18, lr: 2.54e-04 +2022-05-06 08:29:51,457 INFO [train.py:715] (6/8) Epoch 8, batch 32400, loss[loss=0.1616, simple_loss=0.2242, pruned_loss=0.04946, over 4863.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2169, pruned_loss=0.03626, over 973159.47 frames.], batch size: 20, lr: 2.54e-04 +2022-05-06 08:30:30,384 INFO [train.py:715] (6/8) Epoch 8, batch 32450, loss[loss=0.137, simple_loss=0.2071, pruned_loss=0.03346, over 4844.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2155, pruned_loss=0.03582, over 972453.95 frames.], batch size: 30, lr: 2.54e-04 +2022-05-06 08:31:09,408 INFO [train.py:715] (6/8) Epoch 8, batch 32500, loss[loss=0.1704, simple_loss=0.2568, pruned_loss=0.04193, over 4863.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2158, pruned_loss=0.03572, over 972628.06 frames.], batch size: 32, lr: 2.54e-04 +2022-05-06 08:31:48,953 INFO [train.py:715] (6/8) Epoch 8, batch 32550, loss[loss=0.1416, simple_loss=0.2125, pruned_loss=0.03533, over 4792.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2148, pruned_loss=0.03525, over 971622.19 frames.], batch size: 17, lr: 2.54e-04 +2022-05-06 08:32:27,501 INFO [train.py:715] (6/8) Epoch 8, batch 32600, loss[loss=0.1451, simple_loss=0.2143, pruned_loss=0.03796, over 4894.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2146, pruned_loss=0.03496, over 971189.26 frames.], batch size: 19, lr: 2.54e-04 +2022-05-06 08:33:06,728 INFO [train.py:715] (6/8) Epoch 8, batch 32650, loss[loss=0.1283, simple_loss=0.1911, pruned_loss=0.03272, over 4682.00 frames.], tot_loss[loss=0.143, simple_loss=0.2154, pruned_loss=0.03537, over 972057.20 frames.], batch size: 15, lr: 2.54e-04 +2022-05-06 08:33:45,985 INFO [train.py:715] (6/8) Epoch 8, batch 32700, loss[loss=0.1465, simple_loss=0.2133, pruned_loss=0.03984, over 4785.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2155, pruned_loss=0.03545, over 971915.49 frames.], batch size: 17, lr: 2.54e-04 +2022-05-06 08:34:26,181 INFO [train.py:715] (6/8) Epoch 8, batch 32750, loss[loss=0.1382, simple_loss=0.2101, pruned_loss=0.03317, over 4793.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2155, pruned_loss=0.03565, over 972592.20 frames.], batch size: 18, lr: 2.54e-04 +2022-05-06 08:35:04,665 INFO [train.py:715] (6/8) Epoch 8, batch 32800, loss[loss=0.1385, simple_loss=0.2151, pruned_loss=0.03092, over 4818.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2158, pruned_loss=0.03555, over 973121.37 frames.], batch size: 21, lr: 2.54e-04 +2022-05-06 08:35:43,309 INFO [train.py:715] (6/8) Epoch 8, batch 32850, loss[loss=0.1678, simple_loss=0.2311, pruned_loss=0.05228, over 4908.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2155, pruned_loss=0.03532, over 972209.25 frames.], batch size: 17, lr: 2.54e-04 +2022-05-06 08:36:22,466 INFO [train.py:715] (6/8) Epoch 8, batch 32900, loss[loss=0.1458, simple_loss=0.2149, pruned_loss=0.03832, over 4964.00 frames.], tot_loss[loss=0.143, simple_loss=0.2154, pruned_loss=0.0353, over 972329.51 frames.], batch size: 21, lr: 2.54e-04 +2022-05-06 08:37:00,743 INFO [train.py:715] (6/8) Epoch 8, batch 32950, loss[loss=0.1335, simple_loss=0.1958, pruned_loss=0.03559, over 4988.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2153, pruned_loss=0.03559, over 972360.59 frames.], batch size: 14, lr: 2.54e-04 +2022-05-06 08:37:39,630 INFO [train.py:715] (6/8) Epoch 8, batch 33000, loss[loss=0.1296, simple_loss=0.2031, pruned_loss=0.028, over 4827.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2147, pruned_loss=0.03517, over 973208.90 frames.], batch size: 26, lr: 2.54e-04 +2022-05-06 08:37:39,630 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 08:37:52,640 INFO [train.py:742] (6/8) Epoch 8, validation: loss=0.1071, simple_loss=0.1917, pruned_loss=0.01126, over 914524.00 frames. +2022-05-06 08:38:31,975 INFO [train.py:715] (6/8) Epoch 8, batch 33050, loss[loss=0.1581, simple_loss=0.2349, pruned_loss=0.04065, over 4814.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2149, pruned_loss=0.03519, over 972393.51 frames.], batch size: 21, lr: 2.54e-04 +2022-05-06 08:39:10,830 INFO [train.py:715] (6/8) Epoch 8, batch 33100, loss[loss=0.1619, simple_loss=0.2367, pruned_loss=0.04358, over 4759.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2147, pruned_loss=0.03506, over 972286.08 frames.], batch size: 19, lr: 2.54e-04 +2022-05-06 08:39:50,124 INFO [train.py:715] (6/8) Epoch 8, batch 33150, loss[loss=0.1546, simple_loss=0.2236, pruned_loss=0.04278, over 4705.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2151, pruned_loss=0.03503, over 972501.90 frames.], batch size: 15, lr: 2.54e-04 +2022-05-06 08:40:28,832 INFO [train.py:715] (6/8) Epoch 8, batch 33200, loss[loss=0.1609, simple_loss=0.2187, pruned_loss=0.05152, over 4966.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2146, pruned_loss=0.03498, over 972773.73 frames.], batch size: 15, lr: 2.54e-04 +2022-05-06 08:41:08,508 INFO [train.py:715] (6/8) Epoch 8, batch 33250, loss[loss=0.165, simple_loss=0.2464, pruned_loss=0.04182, over 4748.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2137, pruned_loss=0.03477, over 972852.69 frames.], batch size: 16, lr: 2.54e-04 +2022-05-06 08:41:48,103 INFO [train.py:715] (6/8) Epoch 8, batch 33300, loss[loss=0.1211, simple_loss=0.1915, pruned_loss=0.02533, over 4794.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2146, pruned_loss=0.03499, over 973190.68 frames.], batch size: 12, lr: 2.54e-04 +2022-05-06 08:42:26,900 INFO [train.py:715] (6/8) Epoch 8, batch 33350, loss[loss=0.1581, simple_loss=0.2272, pruned_loss=0.04446, over 4967.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2152, pruned_loss=0.03524, over 972708.74 frames.], batch size: 24, lr: 2.54e-04 +2022-05-06 08:43:06,264 INFO [train.py:715] (6/8) Epoch 8, batch 33400, loss[loss=0.1485, simple_loss=0.2226, pruned_loss=0.03716, over 4875.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2153, pruned_loss=0.03491, over 972582.54 frames.], batch size: 20, lr: 2.54e-04 +2022-05-06 08:43:45,178 INFO [train.py:715] (6/8) Epoch 8, batch 33450, loss[loss=0.1297, simple_loss=0.1983, pruned_loss=0.03052, over 4952.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2148, pruned_loss=0.03471, over 972694.49 frames.], batch size: 28, lr: 2.54e-04 +2022-05-06 08:44:24,012 INFO [train.py:715] (6/8) Epoch 8, batch 33500, loss[loss=0.1501, simple_loss=0.2186, pruned_loss=0.04084, over 4965.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2161, pruned_loss=0.0357, over 971764.60 frames.], batch size: 24, lr: 2.54e-04 +2022-05-06 08:45:05,011 INFO [train.py:715] (6/8) Epoch 8, batch 33550, loss[loss=0.1422, simple_loss=0.2151, pruned_loss=0.03468, over 4763.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2153, pruned_loss=0.03545, over 971886.21 frames.], batch size: 18, lr: 2.54e-04 +2022-05-06 08:45:44,467 INFO [train.py:715] (6/8) Epoch 8, batch 33600, loss[loss=0.1247, simple_loss=0.2003, pruned_loss=0.02457, over 4776.00 frames.], tot_loss[loss=0.1432, simple_loss=0.215, pruned_loss=0.03569, over 972353.40 frames.], batch size: 19, lr: 2.54e-04 +2022-05-06 08:46:23,911 INFO [train.py:715] (6/8) Epoch 8, batch 33650, loss[loss=0.2053, simple_loss=0.2711, pruned_loss=0.06976, over 4981.00 frames.], tot_loss[loss=0.143, simple_loss=0.2152, pruned_loss=0.03541, over 972813.46 frames.], batch size: 33, lr: 2.54e-04 +2022-05-06 08:47:02,974 INFO [train.py:715] (6/8) Epoch 8, batch 33700, loss[loss=0.1477, simple_loss=0.216, pruned_loss=0.03968, over 4783.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2147, pruned_loss=0.03501, over 972658.42 frames.], batch size: 17, lr: 2.54e-04 +2022-05-06 08:47:41,967 INFO [train.py:715] (6/8) Epoch 8, batch 33750, loss[loss=0.1417, simple_loss=0.2269, pruned_loss=0.02823, over 4699.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2141, pruned_loss=0.03426, over 972174.88 frames.], batch size: 15, lr: 2.54e-04 +2022-05-06 08:48:20,685 INFO [train.py:715] (6/8) Epoch 8, batch 33800, loss[loss=0.1325, simple_loss=0.2128, pruned_loss=0.02609, over 4774.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2142, pruned_loss=0.03402, over 971734.70 frames.], batch size: 17, lr: 2.54e-04 +2022-05-06 08:48:59,308 INFO [train.py:715] (6/8) Epoch 8, batch 33850, loss[loss=0.1423, simple_loss=0.2041, pruned_loss=0.04022, over 4838.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2145, pruned_loss=0.03414, over 971344.57 frames.], batch size: 13, lr: 2.54e-04 +2022-05-06 08:49:38,119 INFO [train.py:715] (6/8) Epoch 8, batch 33900, loss[loss=0.146, simple_loss=0.2135, pruned_loss=0.03928, over 4795.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2143, pruned_loss=0.03398, over 971181.36 frames.], batch size: 24, lr: 2.54e-04 +2022-05-06 08:50:17,037 INFO [train.py:715] (6/8) Epoch 8, batch 33950, loss[loss=0.1407, simple_loss=0.2174, pruned_loss=0.03203, over 4869.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2151, pruned_loss=0.03436, over 971256.07 frames.], batch size: 16, lr: 2.54e-04 +2022-05-06 08:50:56,635 INFO [train.py:715] (6/8) Epoch 8, batch 34000, loss[loss=0.1509, simple_loss=0.229, pruned_loss=0.03641, over 4821.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2162, pruned_loss=0.03508, over 970836.57 frames.], batch size: 21, lr: 2.54e-04 +2022-05-06 08:51:35,551 INFO [train.py:715] (6/8) Epoch 8, batch 34050, loss[loss=0.1465, simple_loss=0.2207, pruned_loss=0.03614, over 4876.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2169, pruned_loss=0.03572, over 971496.57 frames.], batch size: 32, lr: 2.54e-04 +2022-05-06 08:52:14,818 INFO [train.py:715] (6/8) Epoch 8, batch 34100, loss[loss=0.1336, simple_loss=0.2133, pruned_loss=0.02694, over 4813.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2168, pruned_loss=0.03578, over 971296.68 frames.], batch size: 13, lr: 2.54e-04 +2022-05-06 08:52:53,782 INFO [train.py:715] (6/8) Epoch 8, batch 34150, loss[loss=0.1497, simple_loss=0.2287, pruned_loss=0.0353, over 4912.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2172, pruned_loss=0.03674, over 971516.75 frames.], batch size: 39, lr: 2.54e-04 +2022-05-06 08:53:32,400 INFO [train.py:715] (6/8) Epoch 8, batch 34200, loss[loss=0.1803, simple_loss=0.2533, pruned_loss=0.05366, over 4857.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2167, pruned_loss=0.03639, over 972286.32 frames.], batch size: 20, lr: 2.54e-04 +2022-05-06 08:54:11,304 INFO [train.py:715] (6/8) Epoch 8, batch 34250, loss[loss=0.192, simple_loss=0.2591, pruned_loss=0.06244, over 4989.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2161, pruned_loss=0.03602, over 972578.49 frames.], batch size: 16, lr: 2.54e-04 +2022-05-06 08:54:50,280 INFO [train.py:715] (6/8) Epoch 8, batch 34300, loss[loss=0.1514, simple_loss=0.2243, pruned_loss=0.03924, over 4948.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2165, pruned_loss=0.03606, over 972415.74 frames.], batch size: 29, lr: 2.54e-04 +2022-05-06 08:55:29,030 INFO [train.py:715] (6/8) Epoch 8, batch 34350, loss[loss=0.1217, simple_loss=0.1949, pruned_loss=0.02424, over 4814.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2166, pruned_loss=0.0362, over 972337.04 frames.], batch size: 21, lr: 2.54e-04 +2022-05-06 08:56:07,456 INFO [train.py:715] (6/8) Epoch 8, batch 34400, loss[loss=0.1139, simple_loss=0.1867, pruned_loss=0.02051, over 4802.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2169, pruned_loss=0.03583, over 972088.20 frames.], batch size: 21, lr: 2.54e-04 +2022-05-06 08:56:46,677 INFO [train.py:715] (6/8) Epoch 8, batch 34450, loss[loss=0.171, simple_loss=0.238, pruned_loss=0.05203, over 4945.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2162, pruned_loss=0.03534, over 972347.72 frames.], batch size: 21, lr: 2.54e-04 +2022-05-06 08:57:26,048 INFO [train.py:715] (6/8) Epoch 8, batch 34500, loss[loss=0.1371, simple_loss=0.2163, pruned_loss=0.02889, over 4814.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2167, pruned_loss=0.03576, over 972393.40 frames.], batch size: 25, lr: 2.54e-04 +2022-05-06 08:58:04,292 INFO [train.py:715] (6/8) Epoch 8, batch 34550, loss[loss=0.1433, simple_loss=0.2219, pruned_loss=0.03233, over 4891.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2169, pruned_loss=0.03602, over 973152.61 frames.], batch size: 19, lr: 2.54e-04 +2022-05-06 08:58:42,925 INFO [train.py:715] (6/8) Epoch 8, batch 34600, loss[loss=0.1473, simple_loss=0.2228, pruned_loss=0.03589, over 4827.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2168, pruned_loss=0.03595, over 973102.31 frames.], batch size: 26, lr: 2.54e-04 +2022-05-06 08:59:21,849 INFO [train.py:715] (6/8) Epoch 8, batch 34650, loss[loss=0.1398, simple_loss=0.2173, pruned_loss=0.03111, over 4979.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2166, pruned_loss=0.03556, over 973653.60 frames.], batch size: 28, lr: 2.53e-04 +2022-05-06 09:00:01,505 INFO [train.py:715] (6/8) Epoch 8, batch 34700, loss[loss=0.1183, simple_loss=0.1825, pruned_loss=0.02706, over 4836.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2159, pruned_loss=0.03555, over 973670.96 frames.], batch size: 13, lr: 2.53e-04 +2022-05-06 09:00:38,663 INFO [train.py:715] (6/8) Epoch 8, batch 34750, loss[loss=0.1352, simple_loss=0.2068, pruned_loss=0.03176, over 4864.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2162, pruned_loss=0.03538, over 973283.98 frames.], batch size: 20, lr: 2.53e-04 +2022-05-06 09:01:15,266 INFO [train.py:715] (6/8) Epoch 8, batch 34800, loss[loss=0.129, simple_loss=0.199, pruned_loss=0.02949, over 4805.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2143, pruned_loss=0.035, over 971951.58 frames.], batch size: 12, lr: 2.53e-04 +2022-05-06 09:02:04,657 INFO [train.py:715] (6/8) Epoch 9, batch 0, loss[loss=0.1602, simple_loss=0.2298, pruned_loss=0.04529, over 4984.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2298, pruned_loss=0.04529, over 4984.00 frames.], batch size: 14, lr: 2.42e-04 +2022-05-06 09:02:43,977 INFO [train.py:715] (6/8) Epoch 9, batch 50, loss[loss=0.1265, simple_loss=0.1907, pruned_loss=0.03118, over 4811.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2182, pruned_loss=0.03795, over 219182.41 frames.], batch size: 13, lr: 2.41e-04 +2022-05-06 09:03:23,613 INFO [train.py:715] (6/8) Epoch 9, batch 100, loss[loss=0.1349, simple_loss=0.2116, pruned_loss=0.02915, over 4981.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2168, pruned_loss=0.0362, over 386432.53 frames.], batch size: 24, lr: 2.41e-04 +2022-05-06 09:04:02,102 INFO [train.py:715] (6/8) Epoch 9, batch 150, loss[loss=0.1716, simple_loss=0.2231, pruned_loss=0.06006, over 4694.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2151, pruned_loss=0.03583, over 516702.93 frames.], batch size: 15, lr: 2.41e-04 +2022-05-06 09:04:42,542 INFO [train.py:715] (6/8) Epoch 9, batch 200, loss[loss=0.121, simple_loss=0.1958, pruned_loss=0.0231, over 4971.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2146, pruned_loss=0.03502, over 617499.72 frames.], batch size: 25, lr: 2.41e-04 +2022-05-06 09:05:21,822 INFO [train.py:715] (6/8) Epoch 9, batch 250, loss[loss=0.1292, simple_loss=0.1963, pruned_loss=0.03107, over 4907.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2142, pruned_loss=0.03514, over 695623.45 frames.], batch size: 18, lr: 2.41e-04 +2022-05-06 09:06:01,096 INFO [train.py:715] (6/8) Epoch 9, batch 300, loss[loss=0.1746, simple_loss=0.2407, pruned_loss=0.05424, over 4991.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2155, pruned_loss=0.03591, over 755872.47 frames.], batch size: 14, lr: 2.41e-04 +2022-05-06 09:06:40,658 INFO [train.py:715] (6/8) Epoch 9, batch 350, loss[loss=0.1434, simple_loss=0.2149, pruned_loss=0.03592, over 4852.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2161, pruned_loss=0.03584, over 804481.40 frames.], batch size: 20, lr: 2.41e-04 +2022-05-06 09:07:20,403 INFO [train.py:715] (6/8) Epoch 9, batch 400, loss[loss=0.1545, simple_loss=0.2202, pruned_loss=0.04434, over 4928.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2159, pruned_loss=0.03582, over 842146.98 frames.], batch size: 35, lr: 2.41e-04 +2022-05-06 09:07:59,739 INFO [train.py:715] (6/8) Epoch 9, batch 450, loss[loss=0.1392, simple_loss=0.2262, pruned_loss=0.02612, over 4953.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2162, pruned_loss=0.03558, over 871857.57 frames.], batch size: 21, lr: 2.41e-04 +2022-05-06 09:08:38,887 INFO [train.py:715] (6/8) Epoch 9, batch 500, loss[loss=0.1217, simple_loss=0.1999, pruned_loss=0.02172, over 4985.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2159, pruned_loss=0.03545, over 894733.52 frames.], batch size: 24, lr: 2.41e-04 +2022-05-06 09:09:19,206 INFO [train.py:715] (6/8) Epoch 9, batch 550, loss[loss=0.1168, simple_loss=0.1878, pruned_loss=0.02286, over 4847.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2145, pruned_loss=0.03484, over 912868.95 frames.], batch size: 15, lr: 2.41e-04 +2022-05-06 09:09:58,810 INFO [train.py:715] (6/8) Epoch 9, batch 600, loss[loss=0.1344, simple_loss=0.2049, pruned_loss=0.03193, over 4749.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2154, pruned_loss=0.0349, over 926086.37 frames.], batch size: 16, lr: 2.41e-04 +2022-05-06 09:10:37,827 INFO [train.py:715] (6/8) Epoch 9, batch 650, loss[loss=0.1729, simple_loss=0.2492, pruned_loss=0.0483, over 4865.00 frames.], tot_loss[loss=0.143, simple_loss=0.2155, pruned_loss=0.03522, over 936856.30 frames.], batch size: 32, lr: 2.41e-04 +2022-05-06 09:11:16,920 INFO [train.py:715] (6/8) Epoch 9, batch 700, loss[loss=0.2046, simple_loss=0.2641, pruned_loss=0.07249, over 4806.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2159, pruned_loss=0.03519, over 945042.52 frames.], batch size: 12, lr: 2.41e-04 +2022-05-06 09:11:56,401 INFO [train.py:715] (6/8) Epoch 9, batch 750, loss[loss=0.1353, simple_loss=0.2063, pruned_loss=0.0321, over 4943.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2162, pruned_loss=0.03552, over 950552.71 frames.], batch size: 21, lr: 2.41e-04 +2022-05-06 09:12:35,542 INFO [train.py:715] (6/8) Epoch 9, batch 800, loss[loss=0.161, simple_loss=0.2402, pruned_loss=0.04093, over 4909.00 frames.], tot_loss[loss=0.143, simple_loss=0.2159, pruned_loss=0.03512, over 954921.42 frames.], batch size: 17, lr: 2.41e-04 +2022-05-06 09:13:14,321 INFO [train.py:715] (6/8) Epoch 9, batch 850, loss[loss=0.1209, simple_loss=0.2046, pruned_loss=0.01862, over 4823.00 frames.], tot_loss[loss=0.143, simple_loss=0.2158, pruned_loss=0.03513, over 958715.53 frames.], batch size: 26, lr: 2.41e-04 +2022-05-06 09:13:53,321 INFO [train.py:715] (6/8) Epoch 9, batch 900, loss[loss=0.1251, simple_loss=0.2017, pruned_loss=0.02423, over 4893.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2145, pruned_loss=0.03419, over 962200.35 frames.], batch size: 19, lr: 2.41e-04 +2022-05-06 09:14:32,598 INFO [train.py:715] (6/8) Epoch 9, batch 950, loss[loss=0.1457, simple_loss=0.2196, pruned_loss=0.03592, over 4764.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2136, pruned_loss=0.03405, over 964312.61 frames.], batch size: 19, lr: 2.41e-04 +2022-05-06 09:15:12,211 INFO [train.py:715] (6/8) Epoch 9, batch 1000, loss[loss=0.1246, simple_loss=0.1961, pruned_loss=0.02662, over 4818.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2139, pruned_loss=0.03419, over 966492.43 frames.], batch size: 25, lr: 2.41e-04 +2022-05-06 09:15:50,370 INFO [train.py:715] (6/8) Epoch 9, batch 1050, loss[loss=0.1517, simple_loss=0.2146, pruned_loss=0.0444, over 4762.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2147, pruned_loss=0.03514, over 967416.79 frames.], batch size: 19, lr: 2.41e-04 +2022-05-06 09:16:30,511 INFO [train.py:715] (6/8) Epoch 9, batch 1100, loss[loss=0.151, simple_loss=0.2104, pruned_loss=0.04576, over 4973.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2147, pruned_loss=0.03522, over 968140.68 frames.], batch size: 14, lr: 2.41e-04 +2022-05-06 09:17:10,347 INFO [train.py:715] (6/8) Epoch 9, batch 1150, loss[loss=0.1724, simple_loss=0.2607, pruned_loss=0.04207, over 4871.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2144, pruned_loss=0.03507, over 968545.59 frames.], batch size: 16, lr: 2.41e-04 +2022-05-06 09:17:49,485 INFO [train.py:715] (6/8) Epoch 9, batch 1200, loss[loss=0.1432, simple_loss=0.2139, pruned_loss=0.03626, over 4980.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2152, pruned_loss=0.03551, over 969424.12 frames.], batch size: 31, lr: 2.41e-04 +2022-05-06 09:18:28,822 INFO [train.py:715] (6/8) Epoch 9, batch 1250, loss[loss=0.119, simple_loss=0.1978, pruned_loss=0.02009, over 4902.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2153, pruned_loss=0.03528, over 969918.08 frames.], batch size: 19, lr: 2.41e-04 +2022-05-06 09:19:08,562 INFO [train.py:715] (6/8) Epoch 9, batch 1300, loss[loss=0.1476, simple_loss=0.21, pruned_loss=0.04262, over 4919.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2148, pruned_loss=0.03514, over 970290.74 frames.], batch size: 19, lr: 2.41e-04 +2022-05-06 09:19:48,102 INFO [train.py:715] (6/8) Epoch 9, batch 1350, loss[loss=0.142, simple_loss=0.2209, pruned_loss=0.03154, over 4765.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2157, pruned_loss=0.0352, over 969870.10 frames.], batch size: 17, lr: 2.41e-04 +2022-05-06 09:20:26,898 INFO [train.py:715] (6/8) Epoch 9, batch 1400, loss[loss=0.1181, simple_loss=0.2004, pruned_loss=0.01784, over 4788.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2156, pruned_loss=0.03564, over 970692.26 frames.], batch size: 17, lr: 2.41e-04 +2022-05-06 09:21:06,501 INFO [train.py:715] (6/8) Epoch 9, batch 1450, loss[loss=0.1547, simple_loss=0.2248, pruned_loss=0.04233, over 4895.00 frames.], tot_loss[loss=0.143, simple_loss=0.2152, pruned_loss=0.03544, over 971060.31 frames.], batch size: 22, lr: 2.41e-04 +2022-05-06 09:21:45,312 INFO [train.py:715] (6/8) Epoch 9, batch 1500, loss[loss=0.1141, simple_loss=0.1836, pruned_loss=0.02226, over 4935.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2151, pruned_loss=0.03524, over 971436.04 frames.], batch size: 29, lr: 2.41e-04 +2022-05-06 09:22:24,147 INFO [train.py:715] (6/8) Epoch 9, batch 1550, loss[loss=0.1436, simple_loss=0.2309, pruned_loss=0.0281, over 4941.00 frames.], tot_loss[loss=0.143, simple_loss=0.2153, pruned_loss=0.03532, over 971779.57 frames.], batch size: 21, lr: 2.41e-04 +2022-05-06 09:23:03,179 INFO [train.py:715] (6/8) Epoch 9, batch 1600, loss[loss=0.1164, simple_loss=0.1969, pruned_loss=0.01792, over 4818.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2153, pruned_loss=0.03572, over 971474.70 frames.], batch size: 25, lr: 2.41e-04 +2022-05-06 09:23:42,085 INFO [train.py:715] (6/8) Epoch 9, batch 1650, loss[loss=0.1574, simple_loss=0.2238, pruned_loss=0.04548, over 4902.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2153, pruned_loss=0.03514, over 971436.54 frames.], batch size: 17, lr: 2.41e-04 +2022-05-06 09:24:21,078 INFO [train.py:715] (6/8) Epoch 9, batch 1700, loss[loss=0.1413, simple_loss=0.2122, pruned_loss=0.03521, over 4794.00 frames.], tot_loss[loss=0.1425, simple_loss=0.215, pruned_loss=0.03505, over 972258.49 frames.], batch size: 17, lr: 2.41e-04 +2022-05-06 09:25:00,149 INFO [train.py:715] (6/8) Epoch 9, batch 1750, loss[loss=0.1464, simple_loss=0.2119, pruned_loss=0.04042, over 4706.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2155, pruned_loss=0.03496, over 972121.17 frames.], batch size: 15, lr: 2.41e-04 +2022-05-06 09:25:39,674 INFO [train.py:715] (6/8) Epoch 9, batch 1800, loss[loss=0.1435, simple_loss=0.215, pruned_loss=0.03595, over 4908.00 frames.], tot_loss[loss=0.1433, simple_loss=0.216, pruned_loss=0.03528, over 972509.02 frames.], batch size: 23, lr: 2.41e-04 +2022-05-06 09:26:18,855 INFO [train.py:715] (6/8) Epoch 9, batch 1850, loss[loss=0.1287, simple_loss=0.2061, pruned_loss=0.02562, over 4866.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2158, pruned_loss=0.03531, over 972548.97 frames.], batch size: 16, lr: 2.41e-04 +2022-05-06 09:26:57,988 INFO [train.py:715] (6/8) Epoch 9, batch 1900, loss[loss=0.1744, simple_loss=0.2345, pruned_loss=0.05718, over 4793.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2155, pruned_loss=0.03506, over 972538.37 frames.], batch size: 14, lr: 2.41e-04 +2022-05-06 09:27:37,991 INFO [train.py:715] (6/8) Epoch 9, batch 1950, loss[loss=0.1563, simple_loss=0.2251, pruned_loss=0.04374, over 4837.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2156, pruned_loss=0.03525, over 971480.08 frames.], batch size: 30, lr: 2.41e-04 +2022-05-06 09:28:17,646 INFO [train.py:715] (6/8) Epoch 9, batch 2000, loss[loss=0.1504, simple_loss=0.2311, pruned_loss=0.03487, over 4900.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2156, pruned_loss=0.03571, over 972435.44 frames.], batch size: 19, lr: 2.41e-04 +2022-05-06 09:28:56,801 INFO [train.py:715] (6/8) Epoch 9, batch 2050, loss[loss=0.1646, simple_loss=0.2368, pruned_loss=0.04618, over 4857.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2166, pruned_loss=0.03625, over 972651.18 frames.], batch size: 32, lr: 2.41e-04 +2022-05-06 09:29:35,326 INFO [train.py:715] (6/8) Epoch 9, batch 2100, loss[loss=0.1671, simple_loss=0.2448, pruned_loss=0.04469, over 4928.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2165, pruned_loss=0.03611, over 972475.83 frames.], batch size: 18, lr: 2.41e-04 +2022-05-06 09:30:14,644 INFO [train.py:715] (6/8) Epoch 9, batch 2150, loss[loss=0.1389, simple_loss=0.2066, pruned_loss=0.03561, over 4821.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2161, pruned_loss=0.03573, over 972625.23 frames.], batch size: 15, lr: 2.41e-04 +2022-05-06 09:30:53,734 INFO [train.py:715] (6/8) Epoch 9, batch 2200, loss[loss=0.1441, simple_loss=0.2159, pruned_loss=0.03618, over 4802.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2158, pruned_loss=0.03545, over 972219.43 frames.], batch size: 21, lr: 2.41e-04 +2022-05-06 09:31:32,492 INFO [train.py:715] (6/8) Epoch 9, batch 2250, loss[loss=0.1382, simple_loss=0.2097, pruned_loss=0.0333, over 4970.00 frames.], tot_loss[loss=0.144, simple_loss=0.2161, pruned_loss=0.03593, over 972512.45 frames.], batch size: 24, lr: 2.41e-04 +2022-05-06 09:32:11,658 INFO [train.py:715] (6/8) Epoch 9, batch 2300, loss[loss=0.1456, simple_loss=0.2001, pruned_loss=0.04553, over 4966.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2155, pruned_loss=0.0357, over 972193.70 frames.], batch size: 14, lr: 2.41e-04 +2022-05-06 09:32:50,737 INFO [train.py:715] (6/8) Epoch 9, batch 2350, loss[loss=0.1381, simple_loss=0.2046, pruned_loss=0.03577, over 4935.00 frames.], tot_loss[loss=0.1429, simple_loss=0.215, pruned_loss=0.03539, over 972114.37 frames.], batch size: 18, lr: 2.41e-04 +2022-05-06 09:33:30,104 INFO [train.py:715] (6/8) Epoch 9, batch 2400, loss[loss=0.1364, simple_loss=0.2052, pruned_loss=0.03382, over 4806.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2155, pruned_loss=0.03562, over 972031.19 frames.], batch size: 13, lr: 2.41e-04 +2022-05-06 09:34:08,887 INFO [train.py:715] (6/8) Epoch 9, batch 2450, loss[loss=0.1738, simple_loss=0.2425, pruned_loss=0.05257, over 4946.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2151, pruned_loss=0.03558, over 971935.84 frames.], batch size: 35, lr: 2.41e-04 +2022-05-06 09:34:48,502 INFO [train.py:715] (6/8) Epoch 9, batch 2500, loss[loss=0.15, simple_loss=0.2345, pruned_loss=0.03276, over 4914.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2154, pruned_loss=0.03565, over 972225.22 frames.], batch size: 23, lr: 2.41e-04 +2022-05-06 09:35:27,021 INFO [train.py:715] (6/8) Epoch 9, batch 2550, loss[loss=0.1446, simple_loss=0.2243, pruned_loss=0.03248, over 4900.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2166, pruned_loss=0.03591, over 972397.53 frames.], batch size: 22, lr: 2.41e-04 +2022-05-06 09:36:06,039 INFO [train.py:715] (6/8) Epoch 9, batch 2600, loss[loss=0.1183, simple_loss=0.1862, pruned_loss=0.02524, over 4830.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2168, pruned_loss=0.03606, over 972321.09 frames.], batch size: 13, lr: 2.41e-04 +2022-05-06 09:36:45,108 INFO [train.py:715] (6/8) Epoch 9, batch 2650, loss[loss=0.1423, simple_loss=0.2142, pruned_loss=0.03513, over 4915.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2167, pruned_loss=0.03579, over 972853.36 frames.], batch size: 17, lr: 2.41e-04 +2022-05-06 09:37:24,474 INFO [train.py:715] (6/8) Epoch 9, batch 2700, loss[loss=0.1794, simple_loss=0.2318, pruned_loss=0.06348, over 4980.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2162, pruned_loss=0.03551, over 972505.77 frames.], batch size: 15, lr: 2.40e-04 +2022-05-06 09:38:03,293 INFO [train.py:715] (6/8) Epoch 9, batch 2750, loss[loss=0.1299, simple_loss=0.2017, pruned_loss=0.02904, over 4789.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2156, pruned_loss=0.03507, over 973647.94 frames.], batch size: 21, lr: 2.40e-04 +2022-05-06 09:38:42,269 INFO [train.py:715] (6/8) Epoch 9, batch 2800, loss[loss=0.1775, simple_loss=0.2393, pruned_loss=0.0578, over 4960.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2153, pruned_loss=0.03496, over 973253.46 frames.], batch size: 35, lr: 2.40e-04 +2022-05-06 09:39:21,840 INFO [train.py:715] (6/8) Epoch 9, batch 2850, loss[loss=0.1289, simple_loss=0.2115, pruned_loss=0.02314, over 4971.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2146, pruned_loss=0.03478, over 973159.88 frames.], batch size: 24, lr: 2.40e-04 +2022-05-06 09:40:00,918 INFO [train.py:715] (6/8) Epoch 9, batch 2900, loss[loss=0.1453, simple_loss=0.2211, pruned_loss=0.03472, over 4878.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2151, pruned_loss=0.03493, over 973440.59 frames.], batch size: 20, lr: 2.40e-04 +2022-05-06 09:40:39,679 INFO [train.py:715] (6/8) Epoch 9, batch 2950, loss[loss=0.1348, simple_loss=0.2106, pruned_loss=0.02948, over 4935.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2142, pruned_loss=0.03428, over 973139.17 frames.], batch size: 23, lr: 2.40e-04 +2022-05-06 09:41:18,903 INFO [train.py:715] (6/8) Epoch 9, batch 3000, loss[loss=0.122, simple_loss=0.1911, pruned_loss=0.02641, over 4753.00 frames.], tot_loss[loss=0.142, simple_loss=0.2145, pruned_loss=0.03473, over 972208.29 frames.], batch size: 19, lr: 2.40e-04 +2022-05-06 09:41:18,904 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 09:41:28,535 INFO [train.py:742] (6/8) Epoch 9, validation: loss=0.1069, simple_loss=0.1915, pruned_loss=0.01118, over 914524.00 frames. +2022-05-06 09:42:08,254 INFO [train.py:715] (6/8) Epoch 9, batch 3050, loss[loss=0.12, simple_loss=0.1921, pruned_loss=0.02393, over 4883.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2139, pruned_loss=0.03479, over 973166.40 frames.], batch size: 16, lr: 2.40e-04 +2022-05-06 09:42:47,738 INFO [train.py:715] (6/8) Epoch 9, batch 3100, loss[loss=0.1362, simple_loss=0.2009, pruned_loss=0.03574, over 4991.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2138, pruned_loss=0.03497, over 973600.43 frames.], batch size: 14, lr: 2.40e-04 +2022-05-06 09:43:27,216 INFO [train.py:715] (6/8) Epoch 9, batch 3150, loss[loss=0.1418, simple_loss=0.2093, pruned_loss=0.03721, over 4814.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2147, pruned_loss=0.03504, over 972765.36 frames.], batch size: 27, lr: 2.40e-04 +2022-05-06 09:44:06,427 INFO [train.py:715] (6/8) Epoch 9, batch 3200, loss[loss=0.1471, simple_loss=0.2265, pruned_loss=0.03382, over 4828.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2161, pruned_loss=0.03553, over 972289.95 frames.], batch size: 15, lr: 2.40e-04 +2022-05-06 09:44:45,584 INFO [train.py:715] (6/8) Epoch 9, batch 3250, loss[loss=0.1262, simple_loss=0.2, pruned_loss=0.02624, over 4754.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2159, pruned_loss=0.03543, over 972299.44 frames.], batch size: 16, lr: 2.40e-04 +2022-05-06 09:45:24,839 INFO [train.py:715] (6/8) Epoch 9, batch 3300, loss[loss=0.1439, simple_loss=0.211, pruned_loss=0.03835, over 4741.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2151, pruned_loss=0.03502, over 972581.79 frames.], batch size: 16, lr: 2.40e-04 +2022-05-06 09:46:03,662 INFO [train.py:715] (6/8) Epoch 9, batch 3350, loss[loss=0.132, simple_loss=0.2083, pruned_loss=0.02786, over 4747.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2155, pruned_loss=0.03515, over 972757.71 frames.], batch size: 18, lr: 2.40e-04 +2022-05-06 09:46:42,965 INFO [train.py:715] (6/8) Epoch 9, batch 3400, loss[loss=0.1338, simple_loss=0.2077, pruned_loss=0.02997, over 4793.00 frames.], tot_loss[loss=0.1425, simple_loss=0.215, pruned_loss=0.03499, over 973026.30 frames.], batch size: 21, lr: 2.40e-04 +2022-05-06 09:47:22,072 INFO [train.py:715] (6/8) Epoch 9, batch 3450, loss[loss=0.1249, simple_loss=0.2052, pruned_loss=0.02226, over 4842.00 frames.], tot_loss[loss=0.142, simple_loss=0.2146, pruned_loss=0.03464, over 972439.99 frames.], batch size: 26, lr: 2.40e-04 +2022-05-06 09:48:00,727 INFO [train.py:715] (6/8) Epoch 9, batch 3500, loss[loss=0.1575, simple_loss=0.2165, pruned_loss=0.04919, over 4766.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2141, pruned_loss=0.0345, over 972074.09 frames.], batch size: 12, lr: 2.40e-04 +2022-05-06 09:48:40,284 INFO [train.py:715] (6/8) Epoch 9, batch 3550, loss[loss=0.1533, simple_loss=0.2196, pruned_loss=0.04351, over 4919.00 frames.], tot_loss[loss=0.142, simple_loss=0.2146, pruned_loss=0.03466, over 972649.37 frames.], batch size: 17, lr: 2.40e-04 +2022-05-06 09:49:19,727 INFO [train.py:715] (6/8) Epoch 9, batch 3600, loss[loss=0.1557, simple_loss=0.2265, pruned_loss=0.04248, over 4844.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2133, pruned_loss=0.03454, over 972310.17 frames.], batch size: 13, lr: 2.40e-04 +2022-05-06 09:49:59,016 INFO [train.py:715] (6/8) Epoch 9, batch 3650, loss[loss=0.1435, simple_loss=0.2215, pruned_loss=0.03272, over 4818.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2131, pruned_loss=0.03426, over 973496.38 frames.], batch size: 25, lr: 2.40e-04 +2022-05-06 09:50:37,660 INFO [train.py:715] (6/8) Epoch 9, batch 3700, loss[loss=0.1027, simple_loss=0.1765, pruned_loss=0.01443, over 4863.00 frames.], tot_loss[loss=0.1414, simple_loss=0.214, pruned_loss=0.03443, over 973249.26 frames.], batch size: 13, lr: 2.40e-04 +2022-05-06 09:51:17,148 INFO [train.py:715] (6/8) Epoch 9, batch 3750, loss[loss=0.1307, simple_loss=0.2033, pruned_loss=0.02906, over 4698.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2134, pruned_loss=0.03419, over 973200.27 frames.], batch size: 15, lr: 2.40e-04 +2022-05-06 09:51:56,922 INFO [train.py:715] (6/8) Epoch 9, batch 3800, loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.0296, over 4791.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2125, pruned_loss=0.03383, over 972945.05 frames.], batch size: 18, lr: 2.40e-04 +2022-05-06 09:52:35,336 INFO [train.py:715] (6/8) Epoch 9, batch 3850, loss[loss=0.1841, simple_loss=0.2509, pruned_loss=0.05864, over 4741.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2127, pruned_loss=0.0337, over 973385.03 frames.], batch size: 16, lr: 2.40e-04 +2022-05-06 09:53:14,339 INFO [train.py:715] (6/8) Epoch 9, batch 3900, loss[loss=0.2144, simple_loss=0.2964, pruned_loss=0.06623, over 4754.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2139, pruned_loss=0.03446, over 973341.29 frames.], batch size: 14, lr: 2.40e-04 +2022-05-06 09:53:53,828 INFO [train.py:715] (6/8) Epoch 9, batch 3950, loss[loss=0.1277, simple_loss=0.196, pruned_loss=0.0297, over 4955.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2127, pruned_loss=0.03408, over 972736.65 frames.], batch size: 14, lr: 2.40e-04 +2022-05-06 09:54:33,401 INFO [train.py:715] (6/8) Epoch 9, batch 4000, loss[loss=0.1179, simple_loss=0.1932, pruned_loss=0.02133, over 4853.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2129, pruned_loss=0.03408, over 972711.61 frames.], batch size: 32, lr: 2.40e-04 +2022-05-06 09:55:12,128 INFO [train.py:715] (6/8) Epoch 9, batch 4050, loss[loss=0.1131, simple_loss=0.1804, pruned_loss=0.02289, over 4941.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2128, pruned_loss=0.03432, over 971760.87 frames.], batch size: 29, lr: 2.40e-04 +2022-05-06 09:55:52,105 INFO [train.py:715] (6/8) Epoch 9, batch 4100, loss[loss=0.134, simple_loss=0.2124, pruned_loss=0.02784, over 4965.00 frames.], tot_loss[loss=0.1418, simple_loss=0.214, pruned_loss=0.03481, over 972716.68 frames.], batch size: 24, lr: 2.40e-04 +2022-05-06 09:56:30,808 INFO [train.py:715] (6/8) Epoch 9, batch 4150, loss[loss=0.1479, simple_loss=0.2161, pruned_loss=0.03984, over 4950.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2134, pruned_loss=0.03464, over 972678.16 frames.], batch size: 39, lr: 2.40e-04 +2022-05-06 09:57:10,159 INFO [train.py:715] (6/8) Epoch 9, batch 4200, loss[loss=0.1336, simple_loss=0.2087, pruned_loss=0.02926, over 4917.00 frames.], tot_loss[loss=0.142, simple_loss=0.2139, pruned_loss=0.03509, over 972516.46 frames.], batch size: 29, lr: 2.40e-04 +2022-05-06 09:57:49,720 INFO [train.py:715] (6/8) Epoch 9, batch 4250, loss[loss=0.134, simple_loss=0.2055, pruned_loss=0.0313, over 4966.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2141, pruned_loss=0.035, over 971887.69 frames.], batch size: 35, lr: 2.40e-04 +2022-05-06 09:58:29,620 INFO [train.py:715] (6/8) Epoch 9, batch 4300, loss[loss=0.1393, simple_loss=0.212, pruned_loss=0.03327, over 4771.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2146, pruned_loss=0.03508, over 971479.02 frames.], batch size: 18, lr: 2.40e-04 +2022-05-06 09:59:09,601 INFO [train.py:715] (6/8) Epoch 9, batch 4350, loss[loss=0.1333, simple_loss=0.1964, pruned_loss=0.03507, over 4792.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2141, pruned_loss=0.03485, over 971829.79 frames.], batch size: 24, lr: 2.40e-04 +2022-05-06 09:59:48,196 INFO [train.py:715] (6/8) Epoch 9, batch 4400, loss[loss=0.1279, simple_loss=0.2002, pruned_loss=0.02785, over 4764.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2142, pruned_loss=0.03469, over 972308.48 frames.], batch size: 14, lr: 2.40e-04 +2022-05-06 10:00:27,694 INFO [train.py:715] (6/8) Epoch 9, batch 4450, loss[loss=0.1123, simple_loss=0.1828, pruned_loss=0.02091, over 4837.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2144, pruned_loss=0.03489, over 972209.54 frames.], batch size: 13, lr: 2.40e-04 +2022-05-06 10:01:06,481 INFO [train.py:715] (6/8) Epoch 9, batch 4500, loss[loss=0.1578, simple_loss=0.227, pruned_loss=0.04434, over 4969.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2145, pruned_loss=0.03493, over 971141.65 frames.], batch size: 14, lr: 2.40e-04 +2022-05-06 10:01:45,450 INFO [train.py:715] (6/8) Epoch 9, batch 4550, loss[loss=0.1548, simple_loss=0.2357, pruned_loss=0.03694, over 4881.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2145, pruned_loss=0.03446, over 971935.68 frames.], batch size: 22, lr: 2.40e-04 +2022-05-06 10:02:24,726 INFO [train.py:715] (6/8) Epoch 9, batch 4600, loss[loss=0.1326, simple_loss=0.1987, pruned_loss=0.03322, over 4891.00 frames.], tot_loss[loss=0.141, simple_loss=0.2139, pruned_loss=0.03408, over 972002.32 frames.], batch size: 22, lr: 2.40e-04 +2022-05-06 10:03:04,294 INFO [train.py:715] (6/8) Epoch 9, batch 4650, loss[loss=0.1316, simple_loss=0.2096, pruned_loss=0.02679, over 4957.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2135, pruned_loss=0.03405, over 972055.36 frames.], batch size: 24, lr: 2.40e-04 +2022-05-06 10:03:43,903 INFO [train.py:715] (6/8) Epoch 9, batch 4700, loss[loss=0.1711, simple_loss=0.2365, pruned_loss=0.0529, over 4986.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2144, pruned_loss=0.03475, over 972594.22 frames.], batch size: 16, lr: 2.40e-04 +2022-05-06 10:04:22,850 INFO [train.py:715] (6/8) Epoch 9, batch 4750, loss[loss=0.1205, simple_loss=0.1954, pruned_loss=0.02279, over 4854.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2136, pruned_loss=0.0339, over 973778.08 frames.], batch size: 32, lr: 2.40e-04 +2022-05-06 10:05:02,426 INFO [train.py:715] (6/8) Epoch 9, batch 4800, loss[loss=0.1139, simple_loss=0.1921, pruned_loss=0.01781, over 4862.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2133, pruned_loss=0.03372, over 974176.91 frames.], batch size: 20, lr: 2.40e-04 +2022-05-06 10:05:41,424 INFO [train.py:715] (6/8) Epoch 9, batch 4850, loss[loss=0.1393, simple_loss=0.2111, pruned_loss=0.03375, over 4979.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2126, pruned_loss=0.03383, over 973114.05 frames.], batch size: 25, lr: 2.40e-04 +2022-05-06 10:06:20,856 INFO [train.py:715] (6/8) Epoch 9, batch 4900, loss[loss=0.155, simple_loss=0.2192, pruned_loss=0.0454, over 4911.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2128, pruned_loss=0.03369, over 972743.45 frames.], batch size: 18, lr: 2.40e-04 +2022-05-06 10:06:59,739 INFO [train.py:715] (6/8) Epoch 9, batch 4950, loss[loss=0.1425, simple_loss=0.2114, pruned_loss=0.03674, over 4780.00 frames.], tot_loss[loss=0.141, simple_loss=0.2136, pruned_loss=0.03415, over 972080.41 frames.], batch size: 14, lr: 2.40e-04 +2022-05-06 10:07:39,118 INFO [train.py:715] (6/8) Epoch 9, batch 5000, loss[loss=0.125, simple_loss=0.1956, pruned_loss=0.02722, over 4985.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2135, pruned_loss=0.0337, over 972283.18 frames.], batch size: 31, lr: 2.40e-04 +2022-05-06 10:08:18,418 INFO [train.py:715] (6/8) Epoch 9, batch 5050, loss[loss=0.1265, simple_loss=0.2004, pruned_loss=0.02629, over 4706.00 frames.], tot_loss[loss=0.141, simple_loss=0.2141, pruned_loss=0.03401, over 972139.76 frames.], batch size: 15, lr: 2.40e-04 +2022-05-06 10:08:57,173 INFO [train.py:715] (6/8) Epoch 9, batch 5100, loss[loss=0.1433, simple_loss=0.2208, pruned_loss=0.03288, over 4840.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2148, pruned_loss=0.03426, over 972571.06 frames.], batch size: 15, lr: 2.40e-04 +2022-05-06 10:09:36,562 INFO [train.py:715] (6/8) Epoch 9, batch 5150, loss[loss=0.1411, simple_loss=0.2207, pruned_loss=0.03074, over 4952.00 frames.], tot_loss[loss=0.141, simple_loss=0.2143, pruned_loss=0.0339, over 972529.17 frames.], batch size: 24, lr: 2.40e-04 +2022-05-06 10:10:15,468 INFO [train.py:715] (6/8) Epoch 9, batch 5200, loss[loss=0.1159, simple_loss=0.1922, pruned_loss=0.0198, over 4757.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2142, pruned_loss=0.0343, over 972194.87 frames.], batch size: 19, lr: 2.40e-04 +2022-05-06 10:10:54,749 INFO [train.py:715] (6/8) Epoch 9, batch 5250, loss[loss=0.1692, simple_loss=0.2301, pruned_loss=0.05412, over 4748.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2144, pruned_loss=0.03417, over 971493.48 frames.], batch size: 16, lr: 2.40e-04 +2022-05-06 10:11:33,956 INFO [train.py:715] (6/8) Epoch 9, batch 5300, loss[loss=0.09798, simple_loss=0.1748, pruned_loss=0.01058, over 4992.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2144, pruned_loss=0.03423, over 972742.66 frames.], batch size: 14, lr: 2.39e-04 +2022-05-06 10:12:13,448 INFO [train.py:715] (6/8) Epoch 9, batch 5350, loss[loss=0.1437, simple_loss=0.1971, pruned_loss=0.04518, over 4975.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2145, pruned_loss=0.03466, over 972442.05 frames.], batch size: 14, lr: 2.39e-04 +2022-05-06 10:12:52,105 INFO [train.py:715] (6/8) Epoch 9, batch 5400, loss[loss=0.1505, simple_loss=0.23, pruned_loss=0.03548, over 4809.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2143, pruned_loss=0.03444, over 972465.89 frames.], batch size: 25, lr: 2.39e-04 +2022-05-06 10:13:30,900 INFO [train.py:715] (6/8) Epoch 9, batch 5450, loss[loss=0.1379, simple_loss=0.2169, pruned_loss=0.02942, over 4930.00 frames.], tot_loss[loss=0.1413, simple_loss=0.214, pruned_loss=0.03431, over 972043.79 frames.], batch size: 29, lr: 2.39e-04 +2022-05-06 10:14:10,211 INFO [train.py:715] (6/8) Epoch 9, batch 5500, loss[loss=0.1474, simple_loss=0.2135, pruned_loss=0.04068, over 4752.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2142, pruned_loss=0.03462, over 973121.65 frames.], batch size: 19, lr: 2.39e-04 +2022-05-06 10:14:49,296 INFO [train.py:715] (6/8) Epoch 9, batch 5550, loss[loss=0.199, simple_loss=0.256, pruned_loss=0.07104, over 4981.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2139, pruned_loss=0.03489, over 973368.43 frames.], batch size: 35, lr: 2.39e-04 +2022-05-06 10:15:28,468 INFO [train.py:715] (6/8) Epoch 9, batch 5600, loss[loss=0.1527, simple_loss=0.2189, pruned_loss=0.04325, over 4911.00 frames.], tot_loss[loss=0.142, simple_loss=0.214, pruned_loss=0.03499, over 972775.88 frames.], batch size: 18, lr: 2.39e-04 +2022-05-06 10:16:07,456 INFO [train.py:715] (6/8) Epoch 9, batch 5650, loss[loss=0.1534, simple_loss=0.2306, pruned_loss=0.03807, over 4779.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2138, pruned_loss=0.03457, over 972276.39 frames.], batch size: 18, lr: 2.39e-04 +2022-05-06 10:16:47,100 INFO [train.py:715] (6/8) Epoch 9, batch 5700, loss[loss=0.1519, simple_loss=0.2201, pruned_loss=0.04186, over 4949.00 frames.], tot_loss[loss=0.141, simple_loss=0.2134, pruned_loss=0.03435, over 973126.87 frames.], batch size: 15, lr: 2.39e-04 +2022-05-06 10:17:26,141 INFO [train.py:715] (6/8) Epoch 9, batch 5750, loss[loss=0.1057, simple_loss=0.1771, pruned_loss=0.01715, over 4933.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2134, pruned_loss=0.03438, over 973239.43 frames.], batch size: 23, lr: 2.39e-04 +2022-05-06 10:18:04,786 INFO [train.py:715] (6/8) Epoch 9, batch 5800, loss[loss=0.1245, simple_loss=0.1882, pruned_loss=0.03038, over 4750.00 frames.], tot_loss[loss=0.142, simple_loss=0.214, pruned_loss=0.03499, over 974180.58 frames.], batch size: 19, lr: 2.39e-04 +2022-05-06 10:18:44,316 INFO [train.py:715] (6/8) Epoch 9, batch 5850, loss[loss=0.1391, simple_loss=0.1997, pruned_loss=0.03927, over 4968.00 frames.], tot_loss[loss=0.142, simple_loss=0.214, pruned_loss=0.03501, over 974227.98 frames.], batch size: 15, lr: 2.39e-04 +2022-05-06 10:19:23,131 INFO [train.py:715] (6/8) Epoch 9, batch 5900, loss[loss=0.1739, simple_loss=0.2527, pruned_loss=0.04757, over 4992.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2134, pruned_loss=0.03451, over 974275.85 frames.], batch size: 15, lr: 2.39e-04 +2022-05-06 10:20:02,780 INFO [train.py:715] (6/8) Epoch 9, batch 5950, loss[loss=0.1443, simple_loss=0.2161, pruned_loss=0.03622, over 4782.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2136, pruned_loss=0.03448, over 973961.52 frames.], batch size: 18, lr: 2.39e-04 +2022-05-06 10:20:41,538 INFO [train.py:715] (6/8) Epoch 9, batch 6000, loss[loss=0.1687, simple_loss=0.2416, pruned_loss=0.04795, over 4763.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2142, pruned_loss=0.03461, over 974103.96 frames.], batch size: 18, lr: 2.39e-04 +2022-05-06 10:20:41,539 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 10:20:51,194 INFO [train.py:742] (6/8) Epoch 9, validation: loss=0.107, simple_loss=0.1914, pruned_loss=0.0113, over 914524.00 frames. +2022-05-06 10:21:30,887 INFO [train.py:715] (6/8) Epoch 9, batch 6050, loss[loss=0.1142, simple_loss=0.191, pruned_loss=0.01863, over 4803.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2144, pruned_loss=0.03486, over 973866.74 frames.], batch size: 17, lr: 2.39e-04 +2022-05-06 10:22:10,755 INFO [train.py:715] (6/8) Epoch 9, batch 6100, loss[loss=0.1782, simple_loss=0.2371, pruned_loss=0.05962, over 4918.00 frames.], tot_loss[loss=0.142, simple_loss=0.2142, pruned_loss=0.03488, over 973060.43 frames.], batch size: 18, lr: 2.39e-04 +2022-05-06 10:22:49,975 INFO [train.py:715] (6/8) Epoch 9, batch 6150, loss[loss=0.1531, simple_loss=0.2258, pruned_loss=0.04021, over 4781.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2138, pruned_loss=0.03477, over 972277.09 frames.], batch size: 18, lr: 2.39e-04 +2022-05-06 10:23:28,786 INFO [train.py:715] (6/8) Epoch 9, batch 6200, loss[loss=0.1414, simple_loss=0.2226, pruned_loss=0.0301, over 4869.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2136, pruned_loss=0.03456, over 971825.13 frames.], batch size: 16, lr: 2.39e-04 +2022-05-06 10:24:08,420 INFO [train.py:715] (6/8) Epoch 9, batch 6250, loss[loss=0.1352, simple_loss=0.2086, pruned_loss=0.03087, over 4833.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2136, pruned_loss=0.03433, over 971423.74 frames.], batch size: 26, lr: 2.39e-04 +2022-05-06 10:24:47,200 INFO [train.py:715] (6/8) Epoch 9, batch 6300, loss[loss=0.1336, simple_loss=0.1994, pruned_loss=0.03391, over 4924.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2133, pruned_loss=0.03426, over 972350.72 frames.], batch size: 18, lr: 2.39e-04 +2022-05-06 10:25:26,320 INFO [train.py:715] (6/8) Epoch 9, batch 6350, loss[loss=0.1462, simple_loss=0.2139, pruned_loss=0.03926, over 4956.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2137, pruned_loss=0.03458, over 972251.19 frames.], batch size: 35, lr: 2.39e-04 +2022-05-06 10:26:05,952 INFO [train.py:715] (6/8) Epoch 9, batch 6400, loss[loss=0.1378, simple_loss=0.2042, pruned_loss=0.0357, over 4746.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2144, pruned_loss=0.03461, over 970538.88 frames.], batch size: 16, lr: 2.39e-04 +2022-05-06 10:26:46,101 INFO [train.py:715] (6/8) Epoch 9, batch 6450, loss[loss=0.1159, simple_loss=0.1863, pruned_loss=0.02273, over 4802.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2141, pruned_loss=0.03478, over 971654.89 frames.], batch size: 12, lr: 2.39e-04 +2022-05-06 10:27:25,427 INFO [train.py:715] (6/8) Epoch 9, batch 6500, loss[loss=0.1281, simple_loss=0.1954, pruned_loss=0.03042, over 4975.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2139, pruned_loss=0.03473, over 972178.88 frames.], batch size: 14, lr: 2.39e-04 +2022-05-06 10:28:04,261 INFO [train.py:715] (6/8) Epoch 9, batch 6550, loss[loss=0.1279, simple_loss=0.1977, pruned_loss=0.02904, over 4761.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2138, pruned_loss=0.03478, over 972122.12 frames.], batch size: 18, lr: 2.39e-04 +2022-05-06 10:28:44,039 INFO [train.py:715] (6/8) Epoch 9, batch 6600, loss[loss=0.1557, simple_loss=0.2205, pruned_loss=0.04543, over 4644.00 frames.], tot_loss[loss=0.142, simple_loss=0.2141, pruned_loss=0.03497, over 972279.26 frames.], batch size: 13, lr: 2.39e-04 +2022-05-06 10:29:23,596 INFO [train.py:715] (6/8) Epoch 9, batch 6650, loss[loss=0.1658, simple_loss=0.2276, pruned_loss=0.052, over 4856.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2145, pruned_loss=0.03516, over 971468.56 frames.], batch size: 30, lr: 2.39e-04 +2022-05-06 10:30:02,753 INFO [train.py:715] (6/8) Epoch 9, batch 6700, loss[loss=0.1428, simple_loss=0.22, pruned_loss=0.03274, over 4778.00 frames.], tot_loss[loss=0.1416, simple_loss=0.214, pruned_loss=0.03464, over 971459.64 frames.], batch size: 18, lr: 2.39e-04 +2022-05-06 10:30:44,176 INFO [train.py:715] (6/8) Epoch 9, batch 6750, loss[loss=0.159, simple_loss=0.2374, pruned_loss=0.04032, over 4938.00 frames.], tot_loss[loss=0.1428, simple_loss=0.215, pruned_loss=0.03527, over 972497.08 frames.], batch size: 29, lr: 2.39e-04 +2022-05-06 10:31:23,604 INFO [train.py:715] (6/8) Epoch 9, batch 6800, loss[loss=0.1577, simple_loss=0.2236, pruned_loss=0.04587, over 4791.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2145, pruned_loss=0.03503, over 972626.15 frames.], batch size: 24, lr: 2.39e-04 +2022-05-06 10:32:02,564 INFO [train.py:715] (6/8) Epoch 9, batch 6850, loss[loss=0.1518, simple_loss=0.2273, pruned_loss=0.03816, over 4819.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2145, pruned_loss=0.03487, over 972699.21 frames.], batch size: 15, lr: 2.39e-04 +2022-05-06 10:32:40,758 INFO [train.py:715] (6/8) Epoch 9, batch 6900, loss[loss=0.18, simple_loss=0.2512, pruned_loss=0.05441, over 4892.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2146, pruned_loss=0.03476, over 972495.07 frames.], batch size: 39, lr: 2.39e-04 +2022-05-06 10:33:20,061 INFO [train.py:715] (6/8) Epoch 9, batch 6950, loss[loss=0.1512, simple_loss=0.224, pruned_loss=0.03914, over 4700.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2151, pruned_loss=0.03482, over 970987.75 frames.], batch size: 15, lr: 2.39e-04 +2022-05-06 10:33:59,866 INFO [train.py:715] (6/8) Epoch 9, batch 7000, loss[loss=0.143, simple_loss=0.2266, pruned_loss=0.02973, over 4981.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2156, pruned_loss=0.03466, over 971312.10 frames.], batch size: 28, lr: 2.39e-04 +2022-05-06 10:34:38,729 INFO [train.py:715] (6/8) Epoch 9, batch 7050, loss[loss=0.14, simple_loss=0.2117, pruned_loss=0.03414, over 4830.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2148, pruned_loss=0.03434, over 972256.17 frames.], batch size: 30, lr: 2.39e-04 +2022-05-06 10:35:17,348 INFO [train.py:715] (6/8) Epoch 9, batch 7100, loss[loss=0.1354, simple_loss=0.2064, pruned_loss=0.03217, over 4891.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2155, pruned_loss=0.03463, over 971986.20 frames.], batch size: 22, lr: 2.39e-04 +2022-05-06 10:35:56,811 INFO [train.py:715] (6/8) Epoch 9, batch 7150, loss[loss=0.1386, simple_loss=0.2074, pruned_loss=0.03491, over 4858.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2154, pruned_loss=0.03483, over 972433.70 frames.], batch size: 20, lr: 2.39e-04 +2022-05-06 10:36:35,508 INFO [train.py:715] (6/8) Epoch 9, batch 7200, loss[loss=0.1463, simple_loss=0.2055, pruned_loss=0.04353, over 4806.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2151, pruned_loss=0.03463, over 971400.42 frames.], batch size: 17, lr: 2.39e-04 +2022-05-06 10:37:14,251 INFO [train.py:715] (6/8) Epoch 9, batch 7250, loss[loss=0.1716, simple_loss=0.2284, pruned_loss=0.05735, over 4842.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2147, pruned_loss=0.03447, over 972005.19 frames.], batch size: 13, lr: 2.39e-04 +2022-05-06 10:37:53,497 INFO [train.py:715] (6/8) Epoch 9, batch 7300, loss[loss=0.174, simple_loss=0.2448, pruned_loss=0.05163, over 4915.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2152, pruned_loss=0.03477, over 972854.48 frames.], batch size: 19, lr: 2.39e-04 +2022-05-06 10:38:32,802 INFO [train.py:715] (6/8) Epoch 9, batch 7350, loss[loss=0.1608, simple_loss=0.2277, pruned_loss=0.04691, over 4885.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2156, pruned_loss=0.03543, over 973270.30 frames.], batch size: 39, lr: 2.39e-04 +2022-05-06 10:39:11,300 INFO [train.py:715] (6/8) Epoch 9, batch 7400, loss[loss=0.123, simple_loss=0.1995, pruned_loss=0.02325, over 4931.00 frames.], tot_loss[loss=0.143, simple_loss=0.2153, pruned_loss=0.03535, over 973604.23 frames.], batch size: 23, lr: 2.39e-04 +2022-05-06 10:39:50,259 INFO [train.py:715] (6/8) Epoch 9, batch 7450, loss[loss=0.1566, simple_loss=0.23, pruned_loss=0.04161, over 4969.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2162, pruned_loss=0.03575, over 974519.22 frames.], batch size: 15, lr: 2.39e-04 +2022-05-06 10:40:30,206 INFO [train.py:715] (6/8) Epoch 9, batch 7500, loss[loss=0.1379, simple_loss=0.2023, pruned_loss=0.03675, over 4830.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2155, pruned_loss=0.03541, over 974638.17 frames.], batch size: 12, lr: 2.39e-04 +2022-05-06 10:41:09,248 INFO [train.py:715] (6/8) Epoch 9, batch 7550, loss[loss=0.1535, simple_loss=0.2378, pruned_loss=0.03454, over 4986.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2136, pruned_loss=0.03453, over 972869.26 frames.], batch size: 28, lr: 2.39e-04 +2022-05-06 10:41:48,089 INFO [train.py:715] (6/8) Epoch 9, batch 7600, loss[loss=0.1621, simple_loss=0.2247, pruned_loss=0.04976, over 4935.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2147, pruned_loss=0.03507, over 972630.02 frames.], batch size: 23, lr: 2.39e-04 +2022-05-06 10:42:27,543 INFO [train.py:715] (6/8) Epoch 9, batch 7650, loss[loss=0.1619, simple_loss=0.2138, pruned_loss=0.05495, over 4844.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2142, pruned_loss=0.03486, over 971780.97 frames.], batch size: 30, lr: 2.39e-04 +2022-05-06 10:43:06,745 INFO [train.py:715] (6/8) Epoch 9, batch 7700, loss[loss=0.1504, simple_loss=0.2209, pruned_loss=0.03996, over 4887.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2144, pruned_loss=0.03489, over 971684.47 frames.], batch size: 22, lr: 2.39e-04 +2022-05-06 10:43:45,565 INFO [train.py:715] (6/8) Epoch 9, batch 7750, loss[loss=0.1289, simple_loss=0.1943, pruned_loss=0.03174, over 4847.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2138, pruned_loss=0.03472, over 973014.78 frames.], batch size: 13, lr: 2.39e-04 +2022-05-06 10:44:24,379 INFO [train.py:715] (6/8) Epoch 9, batch 7800, loss[loss=0.1223, simple_loss=0.2063, pruned_loss=0.0192, over 4967.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2137, pruned_loss=0.0346, over 973104.41 frames.], batch size: 24, lr: 2.39e-04 +2022-05-06 10:45:04,418 INFO [train.py:715] (6/8) Epoch 9, batch 7850, loss[loss=0.1475, simple_loss=0.2111, pruned_loss=0.0419, over 4690.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2137, pruned_loss=0.03464, over 972280.59 frames.], batch size: 15, lr: 2.39e-04 +2022-05-06 10:45:43,398 INFO [train.py:715] (6/8) Epoch 9, batch 7900, loss[loss=0.1594, simple_loss=0.2181, pruned_loss=0.0503, over 4908.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2141, pruned_loss=0.03455, over 972620.50 frames.], batch size: 17, lr: 2.39e-04 +2022-05-06 10:46:21,528 INFO [train.py:715] (6/8) Epoch 9, batch 7950, loss[loss=0.1507, simple_loss=0.2243, pruned_loss=0.03851, over 4975.00 frames.], tot_loss[loss=0.141, simple_loss=0.2135, pruned_loss=0.03426, over 973308.20 frames.], batch size: 24, lr: 2.39e-04 +2022-05-06 10:47:00,917 INFO [train.py:715] (6/8) Epoch 9, batch 8000, loss[loss=0.1539, simple_loss=0.2296, pruned_loss=0.03909, over 4785.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2152, pruned_loss=0.03487, over 973003.36 frames.], batch size: 14, lr: 2.38e-04 +2022-05-06 10:47:39,935 INFO [train.py:715] (6/8) Epoch 9, batch 8050, loss[loss=0.1595, simple_loss=0.2286, pruned_loss=0.04514, over 4984.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2156, pruned_loss=0.03483, over 972932.72 frames.], batch size: 15, lr: 2.38e-04 +2022-05-06 10:48:18,561 INFO [train.py:715] (6/8) Epoch 9, batch 8100, loss[loss=0.1386, simple_loss=0.2193, pruned_loss=0.02897, over 4813.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2154, pruned_loss=0.03502, over 971453.42 frames.], batch size: 27, lr: 2.38e-04 +2022-05-06 10:48:57,111 INFO [train.py:715] (6/8) Epoch 9, batch 8150, loss[loss=0.1706, simple_loss=0.2465, pruned_loss=0.0474, over 4801.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2152, pruned_loss=0.03512, over 971496.20 frames.], batch size: 17, lr: 2.38e-04 +2022-05-06 10:49:36,461 INFO [train.py:715] (6/8) Epoch 9, batch 8200, loss[loss=0.1253, simple_loss=0.2025, pruned_loss=0.02405, over 4717.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2153, pruned_loss=0.03474, over 971396.15 frames.], batch size: 15, lr: 2.38e-04 +2022-05-06 10:50:15,127 INFO [train.py:715] (6/8) Epoch 9, batch 8250, loss[loss=0.1249, simple_loss=0.2, pruned_loss=0.02487, over 4912.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2153, pruned_loss=0.0348, over 971638.08 frames.], batch size: 17, lr: 2.38e-04 +2022-05-06 10:50:53,698 INFO [train.py:715] (6/8) Epoch 9, batch 8300, loss[loss=0.1704, simple_loss=0.251, pruned_loss=0.04489, over 4899.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2148, pruned_loss=0.03448, over 971857.14 frames.], batch size: 19, lr: 2.38e-04 +2022-05-06 10:51:32,743 INFO [train.py:715] (6/8) Epoch 9, batch 8350, loss[loss=0.1492, simple_loss=0.2243, pruned_loss=0.037, over 4962.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2149, pruned_loss=0.03478, over 971998.70 frames.], batch size: 15, lr: 2.38e-04 +2022-05-06 10:52:12,415 INFO [train.py:715] (6/8) Epoch 9, batch 8400, loss[loss=0.1309, simple_loss=0.2086, pruned_loss=0.02654, over 4749.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2143, pruned_loss=0.03451, over 971104.40 frames.], batch size: 16, lr: 2.38e-04 +2022-05-06 10:52:50,773 INFO [train.py:715] (6/8) Epoch 9, batch 8450, loss[loss=0.1318, simple_loss=0.2008, pruned_loss=0.03145, over 4762.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2141, pruned_loss=0.03438, over 971682.08 frames.], batch size: 19, lr: 2.38e-04 +2022-05-06 10:53:29,413 INFO [train.py:715] (6/8) Epoch 9, batch 8500, loss[loss=0.1322, simple_loss=0.214, pruned_loss=0.02516, over 4938.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2143, pruned_loss=0.03516, over 972314.75 frames.], batch size: 29, lr: 2.38e-04 +2022-05-06 10:54:08,961 INFO [train.py:715] (6/8) Epoch 9, batch 8550, loss[loss=0.1273, simple_loss=0.2001, pruned_loss=0.02723, over 4813.00 frames.], tot_loss[loss=0.1419, simple_loss=0.214, pruned_loss=0.03491, over 972093.38 frames.], batch size: 12, lr: 2.38e-04 +2022-05-06 10:54:48,130 INFO [train.py:715] (6/8) Epoch 9, batch 8600, loss[loss=0.1467, simple_loss=0.2257, pruned_loss=0.03387, over 4876.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2134, pruned_loss=0.03449, over 971648.30 frames.], batch size: 16, lr: 2.38e-04 +2022-05-06 10:55:26,987 INFO [train.py:715] (6/8) Epoch 9, batch 8650, loss[loss=0.146, simple_loss=0.2164, pruned_loss=0.03777, over 4932.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2139, pruned_loss=0.03441, over 971797.31 frames.], batch size: 21, lr: 2.38e-04 +2022-05-06 10:56:06,801 INFO [train.py:715] (6/8) Epoch 9, batch 8700, loss[loss=0.1932, simple_loss=0.2711, pruned_loss=0.05768, over 4816.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2142, pruned_loss=0.0346, over 971656.20 frames.], batch size: 13, lr: 2.38e-04 +2022-05-06 10:56:46,703 INFO [train.py:715] (6/8) Epoch 9, batch 8750, loss[loss=0.1288, simple_loss=0.1986, pruned_loss=0.02944, over 4898.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2134, pruned_loss=0.03423, over 972809.29 frames.], batch size: 18, lr: 2.38e-04 +2022-05-06 10:57:25,013 INFO [train.py:715] (6/8) Epoch 9, batch 8800, loss[loss=0.1553, simple_loss=0.2248, pruned_loss=0.04289, over 4804.00 frames.], tot_loss[loss=0.141, simple_loss=0.2138, pruned_loss=0.03412, over 973683.19 frames.], batch size: 14, lr: 2.38e-04 +2022-05-06 10:58:04,393 INFO [train.py:715] (6/8) Epoch 9, batch 8850, loss[loss=0.1079, simple_loss=0.1792, pruned_loss=0.0183, over 4782.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2137, pruned_loss=0.03386, over 973405.36 frames.], batch size: 14, lr: 2.38e-04 +2022-05-06 10:58:43,841 INFO [train.py:715] (6/8) Epoch 9, batch 8900, loss[loss=0.1501, simple_loss=0.2127, pruned_loss=0.04381, over 4735.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2141, pruned_loss=0.03446, over 972534.62 frames.], batch size: 16, lr: 2.38e-04 +2022-05-06 10:59:22,969 INFO [train.py:715] (6/8) Epoch 9, batch 8950, loss[loss=0.1243, simple_loss=0.1999, pruned_loss=0.02437, over 4829.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2127, pruned_loss=0.03395, over 972593.75 frames.], batch size: 25, lr: 2.38e-04 +2022-05-06 11:00:01,621 INFO [train.py:715] (6/8) Epoch 9, batch 9000, loss[loss=0.1836, simple_loss=0.2371, pruned_loss=0.06506, over 4786.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2134, pruned_loss=0.03398, over 972954.26 frames.], batch size: 18, lr: 2.38e-04 +2022-05-06 11:00:01,622 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 11:00:11,232 INFO [train.py:742] (6/8) Epoch 9, validation: loss=0.107, simple_loss=0.1914, pruned_loss=0.0113, over 914524.00 frames. +2022-05-06 11:00:49,920 INFO [train.py:715] (6/8) Epoch 9, batch 9050, loss[loss=0.1384, simple_loss=0.2099, pruned_loss=0.03349, over 4857.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2129, pruned_loss=0.03389, over 972638.34 frames.], batch size: 32, lr: 2.38e-04 +2022-05-06 11:01:30,083 INFO [train.py:715] (6/8) Epoch 9, batch 9100, loss[loss=0.1412, simple_loss=0.2206, pruned_loss=0.03087, over 4804.00 frames.], tot_loss[loss=0.1405, simple_loss=0.213, pruned_loss=0.034, over 972905.31 frames.], batch size: 14, lr: 2.38e-04 +2022-05-06 11:02:09,673 INFO [train.py:715] (6/8) Epoch 9, batch 9150, loss[loss=0.1061, simple_loss=0.1788, pruned_loss=0.01667, over 4741.00 frames.], tot_loss[loss=0.1402, simple_loss=0.213, pruned_loss=0.03374, over 973657.55 frames.], batch size: 12, lr: 2.38e-04 +2022-05-06 11:02:48,634 INFO [train.py:715] (6/8) Epoch 9, batch 9200, loss[loss=0.1857, simple_loss=0.2468, pruned_loss=0.0623, over 4987.00 frames.], tot_loss[loss=0.141, simple_loss=0.2134, pruned_loss=0.0343, over 973374.03 frames.], batch size: 35, lr: 2.38e-04 +2022-05-06 11:03:28,189 INFO [train.py:715] (6/8) Epoch 9, batch 9250, loss[loss=0.1415, simple_loss=0.2137, pruned_loss=0.0346, over 4942.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2133, pruned_loss=0.03423, over 973557.31 frames.], batch size: 21, lr: 2.38e-04 +2022-05-06 11:04:07,602 INFO [train.py:715] (6/8) Epoch 9, batch 9300, loss[loss=0.1235, simple_loss=0.2004, pruned_loss=0.02327, over 4983.00 frames.], tot_loss[loss=0.1403, simple_loss=0.213, pruned_loss=0.03382, over 973377.16 frames.], batch size: 25, lr: 2.38e-04 +2022-05-06 11:04:46,772 INFO [train.py:715] (6/8) Epoch 9, batch 9350, loss[loss=0.1391, simple_loss=0.2099, pruned_loss=0.03417, over 4775.00 frames.], tot_loss[loss=0.1392, simple_loss=0.212, pruned_loss=0.03324, over 973042.85 frames.], batch size: 18, lr: 2.38e-04 +2022-05-06 11:05:25,234 INFO [train.py:715] (6/8) Epoch 9, batch 9400, loss[loss=0.1434, simple_loss=0.2162, pruned_loss=0.03529, over 4906.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.03359, over 972808.49 frames.], batch size: 23, lr: 2.38e-04 +2022-05-06 11:06:05,138 INFO [train.py:715] (6/8) Epoch 9, batch 9450, loss[loss=0.1087, simple_loss=0.1848, pruned_loss=0.01635, over 4779.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2124, pruned_loss=0.03328, over 972165.22 frames.], batch size: 18, lr: 2.38e-04 +2022-05-06 11:06:44,280 INFO [train.py:715] (6/8) Epoch 9, batch 9500, loss[loss=0.1427, simple_loss=0.2024, pruned_loss=0.04152, over 4828.00 frames.], tot_loss[loss=0.1392, simple_loss=0.212, pruned_loss=0.03316, over 972247.64 frames.], batch size: 13, lr: 2.38e-04 +2022-05-06 11:07:22,935 INFO [train.py:715] (6/8) Epoch 9, batch 9550, loss[loss=0.1356, simple_loss=0.212, pruned_loss=0.02964, over 4748.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2123, pruned_loss=0.03332, over 972213.30 frames.], batch size: 16, lr: 2.38e-04 +2022-05-06 11:08:02,132 INFO [train.py:715] (6/8) Epoch 9, batch 9600, loss[loss=0.1276, simple_loss=0.2021, pruned_loss=0.02657, over 4966.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2123, pruned_loss=0.03329, over 972552.72 frames.], batch size: 28, lr: 2.38e-04 +2022-05-06 11:08:41,401 INFO [train.py:715] (6/8) Epoch 9, batch 9650, loss[loss=0.1391, simple_loss=0.2053, pruned_loss=0.03646, over 4934.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2128, pruned_loss=0.03394, over 972998.11 frames.], batch size: 29, lr: 2.38e-04 +2022-05-06 11:09:20,431 INFO [train.py:715] (6/8) Epoch 9, batch 9700, loss[loss=0.159, simple_loss=0.2271, pruned_loss=0.04539, over 4884.00 frames.], tot_loss[loss=0.1407, simple_loss=0.213, pruned_loss=0.03423, over 973517.70 frames.], batch size: 19, lr: 2.38e-04 +2022-05-06 11:09:58,457 INFO [train.py:715] (6/8) Epoch 9, batch 9750, loss[loss=0.1522, simple_loss=0.2208, pruned_loss=0.04178, over 4837.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2135, pruned_loss=0.0344, over 972482.29 frames.], batch size: 13, lr: 2.38e-04 +2022-05-06 11:10:38,594 INFO [train.py:715] (6/8) Epoch 9, batch 9800, loss[loss=0.1599, simple_loss=0.2267, pruned_loss=0.04657, over 4900.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2137, pruned_loss=0.03481, over 972431.45 frames.], batch size: 22, lr: 2.38e-04 +2022-05-06 11:11:18,280 INFO [train.py:715] (6/8) Epoch 9, batch 9850, loss[loss=0.1973, simple_loss=0.2475, pruned_loss=0.07356, over 4910.00 frames.], tot_loss[loss=0.143, simple_loss=0.2149, pruned_loss=0.03556, over 972230.29 frames.], batch size: 19, lr: 2.38e-04 +2022-05-06 11:11:56,608 INFO [train.py:715] (6/8) Epoch 9, batch 9900, loss[loss=0.135, simple_loss=0.2145, pruned_loss=0.02772, over 4894.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2142, pruned_loss=0.03518, over 973216.90 frames.], batch size: 19, lr: 2.38e-04 +2022-05-06 11:12:35,818 INFO [train.py:715] (6/8) Epoch 9, batch 9950, loss[loss=0.1652, simple_loss=0.2397, pruned_loss=0.04536, over 4972.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2148, pruned_loss=0.03504, over 973054.57 frames.], batch size: 15, lr: 2.38e-04 +2022-05-06 11:13:15,754 INFO [train.py:715] (6/8) Epoch 9, batch 10000, loss[loss=0.1381, simple_loss=0.2154, pruned_loss=0.03038, over 4815.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2155, pruned_loss=0.03517, over 973455.13 frames.], batch size: 21, lr: 2.38e-04 +2022-05-06 11:13:55,093 INFO [train.py:715] (6/8) Epoch 9, batch 10050, loss[loss=0.1056, simple_loss=0.182, pruned_loss=0.01458, over 4929.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2159, pruned_loss=0.03527, over 973619.52 frames.], batch size: 18, lr: 2.38e-04 +2022-05-06 11:14:33,376 INFO [train.py:715] (6/8) Epoch 9, batch 10100, loss[loss=0.1524, simple_loss=0.228, pruned_loss=0.03842, over 4959.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2164, pruned_loss=0.03569, over 973598.10 frames.], batch size: 35, lr: 2.38e-04 +2022-05-06 11:15:12,912 INFO [train.py:715] (6/8) Epoch 9, batch 10150, loss[loss=0.1579, simple_loss=0.2374, pruned_loss=0.03922, over 4921.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2166, pruned_loss=0.036, over 972749.70 frames.], batch size: 18, lr: 2.38e-04 +2022-05-06 11:15:52,572 INFO [train.py:715] (6/8) Epoch 9, batch 10200, loss[loss=0.1406, simple_loss=0.2043, pruned_loss=0.03844, over 4881.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2156, pruned_loss=0.03571, over 972378.75 frames.], batch size: 16, lr: 2.38e-04 +2022-05-06 11:16:31,365 INFO [train.py:715] (6/8) Epoch 9, batch 10250, loss[loss=0.157, simple_loss=0.2401, pruned_loss=0.03691, over 4988.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2163, pruned_loss=0.03559, over 972474.14 frames.], batch size: 28, lr: 2.38e-04 +2022-05-06 11:17:10,105 INFO [train.py:715] (6/8) Epoch 9, batch 10300, loss[loss=0.138, simple_loss=0.2116, pruned_loss=0.03225, over 4819.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2159, pruned_loss=0.03522, over 972082.02 frames.], batch size: 25, lr: 2.38e-04 +2022-05-06 11:17:49,728 INFO [train.py:715] (6/8) Epoch 9, batch 10350, loss[loss=0.153, simple_loss=0.2214, pruned_loss=0.04229, over 4982.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2153, pruned_loss=0.03485, over 972095.04 frames.], batch size: 39, lr: 2.38e-04 +2022-05-06 11:18:28,423 INFO [train.py:715] (6/8) Epoch 9, batch 10400, loss[loss=0.1399, simple_loss=0.2171, pruned_loss=0.03141, over 4978.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2149, pruned_loss=0.035, over 972433.82 frames.], batch size: 24, lr: 2.38e-04 +2022-05-06 11:19:06,743 INFO [train.py:715] (6/8) Epoch 9, batch 10450, loss[loss=0.1298, simple_loss=0.2109, pruned_loss=0.02433, over 4832.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2152, pruned_loss=0.03504, over 972615.24 frames.], batch size: 26, lr: 2.38e-04 +2022-05-06 11:19:45,853 INFO [train.py:715] (6/8) Epoch 9, batch 10500, loss[loss=0.1446, simple_loss=0.2089, pruned_loss=0.04018, over 4781.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2151, pruned_loss=0.03494, over 972437.04 frames.], batch size: 18, lr: 2.38e-04 +2022-05-06 11:20:25,285 INFO [train.py:715] (6/8) Epoch 9, batch 10550, loss[loss=0.1198, simple_loss=0.2051, pruned_loss=0.0172, over 4950.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2143, pruned_loss=0.03446, over 972259.84 frames.], batch size: 21, lr: 2.38e-04 +2022-05-06 11:21:04,102 INFO [train.py:715] (6/8) Epoch 9, batch 10600, loss[loss=0.1271, simple_loss=0.1919, pruned_loss=0.03114, over 4976.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2143, pruned_loss=0.03426, over 973118.04 frames.], batch size: 28, lr: 2.38e-04 +2022-05-06 11:21:42,612 INFO [train.py:715] (6/8) Epoch 9, batch 10650, loss[loss=0.1432, simple_loss=0.2271, pruned_loss=0.02963, over 4815.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2152, pruned_loss=0.03469, over 973062.34 frames.], batch size: 25, lr: 2.38e-04 +2022-05-06 11:22:21,913 INFO [train.py:715] (6/8) Epoch 9, batch 10700, loss[loss=0.1516, simple_loss=0.2259, pruned_loss=0.03869, over 4913.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2153, pruned_loss=0.03447, over 972932.60 frames.], batch size: 18, lr: 2.37e-04 +2022-05-06 11:23:01,948 INFO [train.py:715] (6/8) Epoch 9, batch 10750, loss[loss=0.1804, simple_loss=0.2421, pruned_loss=0.05939, over 4985.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2159, pruned_loss=0.03483, over 973861.84 frames.], batch size: 15, lr: 2.37e-04 +2022-05-06 11:23:40,538 INFO [train.py:715] (6/8) Epoch 9, batch 10800, loss[loss=0.1452, simple_loss=0.2243, pruned_loss=0.03304, over 4795.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2156, pruned_loss=0.03525, over 973806.40 frames.], batch size: 21, lr: 2.37e-04 +2022-05-06 11:24:20,017 INFO [train.py:715] (6/8) Epoch 9, batch 10850, loss[loss=0.1399, simple_loss=0.2037, pruned_loss=0.03801, over 4978.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2161, pruned_loss=0.03528, over 973874.24 frames.], batch size: 33, lr: 2.37e-04 +2022-05-06 11:24:59,844 INFO [train.py:715] (6/8) Epoch 9, batch 10900, loss[loss=0.1652, simple_loss=0.2365, pruned_loss=0.04698, over 4959.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2156, pruned_loss=0.035, over 974153.05 frames.], batch size: 24, lr: 2.37e-04 +2022-05-06 11:25:40,139 INFO [train.py:715] (6/8) Epoch 9, batch 10950, loss[loss=0.1344, simple_loss=0.2113, pruned_loss=0.02873, over 4862.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2146, pruned_loss=0.03478, over 974251.27 frames.], batch size: 32, lr: 2.37e-04 +2022-05-06 11:26:20,018 INFO [train.py:715] (6/8) Epoch 9, batch 11000, loss[loss=0.1523, simple_loss=0.2332, pruned_loss=0.0357, over 4855.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2145, pruned_loss=0.03453, over 973773.00 frames.], batch size: 20, lr: 2.37e-04 +2022-05-06 11:27:00,852 INFO [train.py:715] (6/8) Epoch 9, batch 11050, loss[loss=0.1366, simple_loss=0.2066, pruned_loss=0.03333, over 4781.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2145, pruned_loss=0.03428, over 973295.86 frames.], batch size: 19, lr: 2.37e-04 +2022-05-06 11:27:42,117 INFO [train.py:715] (6/8) Epoch 9, batch 11100, loss[loss=0.1312, simple_loss=0.2042, pruned_loss=0.02909, over 4866.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2152, pruned_loss=0.0347, over 972842.00 frames.], batch size: 13, lr: 2.37e-04 +2022-05-06 11:28:22,785 INFO [train.py:715] (6/8) Epoch 9, batch 11150, loss[loss=0.1316, simple_loss=0.21, pruned_loss=0.02656, over 4852.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2152, pruned_loss=0.03463, over 973204.62 frames.], batch size: 20, lr: 2.37e-04 +2022-05-06 11:29:03,605 INFO [train.py:715] (6/8) Epoch 9, batch 11200, loss[loss=0.1257, simple_loss=0.2011, pruned_loss=0.02518, over 4791.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2157, pruned_loss=0.03486, over 973261.20 frames.], batch size: 18, lr: 2.37e-04 +2022-05-06 11:29:45,091 INFO [train.py:715] (6/8) Epoch 9, batch 11250, loss[loss=0.122, simple_loss=0.1973, pruned_loss=0.02337, over 4755.00 frames.], tot_loss[loss=0.143, simple_loss=0.2162, pruned_loss=0.03491, over 972708.79 frames.], batch size: 19, lr: 2.37e-04 +2022-05-06 11:30:26,207 INFO [train.py:715] (6/8) Epoch 9, batch 11300, loss[loss=0.1916, simple_loss=0.2577, pruned_loss=0.06273, over 4947.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2165, pruned_loss=0.03512, over 973645.51 frames.], batch size: 39, lr: 2.37e-04 +2022-05-06 11:31:06,653 INFO [train.py:715] (6/8) Epoch 9, batch 11350, loss[loss=0.1361, simple_loss=0.2155, pruned_loss=0.02834, over 4815.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2156, pruned_loss=0.03459, over 973552.92 frames.], batch size: 26, lr: 2.37e-04 +2022-05-06 11:31:47,935 INFO [train.py:715] (6/8) Epoch 9, batch 11400, loss[loss=0.1489, simple_loss=0.2308, pruned_loss=0.03347, over 4826.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2139, pruned_loss=0.0342, over 973998.18 frames.], batch size: 15, lr: 2.37e-04 +2022-05-06 11:32:29,500 INFO [train.py:715] (6/8) Epoch 9, batch 11450, loss[loss=0.1519, simple_loss=0.2189, pruned_loss=0.04244, over 4751.00 frames.], tot_loss[loss=0.142, simple_loss=0.2149, pruned_loss=0.03461, over 973017.39 frames.], batch size: 19, lr: 2.37e-04 +2022-05-06 11:33:10,080 INFO [train.py:715] (6/8) Epoch 9, batch 11500, loss[loss=0.1479, simple_loss=0.2224, pruned_loss=0.03674, over 4870.00 frames.], tot_loss[loss=0.1413, simple_loss=0.214, pruned_loss=0.03432, over 973232.05 frames.], batch size: 22, lr: 2.37e-04 +2022-05-06 11:33:50,773 INFO [train.py:715] (6/8) Epoch 9, batch 11550, loss[loss=0.144, simple_loss=0.214, pruned_loss=0.03699, over 4842.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2133, pruned_loss=0.03406, over 972700.88 frames.], batch size: 30, lr: 2.37e-04 +2022-05-06 11:34:32,087 INFO [train.py:715] (6/8) Epoch 9, batch 11600, loss[loss=0.134, simple_loss=0.2012, pruned_loss=0.03339, over 4943.00 frames.], tot_loss[loss=0.1408, simple_loss=0.213, pruned_loss=0.03426, over 973001.06 frames.], batch size: 21, lr: 2.37e-04 +2022-05-06 11:35:13,602 INFO [train.py:715] (6/8) Epoch 9, batch 11650, loss[loss=0.1413, simple_loss=0.2133, pruned_loss=0.03463, over 4896.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2134, pruned_loss=0.03471, over 972565.17 frames.], batch size: 19, lr: 2.37e-04 +2022-05-06 11:35:53,527 INFO [train.py:715] (6/8) Epoch 9, batch 11700, loss[loss=0.1601, simple_loss=0.2275, pruned_loss=0.0463, over 4898.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2142, pruned_loss=0.03461, over 972343.06 frames.], batch size: 19, lr: 2.37e-04 +2022-05-06 11:36:34,984 INFO [train.py:715] (6/8) Epoch 9, batch 11750, loss[loss=0.1509, simple_loss=0.2218, pruned_loss=0.03998, over 4981.00 frames.], tot_loss[loss=0.1413, simple_loss=0.214, pruned_loss=0.0343, over 971762.97 frames.], batch size: 31, lr: 2.37e-04 +2022-05-06 11:37:16,473 INFO [train.py:715] (6/8) Epoch 9, batch 11800, loss[loss=0.1266, simple_loss=0.1898, pruned_loss=0.03168, over 4955.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2128, pruned_loss=0.03407, over 971765.76 frames.], batch size: 14, lr: 2.37e-04 +2022-05-06 11:37:56,816 INFO [train.py:715] (6/8) Epoch 9, batch 11850, loss[loss=0.1574, simple_loss=0.2306, pruned_loss=0.04215, over 4949.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2134, pruned_loss=0.03421, over 972619.35 frames.], batch size: 24, lr: 2.37e-04 +2022-05-06 11:38:37,235 INFO [train.py:715] (6/8) Epoch 9, batch 11900, loss[loss=0.1269, simple_loss=0.2065, pruned_loss=0.02365, over 4817.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2124, pruned_loss=0.0334, over 972750.16 frames.], batch size: 13, lr: 2.37e-04 +2022-05-06 11:39:18,265 INFO [train.py:715] (6/8) Epoch 9, batch 11950, loss[loss=0.1193, simple_loss=0.1924, pruned_loss=0.02308, over 4929.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.03361, over 973731.14 frames.], batch size: 21, lr: 2.37e-04 +2022-05-06 11:39:59,373 INFO [train.py:715] (6/8) Epoch 9, batch 12000, loss[loss=0.1224, simple_loss=0.1974, pruned_loss=0.02374, over 4755.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2122, pruned_loss=0.03355, over 973048.46 frames.], batch size: 17, lr: 2.37e-04 +2022-05-06 11:39:59,374 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 11:40:09,082 INFO [train.py:742] (6/8) Epoch 9, validation: loss=0.107, simple_loss=0.1913, pruned_loss=0.01136, over 914524.00 frames. +2022-05-06 11:40:50,134 INFO [train.py:715] (6/8) Epoch 9, batch 12050, loss[loss=0.155, simple_loss=0.2291, pruned_loss=0.04043, over 4834.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2123, pruned_loss=0.0333, over 973152.48 frames.], batch size: 15, lr: 2.37e-04 +2022-05-06 11:41:29,638 INFO [train.py:715] (6/8) Epoch 9, batch 12100, loss[loss=0.1534, simple_loss=0.213, pruned_loss=0.04693, over 4821.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2123, pruned_loss=0.03329, over 973244.91 frames.], batch size: 13, lr: 2.37e-04 +2022-05-06 11:42:10,006 INFO [train.py:715] (6/8) Epoch 9, batch 12150, loss[loss=0.1401, simple_loss=0.215, pruned_loss=0.03265, over 4819.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2127, pruned_loss=0.03345, over 973157.40 frames.], batch size: 27, lr: 2.37e-04 +2022-05-06 11:42:50,022 INFO [train.py:715] (6/8) Epoch 9, batch 12200, loss[loss=0.1367, simple_loss=0.2054, pruned_loss=0.03402, over 4740.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2127, pruned_loss=0.03376, over 972998.45 frames.], batch size: 16, lr: 2.37e-04 +2022-05-06 11:43:29,259 INFO [train.py:715] (6/8) Epoch 9, batch 12250, loss[loss=0.1457, simple_loss=0.2088, pruned_loss=0.0413, over 4755.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2124, pruned_loss=0.03367, over 972497.71 frames.], batch size: 19, lr: 2.37e-04 +2022-05-06 11:44:08,222 INFO [train.py:715] (6/8) Epoch 9, batch 12300, loss[loss=0.1217, simple_loss=0.1915, pruned_loss=0.0259, over 4760.00 frames.], tot_loss[loss=0.14, simple_loss=0.2126, pruned_loss=0.03376, over 972676.73 frames.], batch size: 19, lr: 2.37e-04 +2022-05-06 11:44:47,996 INFO [train.py:715] (6/8) Epoch 9, batch 12350, loss[loss=0.1481, simple_loss=0.2262, pruned_loss=0.03504, over 4767.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2136, pruned_loss=0.03404, over 972420.21 frames.], batch size: 18, lr: 2.37e-04 +2022-05-06 11:45:28,035 INFO [train.py:715] (6/8) Epoch 9, batch 12400, loss[loss=0.1581, simple_loss=0.232, pruned_loss=0.04208, over 4754.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2137, pruned_loss=0.03421, over 972753.41 frames.], batch size: 16, lr: 2.37e-04 +2022-05-06 11:46:07,547 INFO [train.py:715] (6/8) Epoch 9, batch 12450, loss[loss=0.1331, simple_loss=0.2215, pruned_loss=0.0224, over 4884.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2141, pruned_loss=0.03414, over 972338.42 frames.], batch size: 16, lr: 2.37e-04 +2022-05-06 11:46:47,598 INFO [train.py:715] (6/8) Epoch 9, batch 12500, loss[loss=0.149, simple_loss=0.2178, pruned_loss=0.04011, over 4791.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2145, pruned_loss=0.03431, over 972584.73 frames.], batch size: 14, lr: 2.37e-04 +2022-05-06 11:47:27,730 INFO [train.py:715] (6/8) Epoch 9, batch 12550, loss[loss=0.1449, simple_loss=0.2206, pruned_loss=0.03456, over 4858.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2142, pruned_loss=0.03383, over 971821.31 frames.], batch size: 20, lr: 2.37e-04 +2022-05-06 11:48:07,695 INFO [train.py:715] (6/8) Epoch 9, batch 12600, loss[loss=0.1802, simple_loss=0.2307, pruned_loss=0.0649, over 4976.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2154, pruned_loss=0.03475, over 971748.22 frames.], batch size: 15, lr: 2.37e-04 +2022-05-06 11:48:46,464 INFO [train.py:715] (6/8) Epoch 9, batch 12650, loss[loss=0.1621, simple_loss=0.2367, pruned_loss=0.04377, over 4847.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2152, pruned_loss=0.03458, over 972401.73 frames.], batch size: 30, lr: 2.37e-04 +2022-05-06 11:49:26,600 INFO [train.py:715] (6/8) Epoch 9, batch 12700, loss[loss=0.1284, simple_loss=0.2126, pruned_loss=0.02207, over 4836.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2152, pruned_loss=0.03468, over 972497.19 frames.], batch size: 26, lr: 2.37e-04 +2022-05-06 11:50:06,590 INFO [train.py:715] (6/8) Epoch 9, batch 12750, loss[loss=0.1368, simple_loss=0.2054, pruned_loss=0.03412, over 4817.00 frames.], tot_loss[loss=0.1418, simple_loss=0.215, pruned_loss=0.03427, over 972573.17 frames.], batch size: 15, lr: 2.37e-04 +2022-05-06 11:50:45,759 INFO [train.py:715] (6/8) Epoch 9, batch 12800, loss[loss=0.1239, simple_loss=0.2009, pruned_loss=0.02349, over 4846.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2142, pruned_loss=0.03415, over 971937.71 frames.], batch size: 13, lr: 2.37e-04 +2022-05-06 11:51:25,608 INFO [train.py:715] (6/8) Epoch 9, batch 12850, loss[loss=0.1797, simple_loss=0.2489, pruned_loss=0.05521, over 4816.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2154, pruned_loss=0.03476, over 972420.39 frames.], batch size: 25, lr: 2.37e-04 +2022-05-06 11:52:05,500 INFO [train.py:715] (6/8) Epoch 9, batch 12900, loss[loss=0.131, simple_loss=0.207, pruned_loss=0.02748, over 4915.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2153, pruned_loss=0.0345, over 972210.54 frames.], batch size: 39, lr: 2.37e-04 +2022-05-06 11:52:45,479 INFO [train.py:715] (6/8) Epoch 9, batch 12950, loss[loss=0.1623, simple_loss=0.2304, pruned_loss=0.04705, over 4984.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2152, pruned_loss=0.03475, over 972277.41 frames.], batch size: 33, lr: 2.37e-04 +2022-05-06 11:53:24,505 INFO [train.py:715] (6/8) Epoch 9, batch 13000, loss[loss=0.1285, simple_loss=0.2025, pruned_loss=0.02728, over 4903.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2153, pruned_loss=0.03462, over 972391.76 frames.], batch size: 19, lr: 2.37e-04 +2022-05-06 11:54:04,859 INFO [train.py:715] (6/8) Epoch 9, batch 13050, loss[loss=0.115, simple_loss=0.1874, pruned_loss=0.02132, over 4885.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2149, pruned_loss=0.0342, over 972234.79 frames.], batch size: 22, lr: 2.37e-04 +2022-05-06 11:54:44,626 INFO [train.py:715] (6/8) Epoch 9, batch 13100, loss[loss=0.1636, simple_loss=0.2321, pruned_loss=0.04758, over 4907.00 frames.], tot_loss[loss=0.143, simple_loss=0.2156, pruned_loss=0.03513, over 971834.28 frames.], batch size: 19, lr: 2.37e-04 +2022-05-06 11:55:23,869 INFO [train.py:715] (6/8) Epoch 9, batch 13150, loss[loss=0.1445, simple_loss=0.2176, pruned_loss=0.03571, over 4993.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2158, pruned_loss=0.03547, over 971739.13 frames.], batch size: 14, lr: 2.37e-04 +2022-05-06 11:56:03,854 INFO [train.py:715] (6/8) Epoch 9, batch 13200, loss[loss=0.14, simple_loss=0.21, pruned_loss=0.03496, over 4783.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2143, pruned_loss=0.0346, over 971658.87 frames.], batch size: 17, lr: 2.37e-04 +2022-05-06 11:56:44,170 INFO [train.py:715] (6/8) Epoch 9, batch 13250, loss[loss=0.1567, simple_loss=0.218, pruned_loss=0.04764, over 4829.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2142, pruned_loss=0.0347, over 972272.37 frames.], batch size: 26, lr: 2.37e-04 +2022-05-06 11:57:23,742 INFO [train.py:715] (6/8) Epoch 9, batch 13300, loss[loss=0.1287, simple_loss=0.202, pruned_loss=0.02773, over 4757.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2141, pruned_loss=0.0346, over 972679.34 frames.], batch size: 19, lr: 2.37e-04 +2022-05-06 11:58:03,449 INFO [train.py:715] (6/8) Epoch 9, batch 13350, loss[loss=0.1198, simple_loss=0.2052, pruned_loss=0.01714, over 4877.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2148, pruned_loss=0.035, over 972760.88 frames.], batch size: 16, lr: 2.37e-04 +2022-05-06 11:58:43,527 INFO [train.py:715] (6/8) Epoch 9, batch 13400, loss[loss=0.1456, simple_loss=0.2168, pruned_loss=0.03722, over 4948.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2145, pruned_loss=0.03524, over 973157.46 frames.], batch size: 21, lr: 2.37e-04 +2022-05-06 11:59:23,801 INFO [train.py:715] (6/8) Epoch 9, batch 13450, loss[loss=0.1467, simple_loss=0.2272, pruned_loss=0.03316, over 4762.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2152, pruned_loss=0.03521, over 972541.53 frames.], batch size: 19, lr: 2.36e-04 +2022-05-06 12:00:02,970 INFO [train.py:715] (6/8) Epoch 9, batch 13500, loss[loss=0.1413, simple_loss=0.2252, pruned_loss=0.02864, over 4884.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2151, pruned_loss=0.03517, over 972769.02 frames.], batch size: 22, lr: 2.36e-04 +2022-05-06 12:00:42,984 INFO [train.py:715] (6/8) Epoch 9, batch 13550, loss[loss=0.1309, simple_loss=0.2079, pruned_loss=0.02696, over 4945.00 frames.], tot_loss[loss=0.1427, simple_loss=0.215, pruned_loss=0.0352, over 972769.10 frames.], batch size: 21, lr: 2.36e-04 +2022-05-06 12:01:22,503 INFO [train.py:715] (6/8) Epoch 9, batch 13600, loss[loss=0.1366, simple_loss=0.2249, pruned_loss=0.02413, over 4905.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2151, pruned_loss=0.0351, over 973331.38 frames.], batch size: 18, lr: 2.36e-04 +2022-05-06 12:02:01,623 INFO [train.py:715] (6/8) Epoch 9, batch 13650, loss[loss=0.1465, simple_loss=0.2328, pruned_loss=0.03009, over 4880.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2152, pruned_loss=0.0346, over 973288.22 frames.], batch size: 17, lr: 2.36e-04 +2022-05-06 12:02:40,855 INFO [train.py:715] (6/8) Epoch 9, batch 13700, loss[loss=0.1408, simple_loss=0.2155, pruned_loss=0.03308, over 4965.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2161, pruned_loss=0.03482, over 973010.92 frames.], batch size: 15, lr: 2.36e-04 +2022-05-06 12:03:20,738 INFO [train.py:715] (6/8) Epoch 9, batch 13750, loss[loss=0.1172, simple_loss=0.1962, pruned_loss=0.01915, over 4826.00 frames.], tot_loss[loss=0.143, simple_loss=0.2157, pruned_loss=0.0352, over 972527.63 frames.], batch size: 26, lr: 2.36e-04 +2022-05-06 12:03:59,888 INFO [train.py:715] (6/8) Epoch 9, batch 13800, loss[loss=0.1113, simple_loss=0.184, pruned_loss=0.01929, over 4959.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2161, pruned_loss=0.03548, over 973237.48 frames.], batch size: 29, lr: 2.36e-04 +2022-05-06 12:04:38,386 INFO [train.py:715] (6/8) Epoch 9, batch 13850, loss[loss=0.1376, simple_loss=0.2146, pruned_loss=0.03025, over 4932.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2163, pruned_loss=0.03539, over 973212.75 frames.], batch size: 29, lr: 2.36e-04 +2022-05-06 12:05:17,817 INFO [train.py:715] (6/8) Epoch 9, batch 13900, loss[loss=0.1464, simple_loss=0.2068, pruned_loss=0.04302, over 4799.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2154, pruned_loss=0.03486, over 971977.65 frames.], batch size: 12, lr: 2.36e-04 +2022-05-06 12:05:57,961 INFO [train.py:715] (6/8) Epoch 9, batch 13950, loss[loss=0.1253, simple_loss=0.2018, pruned_loss=0.02439, over 4856.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2152, pruned_loss=0.03482, over 971589.54 frames.], batch size: 32, lr: 2.36e-04 +2022-05-06 12:06:36,921 INFO [train.py:715] (6/8) Epoch 9, batch 14000, loss[loss=0.155, simple_loss=0.2361, pruned_loss=0.03695, over 4867.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2151, pruned_loss=0.03455, over 972008.19 frames.], batch size: 16, lr: 2.36e-04 +2022-05-06 12:07:16,029 INFO [train.py:715] (6/8) Epoch 9, batch 14050, loss[loss=0.1654, simple_loss=0.2397, pruned_loss=0.04557, over 4966.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2143, pruned_loss=0.03426, over 972518.08 frames.], batch size: 15, lr: 2.36e-04 +2022-05-06 12:07:55,566 INFO [train.py:715] (6/8) Epoch 9, batch 14100, loss[loss=0.1587, simple_loss=0.2342, pruned_loss=0.0416, over 4935.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2145, pruned_loss=0.03496, over 973507.51 frames.], batch size: 39, lr: 2.36e-04 +2022-05-06 12:08:35,132 INFO [train.py:715] (6/8) Epoch 9, batch 14150, loss[loss=0.1973, simple_loss=0.2505, pruned_loss=0.07206, over 4956.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2143, pruned_loss=0.03504, over 973126.85 frames.], batch size: 39, lr: 2.36e-04 +2022-05-06 12:09:14,480 INFO [train.py:715] (6/8) Epoch 9, batch 14200, loss[loss=0.1235, simple_loss=0.1944, pruned_loss=0.0263, over 4975.00 frames.], tot_loss[loss=0.143, simple_loss=0.215, pruned_loss=0.03544, over 972839.81 frames.], batch size: 28, lr: 2.36e-04 +2022-05-06 12:09:53,803 INFO [train.py:715] (6/8) Epoch 9, batch 14250, loss[loss=0.1702, simple_loss=0.2459, pruned_loss=0.04731, over 4849.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2155, pruned_loss=0.03594, over 972873.55 frames.], batch size: 20, lr: 2.36e-04 +2022-05-06 12:10:33,296 INFO [train.py:715] (6/8) Epoch 9, batch 14300, loss[loss=0.1362, simple_loss=0.2053, pruned_loss=0.03356, over 4873.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2144, pruned_loss=0.03487, over 973471.91 frames.], batch size: 16, lr: 2.36e-04 +2022-05-06 12:11:11,973 INFO [train.py:715] (6/8) Epoch 9, batch 14350, loss[loss=0.137, simple_loss=0.2025, pruned_loss=0.03576, over 4902.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2141, pruned_loss=0.03463, over 973269.22 frames.], batch size: 17, lr: 2.36e-04 +2022-05-06 12:11:50,598 INFO [train.py:715] (6/8) Epoch 9, batch 14400, loss[loss=0.1358, simple_loss=0.2184, pruned_loss=0.02657, over 4756.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2148, pruned_loss=0.03493, over 971894.91 frames.], batch size: 19, lr: 2.36e-04 +2022-05-06 12:12:30,361 INFO [train.py:715] (6/8) Epoch 9, batch 14450, loss[loss=0.1416, simple_loss=0.225, pruned_loss=0.02912, over 4924.00 frames.], tot_loss[loss=0.1425, simple_loss=0.215, pruned_loss=0.03495, over 971576.71 frames.], batch size: 29, lr: 2.36e-04 +2022-05-06 12:13:09,686 INFO [train.py:715] (6/8) Epoch 9, batch 14500, loss[loss=0.1811, simple_loss=0.2466, pruned_loss=0.05778, over 4844.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2153, pruned_loss=0.03528, over 971230.83 frames.], batch size: 20, lr: 2.36e-04 +2022-05-06 12:13:48,635 INFO [train.py:715] (6/8) Epoch 9, batch 14550, loss[loss=0.1307, simple_loss=0.2075, pruned_loss=0.02697, over 4768.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2148, pruned_loss=0.03503, over 970882.60 frames.], batch size: 19, lr: 2.36e-04 +2022-05-06 12:14:27,684 INFO [train.py:715] (6/8) Epoch 9, batch 14600, loss[loss=0.114, simple_loss=0.1888, pruned_loss=0.01956, over 4799.00 frames.], tot_loss[loss=0.142, simple_loss=0.2143, pruned_loss=0.03491, over 970767.90 frames.], batch size: 24, lr: 2.36e-04 +2022-05-06 12:15:07,388 INFO [train.py:715] (6/8) Epoch 9, batch 14650, loss[loss=0.128, simple_loss=0.2036, pruned_loss=0.02622, over 4796.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2147, pruned_loss=0.03493, over 971078.48 frames.], batch size: 24, lr: 2.36e-04 +2022-05-06 12:15:45,920 INFO [train.py:715] (6/8) Epoch 9, batch 14700, loss[loss=0.1413, simple_loss=0.2147, pruned_loss=0.03397, over 4871.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2157, pruned_loss=0.03541, over 971960.24 frames.], batch size: 22, lr: 2.36e-04 +2022-05-06 12:16:27,520 INFO [train.py:715] (6/8) Epoch 9, batch 14750, loss[loss=0.1534, simple_loss=0.2272, pruned_loss=0.03977, over 4934.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2157, pruned_loss=0.0354, over 971306.98 frames.], batch size: 23, lr: 2.36e-04 +2022-05-06 12:17:06,575 INFO [train.py:715] (6/8) Epoch 9, batch 14800, loss[loss=0.18, simple_loss=0.2389, pruned_loss=0.06051, over 4946.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2161, pruned_loss=0.03585, over 972164.44 frames.], batch size: 21, lr: 2.36e-04 +2022-05-06 12:17:45,496 INFO [train.py:715] (6/8) Epoch 9, batch 14850, loss[loss=0.1506, simple_loss=0.2248, pruned_loss=0.03821, over 4908.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2157, pruned_loss=0.03569, over 972374.14 frames.], batch size: 18, lr: 2.36e-04 +2022-05-06 12:18:24,548 INFO [train.py:715] (6/8) Epoch 9, batch 14900, loss[loss=0.1565, simple_loss=0.2359, pruned_loss=0.03858, over 4761.00 frames.], tot_loss[loss=0.1436, simple_loss=0.216, pruned_loss=0.03558, over 972345.27 frames.], batch size: 19, lr: 2.36e-04 +2022-05-06 12:19:03,081 INFO [train.py:715] (6/8) Epoch 9, batch 14950, loss[loss=0.1372, simple_loss=0.2088, pruned_loss=0.03285, over 4935.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2161, pruned_loss=0.0356, over 971984.59 frames.], batch size: 23, lr: 2.36e-04 +2022-05-06 12:19:42,677 INFO [train.py:715] (6/8) Epoch 9, batch 15000, loss[loss=0.1696, simple_loss=0.2354, pruned_loss=0.05186, over 4796.00 frames.], tot_loss[loss=0.1435, simple_loss=0.216, pruned_loss=0.03549, over 971256.20 frames.], batch size: 12, lr: 2.36e-04 +2022-05-06 12:19:42,677 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 12:19:52,344 INFO [train.py:742] (6/8) Epoch 9, validation: loss=0.1071, simple_loss=0.1915, pruned_loss=0.01139, over 914524.00 frames. +2022-05-06 12:20:32,096 INFO [train.py:715] (6/8) Epoch 9, batch 15050, loss[loss=0.1084, simple_loss=0.1651, pruned_loss=0.02583, over 4853.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2144, pruned_loss=0.03488, over 971292.60 frames.], batch size: 12, lr: 2.36e-04 +2022-05-06 12:21:11,101 INFO [train.py:715] (6/8) Epoch 9, batch 15100, loss[loss=0.1416, simple_loss=0.2255, pruned_loss=0.02889, over 4789.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2155, pruned_loss=0.03519, over 971367.69 frames.], batch size: 17, lr: 2.36e-04 +2022-05-06 12:21:50,202 INFO [train.py:715] (6/8) Epoch 9, batch 15150, loss[loss=0.1377, simple_loss=0.2106, pruned_loss=0.03242, over 4933.00 frames.], tot_loss[loss=0.1424, simple_loss=0.215, pruned_loss=0.03488, over 972585.09 frames.], batch size: 29, lr: 2.36e-04 +2022-05-06 12:22:30,011 INFO [train.py:715] (6/8) Epoch 9, batch 15200, loss[loss=0.1607, simple_loss=0.2267, pruned_loss=0.04739, over 4871.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2151, pruned_loss=0.0349, over 972816.24 frames.], batch size: 32, lr: 2.36e-04 +2022-05-06 12:23:09,334 INFO [train.py:715] (6/8) Epoch 9, batch 15250, loss[loss=0.1614, simple_loss=0.2287, pruned_loss=0.04707, over 4684.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2146, pruned_loss=0.03475, over 971765.10 frames.], batch size: 15, lr: 2.36e-04 +2022-05-06 12:23:48,033 INFO [train.py:715] (6/8) Epoch 9, batch 15300, loss[loss=0.1227, simple_loss=0.1885, pruned_loss=0.02843, over 4890.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2142, pruned_loss=0.03428, over 972112.33 frames.], batch size: 22, lr: 2.36e-04 +2022-05-06 12:24:27,150 INFO [train.py:715] (6/8) Epoch 9, batch 15350, loss[loss=0.1356, simple_loss=0.2087, pruned_loss=0.03131, over 4940.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2156, pruned_loss=0.03476, over 972849.70 frames.], batch size: 21, lr: 2.36e-04 +2022-05-06 12:25:06,187 INFO [train.py:715] (6/8) Epoch 9, batch 15400, loss[loss=0.1086, simple_loss=0.1877, pruned_loss=0.01473, over 4835.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2152, pruned_loss=0.03467, over 972475.74 frames.], batch size: 13, lr: 2.36e-04 +2022-05-06 12:25:44,961 INFO [train.py:715] (6/8) Epoch 9, batch 15450, loss[loss=0.1362, simple_loss=0.2106, pruned_loss=0.03084, over 4940.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2149, pruned_loss=0.03482, over 971787.70 frames.], batch size: 21, lr: 2.36e-04 +2022-05-06 12:26:23,387 INFO [train.py:715] (6/8) Epoch 9, batch 15500, loss[loss=0.1394, simple_loss=0.2233, pruned_loss=0.0278, over 4883.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2156, pruned_loss=0.03543, over 970071.20 frames.], batch size: 22, lr: 2.36e-04 +2022-05-06 12:27:03,116 INFO [train.py:715] (6/8) Epoch 9, batch 15550, loss[loss=0.1537, simple_loss=0.2276, pruned_loss=0.03994, over 4800.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2147, pruned_loss=0.0351, over 969895.98 frames.], batch size: 24, lr: 2.36e-04 +2022-05-06 12:27:41,878 INFO [train.py:715] (6/8) Epoch 9, batch 15600, loss[loss=0.142, simple_loss=0.2093, pruned_loss=0.03737, over 4695.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2155, pruned_loss=0.0354, over 970673.00 frames.], batch size: 15, lr: 2.36e-04 +2022-05-06 12:28:20,226 INFO [train.py:715] (6/8) Epoch 9, batch 15650, loss[loss=0.1609, simple_loss=0.2291, pruned_loss=0.04637, over 4780.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2154, pruned_loss=0.03524, over 970718.20 frames.], batch size: 18, lr: 2.36e-04 +2022-05-06 12:28:59,315 INFO [train.py:715] (6/8) Epoch 9, batch 15700, loss[loss=0.1414, simple_loss=0.2103, pruned_loss=0.03624, over 4925.00 frames.], tot_loss[loss=0.143, simple_loss=0.2153, pruned_loss=0.03539, over 970883.99 frames.], batch size: 23, lr: 2.36e-04 +2022-05-06 12:29:39,072 INFO [train.py:715] (6/8) Epoch 9, batch 15750, loss[loss=0.1186, simple_loss=0.1866, pruned_loss=0.02534, over 4790.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2152, pruned_loss=0.035, over 970927.84 frames.], batch size: 14, lr: 2.36e-04 +2022-05-06 12:30:17,868 INFO [train.py:715] (6/8) Epoch 9, batch 15800, loss[loss=0.1387, simple_loss=0.2127, pruned_loss=0.03236, over 4940.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2149, pruned_loss=0.03487, over 971458.67 frames.], batch size: 21, lr: 2.36e-04 +2022-05-06 12:30:56,798 INFO [train.py:715] (6/8) Epoch 9, batch 15850, loss[loss=0.1454, simple_loss=0.2233, pruned_loss=0.03376, over 4886.00 frames.], tot_loss[loss=0.1422, simple_loss=0.215, pruned_loss=0.03473, over 972493.76 frames.], batch size: 16, lr: 2.36e-04 +2022-05-06 12:31:36,401 INFO [train.py:715] (6/8) Epoch 9, batch 15900, loss[loss=0.1757, simple_loss=0.2523, pruned_loss=0.0495, over 4975.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2146, pruned_loss=0.03434, over 972277.64 frames.], batch size: 24, lr: 2.36e-04 +2022-05-06 12:32:15,977 INFO [train.py:715] (6/8) Epoch 9, batch 15950, loss[loss=0.1295, simple_loss=0.1977, pruned_loss=0.03066, over 4771.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2146, pruned_loss=0.03433, over 972329.28 frames.], batch size: 14, lr: 2.36e-04 +2022-05-06 12:32:54,618 INFO [train.py:715] (6/8) Epoch 9, batch 16000, loss[loss=0.1241, simple_loss=0.1873, pruned_loss=0.03048, over 4814.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2146, pruned_loss=0.03418, over 971402.34 frames.], batch size: 26, lr: 2.36e-04 +2022-05-06 12:33:33,297 INFO [train.py:715] (6/8) Epoch 9, batch 16050, loss[loss=0.1109, simple_loss=0.1799, pruned_loss=0.02098, over 4919.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2148, pruned_loss=0.03423, over 971603.60 frames.], batch size: 21, lr: 2.36e-04 +2022-05-06 12:34:12,509 INFO [train.py:715] (6/8) Epoch 9, batch 16100, loss[loss=0.1422, simple_loss=0.2178, pruned_loss=0.0333, over 4957.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2157, pruned_loss=0.03466, over 971824.49 frames.], batch size: 24, lr: 2.36e-04 +2022-05-06 12:34:51,592 INFO [train.py:715] (6/8) Epoch 9, batch 16150, loss[loss=0.1413, simple_loss=0.2022, pruned_loss=0.04022, over 4634.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2158, pruned_loss=0.03462, over 972004.71 frames.], batch size: 13, lr: 2.36e-04 +2022-05-06 12:35:30,769 INFO [train.py:715] (6/8) Epoch 9, batch 16200, loss[loss=0.1271, simple_loss=0.2056, pruned_loss=0.02432, over 4817.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2147, pruned_loss=0.03399, over 971584.03 frames.], batch size: 27, lr: 2.36e-04 +2022-05-06 12:36:10,110 INFO [train.py:715] (6/8) Epoch 9, batch 16250, loss[loss=0.1162, simple_loss=0.1982, pruned_loss=0.01704, over 4894.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2148, pruned_loss=0.03401, over 972553.23 frames.], batch size: 19, lr: 2.35e-04 +2022-05-06 12:36:49,788 INFO [train.py:715] (6/8) Epoch 9, batch 16300, loss[loss=0.1367, simple_loss=0.2055, pruned_loss=0.03393, over 4711.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2145, pruned_loss=0.03381, over 971345.49 frames.], batch size: 15, lr: 2.35e-04 +2022-05-06 12:37:27,729 INFO [train.py:715] (6/8) Epoch 9, batch 16350, loss[loss=0.1734, simple_loss=0.2456, pruned_loss=0.05057, over 4855.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2154, pruned_loss=0.03472, over 972005.17 frames.], batch size: 34, lr: 2.35e-04 +2022-05-06 12:38:07,163 INFO [train.py:715] (6/8) Epoch 9, batch 16400, loss[loss=0.1689, simple_loss=0.2245, pruned_loss=0.0567, over 4873.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2153, pruned_loss=0.03494, over 971727.92 frames.], batch size: 16, lr: 2.35e-04 +2022-05-06 12:38:47,056 INFO [train.py:715] (6/8) Epoch 9, batch 16450, loss[loss=0.1475, simple_loss=0.2092, pruned_loss=0.04291, over 4843.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2155, pruned_loss=0.03518, over 972149.87 frames.], batch size: 13, lr: 2.35e-04 +2022-05-06 12:39:25,806 INFO [train.py:715] (6/8) Epoch 9, batch 16500, loss[loss=0.1352, simple_loss=0.2043, pruned_loss=0.03308, over 4974.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2156, pruned_loss=0.03498, over 971948.70 frames.], batch size: 33, lr: 2.35e-04 +2022-05-06 12:40:04,383 INFO [train.py:715] (6/8) Epoch 9, batch 16550, loss[loss=0.1259, simple_loss=0.204, pruned_loss=0.02385, over 4979.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2148, pruned_loss=0.03473, over 972842.24 frames.], batch size: 24, lr: 2.35e-04 +2022-05-06 12:40:43,847 INFO [train.py:715] (6/8) Epoch 9, batch 16600, loss[loss=0.143, simple_loss=0.2046, pruned_loss=0.04068, over 4900.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2145, pruned_loss=0.03464, over 972821.86 frames.], batch size: 17, lr: 2.35e-04 +2022-05-06 12:41:23,438 INFO [train.py:715] (6/8) Epoch 9, batch 16650, loss[loss=0.163, simple_loss=0.2293, pruned_loss=0.04835, over 4933.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2148, pruned_loss=0.03496, over 972846.02 frames.], batch size: 21, lr: 2.35e-04 +2022-05-06 12:42:02,351 INFO [train.py:715] (6/8) Epoch 9, batch 16700, loss[loss=0.1458, simple_loss=0.2164, pruned_loss=0.03761, over 4979.00 frames.], tot_loss[loss=0.1425, simple_loss=0.215, pruned_loss=0.03504, over 972467.40 frames.], batch size: 15, lr: 2.35e-04 +2022-05-06 12:42:41,615 INFO [train.py:715] (6/8) Epoch 9, batch 16750, loss[loss=0.1387, simple_loss=0.2157, pruned_loss=0.03084, over 4913.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2144, pruned_loss=0.03451, over 972906.13 frames.], batch size: 18, lr: 2.35e-04 +2022-05-06 12:43:21,413 INFO [train.py:715] (6/8) Epoch 9, batch 16800, loss[loss=0.1268, simple_loss=0.2064, pruned_loss=0.02365, over 4916.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2135, pruned_loss=0.03394, over 972606.95 frames.], batch size: 17, lr: 2.35e-04 +2022-05-06 12:44:01,036 INFO [train.py:715] (6/8) Epoch 9, batch 16850, loss[loss=0.1472, simple_loss=0.2209, pruned_loss=0.03681, over 4971.00 frames.], tot_loss[loss=0.1413, simple_loss=0.214, pruned_loss=0.03433, over 972456.10 frames.], batch size: 24, lr: 2.35e-04 +2022-05-06 12:44:40,459 INFO [train.py:715] (6/8) Epoch 9, batch 16900, loss[loss=0.1509, simple_loss=0.2238, pruned_loss=0.03905, over 4895.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2142, pruned_loss=0.0346, over 972609.80 frames.], batch size: 22, lr: 2.35e-04 +2022-05-06 12:45:20,535 INFO [train.py:715] (6/8) Epoch 9, batch 16950, loss[loss=0.1173, simple_loss=0.1895, pruned_loss=0.02258, over 4812.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2135, pruned_loss=0.03435, over 972215.09 frames.], batch size: 26, lr: 2.35e-04 +2022-05-06 12:46:00,235 INFO [train.py:715] (6/8) Epoch 9, batch 17000, loss[loss=0.1391, simple_loss=0.2029, pruned_loss=0.03762, over 4828.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2139, pruned_loss=0.03495, over 972177.55 frames.], batch size: 30, lr: 2.35e-04 +2022-05-06 12:46:38,808 INFO [train.py:715] (6/8) Epoch 9, batch 17050, loss[loss=0.1452, simple_loss=0.2155, pruned_loss=0.03741, over 4747.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2141, pruned_loss=0.03443, over 971746.98 frames.], batch size: 16, lr: 2.35e-04 +2022-05-06 12:47:18,389 INFO [train.py:715] (6/8) Epoch 9, batch 17100, loss[loss=0.1415, simple_loss=0.2203, pruned_loss=0.03129, over 4969.00 frames.], tot_loss[loss=0.142, simple_loss=0.2143, pruned_loss=0.03487, over 972397.51 frames.], batch size: 25, lr: 2.35e-04 +2022-05-06 12:47:58,066 INFO [train.py:715] (6/8) Epoch 9, batch 17150, loss[loss=0.1532, simple_loss=0.2236, pruned_loss=0.04142, over 4884.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2138, pruned_loss=0.03484, over 972204.34 frames.], batch size: 19, lr: 2.35e-04 +2022-05-06 12:48:37,323 INFO [train.py:715] (6/8) Epoch 9, batch 17200, loss[loss=0.143, simple_loss=0.2234, pruned_loss=0.03129, over 4691.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2137, pruned_loss=0.03449, over 972420.60 frames.], batch size: 15, lr: 2.35e-04 +2022-05-06 12:49:15,995 INFO [train.py:715] (6/8) Epoch 9, batch 17250, loss[loss=0.1589, simple_loss=0.2431, pruned_loss=0.0373, over 4905.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2138, pruned_loss=0.03446, over 972757.93 frames.], batch size: 18, lr: 2.35e-04 +2022-05-06 12:49:54,885 INFO [train.py:715] (6/8) Epoch 9, batch 17300, loss[loss=0.1418, simple_loss=0.2145, pruned_loss=0.03452, over 4886.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2142, pruned_loss=0.03497, over 973362.33 frames.], batch size: 16, lr: 2.35e-04 +2022-05-06 12:50:33,973 INFO [train.py:715] (6/8) Epoch 9, batch 17350, loss[loss=0.1769, simple_loss=0.2565, pruned_loss=0.04862, over 4886.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2145, pruned_loss=0.03471, over 973023.80 frames.], batch size: 19, lr: 2.35e-04 +2022-05-06 12:51:13,078 INFO [train.py:715] (6/8) Epoch 9, batch 17400, loss[loss=0.1349, simple_loss=0.2094, pruned_loss=0.03023, over 4787.00 frames.], tot_loss[loss=0.1413, simple_loss=0.214, pruned_loss=0.03427, over 972531.08 frames.], batch size: 12, lr: 2.35e-04 +2022-05-06 12:51:52,393 INFO [train.py:715] (6/8) Epoch 9, batch 17450, loss[loss=0.1427, simple_loss=0.219, pruned_loss=0.03323, over 4921.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2145, pruned_loss=0.03414, over 972121.97 frames.], batch size: 23, lr: 2.35e-04 +2022-05-06 12:52:31,598 INFO [train.py:715] (6/8) Epoch 9, batch 17500, loss[loss=0.135, simple_loss=0.199, pruned_loss=0.03551, over 4772.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2144, pruned_loss=0.0345, over 971968.18 frames.], batch size: 18, lr: 2.35e-04 +2022-05-06 12:53:10,815 INFO [train.py:715] (6/8) Epoch 9, batch 17550, loss[loss=0.1648, simple_loss=0.2324, pruned_loss=0.04854, over 4894.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2141, pruned_loss=0.03431, over 972369.80 frames.], batch size: 19, lr: 2.35e-04 +2022-05-06 12:53:49,894 INFO [train.py:715] (6/8) Epoch 9, batch 17600, loss[loss=0.1526, simple_loss=0.2258, pruned_loss=0.03968, over 4906.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2143, pruned_loss=0.03465, over 972458.63 frames.], batch size: 17, lr: 2.35e-04 +2022-05-06 12:54:29,588 INFO [train.py:715] (6/8) Epoch 9, batch 17650, loss[loss=0.1129, simple_loss=0.1877, pruned_loss=0.01907, over 4926.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2141, pruned_loss=0.03449, over 972220.14 frames.], batch size: 29, lr: 2.35e-04 +2022-05-06 12:55:08,480 INFO [train.py:715] (6/8) Epoch 9, batch 17700, loss[loss=0.1197, simple_loss=0.2056, pruned_loss=0.01692, over 4796.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2138, pruned_loss=0.03471, over 971257.33 frames.], batch size: 14, lr: 2.35e-04 +2022-05-06 12:55:47,744 INFO [train.py:715] (6/8) Epoch 9, batch 17750, loss[loss=0.1747, simple_loss=0.2489, pruned_loss=0.05023, over 4886.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2134, pruned_loss=0.03466, over 971564.50 frames.], batch size: 39, lr: 2.35e-04 +2022-05-06 12:56:27,548 INFO [train.py:715] (6/8) Epoch 9, batch 17800, loss[loss=0.1435, simple_loss=0.2163, pruned_loss=0.03536, over 4891.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2148, pruned_loss=0.03544, over 972915.71 frames.], batch size: 18, lr: 2.35e-04 +2022-05-06 12:57:06,523 INFO [train.py:715] (6/8) Epoch 9, batch 17850, loss[loss=0.1203, simple_loss=0.1976, pruned_loss=0.02145, over 4957.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2144, pruned_loss=0.03533, over 972637.45 frames.], batch size: 21, lr: 2.35e-04 +2022-05-06 12:57:45,750 INFO [train.py:715] (6/8) Epoch 9, batch 17900, loss[loss=0.1263, simple_loss=0.2013, pruned_loss=0.02567, over 4926.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2152, pruned_loss=0.03577, over 972801.29 frames.], batch size: 29, lr: 2.35e-04 +2022-05-06 12:58:25,614 INFO [train.py:715] (6/8) Epoch 9, batch 17950, loss[loss=0.1219, simple_loss=0.1905, pruned_loss=0.0267, over 4991.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2144, pruned_loss=0.0355, over 972518.70 frames.], batch size: 14, lr: 2.35e-04 +2022-05-06 12:59:04,989 INFO [train.py:715] (6/8) Epoch 9, batch 18000, loss[loss=0.1392, simple_loss=0.2114, pruned_loss=0.03353, over 4816.00 frames.], tot_loss[loss=0.1421, simple_loss=0.214, pruned_loss=0.03514, over 971796.33 frames.], batch size: 27, lr: 2.35e-04 +2022-05-06 12:59:04,990 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 12:59:14,501 INFO [train.py:742] (6/8) Epoch 9, validation: loss=0.1068, simple_loss=0.1912, pruned_loss=0.01121, over 914524.00 frames. +2022-05-06 12:59:53,954 INFO [train.py:715] (6/8) Epoch 9, batch 18050, loss[loss=0.1438, simple_loss=0.2085, pruned_loss=0.03954, over 4892.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2144, pruned_loss=0.0355, over 971873.70 frames.], batch size: 19, lr: 2.35e-04 +2022-05-06 13:00:33,775 INFO [train.py:715] (6/8) Epoch 9, batch 18100, loss[loss=0.1416, simple_loss=0.2106, pruned_loss=0.03632, over 4746.00 frames.], tot_loss[loss=0.143, simple_loss=0.215, pruned_loss=0.03546, over 971801.22 frames.], batch size: 16, lr: 2.35e-04 +2022-05-06 13:01:13,065 INFO [train.py:715] (6/8) Epoch 9, batch 18150, loss[loss=0.1193, simple_loss=0.1836, pruned_loss=0.02752, over 4819.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2145, pruned_loss=0.03505, over 971300.79 frames.], batch size: 12, lr: 2.35e-04 +2022-05-06 13:01:52,675 INFO [train.py:715] (6/8) Epoch 9, batch 18200, loss[loss=0.1697, simple_loss=0.2385, pruned_loss=0.05047, over 4985.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2138, pruned_loss=0.03469, over 972597.45 frames.], batch size: 15, lr: 2.35e-04 +2022-05-06 13:02:31,902 INFO [train.py:715] (6/8) Epoch 9, batch 18250, loss[loss=0.1549, simple_loss=0.2286, pruned_loss=0.04062, over 4790.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2138, pruned_loss=0.03481, over 972361.29 frames.], batch size: 14, lr: 2.35e-04 +2022-05-06 13:03:11,076 INFO [train.py:715] (6/8) Epoch 9, batch 18300, loss[loss=0.1431, simple_loss=0.2167, pruned_loss=0.03474, over 4986.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2142, pruned_loss=0.03514, over 972478.36 frames.], batch size: 35, lr: 2.35e-04 +2022-05-06 13:03:50,432 INFO [train.py:715] (6/8) Epoch 9, batch 18350, loss[loss=0.1306, simple_loss=0.1979, pruned_loss=0.03161, over 4903.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2139, pruned_loss=0.03488, over 972506.02 frames.], batch size: 17, lr: 2.35e-04 +2022-05-06 13:04:29,597 INFO [train.py:715] (6/8) Epoch 9, batch 18400, loss[loss=0.1553, simple_loss=0.242, pruned_loss=0.0343, over 4865.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2139, pruned_loss=0.03479, over 971641.83 frames.], batch size: 20, lr: 2.35e-04 +2022-05-06 13:05:08,638 INFO [train.py:715] (6/8) Epoch 9, batch 18450, loss[loss=0.1227, simple_loss=0.2013, pruned_loss=0.0221, over 4987.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2132, pruned_loss=0.03419, over 971546.85 frames.], batch size: 14, lr: 2.35e-04 +2022-05-06 13:05:47,603 INFO [train.py:715] (6/8) Epoch 9, batch 18500, loss[loss=0.1479, simple_loss=0.2059, pruned_loss=0.04488, over 4855.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2141, pruned_loss=0.03429, over 970832.82 frames.], batch size: 13, lr: 2.35e-04 +2022-05-06 13:06:26,999 INFO [train.py:715] (6/8) Epoch 9, batch 18550, loss[loss=0.1555, simple_loss=0.2216, pruned_loss=0.0447, over 4842.00 frames.], tot_loss[loss=0.141, simple_loss=0.2135, pruned_loss=0.03423, over 971120.03 frames.], batch size: 30, lr: 2.35e-04 +2022-05-06 13:07:06,069 INFO [train.py:715] (6/8) Epoch 9, batch 18600, loss[loss=0.1452, simple_loss=0.2214, pruned_loss=0.03453, over 4912.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2133, pruned_loss=0.03387, over 971332.46 frames.], batch size: 29, lr: 2.35e-04 +2022-05-06 13:07:44,919 INFO [train.py:715] (6/8) Epoch 9, batch 18650, loss[loss=0.1558, simple_loss=0.2342, pruned_loss=0.03871, over 4957.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2129, pruned_loss=0.03365, over 971728.12 frames.], batch size: 15, lr: 2.35e-04 +2022-05-06 13:08:24,474 INFO [train.py:715] (6/8) Epoch 9, batch 18700, loss[loss=0.1441, simple_loss=0.2107, pruned_loss=0.03874, over 4938.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2135, pruned_loss=0.03385, over 971701.06 frames.], batch size: 35, lr: 2.35e-04 +2022-05-06 13:09:03,188 INFO [train.py:715] (6/8) Epoch 9, batch 18750, loss[loss=0.1482, simple_loss=0.2095, pruned_loss=0.04341, over 4788.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2143, pruned_loss=0.0343, over 971191.59 frames.], batch size: 17, lr: 2.35e-04 +2022-05-06 13:09:42,761 INFO [train.py:715] (6/8) Epoch 9, batch 18800, loss[loss=0.1182, simple_loss=0.1957, pruned_loss=0.02034, over 4899.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2145, pruned_loss=0.03434, over 971469.70 frames.], batch size: 19, lr: 2.35e-04 +2022-05-06 13:10:21,586 INFO [train.py:715] (6/8) Epoch 9, batch 18850, loss[loss=0.1438, simple_loss=0.2134, pruned_loss=0.03711, over 4957.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2142, pruned_loss=0.03417, over 971808.54 frames.], batch size: 21, lr: 2.35e-04 +2022-05-06 13:11:00,822 INFO [train.py:715] (6/8) Epoch 9, batch 18900, loss[loss=0.1357, simple_loss=0.2089, pruned_loss=0.03127, over 4779.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2146, pruned_loss=0.03499, over 971348.05 frames.], batch size: 18, lr: 2.35e-04 +2022-05-06 13:11:40,165 INFO [train.py:715] (6/8) Epoch 9, batch 18950, loss[loss=0.1534, simple_loss=0.2292, pruned_loss=0.03879, over 4821.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2145, pruned_loss=0.03502, over 972472.85 frames.], batch size: 25, lr: 2.35e-04 +2022-05-06 13:12:18,871 INFO [train.py:715] (6/8) Epoch 9, batch 19000, loss[loss=0.1643, simple_loss=0.2278, pruned_loss=0.05035, over 4984.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2148, pruned_loss=0.03539, over 972009.70 frames.], batch size: 31, lr: 2.35e-04 +2022-05-06 13:12:58,978 INFO [train.py:715] (6/8) Epoch 9, batch 19050, loss[loss=0.1283, simple_loss=0.1969, pruned_loss=0.02982, over 4755.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2149, pruned_loss=0.03561, over 971782.25 frames.], batch size: 12, lr: 2.34e-04 +2022-05-06 13:13:38,429 INFO [train.py:715] (6/8) Epoch 9, batch 19100, loss[loss=0.1222, simple_loss=0.1993, pruned_loss=0.02254, over 4830.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2148, pruned_loss=0.03535, over 971357.90 frames.], batch size: 26, lr: 2.34e-04 +2022-05-06 13:14:17,257 INFO [train.py:715] (6/8) Epoch 9, batch 19150, loss[loss=0.1625, simple_loss=0.2257, pruned_loss=0.0497, over 4868.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2143, pruned_loss=0.03493, over 971339.13 frames.], batch size: 20, lr: 2.34e-04 +2022-05-06 13:14:57,089 INFO [train.py:715] (6/8) Epoch 9, batch 19200, loss[loss=0.1131, simple_loss=0.1855, pruned_loss=0.02038, over 4869.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2145, pruned_loss=0.03484, over 970701.91 frames.], batch size: 12, lr: 2.34e-04 +2022-05-06 13:15:36,593 INFO [train.py:715] (6/8) Epoch 9, batch 19250, loss[loss=0.1209, simple_loss=0.1927, pruned_loss=0.02458, over 4870.00 frames.], tot_loss[loss=0.1414, simple_loss=0.214, pruned_loss=0.03446, over 971114.00 frames.], batch size: 16, lr: 2.34e-04 +2022-05-06 13:16:15,486 INFO [train.py:715] (6/8) Epoch 9, batch 19300, loss[loss=0.1348, simple_loss=0.2109, pruned_loss=0.02936, over 4914.00 frames.], tot_loss[loss=0.141, simple_loss=0.2138, pruned_loss=0.03417, over 971428.51 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:16:54,064 INFO [train.py:715] (6/8) Epoch 9, batch 19350, loss[loss=0.1111, simple_loss=0.1806, pruned_loss=0.02074, over 4791.00 frames.], tot_loss[loss=0.1401, simple_loss=0.213, pruned_loss=0.03364, over 971332.18 frames.], batch size: 14, lr: 2.34e-04 +2022-05-06 13:17:34,093 INFO [train.py:715] (6/8) Epoch 9, batch 19400, loss[loss=0.1547, simple_loss=0.2241, pruned_loss=0.04266, over 4907.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2136, pruned_loss=0.03431, over 971163.32 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:18:13,123 INFO [train.py:715] (6/8) Epoch 9, batch 19450, loss[loss=0.1357, simple_loss=0.2152, pruned_loss=0.02811, over 4890.00 frames.], tot_loss[loss=0.1413, simple_loss=0.214, pruned_loss=0.03427, over 971263.41 frames.], batch size: 17, lr: 2.34e-04 +2022-05-06 13:18:51,813 INFO [train.py:715] (6/8) Epoch 9, batch 19500, loss[loss=0.1196, simple_loss=0.1964, pruned_loss=0.02142, over 4820.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2132, pruned_loss=0.0341, over 971328.28 frames.], batch size: 26, lr: 2.34e-04 +2022-05-06 13:19:30,944 INFO [train.py:715] (6/8) Epoch 9, batch 19550, loss[loss=0.1486, simple_loss=0.2175, pruned_loss=0.03986, over 4958.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2143, pruned_loss=0.03454, over 971912.92 frames.], batch size: 39, lr: 2.34e-04 +2022-05-06 13:20:10,208 INFO [train.py:715] (6/8) Epoch 9, batch 19600, loss[loss=0.1626, simple_loss=0.2362, pruned_loss=0.04445, over 4777.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2145, pruned_loss=0.03495, over 971905.00 frames.], batch size: 17, lr: 2.34e-04 +2022-05-06 13:20:48,781 INFO [train.py:715] (6/8) Epoch 9, batch 19650, loss[loss=0.1447, simple_loss=0.2205, pruned_loss=0.03441, over 4890.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2149, pruned_loss=0.03491, over 972317.52 frames.], batch size: 22, lr: 2.34e-04 +2022-05-06 13:21:27,273 INFO [train.py:715] (6/8) Epoch 9, batch 19700, loss[loss=0.1672, simple_loss=0.2433, pruned_loss=0.04555, over 4869.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2143, pruned_loss=0.03449, over 972050.11 frames.], batch size: 32, lr: 2.34e-04 +2022-05-06 13:22:07,183 INFO [train.py:715] (6/8) Epoch 9, batch 19750, loss[loss=0.1395, simple_loss=0.2109, pruned_loss=0.03402, over 4903.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2147, pruned_loss=0.03456, over 972706.12 frames.], batch size: 23, lr: 2.34e-04 +2022-05-06 13:22:46,856 INFO [train.py:715] (6/8) Epoch 9, batch 19800, loss[loss=0.1499, simple_loss=0.2304, pruned_loss=0.0347, over 4793.00 frames.], tot_loss[loss=0.141, simple_loss=0.2138, pruned_loss=0.03416, over 972227.57 frames.], batch size: 18, lr: 2.34e-04 +2022-05-06 13:23:26,651 INFO [train.py:715] (6/8) Epoch 9, batch 19850, loss[loss=0.1269, simple_loss=0.1948, pruned_loss=0.02951, over 4757.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2146, pruned_loss=0.03492, over 971858.48 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:24:06,295 INFO [train.py:715] (6/8) Epoch 9, batch 19900, loss[loss=0.1254, simple_loss=0.201, pruned_loss=0.02494, over 4987.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2145, pruned_loss=0.03511, over 972323.53 frames.], batch size: 28, lr: 2.34e-04 +2022-05-06 13:24:45,453 INFO [train.py:715] (6/8) Epoch 9, batch 19950, loss[loss=0.1239, simple_loss=0.1922, pruned_loss=0.02777, over 4986.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2157, pruned_loss=0.03554, over 973273.83 frames.], batch size: 14, lr: 2.34e-04 +2022-05-06 13:25:24,508 INFO [train.py:715] (6/8) Epoch 9, batch 20000, loss[loss=0.1402, simple_loss=0.2165, pruned_loss=0.03196, over 4877.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2154, pruned_loss=0.0352, over 973649.22 frames.], batch size: 16, lr: 2.34e-04 +2022-05-06 13:26:02,952 INFO [train.py:715] (6/8) Epoch 9, batch 20050, loss[loss=0.1545, simple_loss=0.221, pruned_loss=0.04393, over 4781.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2148, pruned_loss=0.03518, over 973060.27 frames.], batch size: 14, lr: 2.34e-04 +2022-05-06 13:26:42,421 INFO [train.py:715] (6/8) Epoch 9, batch 20100, loss[loss=0.1355, simple_loss=0.2025, pruned_loss=0.03426, over 4866.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2152, pruned_loss=0.03493, over 971843.63 frames.], batch size: 22, lr: 2.34e-04 +2022-05-06 13:27:21,491 INFO [train.py:715] (6/8) Epoch 9, batch 20150, loss[loss=0.1211, simple_loss=0.19, pruned_loss=0.02608, over 4865.00 frames.], tot_loss[loss=0.143, simple_loss=0.2154, pruned_loss=0.0353, over 971835.36 frames.], batch size: 16, lr: 2.34e-04 +2022-05-06 13:27:59,972 INFO [train.py:715] (6/8) Epoch 9, batch 20200, loss[loss=0.1419, simple_loss=0.2229, pruned_loss=0.0304, over 4980.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2159, pruned_loss=0.03557, over 972260.62 frames.], batch size: 28, lr: 2.34e-04 +2022-05-06 13:28:39,474 INFO [train.py:715] (6/8) Epoch 9, batch 20250, loss[loss=0.1342, simple_loss=0.2113, pruned_loss=0.02856, over 4696.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2158, pruned_loss=0.03554, over 972626.48 frames.], batch size: 15, lr: 2.34e-04 +2022-05-06 13:29:18,329 INFO [train.py:715] (6/8) Epoch 9, batch 20300, loss[loss=0.1715, simple_loss=0.2338, pruned_loss=0.05461, over 4790.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2159, pruned_loss=0.03526, over 971954.96 frames.], batch size: 14, lr: 2.34e-04 +2022-05-06 13:29:57,722 INFO [train.py:715] (6/8) Epoch 9, batch 20350, loss[loss=0.1767, simple_loss=0.2488, pruned_loss=0.05235, over 4883.00 frames.], tot_loss[loss=0.143, simple_loss=0.2157, pruned_loss=0.03519, over 971548.34 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:30:37,203 INFO [train.py:715] (6/8) Epoch 9, batch 20400, loss[loss=0.1457, simple_loss=0.2203, pruned_loss=0.0355, over 4786.00 frames.], tot_loss[loss=0.142, simple_loss=0.2146, pruned_loss=0.03467, over 971258.14 frames.], batch size: 17, lr: 2.34e-04 +2022-05-06 13:31:17,089 INFO [train.py:715] (6/8) Epoch 9, batch 20450, loss[loss=0.151, simple_loss=0.2146, pruned_loss=0.04372, over 4990.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2148, pruned_loss=0.03489, over 971882.64 frames.], batch size: 31, lr: 2.34e-04 +2022-05-06 13:31:56,598 INFO [train.py:715] (6/8) Epoch 9, batch 20500, loss[loss=0.1243, simple_loss=0.1984, pruned_loss=0.02512, over 4870.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2142, pruned_loss=0.03437, over 972484.67 frames.], batch size: 16, lr: 2.34e-04 +2022-05-06 13:32:35,671 INFO [train.py:715] (6/8) Epoch 9, batch 20550, loss[loss=0.1375, simple_loss=0.2149, pruned_loss=0.03008, over 4884.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2136, pruned_loss=0.03392, over 972187.49 frames.], batch size: 22, lr: 2.34e-04 +2022-05-06 13:33:14,863 INFO [train.py:715] (6/8) Epoch 9, batch 20600, loss[loss=0.1434, simple_loss=0.2182, pruned_loss=0.03428, over 4847.00 frames.], tot_loss[loss=0.141, simple_loss=0.2138, pruned_loss=0.0341, over 972590.12 frames.], batch size: 30, lr: 2.34e-04 +2022-05-06 13:33:53,312 INFO [train.py:715] (6/8) Epoch 9, batch 20650, loss[loss=0.1626, simple_loss=0.216, pruned_loss=0.05458, over 4850.00 frames.], tot_loss[loss=0.1414, simple_loss=0.214, pruned_loss=0.03436, over 972345.79 frames.], batch size: 30, lr: 2.34e-04 +2022-05-06 13:34:32,416 INFO [train.py:715] (6/8) Epoch 9, batch 20700, loss[loss=0.1316, simple_loss=0.209, pruned_loss=0.0271, over 4759.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2138, pruned_loss=0.03424, over 972011.46 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:35:11,249 INFO [train.py:715] (6/8) Epoch 9, batch 20750, loss[loss=0.1736, simple_loss=0.2428, pruned_loss=0.0522, over 4795.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2131, pruned_loss=0.03399, over 971188.54 frames.], batch size: 18, lr: 2.34e-04 +2022-05-06 13:35:50,841 INFO [train.py:715] (6/8) Epoch 9, batch 20800, loss[loss=0.1465, simple_loss=0.2278, pruned_loss=0.03264, over 4771.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2138, pruned_loss=0.03425, over 970996.06 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:36:30,209 INFO [train.py:715] (6/8) Epoch 9, batch 20850, loss[loss=0.1409, simple_loss=0.2217, pruned_loss=0.03004, over 4762.00 frames.], tot_loss[loss=0.1413, simple_loss=0.214, pruned_loss=0.03432, over 971216.32 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:37:09,652 INFO [train.py:715] (6/8) Epoch 9, batch 20900, loss[loss=0.1408, simple_loss=0.2169, pruned_loss=0.03238, over 4894.00 frames.], tot_loss[loss=0.141, simple_loss=0.2137, pruned_loss=0.03411, over 971720.65 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:37:49,145 INFO [train.py:715] (6/8) Epoch 9, batch 20950, loss[loss=0.1547, simple_loss=0.2345, pruned_loss=0.03747, over 4839.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2145, pruned_loss=0.03441, over 971665.92 frames.], batch size: 15, lr: 2.34e-04 +2022-05-06 13:38:28,447 INFO [train.py:715] (6/8) Epoch 9, batch 21000, loss[loss=0.1183, simple_loss=0.1828, pruned_loss=0.02691, over 4869.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2141, pruned_loss=0.03445, over 971718.95 frames.], batch size: 20, lr: 2.34e-04 +2022-05-06 13:38:28,448 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 13:38:38,082 INFO [train.py:742] (6/8) Epoch 9, validation: loss=0.1069, simple_loss=0.1912, pruned_loss=0.01129, over 914524.00 frames. +2022-05-06 13:39:17,243 INFO [train.py:715] (6/8) Epoch 9, batch 21050, loss[loss=0.1385, simple_loss=0.217, pruned_loss=0.03003, over 4964.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2142, pruned_loss=0.03454, over 971855.85 frames.], batch size: 24, lr: 2.34e-04 +2022-05-06 13:39:56,159 INFO [train.py:715] (6/8) Epoch 9, batch 21100, loss[loss=0.1406, simple_loss=0.2122, pruned_loss=0.03453, over 4826.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2149, pruned_loss=0.03491, over 971983.25 frames.], batch size: 30, lr: 2.34e-04 +2022-05-06 13:40:35,522 INFO [train.py:715] (6/8) Epoch 9, batch 21150, loss[loss=0.1526, simple_loss=0.2263, pruned_loss=0.03941, over 4886.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2144, pruned_loss=0.03465, over 972693.00 frames.], batch size: 16, lr: 2.34e-04 +2022-05-06 13:41:14,533 INFO [train.py:715] (6/8) Epoch 9, batch 21200, loss[loss=0.1305, simple_loss=0.1996, pruned_loss=0.03075, over 4854.00 frames.], tot_loss[loss=0.143, simple_loss=0.2156, pruned_loss=0.03518, over 973108.85 frames.], batch size: 32, lr: 2.34e-04 +2022-05-06 13:41:54,099 INFO [train.py:715] (6/8) Epoch 9, batch 21250, loss[loss=0.1413, simple_loss=0.211, pruned_loss=0.03583, over 4927.00 frames.], tot_loss[loss=0.1435, simple_loss=0.216, pruned_loss=0.03544, over 972960.41 frames.], batch size: 29, lr: 2.34e-04 +2022-05-06 13:42:32,489 INFO [train.py:715] (6/8) Epoch 9, batch 21300, loss[loss=0.1415, simple_loss=0.2208, pruned_loss=0.03104, over 4815.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2157, pruned_loss=0.03508, over 972758.48 frames.], batch size: 26, lr: 2.34e-04 +2022-05-06 13:43:11,102 INFO [train.py:715] (6/8) Epoch 9, batch 21350, loss[loss=0.1458, simple_loss=0.2105, pruned_loss=0.04053, over 4930.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2154, pruned_loss=0.03492, over 972620.56 frames.], batch size: 18, lr: 2.34e-04 +2022-05-06 13:43:50,030 INFO [train.py:715] (6/8) Epoch 9, batch 21400, loss[loss=0.1272, simple_loss=0.198, pruned_loss=0.02823, over 4870.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2154, pruned_loss=0.03456, over 973037.66 frames.], batch size: 13, lr: 2.34e-04 +2022-05-06 13:44:28,775 INFO [train.py:715] (6/8) Epoch 9, batch 21450, loss[loss=0.1312, simple_loss=0.197, pruned_loss=0.03268, over 4834.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2145, pruned_loss=0.03439, over 974087.70 frames.], batch size: 25, lr: 2.34e-04 +2022-05-06 13:45:07,169 INFO [train.py:715] (6/8) Epoch 9, batch 21500, loss[loss=0.1711, simple_loss=0.234, pruned_loss=0.05409, over 4797.00 frames.], tot_loss[loss=0.1422, simple_loss=0.215, pruned_loss=0.03468, over 973171.91 frames.], batch size: 24, lr: 2.34e-04 +2022-05-06 13:45:46,287 INFO [train.py:715] (6/8) Epoch 9, batch 21550, loss[loss=0.1303, simple_loss=0.2015, pruned_loss=0.02961, over 4758.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2147, pruned_loss=0.03458, over 973145.84 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:46:25,002 INFO [train.py:715] (6/8) Epoch 9, batch 21600, loss[loss=0.1539, simple_loss=0.2288, pruned_loss=0.03947, over 4939.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2162, pruned_loss=0.03517, over 972140.90 frames.], batch size: 21, lr: 2.34e-04 +2022-05-06 13:47:04,091 INFO [train.py:715] (6/8) Epoch 9, batch 21650, loss[loss=0.1951, simple_loss=0.2726, pruned_loss=0.05876, over 4864.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2156, pruned_loss=0.03482, over 972396.43 frames.], batch size: 16, lr: 2.34e-04 +2022-05-06 13:47:43,368 INFO [train.py:715] (6/8) Epoch 9, batch 21700, loss[loss=0.1902, simple_loss=0.2522, pruned_loss=0.06412, over 4795.00 frames.], tot_loss[loss=0.1425, simple_loss=0.215, pruned_loss=0.03504, over 972249.74 frames.], batch size: 18, lr: 2.34e-04 +2022-05-06 13:48:22,457 INFO [train.py:715] (6/8) Epoch 9, batch 21750, loss[loss=0.1549, simple_loss=0.2221, pruned_loss=0.04384, over 4866.00 frames.], tot_loss[loss=0.142, simple_loss=0.2147, pruned_loss=0.03465, over 972379.09 frames.], batch size: 13, lr: 2.34e-04 +2022-05-06 13:49:01,580 INFO [train.py:715] (6/8) Epoch 9, batch 21800, loss[loss=0.1217, simple_loss=0.2001, pruned_loss=0.02168, over 4783.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2149, pruned_loss=0.03469, over 973273.79 frames.], batch size: 17, lr: 2.34e-04 +2022-05-06 13:49:41,108 INFO [train.py:715] (6/8) Epoch 9, batch 21850, loss[loss=0.1208, simple_loss=0.1845, pruned_loss=0.02857, over 4770.00 frames.], tot_loss[loss=0.1413, simple_loss=0.214, pruned_loss=0.03437, over 973108.81 frames.], batch size: 12, lr: 2.34e-04 +2022-05-06 13:50:20,443 INFO [train.py:715] (6/8) Epoch 9, batch 21900, loss[loss=0.1273, simple_loss=0.1984, pruned_loss=0.02804, over 4840.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2146, pruned_loss=0.03439, over 974062.56 frames.], batch size: 15, lr: 2.33e-04 +2022-05-06 13:50:59,012 INFO [train.py:715] (6/8) Epoch 9, batch 21950, loss[loss=0.1287, simple_loss=0.195, pruned_loss=0.03115, over 4979.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2143, pruned_loss=0.03423, over 973447.79 frames.], batch size: 35, lr: 2.33e-04 +2022-05-06 13:51:37,914 INFO [train.py:715] (6/8) Epoch 9, batch 22000, loss[loss=0.1452, simple_loss=0.2135, pruned_loss=0.03839, over 4823.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2139, pruned_loss=0.03449, over 972779.12 frames.], batch size: 26, lr: 2.33e-04 +2022-05-06 13:52:16,814 INFO [train.py:715] (6/8) Epoch 9, batch 22050, loss[loss=0.1314, simple_loss=0.1965, pruned_loss=0.0332, over 4746.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2142, pruned_loss=0.03438, over 972334.05 frames.], batch size: 16, lr: 2.33e-04 +2022-05-06 13:52:56,514 INFO [train.py:715] (6/8) Epoch 9, batch 22100, loss[loss=0.129, simple_loss=0.207, pruned_loss=0.02548, over 4802.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2147, pruned_loss=0.03428, over 972515.24 frames.], batch size: 25, lr: 2.33e-04 +2022-05-06 13:53:35,801 INFO [train.py:715] (6/8) Epoch 9, batch 22150, loss[loss=0.1561, simple_loss=0.214, pruned_loss=0.04903, over 4957.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2147, pruned_loss=0.03438, over 971490.03 frames.], batch size: 39, lr: 2.33e-04 +2022-05-06 13:54:14,971 INFO [train.py:715] (6/8) Epoch 9, batch 22200, loss[loss=0.1217, simple_loss=0.1864, pruned_loss=0.0285, over 4860.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2143, pruned_loss=0.03441, over 972528.92 frames.], batch size: 13, lr: 2.33e-04 +2022-05-06 13:54:54,446 INFO [train.py:715] (6/8) Epoch 9, batch 22250, loss[loss=0.1606, simple_loss=0.2431, pruned_loss=0.03908, over 4889.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2141, pruned_loss=0.03435, over 972134.61 frames.], batch size: 19, lr: 2.33e-04 +2022-05-06 13:55:33,233 INFO [train.py:715] (6/8) Epoch 9, batch 22300, loss[loss=0.1346, simple_loss=0.214, pruned_loss=0.02762, over 4943.00 frames.], tot_loss[loss=0.1426, simple_loss=0.215, pruned_loss=0.03511, over 972290.83 frames.], batch size: 29, lr: 2.33e-04 +2022-05-06 13:56:11,832 INFO [train.py:715] (6/8) Epoch 9, batch 22350, loss[loss=0.1336, simple_loss=0.2093, pruned_loss=0.02891, over 4636.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2157, pruned_loss=0.03525, over 972229.70 frames.], batch size: 13, lr: 2.33e-04 +2022-05-06 13:56:50,722 INFO [train.py:715] (6/8) Epoch 9, batch 22400, loss[loss=0.1745, simple_loss=0.2542, pruned_loss=0.04744, over 4941.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2151, pruned_loss=0.03529, over 971503.26 frames.], batch size: 21, lr: 2.33e-04 +2022-05-06 13:57:29,428 INFO [train.py:715] (6/8) Epoch 9, batch 22450, loss[loss=0.1395, simple_loss=0.2055, pruned_loss=0.03679, over 4863.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2146, pruned_loss=0.03484, over 971635.92 frames.], batch size: 20, lr: 2.33e-04 +2022-05-06 13:58:08,128 INFO [train.py:715] (6/8) Epoch 9, batch 22500, loss[loss=0.1367, simple_loss=0.2089, pruned_loss=0.03228, over 4729.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2141, pruned_loss=0.03467, over 971870.16 frames.], batch size: 16, lr: 2.33e-04 +2022-05-06 13:58:47,017 INFO [train.py:715] (6/8) Epoch 9, batch 22550, loss[loss=0.1306, simple_loss=0.204, pruned_loss=0.02859, over 4842.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2146, pruned_loss=0.03485, over 971970.00 frames.], batch size: 30, lr: 2.33e-04 +2022-05-06 13:59:26,039 INFO [train.py:715] (6/8) Epoch 9, batch 22600, loss[loss=0.132, simple_loss=0.2069, pruned_loss=0.02849, over 4718.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2143, pruned_loss=0.03475, over 972416.80 frames.], batch size: 12, lr: 2.33e-04 +2022-05-06 14:00:05,207 INFO [train.py:715] (6/8) Epoch 9, batch 22650, loss[loss=0.1253, simple_loss=0.1979, pruned_loss=0.02638, over 4898.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2132, pruned_loss=0.03391, over 972618.79 frames.], batch size: 22, lr: 2.33e-04 +2022-05-06 14:00:44,245 INFO [train.py:715] (6/8) Epoch 9, batch 22700, loss[loss=0.1477, simple_loss=0.2181, pruned_loss=0.03863, over 4943.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2138, pruned_loss=0.03396, over 972563.38 frames.], batch size: 21, lr: 2.33e-04 +2022-05-06 14:01:26,080 INFO [train.py:715] (6/8) Epoch 9, batch 22750, loss[loss=0.1505, simple_loss=0.2127, pruned_loss=0.0441, over 4896.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2144, pruned_loss=0.0343, over 972713.36 frames.], batch size: 19, lr: 2.33e-04 +2022-05-06 14:02:04,855 INFO [train.py:715] (6/8) Epoch 9, batch 22800, loss[loss=0.1434, simple_loss=0.2214, pruned_loss=0.03275, over 4883.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2152, pruned_loss=0.03485, over 971976.31 frames.], batch size: 19, lr: 2.33e-04 +2022-05-06 14:02:44,153 INFO [train.py:715] (6/8) Epoch 9, batch 22850, loss[loss=0.1469, simple_loss=0.2101, pruned_loss=0.04188, over 4747.00 frames.], tot_loss[loss=0.143, simple_loss=0.2156, pruned_loss=0.03516, over 972580.13 frames.], batch size: 16, lr: 2.33e-04 +2022-05-06 14:03:22,718 INFO [train.py:715] (6/8) Epoch 9, batch 22900, loss[loss=0.1727, simple_loss=0.235, pruned_loss=0.05523, over 4871.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2154, pruned_loss=0.03508, over 972954.87 frames.], batch size: 32, lr: 2.33e-04 +2022-05-06 14:04:01,805 INFO [train.py:715] (6/8) Epoch 9, batch 22950, loss[loss=0.1356, simple_loss=0.2073, pruned_loss=0.03193, over 4903.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2152, pruned_loss=0.03506, over 972403.34 frames.], batch size: 19, lr: 2.33e-04 +2022-05-06 14:04:40,859 INFO [train.py:715] (6/8) Epoch 9, batch 23000, loss[loss=0.1491, simple_loss=0.2231, pruned_loss=0.03755, over 4974.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2153, pruned_loss=0.03514, over 972216.85 frames.], batch size: 25, lr: 2.33e-04 +2022-05-06 14:05:20,250 INFO [train.py:715] (6/8) Epoch 9, batch 23050, loss[loss=0.1459, simple_loss=0.2098, pruned_loss=0.04097, over 4770.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2151, pruned_loss=0.03502, over 972193.60 frames.], batch size: 19, lr: 2.33e-04 +2022-05-06 14:05:59,526 INFO [train.py:715] (6/8) Epoch 9, batch 23100, loss[loss=0.1309, simple_loss=0.2056, pruned_loss=0.02809, over 4786.00 frames.], tot_loss[loss=0.142, simple_loss=0.2147, pruned_loss=0.03466, over 972612.65 frames.], batch size: 14, lr: 2.33e-04 +2022-05-06 14:06:38,546 INFO [train.py:715] (6/8) Epoch 9, batch 23150, loss[loss=0.1354, simple_loss=0.1949, pruned_loss=0.03792, over 4643.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2136, pruned_loss=0.03455, over 972120.96 frames.], batch size: 13, lr: 2.33e-04 +2022-05-06 14:07:18,157 INFO [train.py:715] (6/8) Epoch 9, batch 23200, loss[loss=0.18, simple_loss=0.2464, pruned_loss=0.05675, over 4972.00 frames.], tot_loss[loss=0.142, simple_loss=0.2142, pruned_loss=0.03487, over 972184.29 frames.], batch size: 24, lr: 2.33e-04 +2022-05-06 14:07:57,914 INFO [train.py:715] (6/8) Epoch 9, batch 23250, loss[loss=0.1596, simple_loss=0.223, pruned_loss=0.04806, over 4743.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2143, pruned_loss=0.03461, over 972983.46 frames.], batch size: 16, lr: 2.33e-04 +2022-05-06 14:08:37,687 INFO [train.py:715] (6/8) Epoch 9, batch 23300, loss[loss=0.1571, simple_loss=0.2328, pruned_loss=0.04071, over 4742.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2145, pruned_loss=0.03444, over 972511.35 frames.], batch size: 16, lr: 2.33e-04 +2022-05-06 14:09:17,442 INFO [train.py:715] (6/8) Epoch 9, batch 23350, loss[loss=0.1657, simple_loss=0.2374, pruned_loss=0.04705, over 4855.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2148, pruned_loss=0.03453, over 972758.37 frames.], batch size: 34, lr: 2.33e-04 +2022-05-06 14:09:56,739 INFO [train.py:715] (6/8) Epoch 9, batch 23400, loss[loss=0.1344, simple_loss=0.1958, pruned_loss=0.03647, over 4981.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2139, pruned_loss=0.0341, over 972399.47 frames.], batch size: 35, lr: 2.33e-04 +2022-05-06 14:10:35,595 INFO [train.py:715] (6/8) Epoch 9, batch 23450, loss[loss=0.1641, simple_loss=0.2358, pruned_loss=0.04622, over 4781.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2149, pruned_loss=0.03446, over 971476.25 frames.], batch size: 14, lr: 2.33e-04 +2022-05-06 14:11:14,358 INFO [train.py:715] (6/8) Epoch 9, batch 23500, loss[loss=0.1556, simple_loss=0.2249, pruned_loss=0.0431, over 4966.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2146, pruned_loss=0.03415, over 972269.00 frames.], batch size: 39, lr: 2.33e-04 +2022-05-06 14:11:52,881 INFO [train.py:715] (6/8) Epoch 9, batch 23550, loss[loss=0.1119, simple_loss=0.1794, pruned_loss=0.02214, over 4733.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2142, pruned_loss=0.03373, over 971958.57 frames.], batch size: 16, lr: 2.33e-04 +2022-05-06 14:12:32,350 INFO [train.py:715] (6/8) Epoch 9, batch 23600, loss[loss=0.1461, simple_loss=0.2156, pruned_loss=0.03828, over 4829.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2136, pruned_loss=0.0337, over 972397.44 frames.], batch size: 30, lr: 2.33e-04 +2022-05-06 14:13:11,526 INFO [train.py:715] (6/8) Epoch 9, batch 23650, loss[loss=0.112, simple_loss=0.18, pruned_loss=0.02205, over 4759.00 frames.], tot_loss[loss=0.1404, simple_loss=0.213, pruned_loss=0.03392, over 972125.28 frames.], batch size: 19, lr: 2.33e-04 +2022-05-06 14:13:50,878 INFO [train.py:715] (6/8) Epoch 9, batch 23700, loss[loss=0.1288, simple_loss=0.2016, pruned_loss=0.02797, over 4901.00 frames.], tot_loss[loss=0.141, simple_loss=0.2133, pruned_loss=0.03436, over 971821.68 frames.], batch size: 39, lr: 2.33e-04 +2022-05-06 14:14:30,054 INFO [train.py:715] (6/8) Epoch 9, batch 23750, loss[loss=0.1467, simple_loss=0.2249, pruned_loss=0.03427, over 4978.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2143, pruned_loss=0.03438, over 972164.12 frames.], batch size: 39, lr: 2.33e-04 +2022-05-06 14:15:09,288 INFO [train.py:715] (6/8) Epoch 9, batch 23800, loss[loss=0.1307, simple_loss=0.2102, pruned_loss=0.02562, over 4880.00 frames.], tot_loss[loss=0.1412, simple_loss=0.214, pruned_loss=0.03418, over 971356.99 frames.], batch size: 22, lr: 2.33e-04 +2022-05-06 14:15:48,394 INFO [train.py:715] (6/8) Epoch 9, batch 23850, loss[loss=0.157, simple_loss=0.219, pruned_loss=0.04746, over 4846.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2136, pruned_loss=0.03409, over 971077.84 frames.], batch size: 32, lr: 2.33e-04 +2022-05-06 14:16:27,647 INFO [train.py:715] (6/8) Epoch 9, batch 23900, loss[loss=0.136, simple_loss=0.2043, pruned_loss=0.03384, over 4773.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2128, pruned_loss=0.03378, over 971616.87 frames.], batch size: 14, lr: 2.33e-04 +2022-05-06 14:17:06,535 INFO [train.py:715] (6/8) Epoch 9, batch 23950, loss[loss=0.1303, simple_loss=0.2083, pruned_loss=0.02617, over 4805.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2135, pruned_loss=0.03421, over 972422.68 frames.], batch size: 21, lr: 2.33e-04 +2022-05-06 14:17:45,505 INFO [train.py:715] (6/8) Epoch 9, batch 24000, loss[loss=0.1531, simple_loss=0.224, pruned_loss=0.04105, over 4847.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2139, pruned_loss=0.03434, over 972321.75 frames.], batch size: 32, lr: 2.33e-04 +2022-05-06 14:17:45,506 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 14:17:55,356 INFO [train.py:742] (6/8) Epoch 9, validation: loss=0.1069, simple_loss=0.1913, pruned_loss=0.01128, over 914524.00 frames. +2022-05-06 14:18:34,694 INFO [train.py:715] (6/8) Epoch 9, batch 24050, loss[loss=0.1608, simple_loss=0.2296, pruned_loss=0.04596, over 4805.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2133, pruned_loss=0.03411, over 972850.65 frames.], batch size: 13, lr: 2.33e-04 +2022-05-06 14:19:14,981 INFO [train.py:715] (6/8) Epoch 9, batch 24100, loss[loss=0.1414, simple_loss=0.2188, pruned_loss=0.032, over 4922.00 frames.], tot_loss[loss=0.1415, simple_loss=0.214, pruned_loss=0.03448, over 972653.59 frames.], batch size: 23, lr: 2.33e-04 +2022-05-06 14:19:54,469 INFO [train.py:715] (6/8) Epoch 9, batch 24150, loss[loss=0.1424, simple_loss=0.221, pruned_loss=0.03189, over 4833.00 frames.], tot_loss[loss=0.1414, simple_loss=0.214, pruned_loss=0.03438, over 973167.47 frames.], batch size: 26, lr: 2.33e-04 +2022-05-06 14:20:33,560 INFO [train.py:715] (6/8) Epoch 9, batch 24200, loss[loss=0.1272, simple_loss=0.1854, pruned_loss=0.03451, over 4768.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2144, pruned_loss=0.03468, over 973143.09 frames.], batch size: 12, lr: 2.33e-04 +2022-05-06 14:21:12,486 INFO [train.py:715] (6/8) Epoch 9, batch 24250, loss[loss=0.1513, simple_loss=0.2253, pruned_loss=0.03864, over 4925.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2148, pruned_loss=0.03452, over 972405.31 frames.], batch size: 29, lr: 2.33e-04 +2022-05-06 14:21:52,133 INFO [train.py:715] (6/8) Epoch 9, batch 24300, loss[loss=0.1481, simple_loss=0.2317, pruned_loss=0.03228, over 4787.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2149, pruned_loss=0.03433, over 971700.78 frames.], batch size: 18, lr: 2.33e-04 +2022-05-06 14:22:31,316 INFO [train.py:715] (6/8) Epoch 9, batch 24350, loss[loss=0.1464, simple_loss=0.222, pruned_loss=0.03539, over 4928.00 frames.], tot_loss[loss=0.142, simple_loss=0.2152, pruned_loss=0.03438, over 971509.75 frames.], batch size: 29, lr: 2.33e-04 +2022-05-06 14:23:10,726 INFO [train.py:715] (6/8) Epoch 9, batch 24400, loss[loss=0.1582, simple_loss=0.2298, pruned_loss=0.04327, over 4821.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2139, pruned_loss=0.03396, over 971395.56 frames.], batch size: 15, lr: 2.33e-04 +2022-05-06 14:23:50,615 INFO [train.py:715] (6/8) Epoch 9, batch 24450, loss[loss=0.1516, simple_loss=0.2124, pruned_loss=0.04539, over 4971.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2133, pruned_loss=0.03392, over 971661.94 frames.], batch size: 35, lr: 2.33e-04 +2022-05-06 14:24:30,638 INFO [train.py:715] (6/8) Epoch 9, batch 24500, loss[loss=0.1234, simple_loss=0.1988, pruned_loss=0.024, over 4812.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2126, pruned_loss=0.03351, over 971764.77 frames.], batch size: 25, lr: 2.33e-04 +2022-05-06 14:25:10,997 INFO [train.py:715] (6/8) Epoch 9, batch 24550, loss[loss=0.1371, simple_loss=0.2046, pruned_loss=0.03485, over 4954.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.03359, over 971403.92 frames.], batch size: 24, lr: 2.33e-04 +2022-05-06 14:25:50,743 INFO [train.py:715] (6/8) Epoch 9, batch 24600, loss[loss=0.1656, simple_loss=0.2333, pruned_loss=0.04893, over 4948.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2128, pruned_loss=0.03374, over 970961.34 frames.], batch size: 21, lr: 2.33e-04 +2022-05-06 14:26:30,714 INFO [train.py:715] (6/8) Epoch 9, batch 24650, loss[loss=0.1494, simple_loss=0.2167, pruned_loss=0.04104, over 4796.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2131, pruned_loss=0.03367, over 971118.77 frames.], batch size: 18, lr: 2.33e-04 +2022-05-06 14:27:09,793 INFO [train.py:715] (6/8) Epoch 9, batch 24700, loss[loss=0.1666, simple_loss=0.2342, pruned_loss=0.04948, over 4959.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2131, pruned_loss=0.03408, over 970908.79 frames.], batch size: 35, lr: 2.33e-04 +2022-05-06 14:27:48,508 INFO [train.py:715] (6/8) Epoch 9, batch 24750, loss[loss=0.1313, simple_loss=0.2066, pruned_loss=0.028, over 4752.00 frames.], tot_loss[loss=0.1404, simple_loss=0.213, pruned_loss=0.03386, over 971306.70 frames.], batch size: 19, lr: 2.33e-04 +2022-05-06 14:28:28,023 INFO [train.py:715] (6/8) Epoch 9, batch 24800, loss[loss=0.1125, simple_loss=0.1911, pruned_loss=0.01695, over 4747.00 frames.], tot_loss[loss=0.1401, simple_loss=0.213, pruned_loss=0.03358, over 971315.68 frames.], batch size: 16, lr: 2.32e-04 +2022-05-06 14:29:07,571 INFO [train.py:715] (6/8) Epoch 9, batch 24850, loss[loss=0.1545, simple_loss=0.2318, pruned_loss=0.03857, over 4737.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2132, pruned_loss=0.03403, over 971514.18 frames.], batch size: 16, lr: 2.32e-04 +2022-05-06 14:29:46,974 INFO [train.py:715] (6/8) Epoch 9, batch 24900, loss[loss=0.1367, simple_loss=0.2136, pruned_loss=0.02995, over 4850.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2129, pruned_loss=0.03391, over 971445.36 frames.], batch size: 20, lr: 2.32e-04 +2022-05-06 14:30:26,409 INFO [train.py:715] (6/8) Epoch 9, batch 24950, loss[loss=0.146, simple_loss=0.215, pruned_loss=0.03852, over 4848.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2131, pruned_loss=0.03437, over 971227.93 frames.], batch size: 13, lr: 2.32e-04 +2022-05-06 14:31:06,086 INFO [train.py:715] (6/8) Epoch 9, batch 25000, loss[loss=0.1394, simple_loss=0.2142, pruned_loss=0.03233, over 4983.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2129, pruned_loss=0.03406, over 971613.41 frames.], batch size: 25, lr: 2.32e-04 +2022-05-06 14:31:44,922 INFO [train.py:715] (6/8) Epoch 9, batch 25050, loss[loss=0.1159, simple_loss=0.1885, pruned_loss=0.02161, over 4818.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2126, pruned_loss=0.0335, over 971070.13 frames.], batch size: 13, lr: 2.32e-04 +2022-05-06 14:32:24,419 INFO [train.py:715] (6/8) Epoch 9, batch 25100, loss[loss=0.1283, simple_loss=0.2056, pruned_loss=0.02549, over 4799.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2129, pruned_loss=0.0336, over 971201.84 frames.], batch size: 14, lr: 2.32e-04 +2022-05-06 14:33:03,524 INFO [train.py:715] (6/8) Epoch 9, batch 25150, loss[loss=0.1588, simple_loss=0.2319, pruned_loss=0.04284, over 4929.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2131, pruned_loss=0.03358, over 971309.30 frames.], batch size: 29, lr: 2.32e-04 +2022-05-06 14:33:42,583 INFO [train.py:715] (6/8) Epoch 9, batch 25200, loss[loss=0.1399, simple_loss=0.2155, pruned_loss=0.0322, over 4773.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2126, pruned_loss=0.03349, over 972518.92 frames.], batch size: 12, lr: 2.32e-04 +2022-05-06 14:34:21,841 INFO [train.py:715] (6/8) Epoch 9, batch 25250, loss[loss=0.1497, simple_loss=0.2244, pruned_loss=0.03749, over 4861.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2131, pruned_loss=0.03358, over 972829.24 frames.], batch size: 22, lr: 2.32e-04 +2022-05-06 14:35:00,584 INFO [train.py:715] (6/8) Epoch 9, batch 25300, loss[loss=0.1297, simple_loss=0.2032, pruned_loss=0.02809, over 4844.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2123, pruned_loss=0.03326, over 973084.01 frames.], batch size: 30, lr: 2.32e-04 +2022-05-06 14:35:40,282 INFO [train.py:715] (6/8) Epoch 9, batch 25350, loss[loss=0.1532, simple_loss=0.2295, pruned_loss=0.03849, over 4941.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2133, pruned_loss=0.03389, over 972599.94 frames.], batch size: 21, lr: 2.32e-04 +2022-05-06 14:36:20,125 INFO [train.py:715] (6/8) Epoch 9, batch 25400, loss[loss=0.1289, simple_loss=0.188, pruned_loss=0.03488, over 4822.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2123, pruned_loss=0.03337, over 972691.54 frames.], batch size: 13, lr: 2.32e-04 +2022-05-06 14:37:00,347 INFO [train.py:715] (6/8) Epoch 9, batch 25450, loss[loss=0.1357, simple_loss=0.2096, pruned_loss=0.03087, over 4937.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2129, pruned_loss=0.03346, over 972452.03 frames.], batch size: 23, lr: 2.32e-04 +2022-05-06 14:37:38,914 INFO [train.py:715] (6/8) Epoch 9, batch 25500, loss[loss=0.1269, simple_loss=0.2022, pruned_loss=0.02578, over 4943.00 frames.], tot_loss[loss=0.1397, simple_loss=0.213, pruned_loss=0.03324, over 973132.52 frames.], batch size: 23, lr: 2.32e-04 +2022-05-06 14:38:18,074 INFO [train.py:715] (6/8) Epoch 9, batch 25550, loss[loss=0.1393, simple_loss=0.2039, pruned_loss=0.03731, over 4783.00 frames.], tot_loss[loss=0.1397, simple_loss=0.213, pruned_loss=0.03325, over 972723.97 frames.], batch size: 14, lr: 2.32e-04 +2022-05-06 14:38:57,227 INFO [train.py:715] (6/8) Epoch 9, batch 25600, loss[loss=0.1256, simple_loss=0.1959, pruned_loss=0.02763, over 4937.00 frames.], tot_loss[loss=0.14, simple_loss=0.2133, pruned_loss=0.03336, over 971887.36 frames.], batch size: 18, lr: 2.32e-04 +2022-05-06 14:39:36,164 INFO [train.py:715] (6/8) Epoch 9, batch 25650, loss[loss=0.1204, simple_loss=0.2012, pruned_loss=0.01984, over 4965.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2127, pruned_loss=0.03328, over 972650.55 frames.], batch size: 35, lr: 2.32e-04 +2022-05-06 14:40:15,299 INFO [train.py:715] (6/8) Epoch 9, batch 25700, loss[loss=0.1719, simple_loss=0.2379, pruned_loss=0.05292, over 4916.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2135, pruned_loss=0.03411, over 972892.01 frames.], batch size: 39, lr: 2.32e-04 +2022-05-06 14:40:54,416 INFO [train.py:715] (6/8) Epoch 9, batch 25750, loss[loss=0.1176, simple_loss=0.1924, pruned_loss=0.02142, over 4805.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2142, pruned_loss=0.03428, over 972757.53 frames.], batch size: 25, lr: 2.32e-04 +2022-05-06 14:41:33,412 INFO [train.py:715] (6/8) Epoch 9, batch 25800, loss[loss=0.1417, simple_loss=0.2146, pruned_loss=0.03438, over 4932.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2142, pruned_loss=0.03444, over 972790.05 frames.], batch size: 29, lr: 2.32e-04 +2022-05-06 14:42:13,628 INFO [train.py:715] (6/8) Epoch 9, batch 25850, loss[loss=0.1405, simple_loss=0.2329, pruned_loss=0.0241, over 4952.00 frames.], tot_loss[loss=0.1413, simple_loss=0.214, pruned_loss=0.03427, over 972612.81 frames.], batch size: 24, lr: 2.32e-04 +2022-05-06 14:42:53,073 INFO [train.py:715] (6/8) Epoch 9, batch 25900, loss[loss=0.1531, simple_loss=0.2254, pruned_loss=0.04036, over 4965.00 frames.], tot_loss[loss=0.141, simple_loss=0.2138, pruned_loss=0.03415, over 973110.20 frames.], batch size: 35, lr: 2.32e-04 +2022-05-06 14:43:32,747 INFO [train.py:715] (6/8) Epoch 9, batch 25950, loss[loss=0.1301, simple_loss=0.2026, pruned_loss=0.02884, over 4844.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2135, pruned_loss=0.03395, over 972665.95 frames.], batch size: 30, lr: 2.32e-04 +2022-05-06 14:44:11,976 INFO [train.py:715] (6/8) Epoch 9, batch 26000, loss[loss=0.1547, simple_loss=0.2236, pruned_loss=0.0429, over 4792.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2137, pruned_loss=0.03384, over 972458.07 frames.], batch size: 24, lr: 2.32e-04 +2022-05-06 14:44:51,307 INFO [train.py:715] (6/8) Epoch 9, batch 26050, loss[loss=0.1382, simple_loss=0.2033, pruned_loss=0.03652, over 4980.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2144, pruned_loss=0.03399, over 972852.48 frames.], batch size: 24, lr: 2.32e-04 +2022-05-06 14:45:30,101 INFO [train.py:715] (6/8) Epoch 9, batch 26100, loss[loss=0.1506, simple_loss=0.2155, pruned_loss=0.04284, over 4972.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2137, pruned_loss=0.03386, over 973243.14 frames.], batch size: 28, lr: 2.32e-04 +2022-05-06 14:46:09,807 INFO [train.py:715] (6/8) Epoch 9, batch 26150, loss[loss=0.1432, simple_loss=0.2125, pruned_loss=0.03694, over 4867.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2134, pruned_loss=0.03396, over 972695.88 frames.], batch size: 32, lr: 2.32e-04 +2022-05-06 14:46:50,052 INFO [train.py:715] (6/8) Epoch 9, batch 26200, loss[loss=0.1593, simple_loss=0.2253, pruned_loss=0.04664, over 4862.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2138, pruned_loss=0.03421, over 972307.68 frames.], batch size: 30, lr: 2.32e-04 +2022-05-06 14:47:29,918 INFO [train.py:715] (6/8) Epoch 9, batch 26250, loss[loss=0.1611, simple_loss=0.2406, pruned_loss=0.04079, over 4771.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2131, pruned_loss=0.03387, over 971688.39 frames.], batch size: 18, lr: 2.32e-04 +2022-05-06 14:48:09,838 INFO [train.py:715] (6/8) Epoch 9, batch 26300, loss[loss=0.1265, simple_loss=0.2017, pruned_loss=0.0257, over 4810.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2127, pruned_loss=0.03352, over 972128.03 frames.], batch size: 26, lr: 2.32e-04 +2022-05-06 14:48:49,368 INFO [train.py:715] (6/8) Epoch 9, batch 26350, loss[loss=0.1318, simple_loss=0.2028, pruned_loss=0.03039, over 4926.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03342, over 971979.77 frames.], batch size: 21, lr: 2.32e-04 +2022-05-06 14:49:28,727 INFO [train.py:715] (6/8) Epoch 9, batch 26400, loss[loss=0.1347, simple_loss=0.195, pruned_loss=0.03725, over 4807.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2122, pruned_loss=0.0336, over 971819.51 frames.], batch size: 13, lr: 2.32e-04 +2022-05-06 14:50:07,638 INFO [train.py:715] (6/8) Epoch 9, batch 26450, loss[loss=0.1413, simple_loss=0.2125, pruned_loss=0.03503, over 4895.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2122, pruned_loss=0.03349, over 972624.75 frames.], batch size: 17, lr: 2.32e-04 +2022-05-06 14:50:46,957 INFO [train.py:715] (6/8) Epoch 9, batch 26500, loss[loss=0.1265, simple_loss=0.2153, pruned_loss=0.01881, over 4987.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2118, pruned_loss=0.03316, over 971833.77 frames.], batch size: 26, lr: 2.32e-04 +2022-05-06 14:51:26,799 INFO [train.py:715] (6/8) Epoch 9, batch 26550, loss[loss=0.1706, simple_loss=0.2366, pruned_loss=0.05228, over 4906.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2122, pruned_loss=0.03296, over 973115.20 frames.], batch size: 19, lr: 2.32e-04 +2022-05-06 14:52:06,135 INFO [train.py:715] (6/8) Epoch 9, batch 26600, loss[loss=0.1155, simple_loss=0.1846, pruned_loss=0.02321, over 4771.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03332, over 972503.55 frames.], batch size: 17, lr: 2.32e-04 +2022-05-06 14:52:46,106 INFO [train.py:715] (6/8) Epoch 9, batch 26650, loss[loss=0.1294, simple_loss=0.2096, pruned_loss=0.02458, over 4826.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2128, pruned_loss=0.03344, over 972319.39 frames.], batch size: 15, lr: 2.32e-04 +2022-05-06 14:53:25,382 INFO [train.py:715] (6/8) Epoch 9, batch 26700, loss[loss=0.133, simple_loss=0.2047, pruned_loss=0.03064, over 4922.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2139, pruned_loss=0.03454, over 972572.40 frames.], batch size: 29, lr: 2.32e-04 +2022-05-06 14:54:04,746 INFO [train.py:715] (6/8) Epoch 9, batch 26750, loss[loss=0.1413, simple_loss=0.2097, pruned_loss=0.03645, over 4865.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2138, pruned_loss=0.03457, over 972257.38 frames.], batch size: 32, lr: 2.32e-04 +2022-05-06 14:54:43,926 INFO [train.py:715] (6/8) Epoch 9, batch 26800, loss[loss=0.1284, simple_loss=0.1988, pruned_loss=0.02898, over 4783.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2139, pruned_loss=0.03445, over 972449.16 frames.], batch size: 14, lr: 2.32e-04 +2022-05-06 14:55:22,922 INFO [train.py:715] (6/8) Epoch 9, batch 26850, loss[loss=0.113, simple_loss=0.1825, pruned_loss=0.0218, over 4691.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2129, pruned_loss=0.03411, over 972610.64 frames.], batch size: 15, lr: 2.32e-04 +2022-05-06 14:56:02,405 INFO [train.py:715] (6/8) Epoch 9, batch 26900, loss[loss=0.1723, simple_loss=0.2483, pruned_loss=0.04818, over 4929.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2131, pruned_loss=0.03405, over 972045.39 frames.], batch size: 23, lr: 2.32e-04 +2022-05-06 14:56:42,229 INFO [train.py:715] (6/8) Epoch 9, batch 26950, loss[loss=0.1328, simple_loss=0.2086, pruned_loss=0.02854, over 4768.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2141, pruned_loss=0.0347, over 971996.47 frames.], batch size: 17, lr: 2.32e-04 +2022-05-06 14:57:21,398 INFO [train.py:715] (6/8) Epoch 9, batch 27000, loss[loss=0.1316, simple_loss=0.2088, pruned_loss=0.02724, over 4788.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2134, pruned_loss=0.03422, over 972143.11 frames.], batch size: 24, lr: 2.32e-04 +2022-05-06 14:57:21,399 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 14:57:30,964 INFO [train.py:742] (6/8) Epoch 9, validation: loss=0.1068, simple_loss=0.1912, pruned_loss=0.01121, over 914524.00 frames. +2022-05-06 14:58:10,507 INFO [train.py:715] (6/8) Epoch 9, batch 27050, loss[loss=0.1373, simple_loss=0.2042, pruned_loss=0.03518, over 4799.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2129, pruned_loss=0.03388, over 972469.15 frames.], batch size: 21, lr: 2.32e-04 +2022-05-06 14:58:50,066 INFO [train.py:715] (6/8) Epoch 9, batch 27100, loss[loss=0.1123, simple_loss=0.1913, pruned_loss=0.01671, over 4990.00 frames.], tot_loss[loss=0.1402, simple_loss=0.213, pruned_loss=0.03373, over 973625.76 frames.], batch size: 15, lr: 2.32e-04 +2022-05-06 14:59:30,123 INFO [train.py:715] (6/8) Epoch 9, batch 27150, loss[loss=0.1293, simple_loss=0.201, pruned_loss=0.02882, over 4818.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2123, pruned_loss=0.03338, over 973391.24 frames.], batch size: 13, lr: 2.32e-04 +2022-05-06 15:00:09,250 INFO [train.py:715] (6/8) Epoch 9, batch 27200, loss[loss=0.1539, simple_loss=0.2218, pruned_loss=0.04301, over 4764.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2132, pruned_loss=0.03407, over 973732.49 frames.], batch size: 19, lr: 2.32e-04 +2022-05-06 15:00:48,163 INFO [train.py:715] (6/8) Epoch 9, batch 27250, loss[loss=0.1076, simple_loss=0.1746, pruned_loss=0.02033, over 4781.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2136, pruned_loss=0.03411, over 973383.15 frames.], batch size: 17, lr: 2.32e-04 +2022-05-06 15:01:27,382 INFO [train.py:715] (6/8) Epoch 9, batch 27300, loss[loss=0.161, simple_loss=0.241, pruned_loss=0.04051, over 4764.00 frames.], tot_loss[loss=0.141, simple_loss=0.2137, pruned_loss=0.03413, over 974272.81 frames.], batch size: 14, lr: 2.32e-04 +2022-05-06 15:02:06,272 INFO [train.py:715] (6/8) Epoch 9, batch 27350, loss[loss=0.1338, simple_loss=0.2052, pruned_loss=0.03114, over 4768.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2138, pruned_loss=0.0342, over 973660.94 frames.], batch size: 12, lr: 2.32e-04 +2022-05-06 15:02:45,304 INFO [train.py:715] (6/8) Epoch 9, batch 27400, loss[loss=0.126, simple_loss=0.2083, pruned_loss=0.02186, over 4931.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2141, pruned_loss=0.03415, over 973165.44 frames.], batch size: 23, lr: 2.32e-04 +2022-05-06 15:03:24,468 INFO [train.py:715] (6/8) Epoch 9, batch 27450, loss[loss=0.1406, simple_loss=0.2205, pruned_loss=0.03037, over 4904.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2141, pruned_loss=0.03444, over 971962.02 frames.], batch size: 29, lr: 2.32e-04 +2022-05-06 15:04:03,432 INFO [train.py:715] (6/8) Epoch 9, batch 27500, loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03054, over 4813.00 frames.], tot_loss[loss=0.142, simple_loss=0.2147, pruned_loss=0.03469, over 972003.16 frames.], batch size: 27, lr: 2.32e-04 +2022-05-06 15:04:42,465 INFO [train.py:715] (6/8) Epoch 9, batch 27550, loss[loss=0.1894, simple_loss=0.2501, pruned_loss=0.06436, over 4954.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2147, pruned_loss=0.03524, over 972128.32 frames.], batch size: 23, lr: 2.32e-04 +2022-05-06 15:05:21,387 INFO [train.py:715] (6/8) Epoch 9, batch 27600, loss[loss=0.1215, simple_loss=0.1914, pruned_loss=0.02579, over 4782.00 frames.], tot_loss[loss=0.1416, simple_loss=0.214, pruned_loss=0.03458, over 972721.39 frames.], batch size: 14, lr: 2.32e-04 +2022-05-06 15:06:00,165 INFO [train.py:715] (6/8) Epoch 9, batch 27650, loss[loss=0.1476, simple_loss=0.2161, pruned_loss=0.03952, over 4917.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2145, pruned_loss=0.03478, over 972142.14 frames.], batch size: 18, lr: 2.32e-04 +2022-05-06 15:06:39,018 INFO [train.py:715] (6/8) Epoch 9, batch 27700, loss[loss=0.1745, simple_loss=0.2451, pruned_loss=0.05193, over 4936.00 frames.], tot_loss[loss=0.1429, simple_loss=0.215, pruned_loss=0.03541, over 972657.49 frames.], batch size: 23, lr: 2.32e-04 +2022-05-06 15:07:18,268 INFO [train.py:715] (6/8) Epoch 9, batch 27750, loss[loss=0.1577, simple_loss=0.2294, pruned_loss=0.04301, over 4869.00 frames.], tot_loss[loss=0.1431, simple_loss=0.215, pruned_loss=0.03559, over 973330.98 frames.], batch size: 20, lr: 2.31e-04 +2022-05-06 15:07:57,615 INFO [train.py:715] (6/8) Epoch 9, batch 27800, loss[loss=0.1368, simple_loss=0.2082, pruned_loss=0.03273, over 4777.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2143, pruned_loss=0.03498, over 972577.30 frames.], batch size: 14, lr: 2.31e-04 +2022-05-06 15:08:36,549 INFO [train.py:715] (6/8) Epoch 9, batch 27850, loss[loss=0.1107, simple_loss=0.1835, pruned_loss=0.01892, over 4984.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2146, pruned_loss=0.03501, over 972132.34 frames.], batch size: 14, lr: 2.31e-04 +2022-05-06 15:09:16,414 INFO [train.py:715] (6/8) Epoch 9, batch 27900, loss[loss=0.1247, simple_loss=0.2038, pruned_loss=0.02283, over 4848.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2148, pruned_loss=0.03523, over 971170.00 frames.], batch size: 20, lr: 2.31e-04 +2022-05-06 15:09:54,912 INFO [train.py:715] (6/8) Epoch 9, batch 27950, loss[loss=0.1722, simple_loss=0.2396, pruned_loss=0.05244, over 4701.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2145, pruned_loss=0.03487, over 971262.56 frames.], batch size: 15, lr: 2.31e-04 +2022-05-06 15:10:34,271 INFO [train.py:715] (6/8) Epoch 9, batch 28000, loss[loss=0.1376, simple_loss=0.2074, pruned_loss=0.03391, over 4843.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2139, pruned_loss=0.03441, over 971171.02 frames.], batch size: 26, lr: 2.31e-04 +2022-05-06 15:11:13,575 INFO [train.py:715] (6/8) Epoch 9, batch 28050, loss[loss=0.1393, simple_loss=0.2137, pruned_loss=0.03244, over 4914.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2139, pruned_loss=0.03441, over 971178.79 frames.], batch size: 18, lr: 2.31e-04 +2022-05-06 15:11:52,643 INFO [train.py:715] (6/8) Epoch 9, batch 28100, loss[loss=0.1442, simple_loss=0.2158, pruned_loss=0.03631, over 4920.00 frames.], tot_loss[loss=0.1416, simple_loss=0.214, pruned_loss=0.03458, over 971451.61 frames.], batch size: 17, lr: 2.31e-04 +2022-05-06 15:12:31,909 INFO [train.py:715] (6/8) Epoch 9, batch 28150, loss[loss=0.1155, simple_loss=0.1909, pruned_loss=0.02003, over 4830.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2137, pruned_loss=0.03436, over 972109.89 frames.], batch size: 13, lr: 2.31e-04 +2022-05-06 15:13:10,821 INFO [train.py:715] (6/8) Epoch 9, batch 28200, loss[loss=0.1202, simple_loss=0.2052, pruned_loss=0.01761, over 4819.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2134, pruned_loss=0.03401, over 971748.15 frames.], batch size: 27, lr: 2.31e-04 +2022-05-06 15:13:50,248 INFO [train.py:715] (6/8) Epoch 9, batch 28250, loss[loss=0.1515, simple_loss=0.2349, pruned_loss=0.03401, over 4683.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2141, pruned_loss=0.03383, over 971197.20 frames.], batch size: 15, lr: 2.31e-04 +2022-05-06 15:14:28,529 INFO [train.py:715] (6/8) Epoch 9, batch 28300, loss[loss=0.1134, simple_loss=0.1897, pruned_loss=0.01849, over 4854.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2147, pruned_loss=0.03414, over 971390.14 frames.], batch size: 30, lr: 2.31e-04 +2022-05-06 15:15:07,477 INFO [train.py:715] (6/8) Epoch 9, batch 28350, loss[loss=0.1163, simple_loss=0.194, pruned_loss=0.01937, over 4880.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2153, pruned_loss=0.03469, over 971485.39 frames.], batch size: 20, lr: 2.31e-04 +2022-05-06 15:15:46,876 INFO [train.py:715] (6/8) Epoch 9, batch 28400, loss[loss=0.1257, simple_loss=0.1983, pruned_loss=0.02658, over 4739.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2155, pruned_loss=0.03495, over 972237.62 frames.], batch size: 16, lr: 2.31e-04 +2022-05-06 15:16:25,953 INFO [train.py:715] (6/8) Epoch 9, batch 28450, loss[loss=0.1604, simple_loss=0.2316, pruned_loss=0.0446, over 4959.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2148, pruned_loss=0.03449, over 971880.14 frames.], batch size: 35, lr: 2.31e-04 +2022-05-06 15:17:04,387 INFO [train.py:715] (6/8) Epoch 9, batch 28500, loss[loss=0.1504, simple_loss=0.2205, pruned_loss=0.04017, over 4877.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2146, pruned_loss=0.03475, over 972529.88 frames.], batch size: 16, lr: 2.31e-04 +2022-05-06 15:17:43,522 INFO [train.py:715] (6/8) Epoch 9, batch 28550, loss[loss=0.1403, simple_loss=0.2107, pruned_loss=0.03491, over 4986.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2143, pruned_loss=0.03464, over 973029.58 frames.], batch size: 35, lr: 2.31e-04 +2022-05-06 15:18:22,915 INFO [train.py:715] (6/8) Epoch 9, batch 28600, loss[loss=0.1389, simple_loss=0.2104, pruned_loss=0.03371, over 4750.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2148, pruned_loss=0.0345, over 973356.92 frames.], batch size: 16, lr: 2.31e-04 +2022-05-06 15:19:01,333 INFO [train.py:715] (6/8) Epoch 9, batch 28650, loss[loss=0.1394, simple_loss=0.2071, pruned_loss=0.03583, over 4827.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2137, pruned_loss=0.03391, over 972857.75 frames.], batch size: 26, lr: 2.31e-04 +2022-05-06 15:19:40,173 INFO [train.py:715] (6/8) Epoch 9, batch 28700, loss[loss=0.1308, simple_loss=0.2157, pruned_loss=0.02292, over 4959.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2133, pruned_loss=0.0337, over 973221.02 frames.], batch size: 14, lr: 2.31e-04 +2022-05-06 15:20:19,623 INFO [train.py:715] (6/8) Epoch 9, batch 28750, loss[loss=0.2042, simple_loss=0.2605, pruned_loss=0.07396, over 4975.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2133, pruned_loss=0.0334, over 972752.39 frames.], batch size: 15, lr: 2.31e-04 +2022-05-06 15:20:58,325 INFO [train.py:715] (6/8) Epoch 9, batch 28800, loss[loss=0.1467, simple_loss=0.2335, pruned_loss=0.02999, over 4817.00 frames.], tot_loss[loss=0.1396, simple_loss=0.213, pruned_loss=0.03307, over 971811.72 frames.], batch size: 27, lr: 2.31e-04 +2022-05-06 15:21:36,726 INFO [train.py:715] (6/8) Epoch 9, batch 28850, loss[loss=0.1224, simple_loss=0.2024, pruned_loss=0.02123, over 4884.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2129, pruned_loss=0.03275, over 972221.42 frames.], batch size: 22, lr: 2.31e-04 +2022-05-06 15:22:16,104 INFO [train.py:715] (6/8) Epoch 9, batch 28900, loss[loss=0.1537, simple_loss=0.2, pruned_loss=0.05367, over 4831.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2128, pruned_loss=0.03305, over 971438.72 frames.], batch size: 12, lr: 2.31e-04 +2022-05-06 15:22:55,385 INFO [train.py:715] (6/8) Epoch 9, batch 28950, loss[loss=0.1212, simple_loss=0.1968, pruned_loss=0.02278, over 4781.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2126, pruned_loss=0.033, over 971591.74 frames.], batch size: 18, lr: 2.31e-04 +2022-05-06 15:23:33,684 INFO [train.py:715] (6/8) Epoch 9, batch 29000, loss[loss=0.133, simple_loss=0.2153, pruned_loss=0.0254, over 4814.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2124, pruned_loss=0.03319, over 970808.92 frames.], batch size: 21, lr: 2.31e-04 +2022-05-06 15:24:12,160 INFO [train.py:715] (6/8) Epoch 9, batch 29050, loss[loss=0.136, simple_loss=0.2126, pruned_loss=0.02969, over 4870.00 frames.], tot_loss[loss=0.139, simple_loss=0.2121, pruned_loss=0.03295, over 971375.88 frames.], batch size: 22, lr: 2.31e-04 +2022-05-06 15:24:51,098 INFO [train.py:715] (6/8) Epoch 9, batch 29100, loss[loss=0.1259, simple_loss=0.1922, pruned_loss=0.02985, over 4912.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.03276, over 971548.61 frames.], batch size: 18, lr: 2.31e-04 +2022-05-06 15:25:30,249 INFO [train.py:715] (6/8) Epoch 9, batch 29150, loss[loss=0.1259, simple_loss=0.2, pruned_loss=0.02592, over 4946.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2125, pruned_loss=0.03288, over 971666.37 frames.], batch size: 21, lr: 2.31e-04 +2022-05-06 15:26:09,099 INFO [train.py:715] (6/8) Epoch 9, batch 29200, loss[loss=0.1187, simple_loss=0.1976, pruned_loss=0.01996, over 4742.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2127, pruned_loss=0.03319, over 972255.97 frames.], batch size: 16, lr: 2.31e-04 +2022-05-06 15:26:48,464 INFO [train.py:715] (6/8) Epoch 9, batch 29250, loss[loss=0.1431, simple_loss=0.2138, pruned_loss=0.03624, over 4887.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2123, pruned_loss=0.03323, over 972149.53 frames.], batch size: 22, lr: 2.31e-04 +2022-05-06 15:27:27,202 INFO [train.py:715] (6/8) Epoch 9, batch 29300, loss[loss=0.1312, simple_loss=0.188, pruned_loss=0.03721, over 4814.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2122, pruned_loss=0.03331, over 972081.27 frames.], batch size: 13, lr: 2.31e-04 +2022-05-06 15:28:06,269 INFO [train.py:715] (6/8) Epoch 9, batch 29350, loss[loss=0.1417, simple_loss=0.2195, pruned_loss=0.03188, over 4884.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2132, pruned_loss=0.03395, over 971756.94 frames.], batch size: 22, lr: 2.31e-04 +2022-05-06 15:28:45,213 INFO [train.py:715] (6/8) Epoch 9, batch 29400, loss[loss=0.1264, simple_loss=0.2007, pruned_loss=0.02607, over 4782.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2129, pruned_loss=0.03398, over 971567.06 frames.], batch size: 18, lr: 2.31e-04 +2022-05-06 15:29:23,949 INFO [train.py:715] (6/8) Epoch 9, batch 29450, loss[loss=0.1219, simple_loss=0.1988, pruned_loss=0.02247, over 4934.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2123, pruned_loss=0.03364, over 972042.54 frames.], batch size: 21, lr: 2.31e-04 +2022-05-06 15:30:02,405 INFO [train.py:715] (6/8) Epoch 9, batch 29500, loss[loss=0.1358, simple_loss=0.2181, pruned_loss=0.02678, over 4780.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2126, pruned_loss=0.03422, over 972489.55 frames.], batch size: 18, lr: 2.31e-04 +2022-05-06 15:30:41,338 INFO [train.py:715] (6/8) Epoch 9, batch 29550, loss[loss=0.1463, simple_loss=0.2252, pruned_loss=0.03373, over 4944.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2127, pruned_loss=0.03379, over 973204.61 frames.], batch size: 21, lr: 2.31e-04 +2022-05-06 15:31:20,278 INFO [train.py:715] (6/8) Epoch 9, batch 29600, loss[loss=0.1402, simple_loss=0.2054, pruned_loss=0.03751, over 4863.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2128, pruned_loss=0.03421, over 973409.11 frames.], batch size: 32, lr: 2.31e-04 +2022-05-06 15:31:59,542 INFO [train.py:715] (6/8) Epoch 9, batch 29650, loss[loss=0.1681, simple_loss=0.2354, pruned_loss=0.05037, over 4768.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2131, pruned_loss=0.03401, over 972846.29 frames.], batch size: 19, lr: 2.31e-04 +2022-05-06 15:32:39,148 INFO [train.py:715] (6/8) Epoch 9, batch 29700, loss[loss=0.1596, simple_loss=0.2413, pruned_loss=0.03897, over 4814.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2133, pruned_loss=0.03426, over 972009.20 frames.], batch size: 21, lr: 2.31e-04 +2022-05-06 15:33:17,090 INFO [train.py:715] (6/8) Epoch 9, batch 29750, loss[loss=0.1802, simple_loss=0.2402, pruned_loss=0.06008, over 4812.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2139, pruned_loss=0.03442, over 972224.43 frames.], batch size: 27, lr: 2.31e-04 +2022-05-06 15:33:55,773 INFO [train.py:715] (6/8) Epoch 9, batch 29800, loss[loss=0.1502, simple_loss=0.2259, pruned_loss=0.03729, over 4779.00 frames.], tot_loss[loss=0.1416, simple_loss=0.214, pruned_loss=0.03456, over 971724.03 frames.], batch size: 18, lr: 2.31e-04 +2022-05-06 15:34:34,896 INFO [train.py:715] (6/8) Epoch 9, batch 29850, loss[loss=0.1131, simple_loss=0.1841, pruned_loss=0.02102, over 4875.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2142, pruned_loss=0.03452, over 971617.61 frames.], batch size: 22, lr: 2.31e-04 +2022-05-06 15:35:13,061 INFO [train.py:715] (6/8) Epoch 9, batch 29900, loss[loss=0.115, simple_loss=0.1879, pruned_loss=0.02106, over 4844.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2143, pruned_loss=0.03489, over 971480.66 frames.], batch size: 13, lr: 2.31e-04 +2022-05-06 15:35:52,553 INFO [train.py:715] (6/8) Epoch 9, batch 29950, loss[loss=0.1369, simple_loss=0.2065, pruned_loss=0.03364, over 4957.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2146, pruned_loss=0.03482, over 971705.33 frames.], batch size: 35, lr: 2.31e-04 +2022-05-06 15:36:31,405 INFO [train.py:715] (6/8) Epoch 9, batch 30000, loss[loss=0.1321, simple_loss=0.1957, pruned_loss=0.03421, over 4918.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2136, pruned_loss=0.03397, over 972619.75 frames.], batch size: 18, lr: 2.31e-04 +2022-05-06 15:36:31,406 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 15:36:40,919 INFO [train.py:742] (6/8) Epoch 9, validation: loss=0.1068, simple_loss=0.1911, pruned_loss=0.01124, over 914524.00 frames. +2022-05-06 15:37:20,167 INFO [train.py:715] (6/8) Epoch 9, batch 30050, loss[loss=0.1573, simple_loss=0.2179, pruned_loss=0.04838, over 4773.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2145, pruned_loss=0.03463, over 971848.54 frames.], batch size: 14, lr: 2.31e-04 +2022-05-06 15:37:58,805 INFO [train.py:715] (6/8) Epoch 9, batch 30100, loss[loss=0.1281, simple_loss=0.2147, pruned_loss=0.02076, over 4938.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2149, pruned_loss=0.03484, over 972535.05 frames.], batch size: 29, lr: 2.31e-04 +2022-05-06 15:38:38,125 INFO [train.py:715] (6/8) Epoch 9, batch 30150, loss[loss=0.1368, simple_loss=0.2209, pruned_loss=0.02632, over 4812.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2143, pruned_loss=0.0343, over 972812.17 frames.], batch size: 27, lr: 2.31e-04 +2022-05-06 15:39:17,507 INFO [train.py:715] (6/8) Epoch 9, batch 30200, loss[loss=0.1806, simple_loss=0.2428, pruned_loss=0.05917, over 4842.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2144, pruned_loss=0.03428, over 972912.55 frames.], batch size: 30, lr: 2.31e-04 +2022-05-06 15:39:56,687 INFO [train.py:715] (6/8) Epoch 9, batch 30250, loss[loss=0.1606, simple_loss=0.2391, pruned_loss=0.04103, over 4777.00 frames.], tot_loss[loss=0.142, simple_loss=0.2147, pruned_loss=0.03461, over 973156.20 frames.], batch size: 18, lr: 2.31e-04 +2022-05-06 15:40:35,247 INFO [train.py:715] (6/8) Epoch 9, batch 30300, loss[loss=0.1267, simple_loss=0.2059, pruned_loss=0.02376, over 4803.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2143, pruned_loss=0.03451, over 972560.01 frames.], batch size: 21, lr: 2.31e-04 +2022-05-06 15:41:14,058 INFO [train.py:715] (6/8) Epoch 9, batch 30350, loss[loss=0.1412, simple_loss=0.221, pruned_loss=0.03075, over 4793.00 frames.], tot_loss[loss=0.141, simple_loss=0.2139, pruned_loss=0.03408, over 972586.68 frames.], batch size: 21, lr: 2.31e-04 +2022-05-06 15:41:53,484 INFO [train.py:715] (6/8) Epoch 9, batch 30400, loss[loss=0.1387, simple_loss=0.218, pruned_loss=0.02967, over 4811.00 frames.], tot_loss[loss=0.141, simple_loss=0.2141, pruned_loss=0.03396, over 971896.84 frames.], batch size: 27, lr: 2.31e-04 +2022-05-06 15:42:32,294 INFO [train.py:715] (6/8) Epoch 9, batch 30450, loss[loss=0.1168, simple_loss=0.197, pruned_loss=0.01829, over 4880.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2135, pruned_loss=0.03363, over 972388.98 frames.], batch size: 22, lr: 2.31e-04 +2022-05-06 15:43:10,916 INFO [train.py:715] (6/8) Epoch 9, batch 30500, loss[loss=0.1343, simple_loss=0.2196, pruned_loss=0.02448, over 4814.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2147, pruned_loss=0.03407, over 971728.26 frames.], batch size: 25, lr: 2.31e-04 +2022-05-06 15:43:49,987 INFO [train.py:715] (6/8) Epoch 9, batch 30550, loss[loss=0.1133, simple_loss=0.1916, pruned_loss=0.01749, over 4789.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2148, pruned_loss=0.0344, over 972061.35 frames.], batch size: 12, lr: 2.31e-04 +2022-05-06 15:44:28,849 INFO [train.py:715] (6/8) Epoch 9, batch 30600, loss[loss=0.1198, simple_loss=0.1957, pruned_loss=0.02192, over 4849.00 frames.], tot_loss[loss=0.1411, simple_loss=0.214, pruned_loss=0.03404, over 972805.76 frames.], batch size: 15, lr: 2.31e-04 +2022-05-06 15:45:06,880 INFO [train.py:715] (6/8) Epoch 9, batch 30650, loss[loss=0.1299, simple_loss=0.1988, pruned_loss=0.0305, over 4841.00 frames.], tot_loss[loss=0.141, simple_loss=0.214, pruned_loss=0.03401, over 971956.65 frames.], batch size: 30, lr: 2.31e-04 +2022-05-06 15:45:45,881 INFO [train.py:715] (6/8) Epoch 9, batch 30700, loss[loss=0.1382, simple_loss=0.2216, pruned_loss=0.02738, over 4778.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2131, pruned_loss=0.03354, over 972002.65 frames.], batch size: 18, lr: 2.30e-04 +2022-05-06 15:46:27,573 INFO [train.py:715] (6/8) Epoch 9, batch 30750, loss[loss=0.1287, simple_loss=0.2053, pruned_loss=0.02607, over 4847.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2137, pruned_loss=0.03388, over 971550.87 frames.], batch size: 15, lr: 2.30e-04 +2022-05-06 15:47:06,255 INFO [train.py:715] (6/8) Epoch 9, batch 30800, loss[loss=0.1531, simple_loss=0.2245, pruned_loss=0.04083, over 4763.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2138, pruned_loss=0.03387, over 971915.48 frames.], batch size: 19, lr: 2.30e-04 +2022-05-06 15:47:44,605 INFO [train.py:715] (6/8) Epoch 9, batch 30850, loss[loss=0.1406, simple_loss=0.2084, pruned_loss=0.03637, over 4937.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2133, pruned_loss=0.03379, over 972197.43 frames.], batch size: 29, lr: 2.30e-04 +2022-05-06 15:48:23,857 INFO [train.py:715] (6/8) Epoch 9, batch 30900, loss[loss=0.121, simple_loss=0.1999, pruned_loss=0.02109, over 4820.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2123, pruned_loss=0.03289, over 972569.49 frames.], batch size: 26, lr: 2.30e-04 +2022-05-06 15:49:03,045 INFO [train.py:715] (6/8) Epoch 9, batch 30950, loss[loss=0.1495, simple_loss=0.2184, pruned_loss=0.04035, over 4940.00 frames.], tot_loss[loss=0.14, simple_loss=0.2132, pruned_loss=0.03342, over 972508.81 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 15:49:41,534 INFO [train.py:715] (6/8) Epoch 9, batch 31000, loss[loss=0.1698, simple_loss=0.2298, pruned_loss=0.05487, over 4893.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2135, pruned_loss=0.0337, over 972003.45 frames.], batch size: 16, lr: 2.30e-04 +2022-05-06 15:50:20,507 INFO [train.py:715] (6/8) Epoch 9, batch 31050, loss[loss=0.1634, simple_loss=0.2449, pruned_loss=0.04089, over 4977.00 frames.], tot_loss[loss=0.141, simple_loss=0.214, pruned_loss=0.03398, over 972180.87 frames.], batch size: 25, lr: 2.30e-04 +2022-05-06 15:50:59,765 INFO [train.py:715] (6/8) Epoch 9, batch 31100, loss[loss=0.1589, simple_loss=0.2361, pruned_loss=0.0409, over 4942.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2134, pruned_loss=0.03371, over 973085.30 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 15:51:38,438 INFO [train.py:715] (6/8) Epoch 9, batch 31150, loss[loss=0.1403, simple_loss=0.2192, pruned_loss=0.03075, over 4774.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2142, pruned_loss=0.03414, over 973381.87 frames.], batch size: 18, lr: 2.30e-04 +2022-05-06 15:52:17,025 INFO [train.py:715] (6/8) Epoch 9, batch 31200, loss[loss=0.1483, simple_loss=0.224, pruned_loss=0.03627, over 4881.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2141, pruned_loss=0.03422, over 973053.93 frames.], batch size: 20, lr: 2.30e-04 +2022-05-06 15:52:56,552 INFO [train.py:715] (6/8) Epoch 9, batch 31250, loss[loss=0.1242, simple_loss=0.1935, pruned_loss=0.02739, over 4754.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2146, pruned_loss=0.03496, over 972106.19 frames.], batch size: 16, lr: 2.30e-04 +2022-05-06 15:53:36,001 INFO [train.py:715] (6/8) Epoch 9, batch 31300, loss[loss=0.1643, simple_loss=0.2223, pruned_loss=0.05314, over 4978.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2145, pruned_loss=0.0348, over 972179.23 frames.], batch size: 35, lr: 2.30e-04 +2022-05-06 15:54:14,973 INFO [train.py:715] (6/8) Epoch 9, batch 31350, loss[loss=0.1417, simple_loss=0.2181, pruned_loss=0.03265, over 4911.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2144, pruned_loss=0.03459, over 972327.02 frames.], batch size: 19, lr: 2.30e-04 +2022-05-06 15:54:53,758 INFO [train.py:715] (6/8) Epoch 9, batch 31400, loss[loss=0.1504, simple_loss=0.2193, pruned_loss=0.04069, over 4797.00 frames.], tot_loss[loss=0.1423, simple_loss=0.215, pruned_loss=0.03481, over 972266.69 frames.], batch size: 18, lr: 2.30e-04 +2022-05-06 15:55:32,700 INFO [train.py:715] (6/8) Epoch 9, batch 31450, loss[loss=0.1496, simple_loss=0.2197, pruned_loss=0.03977, over 4692.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2143, pruned_loss=0.03427, over 971927.35 frames.], batch size: 15, lr: 2.30e-04 +2022-05-06 15:56:11,773 INFO [train.py:715] (6/8) Epoch 9, batch 31500, loss[loss=0.1425, simple_loss=0.2216, pruned_loss=0.0317, over 4883.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2157, pruned_loss=0.0351, over 972161.34 frames.], batch size: 22, lr: 2.30e-04 +2022-05-06 15:56:50,184 INFO [train.py:715] (6/8) Epoch 9, batch 31550, loss[loss=0.1196, simple_loss=0.1997, pruned_loss=0.01975, over 4745.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2149, pruned_loss=0.03467, over 972297.83 frames.], batch size: 16, lr: 2.30e-04 +2022-05-06 15:57:29,721 INFO [train.py:715] (6/8) Epoch 9, batch 31600, loss[loss=0.1489, simple_loss=0.2118, pruned_loss=0.04298, over 4750.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2153, pruned_loss=0.03469, over 972574.75 frames.], batch size: 19, lr: 2.30e-04 +2022-05-06 15:58:09,720 INFO [train.py:715] (6/8) Epoch 9, batch 31650, loss[loss=0.1293, simple_loss=0.205, pruned_loss=0.02682, over 4840.00 frames.], tot_loss[loss=0.142, simple_loss=0.2149, pruned_loss=0.03449, over 972036.43 frames.], batch size: 30, lr: 2.30e-04 +2022-05-06 15:58:48,442 INFO [train.py:715] (6/8) Epoch 9, batch 31700, loss[loss=0.1173, simple_loss=0.1899, pruned_loss=0.0223, over 4968.00 frames.], tot_loss[loss=0.1422, simple_loss=0.215, pruned_loss=0.03467, over 971855.68 frames.], batch size: 24, lr: 2.30e-04 +2022-05-06 15:59:27,451 INFO [train.py:715] (6/8) Epoch 9, batch 31750, loss[loss=0.1319, simple_loss=0.2068, pruned_loss=0.02852, over 4818.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2149, pruned_loss=0.03504, over 972262.44 frames.], batch size: 27, lr: 2.30e-04 +2022-05-06 16:00:06,080 INFO [train.py:715] (6/8) Epoch 9, batch 31800, loss[loss=0.1221, simple_loss=0.2019, pruned_loss=0.02119, over 4937.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2147, pruned_loss=0.0346, over 971495.21 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 16:00:45,145 INFO [train.py:715] (6/8) Epoch 9, batch 31850, loss[loss=0.132, simple_loss=0.2117, pruned_loss=0.02618, over 4757.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2146, pruned_loss=0.03447, over 970665.56 frames.], batch size: 19, lr: 2.30e-04 +2022-05-06 16:01:23,639 INFO [train.py:715] (6/8) Epoch 9, batch 31900, loss[loss=0.1488, simple_loss=0.2273, pruned_loss=0.03515, over 4924.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2141, pruned_loss=0.03458, over 972052.99 frames.], batch size: 18, lr: 2.30e-04 +2022-05-06 16:02:02,948 INFO [train.py:715] (6/8) Epoch 9, batch 31950, loss[loss=0.1177, simple_loss=0.1896, pruned_loss=0.02288, over 4802.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2143, pruned_loss=0.03467, over 972009.20 frames.], batch size: 13, lr: 2.30e-04 +2022-05-06 16:02:42,218 INFO [train.py:715] (6/8) Epoch 9, batch 32000, loss[loss=0.1267, simple_loss=0.1935, pruned_loss=0.02998, over 4886.00 frames.], tot_loss[loss=0.141, simple_loss=0.2135, pruned_loss=0.03429, over 972184.27 frames.], batch size: 32, lr: 2.30e-04 +2022-05-06 16:03:20,780 INFO [train.py:715] (6/8) Epoch 9, batch 32050, loss[loss=0.1499, simple_loss=0.214, pruned_loss=0.04289, over 4939.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2131, pruned_loss=0.03437, over 972119.55 frames.], batch size: 23, lr: 2.30e-04 +2022-05-06 16:03:59,268 INFO [train.py:715] (6/8) Epoch 9, batch 32100, loss[loss=0.1387, simple_loss=0.2166, pruned_loss=0.03038, over 4748.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2127, pruned_loss=0.03424, over 972615.96 frames.], batch size: 16, lr: 2.30e-04 +2022-05-06 16:04:38,261 INFO [train.py:715] (6/8) Epoch 9, batch 32150, loss[loss=0.1507, simple_loss=0.2262, pruned_loss=0.0376, over 4821.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2127, pruned_loss=0.03454, over 972931.15 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 16:05:17,702 INFO [train.py:715] (6/8) Epoch 9, batch 32200, loss[loss=0.1752, simple_loss=0.2527, pruned_loss=0.0489, over 4847.00 frames.], tot_loss[loss=0.141, simple_loss=0.2128, pruned_loss=0.03453, over 972075.52 frames.], batch size: 34, lr: 2.30e-04 +2022-05-06 16:05:55,462 INFO [train.py:715] (6/8) Epoch 9, batch 32250, loss[loss=0.1457, simple_loss=0.2241, pruned_loss=0.0336, over 4807.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2143, pruned_loss=0.03504, over 972397.21 frames.], batch size: 25, lr: 2.30e-04 +2022-05-06 16:06:34,664 INFO [train.py:715] (6/8) Epoch 9, batch 32300, loss[loss=0.1437, simple_loss=0.2045, pruned_loss=0.04144, over 4804.00 frames.], tot_loss[loss=0.142, simple_loss=0.2139, pruned_loss=0.03505, over 972028.84 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 16:07:13,844 INFO [train.py:715] (6/8) Epoch 9, batch 32350, loss[loss=0.1138, simple_loss=0.1822, pruned_loss=0.0227, over 4853.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2147, pruned_loss=0.03529, over 972669.29 frames.], batch size: 30, lr: 2.30e-04 +2022-05-06 16:07:52,331 INFO [train.py:715] (6/8) Epoch 9, batch 32400, loss[loss=0.126, simple_loss=0.1987, pruned_loss=0.02668, over 4846.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2143, pruned_loss=0.03473, over 972591.66 frames.], batch size: 32, lr: 2.30e-04 +2022-05-06 16:08:31,415 INFO [train.py:715] (6/8) Epoch 9, batch 32450, loss[loss=0.1464, simple_loss=0.2204, pruned_loss=0.03625, over 4978.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2146, pruned_loss=0.03508, over 971988.95 frames.], batch size: 35, lr: 2.30e-04 +2022-05-06 16:09:10,517 INFO [train.py:715] (6/8) Epoch 9, batch 32500, loss[loss=0.1182, simple_loss=0.1958, pruned_loss=0.02036, over 4938.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2134, pruned_loss=0.03454, over 972722.89 frames.], batch size: 29, lr: 2.30e-04 +2022-05-06 16:09:49,358 INFO [train.py:715] (6/8) Epoch 9, batch 32550, loss[loss=0.1486, simple_loss=0.2235, pruned_loss=0.03689, over 4894.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2132, pruned_loss=0.03414, over 973123.32 frames.], batch size: 19, lr: 2.30e-04 +2022-05-06 16:10:27,863 INFO [train.py:715] (6/8) Epoch 9, batch 32600, loss[loss=0.1299, simple_loss=0.2075, pruned_loss=0.02609, over 4755.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2137, pruned_loss=0.03441, over 972684.71 frames.], batch size: 16, lr: 2.30e-04 +2022-05-06 16:11:06,894 INFO [train.py:715] (6/8) Epoch 9, batch 32650, loss[loss=0.135, simple_loss=0.2149, pruned_loss=0.02758, over 4922.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2137, pruned_loss=0.03447, over 972971.27 frames.], batch size: 18, lr: 2.30e-04 +2022-05-06 16:11:45,873 INFO [train.py:715] (6/8) Epoch 9, batch 32700, loss[loss=0.1229, simple_loss=0.2041, pruned_loss=0.02091, over 4752.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2131, pruned_loss=0.0343, over 973329.06 frames.], batch size: 19, lr: 2.30e-04 +2022-05-06 16:12:24,796 INFO [train.py:715] (6/8) Epoch 9, batch 32750, loss[loss=0.12, simple_loss=0.1935, pruned_loss=0.02324, over 4816.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2135, pruned_loss=0.03453, over 973368.53 frames.], batch size: 13, lr: 2.30e-04 +2022-05-06 16:13:03,522 INFO [train.py:715] (6/8) Epoch 9, batch 32800, loss[loss=0.1226, simple_loss=0.1958, pruned_loss=0.02467, over 4948.00 frames.], tot_loss[loss=0.141, simple_loss=0.2135, pruned_loss=0.03431, over 973651.11 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 16:13:42,565 INFO [train.py:715] (6/8) Epoch 9, batch 32850, loss[loss=0.1507, simple_loss=0.2189, pruned_loss=0.04125, over 4973.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2133, pruned_loss=0.03407, over 973646.87 frames.], batch size: 35, lr: 2.30e-04 +2022-05-06 16:14:21,305 INFO [train.py:715] (6/8) Epoch 9, batch 32900, loss[loss=0.1536, simple_loss=0.2193, pruned_loss=0.04396, over 4953.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2137, pruned_loss=0.03398, over 973415.55 frames.], batch size: 39, lr: 2.30e-04 +2022-05-06 16:14:59,684 INFO [train.py:715] (6/8) Epoch 9, batch 32950, loss[loss=0.1509, simple_loss=0.2318, pruned_loss=0.03503, over 4806.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2138, pruned_loss=0.03377, over 972919.08 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 16:15:38,639 INFO [train.py:715] (6/8) Epoch 9, batch 33000, loss[loss=0.1235, simple_loss=0.1941, pruned_loss=0.0264, over 4813.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2133, pruned_loss=0.03356, over 971828.71 frames.], batch size: 15, lr: 2.30e-04 +2022-05-06 16:15:38,640 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 16:15:48,000 INFO [train.py:742] (6/8) Epoch 9, validation: loss=0.1068, simple_loss=0.1913, pruned_loss=0.01119, over 914524.00 frames. +2022-05-06 16:16:27,263 INFO [train.py:715] (6/8) Epoch 9, batch 33050, loss[loss=0.1455, simple_loss=0.221, pruned_loss=0.035, over 4781.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2138, pruned_loss=0.03368, over 970848.02 frames.], batch size: 18, lr: 2.30e-04 +2022-05-06 16:17:06,452 INFO [train.py:715] (6/8) Epoch 9, batch 33100, loss[loss=0.1382, simple_loss=0.2152, pruned_loss=0.03065, over 4893.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2135, pruned_loss=0.03369, over 971286.94 frames.], batch size: 19, lr: 2.30e-04 +2022-05-06 16:17:45,626 INFO [train.py:715] (6/8) Epoch 9, batch 33150, loss[loss=0.1333, simple_loss=0.2148, pruned_loss=0.02587, over 4989.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2128, pruned_loss=0.03381, over 972053.21 frames.], batch size: 27, lr: 2.30e-04 +2022-05-06 16:18:25,452 INFO [train.py:715] (6/8) Epoch 9, batch 33200, loss[loss=0.13, simple_loss=0.216, pruned_loss=0.02202, over 4978.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2134, pruned_loss=0.03381, over 973043.24 frames.], batch size: 25, lr: 2.30e-04 +2022-05-06 16:19:05,015 INFO [train.py:715] (6/8) Epoch 9, batch 33250, loss[loss=0.1352, simple_loss=0.2143, pruned_loss=0.02807, over 4787.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2138, pruned_loss=0.0338, over 972791.82 frames.], batch size: 17, lr: 2.30e-04 +2022-05-06 16:19:44,053 INFO [train.py:715] (6/8) Epoch 9, batch 33300, loss[loss=0.1377, simple_loss=0.2132, pruned_loss=0.03104, over 4977.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2133, pruned_loss=0.03364, over 973702.30 frames.], batch size: 15, lr: 2.30e-04 +2022-05-06 16:20:23,553 INFO [train.py:715] (6/8) Epoch 9, batch 33350, loss[loss=0.124, simple_loss=0.1907, pruned_loss=0.02865, over 4694.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2136, pruned_loss=0.03372, over 973320.88 frames.], batch size: 15, lr: 2.30e-04 +2022-05-06 16:21:03,301 INFO [train.py:715] (6/8) Epoch 9, batch 33400, loss[loss=0.1245, simple_loss=0.2107, pruned_loss=0.01917, over 4891.00 frames.], tot_loss[loss=0.1408, simple_loss=0.214, pruned_loss=0.03384, over 973134.80 frames.], batch size: 19, lr: 2.30e-04 +2022-05-06 16:21:43,056 INFO [train.py:715] (6/8) Epoch 9, batch 33450, loss[loss=0.1387, simple_loss=0.2187, pruned_loss=0.02936, over 4887.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2145, pruned_loss=0.03418, over 973447.98 frames.], batch size: 19, lr: 2.30e-04 +2022-05-06 16:22:22,075 INFO [train.py:715] (6/8) Epoch 9, batch 33500, loss[loss=0.1322, simple_loss=0.2079, pruned_loss=0.02824, over 4947.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2143, pruned_loss=0.03426, over 973597.11 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 16:23:00,827 INFO [train.py:715] (6/8) Epoch 9, batch 33550, loss[loss=0.1706, simple_loss=0.2572, pruned_loss=0.04203, over 4965.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2139, pruned_loss=0.03394, over 972700.13 frames.], batch size: 15, lr: 2.30e-04 +2022-05-06 16:23:40,549 INFO [train.py:715] (6/8) Epoch 9, batch 33600, loss[loss=0.1509, simple_loss=0.2216, pruned_loss=0.04008, over 4696.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2131, pruned_loss=0.03373, over 972729.39 frames.], batch size: 15, lr: 2.30e-04 +2022-05-06 16:24:19,321 INFO [train.py:715] (6/8) Epoch 9, batch 33650, loss[loss=0.1279, simple_loss=0.2125, pruned_loss=0.02169, over 4916.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2128, pruned_loss=0.03321, over 972579.65 frames.], batch size: 18, lr: 2.30e-04 +2022-05-06 16:24:58,236 INFO [train.py:715] (6/8) Epoch 9, batch 33700, loss[loss=0.1239, simple_loss=0.1884, pruned_loss=0.02976, over 4794.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2115, pruned_loss=0.03302, over 971910.58 frames.], batch size: 12, lr: 2.29e-04 +2022-05-06 16:25:37,408 INFO [train.py:715] (6/8) Epoch 9, batch 33750, loss[loss=0.1323, simple_loss=0.2033, pruned_loss=0.0306, over 4963.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2126, pruned_loss=0.03363, over 971966.64 frames.], batch size: 24, lr: 2.29e-04 +2022-05-06 16:26:16,197 INFO [train.py:715] (6/8) Epoch 9, batch 33800, loss[loss=0.1633, simple_loss=0.2357, pruned_loss=0.04543, over 4927.00 frames.], tot_loss[loss=0.1405, simple_loss=0.213, pruned_loss=0.03404, over 972097.26 frames.], batch size: 29, lr: 2.29e-04 +2022-05-06 16:26:54,913 INFO [train.py:715] (6/8) Epoch 9, batch 33850, loss[loss=0.1168, simple_loss=0.1907, pruned_loss=0.02143, over 4707.00 frames.], tot_loss[loss=0.1403, simple_loss=0.213, pruned_loss=0.03376, over 971666.32 frames.], batch size: 15, lr: 2.29e-04 +2022-05-06 16:27:33,754 INFO [train.py:715] (6/8) Epoch 9, batch 33900, loss[loss=0.1437, simple_loss=0.2156, pruned_loss=0.03589, over 4709.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.03359, over 972031.40 frames.], batch size: 15, lr: 2.29e-04 +2022-05-06 16:28:13,485 INFO [train.py:715] (6/8) Epoch 9, batch 33950, loss[loss=0.1425, simple_loss=0.2212, pruned_loss=0.03192, over 4788.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2123, pruned_loss=0.03347, over 972217.10 frames.], batch size: 17, lr: 2.29e-04 +2022-05-06 16:28:52,281 INFO [train.py:715] (6/8) Epoch 9, batch 34000, loss[loss=0.1358, simple_loss=0.2045, pruned_loss=0.03359, over 4862.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2124, pruned_loss=0.03328, over 971327.64 frames.], batch size: 13, lr: 2.29e-04 +2022-05-06 16:29:31,511 INFO [train.py:715] (6/8) Epoch 9, batch 34050, loss[loss=0.1306, simple_loss=0.2031, pruned_loss=0.02902, over 4843.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2122, pruned_loss=0.0332, over 971539.52 frames.], batch size: 30, lr: 2.29e-04 +2022-05-06 16:30:09,976 INFO [train.py:715] (6/8) Epoch 9, batch 34100, loss[loss=0.1211, simple_loss=0.2071, pruned_loss=0.01753, over 4762.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2119, pruned_loss=0.03297, over 971706.06 frames.], batch size: 18, lr: 2.29e-04 +2022-05-06 16:30:49,075 INFO [train.py:715] (6/8) Epoch 9, batch 34150, loss[loss=0.1861, simple_loss=0.2662, pruned_loss=0.053, over 4885.00 frames.], tot_loss[loss=0.14, simple_loss=0.213, pruned_loss=0.03353, over 972218.57 frames.], batch size: 22, lr: 2.29e-04 +2022-05-06 16:31:27,541 INFO [train.py:715] (6/8) Epoch 9, batch 34200, loss[loss=0.1123, simple_loss=0.1891, pruned_loss=0.01774, over 4965.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2131, pruned_loss=0.03354, over 971959.18 frames.], batch size: 14, lr: 2.29e-04 +2022-05-06 16:32:05,776 INFO [train.py:715] (6/8) Epoch 9, batch 34250, loss[loss=0.1178, simple_loss=0.183, pruned_loss=0.02633, over 4975.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2122, pruned_loss=0.03325, over 972022.09 frames.], batch size: 15, lr: 2.29e-04 +2022-05-06 16:32:45,092 INFO [train.py:715] (6/8) Epoch 9, batch 34300, loss[loss=0.1267, simple_loss=0.2052, pruned_loss=0.02413, over 4778.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2128, pruned_loss=0.03367, over 972045.93 frames.], batch size: 14, lr: 2.29e-04 +2022-05-06 16:33:23,851 INFO [train.py:715] (6/8) Epoch 9, batch 34350, loss[loss=0.1702, simple_loss=0.2429, pruned_loss=0.04877, over 4780.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2133, pruned_loss=0.03412, over 972459.30 frames.], batch size: 18, lr: 2.29e-04 +2022-05-06 16:34:02,524 INFO [train.py:715] (6/8) Epoch 9, batch 34400, loss[loss=0.1267, simple_loss=0.2009, pruned_loss=0.02623, over 4819.00 frames.], tot_loss[loss=0.141, simple_loss=0.2139, pruned_loss=0.03401, over 972294.70 frames.], batch size: 27, lr: 2.29e-04 +2022-05-06 16:34:41,410 INFO [train.py:715] (6/8) Epoch 9, batch 34450, loss[loss=0.1758, simple_loss=0.2462, pruned_loss=0.05263, over 4892.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2138, pruned_loss=0.03398, over 972317.22 frames.], batch size: 16, lr: 2.29e-04 +2022-05-06 16:35:20,344 INFO [train.py:715] (6/8) Epoch 9, batch 34500, loss[loss=0.1593, simple_loss=0.2323, pruned_loss=0.04312, over 4760.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2141, pruned_loss=0.03421, over 972306.99 frames.], batch size: 18, lr: 2.29e-04 +2022-05-06 16:35:59,377 INFO [train.py:715] (6/8) Epoch 9, batch 34550, loss[loss=0.1247, simple_loss=0.1955, pruned_loss=0.02695, over 4778.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2144, pruned_loss=0.03426, over 971543.02 frames.], batch size: 17, lr: 2.29e-04 +2022-05-06 16:36:38,003 INFO [train.py:715] (6/8) Epoch 9, batch 34600, loss[loss=0.145, simple_loss=0.217, pruned_loss=0.03651, over 4917.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2145, pruned_loss=0.03429, over 971849.93 frames.], batch size: 18, lr: 2.29e-04 +2022-05-06 16:37:17,109 INFO [train.py:715] (6/8) Epoch 9, batch 34650, loss[loss=0.148, simple_loss=0.2203, pruned_loss=0.03786, over 4887.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2145, pruned_loss=0.03413, over 972305.47 frames.], batch size: 22, lr: 2.29e-04 +2022-05-06 16:37:56,511 INFO [train.py:715] (6/8) Epoch 9, batch 34700, loss[loss=0.1394, simple_loss=0.2047, pruned_loss=0.03702, over 4911.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2144, pruned_loss=0.0342, over 972220.28 frames.], batch size: 17, lr: 2.29e-04 +2022-05-06 16:38:34,801 INFO [train.py:715] (6/8) Epoch 9, batch 34750, loss[loss=0.134, simple_loss=0.2052, pruned_loss=0.03143, over 4832.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2135, pruned_loss=0.03378, over 972375.02 frames.], batch size: 32, lr: 2.29e-04 +2022-05-06 16:39:12,250 INFO [train.py:715] (6/8) Epoch 9, batch 34800, loss[loss=0.1363, simple_loss=0.2089, pruned_loss=0.0319, over 4747.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2125, pruned_loss=0.03368, over 972121.42 frames.], batch size: 12, lr: 2.29e-04 +2022-05-06 16:40:01,154 INFO [train.py:715] (6/8) Epoch 10, batch 0, loss[loss=0.1491, simple_loss=0.232, pruned_loss=0.03306, over 4818.00 frames.], tot_loss[loss=0.1491, simple_loss=0.232, pruned_loss=0.03306, over 4818.00 frames.], batch size: 26, lr: 2.19e-04 +2022-05-06 16:40:41,037 INFO [train.py:715] (6/8) Epoch 10, batch 50, loss[loss=0.1681, simple_loss=0.231, pruned_loss=0.05265, over 4975.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2127, pruned_loss=0.03179, over 218622.98 frames.], batch size: 39, lr: 2.19e-04 +2022-05-06 16:41:20,752 INFO [train.py:715] (6/8) Epoch 10, batch 100, loss[loss=0.1537, simple_loss=0.2262, pruned_loss=0.04058, over 4907.00 frames.], tot_loss[loss=0.139, simple_loss=0.2128, pruned_loss=0.03257, over 386342.52 frames.], batch size: 19, lr: 2.19e-04 +2022-05-06 16:42:00,759 INFO [train.py:715] (6/8) Epoch 10, batch 150, loss[loss=0.1258, simple_loss=0.1996, pruned_loss=0.02601, over 4829.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2122, pruned_loss=0.03238, over 516449.58 frames.], batch size: 12, lr: 2.19e-04 +2022-05-06 16:42:41,344 INFO [train.py:715] (6/8) Epoch 10, batch 200, loss[loss=0.1414, simple_loss=0.227, pruned_loss=0.02785, over 4764.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03334, over 617315.99 frames.], batch size: 19, lr: 2.19e-04 +2022-05-06 16:43:22,401 INFO [train.py:715] (6/8) Epoch 10, batch 250, loss[loss=0.1767, simple_loss=0.2342, pruned_loss=0.05955, over 4979.00 frames.], tot_loss[loss=0.141, simple_loss=0.2139, pruned_loss=0.03406, over 695696.53 frames.], batch size: 28, lr: 2.19e-04 +2022-05-06 16:44:03,221 INFO [train.py:715] (6/8) Epoch 10, batch 300, loss[loss=0.1281, simple_loss=0.2052, pruned_loss=0.02549, over 4831.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2127, pruned_loss=0.03353, over 757356.42 frames.], batch size: 27, lr: 2.19e-04 +2022-05-06 16:44:43,668 INFO [train.py:715] (6/8) Epoch 10, batch 350, loss[loss=0.122, simple_loss=0.1883, pruned_loss=0.02788, over 4928.00 frames.], tot_loss[loss=0.138, simple_loss=0.2108, pruned_loss=0.03255, over 805065.83 frames.], batch size: 18, lr: 2.19e-04 +2022-05-06 16:45:25,024 INFO [train.py:715] (6/8) Epoch 10, batch 400, loss[loss=0.1558, simple_loss=0.2324, pruned_loss=0.03955, over 4950.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2113, pruned_loss=0.0329, over 842525.44 frames.], batch size: 15, lr: 2.19e-04 +2022-05-06 16:46:06,716 INFO [train.py:715] (6/8) Epoch 10, batch 450, loss[loss=0.1618, simple_loss=0.2368, pruned_loss=0.04345, over 4863.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2121, pruned_loss=0.03332, over 871330.06 frames.], batch size: 20, lr: 2.19e-04 +2022-05-06 16:46:47,450 INFO [train.py:715] (6/8) Epoch 10, batch 500, loss[loss=0.1344, simple_loss=0.199, pruned_loss=0.03495, over 4760.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2118, pruned_loss=0.03323, over 893042.99 frames.], batch size: 19, lr: 2.19e-04 +2022-05-06 16:47:28,884 INFO [train.py:715] (6/8) Epoch 10, batch 550, loss[loss=0.1224, simple_loss=0.1871, pruned_loss=0.02884, over 4776.00 frames.], tot_loss[loss=0.1407, simple_loss=0.213, pruned_loss=0.03419, over 910637.20 frames.], batch size: 14, lr: 2.19e-04 +2022-05-06 16:48:10,024 INFO [train.py:715] (6/8) Epoch 10, batch 600, loss[loss=0.1269, simple_loss=0.1868, pruned_loss=0.03349, over 4793.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2128, pruned_loss=0.03394, over 923834.71 frames.], batch size: 12, lr: 2.19e-04 +2022-05-06 16:48:50,549 INFO [train.py:715] (6/8) Epoch 10, batch 650, loss[loss=0.1376, simple_loss=0.2134, pruned_loss=0.03088, over 4845.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2126, pruned_loss=0.03331, over 934256.91 frames.], batch size: 30, lr: 2.19e-04 +2022-05-06 16:49:31,187 INFO [train.py:715] (6/8) Epoch 10, batch 700, loss[loss=0.1989, simple_loss=0.2728, pruned_loss=0.06247, over 4832.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2128, pruned_loss=0.03327, over 943777.14 frames.], batch size: 26, lr: 2.19e-04 +2022-05-06 16:50:12,725 INFO [train.py:715] (6/8) Epoch 10, batch 750, loss[loss=0.1396, simple_loss=0.1953, pruned_loss=0.042, over 4755.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2134, pruned_loss=0.03374, over 948987.78 frames.], batch size: 12, lr: 2.19e-04 +2022-05-06 16:50:54,003 INFO [train.py:715] (6/8) Epoch 10, batch 800, loss[loss=0.1592, simple_loss=0.2331, pruned_loss=0.0427, over 4961.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2131, pruned_loss=0.03356, over 954803.73 frames.], batch size: 39, lr: 2.19e-04 +2022-05-06 16:51:34,426 INFO [train.py:715] (6/8) Epoch 10, batch 850, loss[loss=0.1367, simple_loss=0.212, pruned_loss=0.03073, over 4914.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2129, pruned_loss=0.03372, over 958447.20 frames.], batch size: 19, lr: 2.19e-04 +2022-05-06 16:52:15,222 INFO [train.py:715] (6/8) Epoch 10, batch 900, loss[loss=0.1436, simple_loss=0.2194, pruned_loss=0.03386, over 4875.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2125, pruned_loss=0.03343, over 961248.15 frames.], batch size: 20, lr: 2.19e-04 +2022-05-06 16:52:55,742 INFO [train.py:715] (6/8) Epoch 10, batch 950, loss[loss=0.1255, simple_loss=0.2005, pruned_loss=0.02522, over 4936.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2133, pruned_loss=0.03392, over 964534.24 frames.], batch size: 21, lr: 2.19e-04 +2022-05-06 16:53:35,736 INFO [train.py:715] (6/8) Epoch 10, batch 1000, loss[loss=0.1368, simple_loss=0.2157, pruned_loss=0.02892, over 4715.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2142, pruned_loss=0.03406, over 966625.90 frames.], batch size: 15, lr: 2.19e-04 +2022-05-06 16:54:14,960 INFO [train.py:715] (6/8) Epoch 10, batch 1050, loss[loss=0.135, simple_loss=0.212, pruned_loss=0.02901, over 4877.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2139, pruned_loss=0.03415, over 968405.56 frames.], batch size: 16, lr: 2.19e-04 +2022-05-06 16:54:55,334 INFO [train.py:715] (6/8) Epoch 10, batch 1100, loss[loss=0.134, simple_loss=0.2045, pruned_loss=0.0317, over 4891.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2139, pruned_loss=0.03426, over 969387.71 frames.], batch size: 22, lr: 2.19e-04 +2022-05-06 16:55:34,631 INFO [train.py:715] (6/8) Epoch 10, batch 1150, loss[loss=0.1332, simple_loss=0.2136, pruned_loss=0.02639, over 4905.00 frames.], tot_loss[loss=0.1414, simple_loss=0.214, pruned_loss=0.03434, over 969753.64 frames.], batch size: 19, lr: 2.19e-04 +2022-05-06 16:56:13,830 INFO [train.py:715] (6/8) Epoch 10, batch 1200, loss[loss=0.1409, simple_loss=0.2231, pruned_loss=0.02934, over 4837.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2137, pruned_loss=0.03429, over 970481.60 frames.], batch size: 15, lr: 2.19e-04 +2022-05-06 16:56:53,603 INFO [train.py:715] (6/8) Epoch 10, batch 1250, loss[loss=0.1359, simple_loss=0.2095, pruned_loss=0.03116, over 4749.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2138, pruned_loss=0.0342, over 970705.43 frames.], batch size: 16, lr: 2.19e-04 +2022-05-06 16:57:32,221 INFO [train.py:715] (6/8) Epoch 10, batch 1300, loss[loss=0.1439, simple_loss=0.2055, pruned_loss=0.0412, over 4841.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2129, pruned_loss=0.03422, over 970084.89 frames.], batch size: 32, lr: 2.19e-04 +2022-05-06 16:58:11,019 INFO [train.py:715] (6/8) Epoch 10, batch 1350, loss[loss=0.1502, simple_loss=0.2204, pruned_loss=0.03995, over 4927.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2135, pruned_loss=0.0345, over 970895.74 frames.], batch size: 23, lr: 2.19e-04 +2022-05-06 16:58:49,197 INFO [train.py:715] (6/8) Epoch 10, batch 1400, loss[loss=0.1424, simple_loss=0.223, pruned_loss=0.03092, over 4814.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2139, pruned_loss=0.03452, over 970921.13 frames.], batch size: 13, lr: 2.19e-04 +2022-05-06 16:59:28,746 INFO [train.py:715] (6/8) Epoch 10, batch 1450, loss[loss=0.1481, simple_loss=0.2277, pruned_loss=0.03425, over 4877.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2137, pruned_loss=0.0346, over 971480.00 frames.], batch size: 32, lr: 2.19e-04 +2022-05-06 17:00:07,737 INFO [train.py:715] (6/8) Epoch 10, batch 1500, loss[loss=0.1329, simple_loss=0.2062, pruned_loss=0.0298, over 4911.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2138, pruned_loss=0.03451, over 972260.19 frames.], batch size: 19, lr: 2.19e-04 +2022-05-06 17:00:46,478 INFO [train.py:715] (6/8) Epoch 10, batch 1550, loss[loss=0.1478, simple_loss=0.2195, pruned_loss=0.03804, over 4794.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2141, pruned_loss=0.03462, over 972825.77 frames.], batch size: 17, lr: 2.19e-04 +2022-05-06 17:01:25,571 INFO [train.py:715] (6/8) Epoch 10, batch 1600, loss[loss=0.1539, simple_loss=0.2286, pruned_loss=0.03959, over 4963.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2133, pruned_loss=0.03418, over 971929.52 frames.], batch size: 15, lr: 2.19e-04 +2022-05-06 17:02:04,990 INFO [train.py:715] (6/8) Epoch 10, batch 1650, loss[loss=0.1723, simple_loss=0.2427, pruned_loss=0.05097, over 4981.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2137, pruned_loss=0.03404, over 971630.27 frames.], batch size: 31, lr: 2.19e-04 +2022-05-06 17:02:43,709 INFO [train.py:715] (6/8) Epoch 10, batch 1700, loss[loss=0.1467, simple_loss=0.2205, pruned_loss=0.03643, over 4951.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2135, pruned_loss=0.03383, over 971112.41 frames.], batch size: 21, lr: 2.19e-04 +2022-05-06 17:03:22,056 INFO [train.py:715] (6/8) Epoch 10, batch 1750, loss[loss=0.1475, simple_loss=0.2188, pruned_loss=0.03814, over 4920.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2142, pruned_loss=0.03414, over 971616.35 frames.], batch size: 17, lr: 2.19e-04 +2022-05-06 17:04:02,189 INFO [train.py:715] (6/8) Epoch 10, batch 1800, loss[loss=0.1299, simple_loss=0.212, pruned_loss=0.02391, over 4756.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2138, pruned_loss=0.03415, over 971990.96 frames.], batch size: 19, lr: 2.19e-04 +2022-05-06 17:04:41,816 INFO [train.py:715] (6/8) Epoch 10, batch 1850, loss[loss=0.1382, simple_loss=0.2129, pruned_loss=0.03178, over 4857.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2126, pruned_loss=0.03365, over 971941.96 frames.], batch size: 20, lr: 2.19e-04 +2022-05-06 17:05:20,551 INFO [train.py:715] (6/8) Epoch 10, batch 1900, loss[loss=0.1219, simple_loss=0.1995, pruned_loss=0.0221, over 4792.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2124, pruned_loss=0.03334, over 971984.66 frames.], batch size: 24, lr: 2.19e-04 +2022-05-06 17:05:59,513 INFO [train.py:715] (6/8) Epoch 10, batch 1950, loss[loss=0.1256, simple_loss=0.1994, pruned_loss=0.02588, over 4892.00 frames.], tot_loss[loss=0.1401, simple_loss=0.213, pruned_loss=0.0336, over 972588.35 frames.], batch size: 17, lr: 2.18e-04 +2022-05-06 17:06:39,839 INFO [train.py:715] (6/8) Epoch 10, batch 2000, loss[loss=0.1515, simple_loss=0.2179, pruned_loss=0.04252, over 4789.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2124, pruned_loss=0.03346, over 973097.80 frames.], batch size: 18, lr: 2.18e-04 +2022-05-06 17:07:19,137 INFO [train.py:715] (6/8) Epoch 10, batch 2050, loss[loss=0.1305, simple_loss=0.2192, pruned_loss=0.02087, over 4822.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2122, pruned_loss=0.03319, over 972827.96 frames.], batch size: 26, lr: 2.18e-04 +2022-05-06 17:07:57,718 INFO [train.py:715] (6/8) Epoch 10, batch 2100, loss[loss=0.1448, simple_loss=0.2218, pruned_loss=0.03385, over 4878.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2114, pruned_loss=0.0327, over 972978.08 frames.], batch size: 22, lr: 2.18e-04 +2022-05-06 17:08:37,348 INFO [train.py:715] (6/8) Epoch 10, batch 2150, loss[loss=0.1338, simple_loss=0.2117, pruned_loss=0.02797, over 4779.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2122, pruned_loss=0.03308, over 972658.33 frames.], batch size: 12, lr: 2.18e-04 +2022-05-06 17:09:16,484 INFO [train.py:715] (6/8) Epoch 10, batch 2200, loss[loss=0.1658, simple_loss=0.2354, pruned_loss=0.04809, over 4910.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2116, pruned_loss=0.03284, over 972094.83 frames.], batch size: 39, lr: 2.18e-04 +2022-05-06 17:09:55,192 INFO [train.py:715] (6/8) Epoch 10, batch 2250, loss[loss=0.1454, simple_loss=0.2276, pruned_loss=0.03158, over 4928.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2122, pruned_loss=0.03317, over 972387.05 frames.], batch size: 21, lr: 2.18e-04 +2022-05-06 17:10:33,970 INFO [train.py:715] (6/8) Epoch 10, batch 2300, loss[loss=0.1149, simple_loss=0.1945, pruned_loss=0.0176, over 4924.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2124, pruned_loss=0.03309, over 971278.61 frames.], batch size: 18, lr: 2.18e-04 +2022-05-06 17:11:13,695 INFO [train.py:715] (6/8) Epoch 10, batch 2350, loss[loss=0.1232, simple_loss=0.195, pruned_loss=0.02576, over 4971.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2123, pruned_loss=0.03296, over 971592.28 frames.], batch size: 31, lr: 2.18e-04 +2022-05-06 17:11:52,502 INFO [train.py:715] (6/8) Epoch 10, batch 2400, loss[loss=0.1406, simple_loss=0.2171, pruned_loss=0.03199, over 4799.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2113, pruned_loss=0.03255, over 972170.56 frames.], batch size: 24, lr: 2.18e-04 +2022-05-06 17:12:31,240 INFO [train.py:715] (6/8) Epoch 10, batch 2450, loss[loss=0.1208, simple_loss=0.2065, pruned_loss=0.01759, over 4863.00 frames.], tot_loss[loss=0.138, simple_loss=0.2114, pruned_loss=0.03228, over 972945.04 frames.], batch size: 20, lr: 2.18e-04 +2022-05-06 17:13:10,538 INFO [train.py:715] (6/8) Epoch 10, batch 2500, loss[loss=0.1518, simple_loss=0.2222, pruned_loss=0.04068, over 4918.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2119, pruned_loss=0.03238, over 973804.47 frames.], batch size: 39, lr: 2.18e-04 +2022-05-06 17:13:49,922 INFO [train.py:715] (6/8) Epoch 10, batch 2550, loss[loss=0.1377, simple_loss=0.217, pruned_loss=0.02924, over 4801.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2121, pruned_loss=0.03261, over 972890.06 frames.], batch size: 24, lr: 2.18e-04 +2022-05-06 17:14:29,345 INFO [train.py:715] (6/8) Epoch 10, batch 2600, loss[loss=0.126, simple_loss=0.1965, pruned_loss=0.02772, over 4957.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2133, pruned_loss=0.03344, over 973394.75 frames.], batch size: 14, lr: 2.18e-04 +2022-05-06 17:15:08,460 INFO [train.py:715] (6/8) Epoch 10, batch 2650, loss[loss=0.1738, simple_loss=0.2462, pruned_loss=0.05073, over 4784.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2131, pruned_loss=0.03316, over 974137.37 frames.], batch size: 18, lr: 2.18e-04 +2022-05-06 17:15:47,662 INFO [train.py:715] (6/8) Epoch 10, batch 2700, loss[loss=0.1487, simple_loss=0.2127, pruned_loss=0.04229, over 4903.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2135, pruned_loss=0.03335, over 973358.18 frames.], batch size: 17, lr: 2.18e-04 +2022-05-06 17:16:26,376 INFO [train.py:715] (6/8) Epoch 10, batch 2750, loss[loss=0.1142, simple_loss=0.1865, pruned_loss=0.02092, over 4702.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2141, pruned_loss=0.03377, over 972063.28 frames.], batch size: 15, lr: 2.18e-04 +2022-05-06 17:17:05,080 INFO [train.py:715] (6/8) Epoch 10, batch 2800, loss[loss=0.13, simple_loss=0.2016, pruned_loss=0.02921, over 4816.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2145, pruned_loss=0.03412, over 972264.35 frames.], batch size: 14, lr: 2.18e-04 +2022-05-06 17:17:43,818 INFO [train.py:715] (6/8) Epoch 10, batch 2850, loss[loss=0.1165, simple_loss=0.2001, pruned_loss=0.01645, over 4910.00 frames.], tot_loss[loss=0.141, simple_loss=0.214, pruned_loss=0.03396, over 971722.78 frames.], batch size: 29, lr: 2.18e-04 +2022-05-06 17:18:23,064 INFO [train.py:715] (6/8) Epoch 10, batch 2900, loss[loss=0.1228, simple_loss=0.2009, pruned_loss=0.02235, over 4747.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2132, pruned_loss=0.03352, over 971183.75 frames.], batch size: 19, lr: 2.18e-04 +2022-05-06 17:19:02,256 INFO [train.py:715] (6/8) Epoch 10, batch 2950, loss[loss=0.1854, simple_loss=0.2481, pruned_loss=0.06137, over 4702.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2137, pruned_loss=0.03395, over 970946.41 frames.], batch size: 15, lr: 2.18e-04 +2022-05-06 17:19:40,637 INFO [train.py:715] (6/8) Epoch 10, batch 3000, loss[loss=0.146, simple_loss=0.2224, pruned_loss=0.03477, over 4801.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2141, pruned_loss=0.03432, over 971028.53 frames.], batch size: 21, lr: 2.18e-04 +2022-05-06 17:19:40,638 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 17:19:50,100 INFO [train.py:742] (6/8) Epoch 10, validation: loss=0.1065, simple_loss=0.1908, pruned_loss=0.01113, over 914524.00 frames. +2022-05-06 17:20:28,628 INFO [train.py:715] (6/8) Epoch 10, batch 3050, loss[loss=0.1371, simple_loss=0.2135, pruned_loss=0.03036, over 4871.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2137, pruned_loss=0.03396, over 970867.84 frames.], batch size: 20, lr: 2.18e-04 +2022-05-06 17:21:07,570 INFO [train.py:715] (6/8) Epoch 10, batch 3100, loss[loss=0.1161, simple_loss=0.1862, pruned_loss=0.02298, over 4961.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2144, pruned_loss=0.03403, over 971048.83 frames.], batch size: 35, lr: 2.18e-04 +2022-05-06 17:21:46,737 INFO [train.py:715] (6/8) Epoch 10, batch 3150, loss[loss=0.1248, simple_loss=0.2023, pruned_loss=0.02365, over 4817.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2146, pruned_loss=0.03432, over 970712.21 frames.], batch size: 26, lr: 2.18e-04 +2022-05-06 17:22:25,533 INFO [train.py:715] (6/8) Epoch 10, batch 3200, loss[loss=0.1578, simple_loss=0.2209, pruned_loss=0.04737, over 4768.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2143, pruned_loss=0.03407, over 970710.81 frames.], batch size: 17, lr: 2.18e-04 +2022-05-06 17:23:03,965 INFO [train.py:715] (6/8) Epoch 10, batch 3250, loss[loss=0.1751, simple_loss=0.245, pruned_loss=0.05258, over 4969.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2142, pruned_loss=0.03413, over 971017.05 frames.], batch size: 35, lr: 2.18e-04 +2022-05-06 17:23:44,489 INFO [train.py:715] (6/8) Epoch 10, batch 3300, loss[loss=0.1444, simple_loss=0.2218, pruned_loss=0.03345, over 4791.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2143, pruned_loss=0.03424, over 971362.17 frames.], batch size: 21, lr: 2.18e-04 +2022-05-06 17:24:24,214 INFO [train.py:715] (6/8) Epoch 10, batch 3350, loss[loss=0.1315, simple_loss=0.2024, pruned_loss=0.03023, over 4971.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2149, pruned_loss=0.03435, over 971447.68 frames.], batch size: 14, lr: 2.18e-04 +2022-05-06 17:25:04,073 INFO [train.py:715] (6/8) Epoch 10, batch 3400, loss[loss=0.1384, simple_loss=0.2144, pruned_loss=0.03123, over 4976.00 frames.], tot_loss[loss=0.141, simple_loss=0.2139, pruned_loss=0.03407, over 971702.41 frames.], batch size: 25, lr: 2.18e-04 +2022-05-06 17:25:44,894 INFO [train.py:715] (6/8) Epoch 10, batch 3450, loss[loss=0.1855, simple_loss=0.2535, pruned_loss=0.05877, over 4986.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2143, pruned_loss=0.03399, over 972031.53 frames.], batch size: 35, lr: 2.18e-04 +2022-05-06 17:26:26,600 INFO [train.py:715] (6/8) Epoch 10, batch 3500, loss[loss=0.1367, simple_loss=0.2193, pruned_loss=0.027, over 4969.00 frames.], tot_loss[loss=0.141, simple_loss=0.2143, pruned_loss=0.03387, over 971928.07 frames.], batch size: 15, lr: 2.18e-04 +2022-05-06 17:27:07,267 INFO [train.py:715] (6/8) Epoch 10, batch 3550, loss[loss=0.1492, simple_loss=0.2219, pruned_loss=0.03829, over 4956.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2138, pruned_loss=0.03384, over 971833.31 frames.], batch size: 35, lr: 2.18e-04 +2022-05-06 17:27:48,547 INFO [train.py:715] (6/8) Epoch 10, batch 3600, loss[loss=0.1547, simple_loss=0.2207, pruned_loss=0.04434, over 4934.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2144, pruned_loss=0.03401, over 972480.93 frames.], batch size: 18, lr: 2.18e-04 +2022-05-06 17:28:29,173 INFO [train.py:715] (6/8) Epoch 10, batch 3650, loss[loss=0.1221, simple_loss=0.1925, pruned_loss=0.02583, over 4864.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2136, pruned_loss=0.03403, over 971564.97 frames.], batch size: 22, lr: 2.18e-04 +2022-05-06 17:29:10,579 INFO [train.py:715] (6/8) Epoch 10, batch 3700, loss[loss=0.1434, simple_loss=0.2232, pruned_loss=0.03178, over 4906.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2132, pruned_loss=0.03383, over 971929.22 frames.], batch size: 17, lr: 2.18e-04 +2022-05-06 17:29:51,157 INFO [train.py:715] (6/8) Epoch 10, batch 3750, loss[loss=0.1408, simple_loss=0.229, pruned_loss=0.02631, over 4906.00 frames.], tot_loss[loss=0.1399, simple_loss=0.213, pruned_loss=0.0334, over 971677.21 frames.], batch size: 18, lr: 2.18e-04 +2022-05-06 17:30:32,381 INFO [train.py:715] (6/8) Epoch 10, batch 3800, loss[loss=0.1586, simple_loss=0.2335, pruned_loss=0.04183, over 4705.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2125, pruned_loss=0.03333, over 971601.59 frames.], batch size: 15, lr: 2.18e-04 +2022-05-06 17:31:13,749 INFO [train.py:715] (6/8) Epoch 10, batch 3850, loss[loss=0.1453, simple_loss=0.2274, pruned_loss=0.03162, over 4962.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2127, pruned_loss=0.03321, over 972731.78 frames.], batch size: 21, lr: 2.18e-04 +2022-05-06 17:31:54,695 INFO [train.py:715] (6/8) Epoch 10, batch 3900, loss[loss=0.1505, simple_loss=0.2257, pruned_loss=0.0377, over 4919.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2127, pruned_loss=0.03342, over 972069.29 frames.], batch size: 23, lr: 2.18e-04 +2022-05-06 17:32:36,893 INFO [train.py:715] (6/8) Epoch 10, batch 3950, loss[loss=0.151, simple_loss=0.2148, pruned_loss=0.04362, over 4931.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2133, pruned_loss=0.03366, over 972065.06 frames.], batch size: 39, lr: 2.18e-04 +2022-05-06 17:33:16,173 INFO [train.py:715] (6/8) Epoch 10, batch 4000, loss[loss=0.1479, simple_loss=0.2221, pruned_loss=0.03681, over 4779.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2129, pruned_loss=0.03359, over 971395.94 frames.], batch size: 17, lr: 2.18e-04 +2022-05-06 17:33:55,838 INFO [train.py:715] (6/8) Epoch 10, batch 4050, loss[loss=0.1185, simple_loss=0.2017, pruned_loss=0.01766, over 4887.00 frames.], tot_loss[loss=0.1409, simple_loss=0.214, pruned_loss=0.0339, over 971868.96 frames.], batch size: 22, lr: 2.18e-04 +2022-05-06 17:34:34,557 INFO [train.py:715] (6/8) Epoch 10, batch 4100, loss[loss=0.1243, simple_loss=0.2053, pruned_loss=0.02168, over 4983.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2145, pruned_loss=0.03402, over 972299.30 frames.], batch size: 28, lr: 2.18e-04 +2022-05-06 17:35:13,435 INFO [train.py:715] (6/8) Epoch 10, batch 4150, loss[loss=0.1335, simple_loss=0.2151, pruned_loss=0.02601, over 4885.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2142, pruned_loss=0.03418, over 972332.71 frames.], batch size: 22, lr: 2.18e-04 +2022-05-06 17:35:52,992 INFO [train.py:715] (6/8) Epoch 10, batch 4200, loss[loss=0.1295, simple_loss=0.205, pruned_loss=0.02694, over 4773.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2133, pruned_loss=0.03387, over 973072.51 frames.], batch size: 18, lr: 2.18e-04 +2022-05-06 17:36:31,677 INFO [train.py:715] (6/8) Epoch 10, batch 4250, loss[loss=0.1345, simple_loss=0.2077, pruned_loss=0.03064, over 4921.00 frames.], tot_loss[loss=0.141, simple_loss=0.214, pruned_loss=0.03403, over 973060.10 frames.], batch size: 23, lr: 2.18e-04 +2022-05-06 17:37:10,489 INFO [train.py:715] (6/8) Epoch 10, batch 4300, loss[loss=0.1698, simple_loss=0.2418, pruned_loss=0.04889, over 4856.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2142, pruned_loss=0.03437, over 972796.85 frames.], batch size: 20, lr: 2.18e-04 +2022-05-06 17:37:49,694 INFO [train.py:715] (6/8) Epoch 10, batch 4350, loss[loss=0.1426, simple_loss=0.2205, pruned_loss=0.03238, over 4915.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2134, pruned_loss=0.03402, over 973407.49 frames.], batch size: 19, lr: 2.18e-04 +2022-05-06 17:38:28,648 INFO [train.py:715] (6/8) Epoch 10, batch 4400, loss[loss=0.1172, simple_loss=0.1952, pruned_loss=0.01961, over 4943.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2125, pruned_loss=0.03344, over 973716.28 frames.], batch size: 24, lr: 2.18e-04 +2022-05-06 17:39:07,613 INFO [train.py:715] (6/8) Epoch 10, batch 4450, loss[loss=0.123, simple_loss=0.1993, pruned_loss=0.02333, over 4862.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2133, pruned_loss=0.03369, over 972936.81 frames.], batch size: 20, lr: 2.18e-04 +2022-05-06 17:39:46,323 INFO [train.py:715] (6/8) Epoch 10, batch 4500, loss[loss=0.1219, simple_loss=0.1889, pruned_loss=0.02746, over 4810.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2134, pruned_loss=0.03371, over 972031.88 frames.], batch size: 12, lr: 2.18e-04 +2022-05-06 17:40:25,800 INFO [train.py:715] (6/8) Epoch 10, batch 4550, loss[loss=0.1168, simple_loss=0.1909, pruned_loss=0.02139, over 4880.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2128, pruned_loss=0.03329, over 973011.26 frames.], batch size: 16, lr: 2.18e-04 +2022-05-06 17:41:04,679 INFO [train.py:715] (6/8) Epoch 10, batch 4600, loss[loss=0.1238, simple_loss=0.1934, pruned_loss=0.02712, over 4802.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2125, pruned_loss=0.0333, over 972252.17 frames.], batch size: 21, lr: 2.18e-04 +2022-05-06 17:41:43,578 INFO [train.py:715] (6/8) Epoch 10, batch 4650, loss[loss=0.1158, simple_loss=0.1904, pruned_loss=0.02058, over 4981.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2136, pruned_loss=0.03388, over 972511.41 frames.], batch size: 15, lr: 2.18e-04 +2022-05-06 17:42:23,833 INFO [train.py:715] (6/8) Epoch 10, batch 4700, loss[loss=0.1264, simple_loss=0.1985, pruned_loss=0.02713, over 4975.00 frames.], tot_loss[loss=0.14, simple_loss=0.2127, pruned_loss=0.03369, over 972782.28 frames.], batch size: 15, lr: 2.18e-04 +2022-05-06 17:43:03,975 INFO [train.py:715] (6/8) Epoch 10, batch 4750, loss[loss=0.1304, simple_loss=0.2135, pruned_loss=0.02368, over 4770.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2133, pruned_loss=0.03371, over 972237.72 frames.], batch size: 18, lr: 2.18e-04 +2022-05-06 17:43:43,160 INFO [train.py:715] (6/8) Epoch 10, batch 4800, loss[loss=0.1141, simple_loss=0.1899, pruned_loss=0.01919, over 4931.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2144, pruned_loss=0.03465, over 972370.38 frames.], batch size: 29, lr: 2.18e-04 +2022-05-06 17:44:22,999 INFO [train.py:715] (6/8) Epoch 10, batch 4850, loss[loss=0.1038, simple_loss=0.1674, pruned_loss=0.02015, over 4824.00 frames.], tot_loss[loss=0.1405, simple_loss=0.213, pruned_loss=0.034, over 973109.05 frames.], batch size: 13, lr: 2.18e-04 +2022-05-06 17:45:02,950 INFO [train.py:715] (6/8) Epoch 10, batch 4900, loss[loss=0.1619, simple_loss=0.2278, pruned_loss=0.04794, over 4846.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2136, pruned_loss=0.03402, over 972681.57 frames.], batch size: 30, lr: 2.18e-04 +2022-05-06 17:45:42,397 INFO [train.py:715] (6/8) Epoch 10, batch 4950, loss[loss=0.1213, simple_loss=0.192, pruned_loss=0.02532, over 4841.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2131, pruned_loss=0.03403, over 971888.50 frames.], batch size: 13, lr: 2.18e-04 +2022-05-06 17:46:21,437 INFO [train.py:715] (6/8) Epoch 10, batch 5000, loss[loss=0.1282, simple_loss=0.2039, pruned_loss=0.02629, over 4899.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2139, pruned_loss=0.03432, over 971137.64 frames.], batch size: 19, lr: 2.18e-04 +2022-05-06 17:47:00,598 INFO [train.py:715] (6/8) Epoch 10, batch 5050, loss[loss=0.154, simple_loss=0.218, pruned_loss=0.04499, over 4951.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2135, pruned_loss=0.03408, over 971029.03 frames.], batch size: 35, lr: 2.18e-04 +2022-05-06 17:47:39,527 INFO [train.py:715] (6/8) Epoch 10, batch 5100, loss[loss=0.215, simple_loss=0.279, pruned_loss=0.07547, over 4943.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2133, pruned_loss=0.03406, over 971299.19 frames.], batch size: 39, lr: 2.18e-04 +2022-05-06 17:48:18,802 INFO [train.py:715] (6/8) Epoch 10, batch 5150, loss[loss=0.1342, simple_loss=0.2086, pruned_loss=0.02993, over 4952.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2128, pruned_loss=0.03377, over 971326.94 frames.], batch size: 21, lr: 2.18e-04 +2022-05-06 17:48:58,638 INFO [train.py:715] (6/8) Epoch 10, batch 5200, loss[loss=0.1354, simple_loss=0.2165, pruned_loss=0.02715, over 4975.00 frames.], tot_loss[loss=0.142, simple_loss=0.2144, pruned_loss=0.03479, over 971747.95 frames.], batch size: 39, lr: 2.17e-04 +2022-05-06 17:49:38,471 INFO [train.py:715] (6/8) Epoch 10, batch 5250, loss[loss=0.1634, simple_loss=0.2297, pruned_loss=0.04849, over 4922.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2147, pruned_loss=0.03481, over 971187.92 frames.], batch size: 17, lr: 2.17e-04 +2022-05-06 17:50:17,853 INFO [train.py:715] (6/8) Epoch 10, batch 5300, loss[loss=0.1272, simple_loss=0.2095, pruned_loss=0.02239, over 4963.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2155, pruned_loss=0.03502, over 971690.69 frames.], batch size: 28, lr: 2.17e-04 +2022-05-06 17:50:57,195 INFO [train.py:715] (6/8) Epoch 10, batch 5350, loss[loss=0.1278, simple_loss=0.202, pruned_loss=0.02685, over 4824.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2151, pruned_loss=0.03464, over 971675.32 frames.], batch size: 27, lr: 2.17e-04 +2022-05-06 17:51:37,024 INFO [train.py:715] (6/8) Epoch 10, batch 5400, loss[loss=0.1424, simple_loss=0.2204, pruned_loss=0.03215, over 4872.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2151, pruned_loss=0.03456, over 971885.65 frames.], batch size: 16, lr: 2.17e-04 +2022-05-06 17:52:16,938 INFO [train.py:715] (6/8) Epoch 10, batch 5450, loss[loss=0.1519, simple_loss=0.2315, pruned_loss=0.03612, over 4808.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2146, pruned_loss=0.03389, over 972425.12 frames.], batch size: 21, lr: 2.17e-04 +2022-05-06 17:52:56,345 INFO [train.py:715] (6/8) Epoch 10, batch 5500, loss[loss=0.1204, simple_loss=0.1889, pruned_loss=0.02596, over 4979.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2142, pruned_loss=0.03407, over 972725.48 frames.], batch size: 28, lr: 2.17e-04 +2022-05-06 17:53:36,104 INFO [train.py:715] (6/8) Epoch 10, batch 5550, loss[loss=0.1647, simple_loss=0.2426, pruned_loss=0.04339, over 4969.00 frames.], tot_loss[loss=0.142, simple_loss=0.2152, pruned_loss=0.0344, over 972915.86 frames.], batch size: 28, lr: 2.17e-04 +2022-05-06 17:54:16,053 INFO [train.py:715] (6/8) Epoch 10, batch 5600, loss[loss=0.1267, simple_loss=0.1944, pruned_loss=0.02948, over 4783.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2139, pruned_loss=0.03382, over 972798.26 frames.], batch size: 14, lr: 2.17e-04 +2022-05-06 17:54:55,815 INFO [train.py:715] (6/8) Epoch 10, batch 5650, loss[loss=0.1287, simple_loss=0.2053, pruned_loss=0.02606, over 4824.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2136, pruned_loss=0.03389, over 972797.88 frames.], batch size: 26, lr: 2.17e-04 +2022-05-06 17:55:34,982 INFO [train.py:715] (6/8) Epoch 10, batch 5700, loss[loss=0.1756, simple_loss=0.2463, pruned_loss=0.05244, over 4843.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2136, pruned_loss=0.0339, over 973018.86 frames.], batch size: 26, lr: 2.17e-04 +2022-05-06 17:56:15,023 INFO [train.py:715] (6/8) Epoch 10, batch 5750, loss[loss=0.1376, simple_loss=0.2198, pruned_loss=0.02767, over 4811.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2133, pruned_loss=0.03373, over 972179.75 frames.], batch size: 26, lr: 2.17e-04 +2022-05-06 17:56:54,689 INFO [train.py:715] (6/8) Epoch 10, batch 5800, loss[loss=0.175, simple_loss=0.2501, pruned_loss=0.04993, over 4794.00 frames.], tot_loss[loss=0.141, simple_loss=0.2141, pruned_loss=0.034, over 972884.02 frames.], batch size: 12, lr: 2.17e-04 +2022-05-06 17:57:34,210 INFO [train.py:715] (6/8) Epoch 10, batch 5850, loss[loss=0.1327, simple_loss=0.2105, pruned_loss=0.02741, over 4968.00 frames.], tot_loss[loss=0.1421, simple_loss=0.215, pruned_loss=0.03461, over 972339.88 frames.], batch size: 24, lr: 2.17e-04 +2022-05-06 17:58:14,028 INFO [train.py:715] (6/8) Epoch 10, batch 5900, loss[loss=0.1309, simple_loss=0.2091, pruned_loss=0.0263, over 4945.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2142, pruned_loss=0.03436, over 972028.55 frames.], batch size: 24, lr: 2.17e-04 +2022-05-06 17:58:53,765 INFO [train.py:715] (6/8) Epoch 10, batch 5950, loss[loss=0.1254, simple_loss=0.2059, pruned_loss=0.02249, over 4890.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2136, pruned_loss=0.03428, over 972599.26 frames.], batch size: 19, lr: 2.17e-04 +2022-05-06 17:59:33,431 INFO [train.py:715] (6/8) Epoch 10, batch 6000, loss[loss=0.143, simple_loss=0.2094, pruned_loss=0.03833, over 4850.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2124, pruned_loss=0.03342, over 972926.73 frames.], batch size: 20, lr: 2.17e-04 +2022-05-06 17:59:33,431 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 17:59:42,753 INFO [train.py:742] (6/8) Epoch 10, validation: loss=0.1067, simple_loss=0.1909, pruned_loss=0.01126, over 914524.00 frames. +2022-05-06 18:00:22,328 INFO [train.py:715] (6/8) Epoch 10, batch 6050, loss[loss=0.09783, simple_loss=0.1734, pruned_loss=0.01113, over 4791.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03325, over 972635.88 frames.], batch size: 12, lr: 2.17e-04 +2022-05-06 18:01:00,746 INFO [train.py:715] (6/8) Epoch 10, batch 6100, loss[loss=0.1419, simple_loss=0.22, pruned_loss=0.03196, over 4914.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2124, pruned_loss=0.03328, over 972982.93 frames.], batch size: 29, lr: 2.17e-04 +2022-05-06 18:01:40,229 INFO [train.py:715] (6/8) Epoch 10, batch 6150, loss[loss=0.1565, simple_loss=0.2307, pruned_loss=0.0411, over 4817.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2124, pruned_loss=0.03352, over 973259.51 frames.], batch size: 25, lr: 2.17e-04 +2022-05-06 18:02:20,087 INFO [train.py:715] (6/8) Epoch 10, batch 6200, loss[loss=0.1307, simple_loss=0.2093, pruned_loss=0.02603, over 4834.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2129, pruned_loss=0.03393, over 972667.52 frames.], batch size: 13, lr: 2.17e-04 +2022-05-06 18:02:59,941 INFO [train.py:715] (6/8) Epoch 10, batch 6250, loss[loss=0.1499, simple_loss=0.2397, pruned_loss=0.03006, over 4743.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2127, pruned_loss=0.03349, over 971894.93 frames.], batch size: 16, lr: 2.17e-04 +2022-05-06 18:03:39,470 INFO [train.py:715] (6/8) Epoch 10, batch 6300, loss[loss=0.1178, simple_loss=0.1875, pruned_loss=0.02405, over 4911.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2124, pruned_loss=0.03333, over 972752.40 frames.], batch size: 17, lr: 2.17e-04 +2022-05-06 18:04:19,282 INFO [train.py:715] (6/8) Epoch 10, batch 6350, loss[loss=0.1561, simple_loss=0.2314, pruned_loss=0.04039, over 4954.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2128, pruned_loss=0.03375, over 973995.53 frames.], batch size: 21, lr: 2.17e-04 +2022-05-06 18:04:58,327 INFO [train.py:715] (6/8) Epoch 10, batch 6400, loss[loss=0.139, simple_loss=0.2026, pruned_loss=0.03774, over 4644.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2131, pruned_loss=0.03388, over 973866.75 frames.], batch size: 13, lr: 2.17e-04 +2022-05-06 18:05:36,735 INFO [train.py:715] (6/8) Epoch 10, batch 6450, loss[loss=0.1353, simple_loss=0.2131, pruned_loss=0.02877, over 4862.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2139, pruned_loss=0.034, over 973321.35 frames.], batch size: 38, lr: 2.17e-04 +2022-05-06 18:06:15,661 INFO [train.py:715] (6/8) Epoch 10, batch 6500, loss[loss=0.1197, simple_loss=0.1997, pruned_loss=0.01986, over 4984.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2137, pruned_loss=0.03359, over 972642.39 frames.], batch size: 25, lr: 2.17e-04 +2022-05-06 18:06:54,777 INFO [train.py:715] (6/8) Epoch 10, batch 6550, loss[loss=0.1324, simple_loss=0.2124, pruned_loss=0.02615, over 4978.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2128, pruned_loss=0.03291, over 972775.28 frames.], batch size: 40, lr: 2.17e-04 +2022-05-06 18:07:33,918 INFO [train.py:715] (6/8) Epoch 10, batch 6600, loss[loss=0.1327, simple_loss=0.21, pruned_loss=0.02771, over 4933.00 frames.], tot_loss[loss=0.1396, simple_loss=0.213, pruned_loss=0.03308, over 973796.51 frames.], batch size: 29, lr: 2.17e-04 +2022-05-06 18:08:12,475 INFO [train.py:715] (6/8) Epoch 10, batch 6650, loss[loss=0.1231, simple_loss=0.1888, pruned_loss=0.02873, over 4979.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2141, pruned_loss=0.03375, over 974234.67 frames.], batch size: 25, lr: 2.17e-04 +2022-05-06 18:08:52,602 INFO [train.py:715] (6/8) Epoch 10, batch 6700, loss[loss=0.1037, simple_loss=0.1754, pruned_loss=0.016, over 4791.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2132, pruned_loss=0.03353, over 973632.89 frames.], batch size: 24, lr: 2.17e-04 +2022-05-06 18:09:31,855 INFO [train.py:715] (6/8) Epoch 10, batch 6750, loss[loss=0.1389, simple_loss=0.2253, pruned_loss=0.0263, over 4808.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2142, pruned_loss=0.034, over 973470.88 frames.], batch size: 25, lr: 2.17e-04 +2022-05-06 18:10:10,547 INFO [train.py:715] (6/8) Epoch 10, batch 6800, loss[loss=0.1236, simple_loss=0.2036, pruned_loss=0.02184, over 4821.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2142, pruned_loss=0.0336, over 973189.96 frames.], batch size: 26, lr: 2.17e-04 +2022-05-06 18:10:50,417 INFO [train.py:715] (6/8) Epoch 10, batch 6850, loss[loss=0.1416, simple_loss=0.2269, pruned_loss=0.02817, over 4779.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2137, pruned_loss=0.03301, over 973580.42 frames.], batch size: 18, lr: 2.17e-04 +2022-05-06 18:11:29,659 INFO [train.py:715] (6/8) Epoch 10, batch 6900, loss[loss=0.1413, simple_loss=0.2134, pruned_loss=0.03464, over 4810.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2132, pruned_loss=0.03281, over 973939.62 frames.], batch size: 26, lr: 2.17e-04 +2022-05-06 18:12:08,738 INFO [train.py:715] (6/8) Epoch 10, batch 6950, loss[loss=0.153, simple_loss=0.2297, pruned_loss=0.03821, over 4757.00 frames.], tot_loss[loss=0.1397, simple_loss=0.213, pruned_loss=0.03316, over 973573.86 frames.], batch size: 16, lr: 2.17e-04 +2022-05-06 18:12:48,644 INFO [train.py:715] (6/8) Epoch 10, batch 7000, loss[loss=0.1245, simple_loss=0.1914, pruned_loss=0.02875, over 4712.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2131, pruned_loss=0.03298, over 972297.44 frames.], batch size: 12, lr: 2.17e-04 +2022-05-06 18:13:28,552 INFO [train.py:715] (6/8) Epoch 10, batch 7050, loss[loss=0.1264, simple_loss=0.2036, pruned_loss=0.02461, over 4845.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2129, pruned_loss=0.03281, over 971370.17 frames.], batch size: 30, lr: 2.17e-04 +2022-05-06 18:14:07,744 INFO [train.py:715] (6/8) Epoch 10, batch 7100, loss[loss=0.1571, simple_loss=0.222, pruned_loss=0.04613, over 4840.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2138, pruned_loss=0.03346, over 971167.71 frames.], batch size: 13, lr: 2.17e-04 +2022-05-06 18:14:46,900 INFO [train.py:715] (6/8) Epoch 10, batch 7150, loss[loss=0.1491, simple_loss=0.2243, pruned_loss=0.03693, over 4970.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2139, pruned_loss=0.03355, over 971826.93 frames.], batch size: 39, lr: 2.17e-04 +2022-05-06 18:15:26,299 INFO [train.py:715] (6/8) Epoch 10, batch 7200, loss[loss=0.1479, simple_loss=0.2182, pruned_loss=0.03881, over 4768.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2136, pruned_loss=0.03376, over 972433.39 frames.], batch size: 19, lr: 2.17e-04 +2022-05-06 18:16:05,439 INFO [train.py:715] (6/8) Epoch 10, batch 7250, loss[loss=0.146, simple_loss=0.2201, pruned_loss=0.03597, over 4924.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2131, pruned_loss=0.03389, over 972624.89 frames.], batch size: 23, lr: 2.17e-04 +2022-05-06 18:16:44,410 INFO [train.py:715] (6/8) Epoch 10, batch 7300, loss[loss=0.1492, simple_loss=0.2141, pruned_loss=0.04212, over 4948.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2134, pruned_loss=0.03402, over 972805.09 frames.], batch size: 23, lr: 2.17e-04 +2022-05-06 18:17:23,331 INFO [train.py:715] (6/8) Epoch 10, batch 7350, loss[loss=0.1313, simple_loss=0.2008, pruned_loss=0.03087, over 4894.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2133, pruned_loss=0.03399, over 972879.86 frames.], batch size: 17, lr: 2.17e-04 +2022-05-06 18:18:02,743 INFO [train.py:715] (6/8) Epoch 10, batch 7400, loss[loss=0.1339, simple_loss=0.2137, pruned_loss=0.02702, over 4818.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.03353, over 972397.68 frames.], batch size: 21, lr: 2.17e-04 +2022-05-06 18:18:41,885 INFO [train.py:715] (6/8) Epoch 10, batch 7450, loss[loss=0.143, simple_loss=0.2157, pruned_loss=0.03512, over 4783.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2134, pruned_loss=0.03378, over 972131.55 frames.], batch size: 18, lr: 2.17e-04 +2022-05-06 18:19:20,023 INFO [train.py:715] (6/8) Epoch 10, batch 7500, loss[loss=0.1372, simple_loss=0.2097, pruned_loss=0.03235, over 4939.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2122, pruned_loss=0.03366, over 972339.66 frames.], batch size: 21, lr: 2.17e-04 +2022-05-06 18:19:59,645 INFO [train.py:715] (6/8) Epoch 10, batch 7550, loss[loss=0.1261, simple_loss=0.1947, pruned_loss=0.02879, over 4787.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2118, pruned_loss=0.03333, over 972659.83 frames.], batch size: 17, lr: 2.17e-04 +2022-05-06 18:20:38,459 INFO [train.py:715] (6/8) Epoch 10, batch 7600, loss[loss=0.1036, simple_loss=0.1841, pruned_loss=0.01161, over 4811.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2125, pruned_loss=0.03351, over 971853.01 frames.], batch size: 25, lr: 2.17e-04 +2022-05-06 18:21:17,039 INFO [train.py:715] (6/8) Epoch 10, batch 7650, loss[loss=0.1302, simple_loss=0.2025, pruned_loss=0.029, over 4775.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2127, pruned_loss=0.03352, over 972213.21 frames.], batch size: 17, lr: 2.17e-04 +2022-05-06 18:21:56,439 INFO [train.py:715] (6/8) Epoch 10, batch 7700, loss[loss=0.1249, simple_loss=0.1979, pruned_loss=0.02595, over 4933.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2132, pruned_loss=0.03377, over 971952.40 frames.], batch size: 29, lr: 2.17e-04 +2022-05-06 18:22:35,796 INFO [train.py:715] (6/8) Epoch 10, batch 7750, loss[loss=0.1116, simple_loss=0.1783, pruned_loss=0.0225, over 4827.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2125, pruned_loss=0.0336, over 972764.49 frames.], batch size: 12, lr: 2.17e-04 +2022-05-06 18:23:15,171 INFO [train.py:715] (6/8) Epoch 10, batch 7800, loss[loss=0.1373, simple_loss=0.2124, pruned_loss=0.03113, over 4897.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2124, pruned_loss=0.03361, over 973140.79 frames.], batch size: 22, lr: 2.17e-04 +2022-05-06 18:23:53,548 INFO [train.py:715] (6/8) Epoch 10, batch 7850, loss[loss=0.137, simple_loss=0.2064, pruned_loss=0.03376, over 4917.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2127, pruned_loss=0.03414, over 972883.19 frames.], batch size: 18, lr: 2.17e-04 +2022-05-06 18:24:33,025 INFO [train.py:715] (6/8) Epoch 10, batch 7900, loss[loss=0.1269, simple_loss=0.1971, pruned_loss=0.02835, over 4777.00 frames.], tot_loss[loss=0.14, simple_loss=0.2126, pruned_loss=0.03375, over 972532.22 frames.], batch size: 18, lr: 2.17e-04 +2022-05-06 18:25:12,548 INFO [train.py:715] (6/8) Epoch 10, batch 7950, loss[loss=0.1458, simple_loss=0.2326, pruned_loss=0.0295, over 4965.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2131, pruned_loss=0.03393, over 973275.40 frames.], batch size: 24, lr: 2.17e-04 +2022-05-06 18:25:51,361 INFO [train.py:715] (6/8) Epoch 10, batch 8000, loss[loss=0.1623, simple_loss=0.2397, pruned_loss=0.04242, over 4784.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2132, pruned_loss=0.03366, over 972881.33 frames.], batch size: 17, lr: 2.17e-04 +2022-05-06 18:26:30,790 INFO [train.py:715] (6/8) Epoch 10, batch 8050, loss[loss=0.1219, simple_loss=0.1977, pruned_loss=0.02307, over 4752.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2131, pruned_loss=0.03362, over 972365.30 frames.], batch size: 19, lr: 2.17e-04 +2022-05-06 18:27:10,411 INFO [train.py:715] (6/8) Epoch 10, batch 8100, loss[loss=0.1585, simple_loss=0.2202, pruned_loss=0.04839, over 4816.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2137, pruned_loss=0.03384, over 972266.78 frames.], batch size: 13, lr: 2.17e-04 +2022-05-06 18:27:49,303 INFO [train.py:715] (6/8) Epoch 10, batch 8150, loss[loss=0.1177, simple_loss=0.1988, pruned_loss=0.01832, over 4916.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2136, pruned_loss=0.03346, over 972471.36 frames.], batch size: 18, lr: 2.17e-04 +2022-05-06 18:28:27,913 INFO [train.py:715] (6/8) Epoch 10, batch 8200, loss[loss=0.1511, simple_loss=0.2196, pruned_loss=0.04134, over 4738.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2132, pruned_loss=0.03366, over 972294.95 frames.], batch size: 16, lr: 2.17e-04 +2022-05-06 18:29:07,593 INFO [train.py:715] (6/8) Epoch 10, batch 8250, loss[loss=0.1508, simple_loss=0.2152, pruned_loss=0.0432, over 4973.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2123, pruned_loss=0.03334, over 972386.34 frames.], batch size: 15, lr: 2.17e-04 +2022-05-06 18:29:46,990 INFO [train.py:715] (6/8) Epoch 10, batch 8300, loss[loss=0.1422, simple_loss=0.2137, pruned_loss=0.03539, over 4964.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2122, pruned_loss=0.0333, over 972478.11 frames.], batch size: 35, lr: 2.17e-04 +2022-05-06 18:30:25,736 INFO [train.py:715] (6/8) Epoch 10, batch 8350, loss[loss=0.1455, simple_loss=0.2163, pruned_loss=0.03736, over 4777.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03263, over 972291.52 frames.], batch size: 17, lr: 2.17e-04 +2022-05-06 18:31:05,470 INFO [train.py:715] (6/8) Epoch 10, batch 8400, loss[loss=0.1441, simple_loss=0.223, pruned_loss=0.03263, over 4830.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03262, over 972734.83 frames.], batch size: 25, lr: 2.17e-04 +2022-05-06 18:31:44,986 INFO [train.py:715] (6/8) Epoch 10, batch 8450, loss[loss=0.1134, simple_loss=0.1811, pruned_loss=0.0229, over 4969.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2116, pruned_loss=0.03272, over 972569.40 frames.], batch size: 15, lr: 2.16e-04 +2022-05-06 18:32:23,262 INFO [train.py:715] (6/8) Epoch 10, batch 8500, loss[loss=0.1265, simple_loss=0.2022, pruned_loss=0.0254, over 4945.00 frames.], tot_loss[loss=0.139, simple_loss=0.212, pruned_loss=0.03298, over 972597.83 frames.], batch size: 35, lr: 2.16e-04 +2022-05-06 18:33:02,053 INFO [train.py:715] (6/8) Epoch 10, batch 8550, loss[loss=0.1661, simple_loss=0.2306, pruned_loss=0.05084, over 4768.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2119, pruned_loss=0.03271, over 973333.34 frames.], batch size: 14, lr: 2.16e-04 +2022-05-06 18:33:41,303 INFO [train.py:715] (6/8) Epoch 10, batch 8600, loss[loss=0.1279, simple_loss=0.1908, pruned_loss=0.03251, over 4953.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2123, pruned_loss=0.03301, over 973791.33 frames.], batch size: 21, lr: 2.16e-04 +2022-05-06 18:34:19,985 INFO [train.py:715] (6/8) Epoch 10, batch 8650, loss[loss=0.1432, simple_loss=0.2212, pruned_loss=0.03259, over 4772.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2113, pruned_loss=0.03266, over 973349.03 frames.], batch size: 18, lr: 2.16e-04 +2022-05-06 18:34:58,632 INFO [train.py:715] (6/8) Epoch 10, batch 8700, loss[loss=0.1324, simple_loss=0.2172, pruned_loss=0.02385, over 4814.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2108, pruned_loss=0.03266, over 973653.68 frames.], batch size: 25, lr: 2.16e-04 +2022-05-06 18:35:37,458 INFO [train.py:715] (6/8) Epoch 10, batch 8750, loss[loss=0.1704, simple_loss=0.248, pruned_loss=0.04635, over 4777.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2115, pruned_loss=0.03294, over 973668.51 frames.], batch size: 17, lr: 2.16e-04 +2022-05-06 18:36:15,830 INFO [train.py:715] (6/8) Epoch 10, batch 8800, loss[loss=0.1728, simple_loss=0.2436, pruned_loss=0.05103, over 4806.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2123, pruned_loss=0.03343, over 973103.70 frames.], batch size: 21, lr: 2.16e-04 +2022-05-06 18:36:54,706 INFO [train.py:715] (6/8) Epoch 10, batch 8850, loss[loss=0.1688, simple_loss=0.2483, pruned_loss=0.04465, over 4904.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2122, pruned_loss=0.03352, over 973169.81 frames.], batch size: 19, lr: 2.16e-04 +2022-05-06 18:37:34,279 INFO [train.py:715] (6/8) Epoch 10, batch 8900, loss[loss=0.1059, simple_loss=0.1781, pruned_loss=0.01683, over 4911.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2123, pruned_loss=0.03356, over 973183.62 frames.], batch size: 17, lr: 2.16e-04 +2022-05-06 18:38:13,788 INFO [train.py:715] (6/8) Epoch 10, batch 8950, loss[loss=0.1248, simple_loss=0.2004, pruned_loss=0.02457, over 4874.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2132, pruned_loss=0.03387, over 972956.74 frames.], batch size: 16, lr: 2.16e-04 +2022-05-06 18:38:53,304 INFO [train.py:715] (6/8) Epoch 10, batch 9000, loss[loss=0.1436, simple_loss=0.2133, pruned_loss=0.03695, over 4819.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2134, pruned_loss=0.03379, over 972588.10 frames.], batch size: 13, lr: 2.16e-04 +2022-05-06 18:38:53,305 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 18:39:02,858 INFO [train.py:742] (6/8) Epoch 10, validation: loss=0.1064, simple_loss=0.1907, pruned_loss=0.01106, over 914524.00 frames. +2022-05-06 18:39:42,088 INFO [train.py:715] (6/8) Epoch 10, batch 9050, loss[loss=0.1451, simple_loss=0.2177, pruned_loss=0.03624, over 4941.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2131, pruned_loss=0.03383, over 972956.48 frames.], batch size: 39, lr: 2.16e-04 +2022-05-06 18:40:21,152 INFO [train.py:715] (6/8) Epoch 10, batch 9100, loss[loss=0.1312, simple_loss=0.2115, pruned_loss=0.02538, over 4787.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03331, over 972320.73 frames.], batch size: 17, lr: 2.16e-04 +2022-05-06 18:41:01,482 INFO [train.py:715] (6/8) Epoch 10, batch 9150, loss[loss=0.1533, simple_loss=0.2216, pruned_loss=0.04253, over 4953.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03328, over 971780.55 frames.], batch size: 21, lr: 2.16e-04 +2022-05-06 18:41:40,998 INFO [train.py:715] (6/8) Epoch 10, batch 9200, loss[loss=0.1242, simple_loss=0.1993, pruned_loss=0.02458, over 4804.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2126, pruned_loss=0.0335, over 971340.21 frames.], batch size: 24, lr: 2.16e-04 +2022-05-06 18:42:20,448 INFO [train.py:715] (6/8) Epoch 10, batch 9250, loss[loss=0.1348, simple_loss=0.2064, pruned_loss=0.0316, over 4851.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2133, pruned_loss=0.03383, over 971475.53 frames.], batch size: 30, lr: 2.16e-04 +2022-05-06 18:43:00,266 INFO [train.py:715] (6/8) Epoch 10, batch 9300, loss[loss=0.1432, simple_loss=0.2149, pruned_loss=0.03572, over 4903.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2145, pruned_loss=0.03447, over 972454.27 frames.], batch size: 17, lr: 2.16e-04 +2022-05-06 18:43:39,887 INFO [train.py:715] (6/8) Epoch 10, batch 9350, loss[loss=0.1696, simple_loss=0.2451, pruned_loss=0.04704, over 4905.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2153, pruned_loss=0.0349, over 972850.23 frames.], batch size: 39, lr: 2.16e-04 +2022-05-06 18:44:19,413 INFO [train.py:715] (6/8) Epoch 10, batch 9400, loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02977, over 4847.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2144, pruned_loss=0.03454, over 972650.19 frames.], batch size: 30, lr: 2.16e-04 +2022-05-06 18:44:58,985 INFO [train.py:715] (6/8) Epoch 10, batch 9450, loss[loss=0.1499, simple_loss=0.2226, pruned_loss=0.03863, over 4892.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2141, pruned_loss=0.03424, over 972978.64 frames.], batch size: 17, lr: 2.16e-04 +2022-05-06 18:45:38,378 INFO [train.py:715] (6/8) Epoch 10, batch 9500, loss[loss=0.1253, simple_loss=0.2059, pruned_loss=0.02238, over 4805.00 frames.], tot_loss[loss=0.1409, simple_loss=0.214, pruned_loss=0.03394, over 972998.00 frames.], batch size: 25, lr: 2.16e-04 +2022-05-06 18:46:17,354 INFO [train.py:715] (6/8) Epoch 10, batch 9550, loss[loss=0.1364, simple_loss=0.199, pruned_loss=0.03687, over 4814.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2141, pruned_loss=0.03441, over 973174.97 frames.], batch size: 27, lr: 2.16e-04 +2022-05-06 18:46:55,768 INFO [train.py:715] (6/8) Epoch 10, batch 9600, loss[loss=0.1392, simple_loss=0.2103, pruned_loss=0.03403, over 4954.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2134, pruned_loss=0.03424, over 972697.08 frames.], batch size: 24, lr: 2.16e-04 +2022-05-06 18:47:34,909 INFO [train.py:715] (6/8) Epoch 10, batch 9650, loss[loss=0.161, simple_loss=0.2391, pruned_loss=0.04141, over 4978.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2135, pruned_loss=0.03407, over 973370.84 frames.], batch size: 15, lr: 2.16e-04 +2022-05-06 18:48:14,559 INFO [train.py:715] (6/8) Epoch 10, batch 9700, loss[loss=0.1423, simple_loss=0.223, pruned_loss=0.0308, over 4876.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2138, pruned_loss=0.03382, over 974095.82 frames.], batch size: 32, lr: 2.16e-04 +2022-05-06 18:48:52,980 INFO [train.py:715] (6/8) Epoch 10, batch 9750, loss[loss=0.1489, simple_loss=0.217, pruned_loss=0.04042, over 4954.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2134, pruned_loss=0.03343, over 974118.30 frames.], batch size: 14, lr: 2.16e-04 +2022-05-06 18:49:32,217 INFO [train.py:715] (6/8) Epoch 10, batch 9800, loss[loss=0.1205, simple_loss=0.1888, pruned_loss=0.02613, over 4634.00 frames.], tot_loss[loss=0.1399, simple_loss=0.213, pruned_loss=0.03334, over 973988.48 frames.], batch size: 13, lr: 2.16e-04 +2022-05-06 18:50:11,752 INFO [train.py:715] (6/8) Epoch 10, batch 9850, loss[loss=0.1397, simple_loss=0.2029, pruned_loss=0.03823, over 4783.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2123, pruned_loss=0.03306, over 973140.84 frames.], batch size: 18, lr: 2.16e-04 +2022-05-06 18:50:51,055 INFO [train.py:715] (6/8) Epoch 10, batch 9900, loss[loss=0.1536, simple_loss=0.2244, pruned_loss=0.04139, over 4889.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2129, pruned_loss=0.0333, over 973167.74 frames.], batch size: 39, lr: 2.16e-04 +2022-05-06 18:51:30,047 INFO [train.py:715] (6/8) Epoch 10, batch 9950, loss[loss=0.1412, simple_loss=0.212, pruned_loss=0.03515, over 4780.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2131, pruned_loss=0.03354, over 971884.42 frames.], batch size: 18, lr: 2.16e-04 +2022-05-06 18:52:10,245 INFO [train.py:715] (6/8) Epoch 10, batch 10000, loss[loss=0.1188, simple_loss=0.1955, pruned_loss=0.02109, over 4743.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2125, pruned_loss=0.03287, over 972380.06 frames.], batch size: 16, lr: 2.16e-04 +2022-05-06 18:52:49,843 INFO [train.py:715] (6/8) Epoch 10, batch 10050, loss[loss=0.1225, simple_loss=0.1991, pruned_loss=0.02296, over 4931.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2123, pruned_loss=0.03265, over 972503.77 frames.], batch size: 21, lr: 2.16e-04 +2022-05-06 18:53:27,871 INFO [train.py:715] (6/8) Epoch 10, batch 10100, loss[loss=0.1712, simple_loss=0.217, pruned_loss=0.06271, over 4831.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2137, pruned_loss=0.03331, over 972928.18 frames.], batch size: 15, lr: 2.16e-04 +2022-05-06 18:54:06,608 INFO [train.py:715] (6/8) Epoch 10, batch 10150, loss[loss=0.1295, simple_loss=0.2065, pruned_loss=0.02624, over 4936.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2129, pruned_loss=0.03298, over 972671.27 frames.], batch size: 23, lr: 2.16e-04 +2022-05-06 18:54:46,541 INFO [train.py:715] (6/8) Epoch 10, batch 10200, loss[loss=0.1579, simple_loss=0.2247, pruned_loss=0.0456, over 4862.00 frames.], tot_loss[loss=0.14, simple_loss=0.2131, pruned_loss=0.03339, over 972365.55 frames.], batch size: 32, lr: 2.16e-04 +2022-05-06 18:55:25,660 INFO [train.py:715] (6/8) Epoch 10, batch 10250, loss[loss=0.1381, simple_loss=0.2139, pruned_loss=0.03114, over 4960.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2131, pruned_loss=0.03328, over 972695.98 frames.], batch size: 21, lr: 2.16e-04 +2022-05-06 18:56:04,514 INFO [train.py:715] (6/8) Epoch 10, batch 10300, loss[loss=0.1365, simple_loss=0.218, pruned_loss=0.02745, over 4792.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2134, pruned_loss=0.0337, over 972255.44 frames.], batch size: 24, lr: 2.16e-04 +2022-05-06 18:56:44,442 INFO [train.py:715] (6/8) Epoch 10, batch 10350, loss[loss=0.1164, simple_loss=0.1902, pruned_loss=0.02128, over 4761.00 frames.], tot_loss[loss=0.1392, simple_loss=0.212, pruned_loss=0.0332, over 972148.57 frames.], batch size: 19, lr: 2.16e-04 +2022-05-06 18:57:24,439 INFO [train.py:715] (6/8) Epoch 10, batch 10400, loss[loss=0.143, simple_loss=0.2211, pruned_loss=0.03247, over 4963.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03341, over 972317.06 frames.], batch size: 15, lr: 2.16e-04 +2022-05-06 18:58:02,846 INFO [train.py:715] (6/8) Epoch 10, batch 10450, loss[loss=0.1389, simple_loss=0.2189, pruned_loss=0.0295, over 4760.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03325, over 972829.21 frames.], batch size: 16, lr: 2.16e-04 +2022-05-06 18:58:41,114 INFO [train.py:715] (6/8) Epoch 10, batch 10500, loss[loss=0.1294, simple_loss=0.2016, pruned_loss=0.02854, over 4958.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2135, pruned_loss=0.03387, over 973075.29 frames.], batch size: 29, lr: 2.16e-04 +2022-05-06 18:59:20,243 INFO [train.py:715] (6/8) Epoch 10, batch 10550, loss[loss=0.1467, simple_loss=0.2161, pruned_loss=0.03864, over 4705.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2133, pruned_loss=0.03389, over 972910.42 frames.], batch size: 15, lr: 2.16e-04 +2022-05-06 18:59:59,205 INFO [train.py:715] (6/8) Epoch 10, batch 10600, loss[loss=0.1237, simple_loss=0.197, pruned_loss=0.02518, over 4994.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2134, pruned_loss=0.03376, over 972231.74 frames.], batch size: 16, lr: 2.16e-04 +2022-05-06 19:00:37,421 INFO [train.py:715] (6/8) Epoch 10, batch 10650, loss[loss=0.1564, simple_loss=0.2253, pruned_loss=0.04379, over 4987.00 frames.], tot_loss[loss=0.141, simple_loss=0.2135, pruned_loss=0.03423, over 972065.83 frames.], batch size: 26, lr: 2.16e-04 +2022-05-06 19:01:16,843 INFO [train.py:715] (6/8) Epoch 10, batch 10700, loss[loss=0.1586, simple_loss=0.2296, pruned_loss=0.04383, over 4862.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2141, pruned_loss=0.03465, over 971990.50 frames.], batch size: 20, lr: 2.16e-04 +2022-05-06 19:01:56,167 INFO [train.py:715] (6/8) Epoch 10, batch 10750, loss[loss=0.1182, simple_loss=0.1921, pruned_loss=0.02222, over 4876.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2135, pruned_loss=0.03398, over 971718.02 frames.], batch size: 22, lr: 2.16e-04 +2022-05-06 19:02:34,992 INFO [train.py:715] (6/8) Epoch 10, batch 10800, loss[loss=0.1541, simple_loss=0.2259, pruned_loss=0.04119, over 4847.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2135, pruned_loss=0.0342, over 971019.60 frames.], batch size: 32, lr: 2.16e-04 +2022-05-06 19:03:13,439 INFO [train.py:715] (6/8) Epoch 10, batch 10850, loss[loss=0.1285, simple_loss=0.1884, pruned_loss=0.03425, over 4819.00 frames.], tot_loss[loss=0.141, simple_loss=0.2133, pruned_loss=0.03431, over 971417.42 frames.], batch size: 12, lr: 2.16e-04 +2022-05-06 19:03:52,881 INFO [train.py:715] (6/8) Epoch 10, batch 10900, loss[loss=0.1522, simple_loss=0.2253, pruned_loss=0.0395, over 4829.00 frames.], tot_loss[loss=0.141, simple_loss=0.2133, pruned_loss=0.03435, over 972063.64 frames.], batch size: 15, lr: 2.16e-04 +2022-05-06 19:04:31,793 INFO [train.py:715] (6/8) Epoch 10, batch 10950, loss[loss=0.1347, simple_loss=0.2071, pruned_loss=0.03117, over 4989.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2132, pruned_loss=0.03401, over 973092.29 frames.], batch size: 20, lr: 2.16e-04 +2022-05-06 19:05:10,348 INFO [train.py:715] (6/8) Epoch 10, batch 11000, loss[loss=0.1507, simple_loss=0.2231, pruned_loss=0.03917, over 4898.00 frames.], tot_loss[loss=0.1403, simple_loss=0.213, pruned_loss=0.03385, over 973539.68 frames.], batch size: 22, lr: 2.16e-04 +2022-05-06 19:05:49,502 INFO [train.py:715] (6/8) Epoch 10, batch 11050, loss[loss=0.1729, simple_loss=0.242, pruned_loss=0.05194, over 4786.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2134, pruned_loss=0.03364, over 973620.42 frames.], batch size: 17, lr: 2.16e-04 +2022-05-06 19:06:29,283 INFO [train.py:715] (6/8) Epoch 10, batch 11100, loss[loss=0.1543, simple_loss=0.2288, pruned_loss=0.03989, over 4753.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2136, pruned_loss=0.03393, over 973880.35 frames.], batch size: 16, lr: 2.16e-04 +2022-05-06 19:07:07,074 INFO [train.py:715] (6/8) Epoch 10, batch 11150, loss[loss=0.1441, simple_loss=0.2156, pruned_loss=0.03632, over 4913.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2138, pruned_loss=0.03419, over 973115.39 frames.], batch size: 23, lr: 2.16e-04 +2022-05-06 19:07:46,335 INFO [train.py:715] (6/8) Epoch 10, batch 11200, loss[loss=0.1207, simple_loss=0.1941, pruned_loss=0.02362, over 4975.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2136, pruned_loss=0.0338, over 973985.34 frames.], batch size: 24, lr: 2.16e-04 +2022-05-06 19:08:25,404 INFO [train.py:715] (6/8) Epoch 10, batch 11250, loss[loss=0.1385, simple_loss=0.2162, pruned_loss=0.03038, over 4962.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2138, pruned_loss=0.03396, over 974156.63 frames.], batch size: 39, lr: 2.16e-04 +2022-05-06 19:09:03,755 INFO [train.py:715] (6/8) Epoch 10, batch 11300, loss[loss=0.1302, simple_loss=0.2049, pruned_loss=0.02774, over 4926.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2133, pruned_loss=0.0339, over 974128.48 frames.], batch size: 23, lr: 2.16e-04 +2022-05-06 19:09:42,494 INFO [train.py:715] (6/8) Epoch 10, batch 11350, loss[loss=0.1651, simple_loss=0.238, pruned_loss=0.0461, over 4857.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2136, pruned_loss=0.03401, over 973604.32 frames.], batch size: 30, lr: 2.16e-04 +2022-05-06 19:10:21,471 INFO [train.py:715] (6/8) Epoch 10, batch 11400, loss[loss=0.1416, simple_loss=0.2221, pruned_loss=0.03056, over 4776.00 frames.], tot_loss[loss=0.1405, simple_loss=0.213, pruned_loss=0.03401, over 973489.99 frames.], batch size: 14, lr: 2.16e-04 +2022-05-06 19:11:00,940 INFO [train.py:715] (6/8) Epoch 10, batch 11450, loss[loss=0.1465, simple_loss=0.2213, pruned_loss=0.03585, over 4952.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2127, pruned_loss=0.03357, over 973346.77 frames.], batch size: 14, lr: 2.16e-04 +2022-05-06 19:11:38,823 INFO [train.py:715] (6/8) Epoch 10, batch 11500, loss[loss=0.1297, simple_loss=0.2132, pruned_loss=0.02308, over 4923.00 frames.], tot_loss[loss=0.14, simple_loss=0.2127, pruned_loss=0.03361, over 972638.91 frames.], batch size: 23, lr: 2.16e-04 +2022-05-06 19:12:17,873 INFO [train.py:715] (6/8) Epoch 10, batch 11550, loss[loss=0.1821, simple_loss=0.2551, pruned_loss=0.05457, over 4924.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2125, pruned_loss=0.03385, over 972941.22 frames.], batch size: 18, lr: 2.16e-04 +2022-05-06 19:12:57,427 INFO [train.py:715] (6/8) Epoch 10, batch 11600, loss[loss=0.1489, simple_loss=0.2246, pruned_loss=0.03666, over 4751.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2117, pruned_loss=0.03336, over 972914.76 frames.], batch size: 19, lr: 2.16e-04 +2022-05-06 19:13:35,827 INFO [train.py:715] (6/8) Epoch 10, batch 11650, loss[loss=0.122, simple_loss=0.1956, pruned_loss=0.02418, over 4792.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2117, pruned_loss=0.03354, over 973041.77 frames.], batch size: 12, lr: 2.16e-04 +2022-05-06 19:14:14,884 INFO [train.py:715] (6/8) Epoch 10, batch 11700, loss[loss=0.1512, simple_loss=0.2262, pruned_loss=0.03816, over 4742.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2117, pruned_loss=0.03342, over 973427.37 frames.], batch size: 16, lr: 2.16e-04 +2022-05-06 19:14:53,452 INFO [train.py:715] (6/8) Epoch 10, batch 11750, loss[loss=0.1384, simple_loss=0.2152, pruned_loss=0.03084, over 4707.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2125, pruned_loss=0.03351, over 973557.93 frames.], batch size: 15, lr: 2.15e-04 +2022-05-06 19:15:32,372 INFO [train.py:715] (6/8) Epoch 10, batch 11800, loss[loss=0.1233, simple_loss=0.2006, pruned_loss=0.02295, over 4775.00 frames.], tot_loss[loss=0.1405, simple_loss=0.213, pruned_loss=0.034, over 973126.99 frames.], batch size: 14, lr: 2.15e-04 +2022-05-06 19:16:10,398 INFO [train.py:715] (6/8) Epoch 10, batch 11850, loss[loss=0.1287, simple_loss=0.205, pruned_loss=0.02621, over 4981.00 frames.], tot_loss[loss=0.1405, simple_loss=0.213, pruned_loss=0.03401, over 973385.38 frames.], batch size: 31, lr: 2.15e-04 +2022-05-06 19:16:49,163 INFO [train.py:715] (6/8) Epoch 10, batch 11900, loss[loss=0.1278, simple_loss=0.202, pruned_loss=0.02681, over 4820.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2135, pruned_loss=0.03393, over 973249.51 frames.], batch size: 27, lr: 2.15e-04 +2022-05-06 19:17:30,481 INFO [train.py:715] (6/8) Epoch 10, batch 11950, loss[loss=0.1181, simple_loss=0.1964, pruned_loss=0.01995, over 4821.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2131, pruned_loss=0.03306, over 973011.70 frames.], batch size: 13, lr: 2.15e-04 +2022-05-06 19:18:09,369 INFO [train.py:715] (6/8) Epoch 10, batch 12000, loss[loss=0.1338, simple_loss=0.215, pruned_loss=0.02634, over 4818.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2127, pruned_loss=0.03307, over 972958.18 frames.], batch size: 26, lr: 2.15e-04 +2022-05-06 19:18:09,369 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 19:18:19,016 INFO [train.py:742] (6/8) Epoch 10, validation: loss=0.1065, simple_loss=0.1908, pruned_loss=0.01105, over 914524.00 frames. +2022-05-06 19:18:57,897 INFO [train.py:715] (6/8) Epoch 10, batch 12050, loss[loss=0.1142, simple_loss=0.1808, pruned_loss=0.02374, over 4845.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2131, pruned_loss=0.03324, over 973219.25 frames.], batch size: 15, lr: 2.15e-04 +2022-05-06 19:19:37,116 INFO [train.py:715] (6/8) Epoch 10, batch 12100, loss[loss=0.1234, simple_loss=0.1913, pruned_loss=0.02773, over 4930.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2124, pruned_loss=0.03297, over 972440.91 frames.], batch size: 29, lr: 2.15e-04 +2022-05-06 19:20:16,372 INFO [train.py:715] (6/8) Epoch 10, batch 12150, loss[loss=0.138, simple_loss=0.2207, pruned_loss=0.02765, over 4882.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2121, pruned_loss=0.03264, over 972638.35 frames.], batch size: 22, lr: 2.15e-04 +2022-05-06 19:20:55,543 INFO [train.py:715] (6/8) Epoch 10, batch 12200, loss[loss=0.1256, simple_loss=0.1943, pruned_loss=0.02841, over 4931.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2127, pruned_loss=0.03322, over 971943.45 frames.], batch size: 18, lr: 2.15e-04 +2022-05-06 19:21:34,088 INFO [train.py:715] (6/8) Epoch 10, batch 12250, loss[loss=0.1124, simple_loss=0.1784, pruned_loss=0.02323, over 4974.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2127, pruned_loss=0.0333, over 971366.97 frames.], batch size: 14, lr: 2.15e-04 +2022-05-06 19:22:13,028 INFO [train.py:715] (6/8) Epoch 10, batch 12300, loss[loss=0.1338, simple_loss=0.2126, pruned_loss=0.02751, over 4786.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2128, pruned_loss=0.03325, over 971035.37 frames.], batch size: 14, lr: 2.15e-04 +2022-05-06 19:22:51,961 INFO [train.py:715] (6/8) Epoch 10, batch 12350, loss[loss=0.1331, simple_loss=0.203, pruned_loss=0.0316, over 4868.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2125, pruned_loss=0.03319, over 971285.90 frames.], batch size: 32, lr: 2.15e-04 +2022-05-06 19:23:30,793 INFO [train.py:715] (6/8) Epoch 10, batch 12400, loss[loss=0.1223, simple_loss=0.1975, pruned_loss=0.0235, over 4810.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2116, pruned_loss=0.03284, over 971244.51 frames.], batch size: 26, lr: 2.15e-04 +2022-05-06 19:24:09,219 INFO [train.py:715] (6/8) Epoch 10, batch 12450, loss[loss=0.1268, simple_loss=0.1985, pruned_loss=0.02752, over 4882.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03253, over 971357.88 frames.], batch size: 16, lr: 2.15e-04 +2022-05-06 19:24:48,249 INFO [train.py:715] (6/8) Epoch 10, batch 12500, loss[loss=0.1468, simple_loss=0.2167, pruned_loss=0.0385, over 4754.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2115, pruned_loss=0.03258, over 971391.94 frames.], batch size: 16, lr: 2.15e-04 +2022-05-06 19:25:27,028 INFO [train.py:715] (6/8) Epoch 10, batch 12550, loss[loss=0.1034, simple_loss=0.1735, pruned_loss=0.01663, over 4888.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2124, pruned_loss=0.03307, over 971118.13 frames.], batch size: 19, lr: 2.15e-04 +2022-05-06 19:26:05,183 INFO [train.py:715] (6/8) Epoch 10, batch 12600, loss[loss=0.1436, simple_loss=0.2228, pruned_loss=0.03219, over 4776.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2131, pruned_loss=0.03363, over 971588.54 frames.], batch size: 18, lr: 2.15e-04 +2022-05-06 19:26:43,474 INFO [train.py:715] (6/8) Epoch 10, batch 12650, loss[loss=0.117, simple_loss=0.1905, pruned_loss=0.02174, over 4862.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2126, pruned_loss=0.0332, over 973107.82 frames.], batch size: 20, lr: 2.15e-04 +2022-05-06 19:27:22,412 INFO [train.py:715] (6/8) Epoch 10, batch 12700, loss[loss=0.1305, simple_loss=0.2171, pruned_loss=0.02196, over 4970.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2125, pruned_loss=0.03306, over 972602.29 frames.], batch size: 24, lr: 2.15e-04 +2022-05-06 19:28:00,755 INFO [train.py:715] (6/8) Epoch 10, batch 12750, loss[loss=0.1588, simple_loss=0.235, pruned_loss=0.04134, over 4872.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2124, pruned_loss=0.03293, over 972390.43 frames.], batch size: 22, lr: 2.15e-04 +2022-05-06 19:28:39,218 INFO [train.py:715] (6/8) Epoch 10, batch 12800, loss[loss=0.1711, simple_loss=0.2381, pruned_loss=0.05202, over 4919.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03322, over 972474.18 frames.], batch size: 19, lr: 2.15e-04 +2022-05-06 19:29:18,645 INFO [train.py:715] (6/8) Epoch 10, batch 12850, loss[loss=0.1402, simple_loss=0.2182, pruned_loss=0.03106, over 4942.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2127, pruned_loss=0.03329, over 972506.06 frames.], batch size: 21, lr: 2.15e-04 +2022-05-06 19:29:57,813 INFO [train.py:715] (6/8) Epoch 10, batch 12900, loss[loss=0.1772, simple_loss=0.2379, pruned_loss=0.05828, over 4921.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2122, pruned_loss=0.03305, over 972274.52 frames.], batch size: 18, lr: 2.15e-04 +2022-05-06 19:30:36,231 INFO [train.py:715] (6/8) Epoch 10, batch 12950, loss[loss=0.1615, simple_loss=0.2298, pruned_loss=0.04655, over 4947.00 frames.], tot_loss[loss=0.1389, simple_loss=0.212, pruned_loss=0.03296, over 972186.36 frames.], batch size: 21, lr: 2.15e-04 +2022-05-06 19:31:14,800 INFO [train.py:715] (6/8) Epoch 10, batch 13000, loss[loss=0.1424, simple_loss=0.2154, pruned_loss=0.03467, over 4790.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2125, pruned_loss=0.03305, over 971905.55 frames.], batch size: 17, lr: 2.15e-04 +2022-05-06 19:31:54,378 INFO [train.py:715] (6/8) Epoch 10, batch 13050, loss[loss=0.1454, simple_loss=0.2003, pruned_loss=0.04525, over 4849.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2125, pruned_loss=0.03332, over 971855.16 frames.], batch size: 13, lr: 2.15e-04 +2022-05-06 19:32:32,861 INFO [train.py:715] (6/8) Epoch 10, batch 13100, loss[loss=0.1238, simple_loss=0.204, pruned_loss=0.02183, over 4797.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2128, pruned_loss=0.0335, over 971925.36 frames.], batch size: 24, lr: 2.15e-04 +2022-05-06 19:33:11,986 INFO [train.py:715] (6/8) Epoch 10, batch 13150, loss[loss=0.1457, simple_loss=0.2239, pruned_loss=0.03374, over 4744.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2131, pruned_loss=0.03373, over 972078.54 frames.], batch size: 16, lr: 2.15e-04 +2022-05-06 19:33:51,014 INFO [train.py:715] (6/8) Epoch 10, batch 13200, loss[loss=0.1279, simple_loss=0.1977, pruned_loss=0.0291, over 4967.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2128, pruned_loss=0.03347, over 972609.01 frames.], batch size: 35, lr: 2.15e-04 +2022-05-06 19:34:30,051 INFO [train.py:715] (6/8) Epoch 10, batch 13250, loss[loss=0.1366, simple_loss=0.2153, pruned_loss=0.0289, over 4960.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2121, pruned_loss=0.03365, over 973467.02 frames.], batch size: 24, lr: 2.15e-04 +2022-05-06 19:35:08,699 INFO [train.py:715] (6/8) Epoch 10, batch 13300, loss[loss=0.149, simple_loss=0.224, pruned_loss=0.037, over 4829.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2124, pruned_loss=0.03374, over 972454.05 frames.], batch size: 15, lr: 2.15e-04 +2022-05-06 19:35:47,101 INFO [train.py:715] (6/8) Epoch 10, batch 13350, loss[loss=0.148, simple_loss=0.2116, pruned_loss=0.0422, over 4967.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2129, pruned_loss=0.03399, over 973231.86 frames.], batch size: 15, lr: 2.15e-04 +2022-05-06 19:36:26,374 INFO [train.py:715] (6/8) Epoch 10, batch 13400, loss[loss=0.1474, simple_loss=0.2199, pruned_loss=0.03739, over 4975.00 frames.], tot_loss[loss=0.14, simple_loss=0.2127, pruned_loss=0.03368, over 973953.25 frames.], batch size: 26, lr: 2.15e-04 +2022-05-06 19:37:04,727 INFO [train.py:715] (6/8) Epoch 10, batch 13450, loss[loss=0.137, simple_loss=0.2054, pruned_loss=0.03428, over 4875.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2125, pruned_loss=0.03339, over 973760.20 frames.], batch size: 16, lr: 2.15e-04 +2022-05-06 19:37:42,968 INFO [train.py:715] (6/8) Epoch 10, batch 13500, loss[loss=0.1523, simple_loss=0.2238, pruned_loss=0.0404, over 4871.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2131, pruned_loss=0.03367, over 974415.53 frames.], batch size: 16, lr: 2.15e-04 +2022-05-06 19:38:22,036 INFO [train.py:715] (6/8) Epoch 10, batch 13550, loss[loss=0.1453, simple_loss=0.2232, pruned_loss=0.03376, over 4790.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2132, pruned_loss=0.03359, over 973763.70 frames.], batch size: 14, lr: 2.15e-04 +2022-05-06 19:39:00,609 INFO [train.py:715] (6/8) Epoch 10, batch 13600, loss[loss=0.1338, simple_loss=0.2153, pruned_loss=0.02617, over 4811.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2128, pruned_loss=0.03317, over 973693.66 frames.], batch size: 21, lr: 2.15e-04 +2022-05-06 19:39:39,008 INFO [train.py:715] (6/8) Epoch 10, batch 13650, loss[loss=0.1817, simple_loss=0.2444, pruned_loss=0.05946, over 4849.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2127, pruned_loss=0.03303, over 973247.64 frames.], batch size: 30, lr: 2.15e-04 +2022-05-06 19:40:17,577 INFO [train.py:715] (6/8) Epoch 10, batch 13700, loss[loss=0.1262, simple_loss=0.2026, pruned_loss=0.02491, over 4854.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.03277, over 972827.27 frames.], batch size: 30, lr: 2.15e-04 +2022-05-06 19:40:57,645 INFO [train.py:715] (6/8) Epoch 10, batch 13750, loss[loss=0.1662, simple_loss=0.2379, pruned_loss=0.04731, over 4825.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2126, pruned_loss=0.03304, over 973146.01 frames.], batch size: 13, lr: 2.15e-04 +2022-05-06 19:41:37,007 INFO [train.py:715] (6/8) Epoch 10, batch 13800, loss[loss=0.1528, simple_loss=0.2253, pruned_loss=0.04019, over 4846.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2124, pruned_loss=0.03294, over 973436.73 frames.], batch size: 32, lr: 2.15e-04 +2022-05-06 19:42:15,530 INFO [train.py:715] (6/8) Epoch 10, batch 13850, loss[loss=0.1439, simple_loss=0.2071, pruned_loss=0.04034, over 4849.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2134, pruned_loss=0.0338, over 972052.18 frames.], batch size: 32, lr: 2.15e-04 +2022-05-06 19:42:55,149 INFO [train.py:715] (6/8) Epoch 10, batch 13900, loss[loss=0.1158, simple_loss=0.1968, pruned_loss=0.0174, over 4828.00 frames.], tot_loss[loss=0.139, simple_loss=0.2121, pruned_loss=0.03299, over 972249.77 frames.], batch size: 26, lr: 2.15e-04 +2022-05-06 19:43:33,830 INFO [train.py:715] (6/8) Epoch 10, batch 13950, loss[loss=0.1237, simple_loss=0.1885, pruned_loss=0.0295, over 4884.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2127, pruned_loss=0.03307, over 972973.80 frames.], batch size: 32, lr: 2.15e-04 +2022-05-06 19:44:12,833 INFO [train.py:715] (6/8) Epoch 10, batch 14000, loss[loss=0.1713, simple_loss=0.2338, pruned_loss=0.05441, over 4923.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2128, pruned_loss=0.03334, over 972268.10 frames.], batch size: 23, lr: 2.15e-04 +2022-05-06 19:44:51,238 INFO [train.py:715] (6/8) Epoch 10, batch 14050, loss[loss=0.1316, simple_loss=0.202, pruned_loss=0.0306, over 4795.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2137, pruned_loss=0.03399, over 971843.35 frames.], batch size: 14, lr: 2.15e-04 +2022-05-06 19:45:30,766 INFO [train.py:715] (6/8) Epoch 10, batch 14100, loss[loss=0.1673, simple_loss=0.225, pruned_loss=0.05476, over 4954.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2137, pruned_loss=0.03399, over 972281.77 frames.], batch size: 15, lr: 2.15e-04 +2022-05-06 19:46:09,126 INFO [train.py:715] (6/8) Epoch 10, batch 14150, loss[loss=0.1514, simple_loss=0.219, pruned_loss=0.04187, over 4837.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2139, pruned_loss=0.03364, over 972456.30 frames.], batch size: 30, lr: 2.15e-04 +2022-05-06 19:46:47,035 INFO [train.py:715] (6/8) Epoch 10, batch 14200, loss[loss=0.15, simple_loss=0.2198, pruned_loss=0.04012, over 4829.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2123, pruned_loss=0.03294, over 971745.50 frames.], batch size: 15, lr: 2.15e-04 +2022-05-06 19:47:26,635 INFO [train.py:715] (6/8) Epoch 10, batch 14250, loss[loss=0.1256, simple_loss=0.1887, pruned_loss=0.03126, over 4875.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2122, pruned_loss=0.03301, over 972037.98 frames.], batch size: 16, lr: 2.15e-04 +2022-05-06 19:48:05,011 INFO [train.py:715] (6/8) Epoch 10, batch 14300, loss[loss=0.1247, simple_loss=0.1991, pruned_loss=0.02515, over 4816.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2123, pruned_loss=0.03306, over 972180.78 frames.], batch size: 13, lr: 2.15e-04 +2022-05-06 19:48:43,130 INFO [train.py:715] (6/8) Epoch 10, batch 14350, loss[loss=0.1293, simple_loss=0.2129, pruned_loss=0.02282, over 4974.00 frames.], tot_loss[loss=0.139, simple_loss=0.212, pruned_loss=0.03301, over 973115.35 frames.], batch size: 25, lr: 2.15e-04 +2022-05-06 19:49:21,567 INFO [train.py:715] (6/8) Epoch 10, batch 14400, loss[loss=0.12, simple_loss=0.1887, pruned_loss=0.0256, over 4637.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2133, pruned_loss=0.03346, over 973080.60 frames.], batch size: 13, lr: 2.15e-04 +2022-05-06 19:50:01,193 INFO [train.py:715] (6/8) Epoch 10, batch 14450, loss[loss=0.1396, simple_loss=0.2094, pruned_loss=0.03487, over 4951.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2137, pruned_loss=0.03374, over 972983.90 frames.], batch size: 14, lr: 2.15e-04 +2022-05-06 19:50:39,562 INFO [train.py:715] (6/8) Epoch 10, batch 14500, loss[loss=0.1351, simple_loss=0.221, pruned_loss=0.0246, over 4901.00 frames.], tot_loss[loss=0.1409, simple_loss=0.214, pruned_loss=0.03393, over 973384.03 frames.], batch size: 22, lr: 2.15e-04 +2022-05-06 19:51:17,701 INFO [train.py:715] (6/8) Epoch 10, batch 14550, loss[loss=0.1394, simple_loss=0.2056, pruned_loss=0.03661, over 4914.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2139, pruned_loss=0.03384, over 973541.68 frames.], batch size: 18, lr: 2.15e-04 +2022-05-06 19:51:57,348 INFO [train.py:715] (6/8) Epoch 10, batch 14600, loss[loss=0.1147, simple_loss=0.1833, pruned_loss=0.02299, over 4815.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2135, pruned_loss=0.03361, over 973181.45 frames.], batch size: 13, lr: 2.15e-04 +2022-05-06 19:52:35,984 INFO [train.py:715] (6/8) Epoch 10, batch 14650, loss[loss=0.1234, simple_loss=0.1986, pruned_loss=0.02414, over 4811.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03338, over 973586.58 frames.], batch size: 26, lr: 2.15e-04 +2022-05-06 19:53:14,370 INFO [train.py:715] (6/8) Epoch 10, batch 14700, loss[loss=0.1409, simple_loss=0.2221, pruned_loss=0.02986, over 4915.00 frames.], tot_loss[loss=0.1393, simple_loss=0.212, pruned_loss=0.03333, over 973544.89 frames.], batch size: 29, lr: 2.15e-04 +2022-05-06 19:53:53,326 INFO [train.py:715] (6/8) Epoch 10, batch 14750, loss[loss=0.1392, simple_loss=0.2012, pruned_loss=0.03862, over 4944.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2118, pruned_loss=0.0329, over 973513.28 frames.], batch size: 35, lr: 2.15e-04 +2022-05-06 19:54:33,138 INFO [train.py:715] (6/8) Epoch 10, batch 14800, loss[loss=0.1231, simple_loss=0.1951, pruned_loss=0.02551, over 4780.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2126, pruned_loss=0.03326, over 973833.49 frames.], batch size: 17, lr: 2.15e-04 +2022-05-06 19:55:12,161 INFO [train.py:715] (6/8) Epoch 10, batch 14850, loss[loss=0.1737, simple_loss=0.2385, pruned_loss=0.05446, over 4734.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2122, pruned_loss=0.03318, over 972972.66 frames.], batch size: 16, lr: 2.15e-04 +2022-05-06 19:55:50,175 INFO [train.py:715] (6/8) Epoch 10, batch 14900, loss[loss=0.1168, simple_loss=0.1896, pruned_loss=0.02198, over 4910.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2124, pruned_loss=0.03293, over 972578.82 frames.], batch size: 19, lr: 2.15e-04 +2022-05-06 19:56:30,295 INFO [train.py:715] (6/8) Epoch 10, batch 14950, loss[loss=0.162, simple_loss=0.217, pruned_loss=0.0535, over 4823.00 frames.], tot_loss[loss=0.14, simple_loss=0.213, pruned_loss=0.03353, over 972657.90 frames.], batch size: 12, lr: 2.15e-04 +2022-05-06 19:57:09,816 INFO [train.py:715] (6/8) Epoch 10, batch 15000, loss[loss=0.1675, simple_loss=0.2363, pruned_loss=0.04937, over 4961.00 frames.], tot_loss[loss=0.1399, simple_loss=0.213, pruned_loss=0.03343, over 971948.73 frames.], batch size: 35, lr: 2.15e-04 +2022-05-06 19:57:09,817 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 19:57:19,461 INFO [train.py:742] (6/8) Epoch 10, validation: loss=0.1065, simple_loss=0.1909, pruned_loss=0.01111, over 914524.00 frames. +2022-05-06 19:57:59,086 INFO [train.py:715] (6/8) Epoch 10, batch 15050, loss[loss=0.1552, simple_loss=0.234, pruned_loss=0.03817, over 4787.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2134, pruned_loss=0.03344, over 971995.29 frames.], batch size: 18, lr: 2.15e-04 +2022-05-06 19:58:38,145 INFO [train.py:715] (6/8) Epoch 10, batch 15100, loss[loss=0.1418, simple_loss=0.218, pruned_loss=0.0328, over 4788.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03342, over 972857.76 frames.], batch size: 14, lr: 2.15e-04 +2022-05-06 19:59:17,367 INFO [train.py:715] (6/8) Epoch 10, batch 15150, loss[loss=0.1268, simple_loss=0.2051, pruned_loss=0.02431, over 4940.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2121, pruned_loss=0.0335, over 972772.85 frames.], batch size: 29, lr: 2.14e-04 +2022-05-06 19:59:56,362 INFO [train.py:715] (6/8) Epoch 10, batch 15200, loss[loss=0.114, simple_loss=0.1905, pruned_loss=0.01869, over 4986.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2121, pruned_loss=0.03349, over 972455.14 frames.], batch size: 28, lr: 2.14e-04 +2022-05-06 20:00:35,743 INFO [train.py:715] (6/8) Epoch 10, batch 15250, loss[loss=0.1528, simple_loss=0.2297, pruned_loss=0.03796, over 4957.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.03364, over 972311.74 frames.], batch size: 24, lr: 2.14e-04 +2022-05-06 20:01:14,787 INFO [train.py:715] (6/8) Epoch 10, batch 15300, loss[loss=0.1701, simple_loss=0.2489, pruned_loss=0.04563, over 4805.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.03362, over 972039.49 frames.], batch size: 21, lr: 2.14e-04 +2022-05-06 20:01:54,061 INFO [train.py:715] (6/8) Epoch 10, batch 15350, loss[loss=0.1195, simple_loss=0.198, pruned_loss=0.02047, over 4810.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.03357, over 971515.81 frames.], batch size: 25, lr: 2.14e-04 +2022-05-06 20:02:34,122 INFO [train.py:715] (6/8) Epoch 10, batch 15400, loss[loss=0.1441, simple_loss=0.2201, pruned_loss=0.0341, over 4791.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2132, pruned_loss=0.03383, over 971838.66 frames.], batch size: 14, lr: 2.14e-04 +2022-05-06 20:03:13,391 INFO [train.py:715] (6/8) Epoch 10, batch 15450, loss[loss=0.1508, simple_loss=0.2241, pruned_loss=0.03871, over 4752.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2137, pruned_loss=0.03379, over 971872.92 frames.], batch size: 19, lr: 2.14e-04 +2022-05-06 20:03:53,466 INFO [train.py:715] (6/8) Epoch 10, batch 15500, loss[loss=0.1367, simple_loss=0.2045, pruned_loss=0.03442, over 4783.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.0333, over 971220.25 frames.], batch size: 14, lr: 2.14e-04 +2022-05-06 20:04:32,489 INFO [train.py:715] (6/8) Epoch 10, batch 15550, loss[loss=0.1769, simple_loss=0.25, pruned_loss=0.05193, over 4975.00 frames.], tot_loss[loss=0.1389, simple_loss=0.212, pruned_loss=0.03296, over 971357.55 frames.], batch size: 39, lr: 2.14e-04 +2022-05-06 20:05:11,887 INFO [train.py:715] (6/8) Epoch 10, batch 15600, loss[loss=0.1562, simple_loss=0.2313, pruned_loss=0.04058, over 4984.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2119, pruned_loss=0.03311, over 971701.32 frames.], batch size: 28, lr: 2.14e-04 +2022-05-06 20:05:50,245 INFO [train.py:715] (6/8) Epoch 10, batch 15650, loss[loss=0.1424, simple_loss=0.209, pruned_loss=0.03792, over 4976.00 frames.], tot_loss[loss=0.139, simple_loss=0.2113, pruned_loss=0.03331, over 971716.37 frames.], batch size: 39, lr: 2.14e-04 +2022-05-06 20:06:28,932 INFO [train.py:715] (6/8) Epoch 10, batch 15700, loss[loss=0.1285, simple_loss=0.2065, pruned_loss=0.02531, over 4812.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2116, pruned_loss=0.03309, over 971128.52 frames.], batch size: 21, lr: 2.14e-04 +2022-05-06 20:07:08,413 INFO [train.py:715] (6/8) Epoch 10, batch 15750, loss[loss=0.1458, simple_loss=0.2229, pruned_loss=0.03428, over 4835.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2121, pruned_loss=0.03354, over 971065.12 frames.], batch size: 25, lr: 2.14e-04 +2022-05-06 20:07:46,973 INFO [train.py:715] (6/8) Epoch 10, batch 15800, loss[loss=0.1502, simple_loss=0.2205, pruned_loss=0.04, over 4783.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2125, pruned_loss=0.03341, over 971090.34 frames.], batch size: 14, lr: 2.14e-04 +2022-05-06 20:08:26,773 INFO [train.py:715] (6/8) Epoch 10, batch 15850, loss[loss=0.1057, simple_loss=0.173, pruned_loss=0.01923, over 4901.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2126, pruned_loss=0.03365, over 971490.55 frames.], batch size: 19, lr: 2.14e-04 +2022-05-06 20:09:05,641 INFO [train.py:715] (6/8) Epoch 10, batch 15900, loss[loss=0.1146, simple_loss=0.1869, pruned_loss=0.02118, over 4985.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2136, pruned_loss=0.03413, over 970679.82 frames.], batch size: 25, lr: 2.14e-04 +2022-05-06 20:09:44,866 INFO [train.py:715] (6/8) Epoch 10, batch 15950, loss[loss=0.1188, simple_loss=0.1928, pruned_loss=0.02244, over 4862.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2137, pruned_loss=0.03381, over 971669.58 frames.], batch size: 12, lr: 2.14e-04 +2022-05-06 20:10:23,756 INFO [train.py:715] (6/8) Epoch 10, batch 16000, loss[loss=0.1411, simple_loss=0.2161, pruned_loss=0.03308, over 4644.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2144, pruned_loss=0.03466, over 971000.01 frames.], batch size: 13, lr: 2.14e-04 +2022-05-06 20:11:02,662 INFO [train.py:715] (6/8) Epoch 10, batch 16050, loss[loss=0.1578, simple_loss=0.2292, pruned_loss=0.04318, over 4954.00 frames.], tot_loss[loss=0.1423, simple_loss=0.215, pruned_loss=0.0348, over 970650.68 frames.], batch size: 21, lr: 2.14e-04 +2022-05-06 20:11:41,917 INFO [train.py:715] (6/8) Epoch 10, batch 16100, loss[loss=0.1405, simple_loss=0.2322, pruned_loss=0.02444, over 4847.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2145, pruned_loss=0.03385, over 971002.13 frames.], batch size: 20, lr: 2.14e-04 +2022-05-06 20:12:21,125 INFO [train.py:715] (6/8) Epoch 10, batch 16150, loss[loss=0.1391, simple_loss=0.2023, pruned_loss=0.03799, over 4787.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2138, pruned_loss=0.03357, over 971139.39 frames.], batch size: 14, lr: 2.14e-04 +2022-05-06 20:13:01,098 INFO [train.py:715] (6/8) Epoch 10, batch 16200, loss[loss=0.1523, simple_loss=0.2202, pruned_loss=0.04223, over 4862.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2131, pruned_loss=0.03301, over 970991.09 frames.], batch size: 30, lr: 2.14e-04 +2022-05-06 20:13:40,635 INFO [train.py:715] (6/8) Epoch 10, batch 16250, loss[loss=0.1298, simple_loss=0.2096, pruned_loss=0.02505, over 4871.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2147, pruned_loss=0.0339, over 970639.31 frames.], batch size: 22, lr: 2.14e-04 +2022-05-06 20:14:19,848 INFO [train.py:715] (6/8) Epoch 10, batch 16300, loss[loss=0.1297, simple_loss=0.1975, pruned_loss=0.03098, over 4892.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2148, pruned_loss=0.03419, over 971317.26 frames.], batch size: 22, lr: 2.14e-04 +2022-05-06 20:14:59,849 INFO [train.py:715] (6/8) Epoch 10, batch 16350, loss[loss=0.1601, simple_loss=0.2376, pruned_loss=0.04128, over 4906.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2138, pruned_loss=0.03342, over 971619.10 frames.], batch size: 17, lr: 2.14e-04 +2022-05-06 20:15:39,246 INFO [train.py:715] (6/8) Epoch 10, batch 16400, loss[loss=0.1447, simple_loss=0.2211, pruned_loss=0.03413, over 4747.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2142, pruned_loss=0.03354, over 971794.83 frames.], batch size: 16, lr: 2.14e-04 +2022-05-06 20:16:18,984 INFO [train.py:715] (6/8) Epoch 10, batch 16450, loss[loss=0.107, simple_loss=0.1766, pruned_loss=0.01867, over 4987.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2143, pruned_loss=0.03393, over 971823.83 frames.], batch size: 31, lr: 2.14e-04 +2022-05-06 20:16:57,469 INFO [train.py:715] (6/8) Epoch 10, batch 16500, loss[loss=0.159, simple_loss=0.221, pruned_loss=0.04851, over 4869.00 frames.], tot_loss[loss=0.14, simple_loss=0.2134, pruned_loss=0.03331, over 972312.21 frames.], batch size: 32, lr: 2.14e-04 +2022-05-06 20:17:36,177 INFO [train.py:715] (6/8) Epoch 10, batch 16550, loss[loss=0.1308, simple_loss=0.202, pruned_loss=0.02977, over 4799.00 frames.], tot_loss[loss=0.14, simple_loss=0.2131, pruned_loss=0.03338, over 971548.16 frames.], batch size: 24, lr: 2.14e-04 +2022-05-06 20:18:15,859 INFO [train.py:715] (6/8) Epoch 10, batch 16600, loss[loss=0.1551, simple_loss=0.2305, pruned_loss=0.03989, over 4908.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2129, pruned_loss=0.03343, over 972039.48 frames.], batch size: 19, lr: 2.14e-04 +2022-05-06 20:18:54,010 INFO [train.py:715] (6/8) Epoch 10, batch 16650, loss[loss=0.1128, simple_loss=0.1825, pruned_loss=0.02152, over 4912.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2128, pruned_loss=0.03341, over 972363.62 frames.], batch size: 17, lr: 2.14e-04 +2022-05-06 20:19:33,371 INFO [train.py:715] (6/8) Epoch 10, batch 16700, loss[loss=0.1586, simple_loss=0.2342, pruned_loss=0.04143, over 4925.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2127, pruned_loss=0.0337, over 972725.81 frames.], batch size: 18, lr: 2.14e-04 +2022-05-06 20:20:12,355 INFO [train.py:715] (6/8) Epoch 10, batch 16750, loss[loss=0.1805, simple_loss=0.241, pruned_loss=0.05997, over 4901.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2122, pruned_loss=0.03374, over 972252.42 frames.], batch size: 19, lr: 2.14e-04 +2022-05-06 20:20:52,513 INFO [train.py:715] (6/8) Epoch 10, batch 16800, loss[loss=0.138, simple_loss=0.2119, pruned_loss=0.03204, over 4965.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2124, pruned_loss=0.03351, over 972877.14 frames.], batch size: 15, lr: 2.14e-04 +2022-05-06 20:21:31,831 INFO [train.py:715] (6/8) Epoch 10, batch 16850, loss[loss=0.1275, simple_loss=0.207, pruned_loss=0.02396, over 4971.00 frames.], tot_loss[loss=0.1398, simple_loss=0.213, pruned_loss=0.0333, over 973364.86 frames.], batch size: 24, lr: 2.14e-04 +2022-05-06 20:22:11,636 INFO [train.py:715] (6/8) Epoch 10, batch 16900, loss[loss=0.1388, simple_loss=0.2081, pruned_loss=0.0347, over 4762.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2139, pruned_loss=0.03373, over 972995.34 frames.], batch size: 16, lr: 2.14e-04 +2022-05-06 20:22:51,675 INFO [train.py:715] (6/8) Epoch 10, batch 16950, loss[loss=0.2043, simple_loss=0.2806, pruned_loss=0.06397, over 4913.00 frames.], tot_loss[loss=0.14, simple_loss=0.2135, pruned_loss=0.03329, over 973592.66 frames.], batch size: 39, lr: 2.14e-04 +2022-05-06 20:23:29,924 INFO [train.py:715] (6/8) Epoch 10, batch 17000, loss[loss=0.1239, simple_loss=0.2069, pruned_loss=0.02043, over 4910.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2123, pruned_loss=0.03269, over 972980.91 frames.], batch size: 18, lr: 2.14e-04 +2022-05-06 20:24:09,514 INFO [train.py:715] (6/8) Epoch 10, batch 17050, loss[loss=0.126, simple_loss=0.205, pruned_loss=0.02353, over 4746.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2127, pruned_loss=0.03278, over 972328.20 frames.], batch size: 19, lr: 2.14e-04 +2022-05-06 20:24:48,214 INFO [train.py:715] (6/8) Epoch 10, batch 17100, loss[loss=0.1298, simple_loss=0.2035, pruned_loss=0.02807, over 4898.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2129, pruned_loss=0.03345, over 972919.55 frames.], batch size: 19, lr: 2.14e-04 +2022-05-06 20:25:27,439 INFO [train.py:715] (6/8) Epoch 10, batch 17150, loss[loss=0.1419, simple_loss=0.2089, pruned_loss=0.03748, over 4912.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2119, pruned_loss=0.03326, over 972882.77 frames.], batch size: 17, lr: 2.14e-04 +2022-05-06 20:26:07,400 INFO [train.py:715] (6/8) Epoch 10, batch 17200, loss[loss=0.157, simple_loss=0.2293, pruned_loss=0.04235, over 4811.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2124, pruned_loss=0.03338, over 973767.65 frames.], batch size: 15, lr: 2.14e-04 +2022-05-06 20:26:47,009 INFO [train.py:715] (6/8) Epoch 10, batch 17250, loss[loss=0.1467, simple_loss=0.2196, pruned_loss=0.03692, over 4878.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2131, pruned_loss=0.03388, over 974244.38 frames.], batch size: 16, lr: 2.14e-04 +2022-05-06 20:27:26,664 INFO [train.py:715] (6/8) Epoch 10, batch 17300, loss[loss=0.1382, simple_loss=0.2143, pruned_loss=0.03103, over 4981.00 frames.], tot_loss[loss=0.141, simple_loss=0.2137, pruned_loss=0.03416, over 974248.40 frames.], batch size: 25, lr: 2.14e-04 +2022-05-06 20:28:05,427 INFO [train.py:715] (6/8) Epoch 10, batch 17350, loss[loss=0.169, simple_loss=0.2422, pruned_loss=0.04793, over 4948.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2149, pruned_loss=0.03448, over 973832.17 frames.], batch size: 39, lr: 2.14e-04 +2022-05-06 20:28:44,831 INFO [train.py:715] (6/8) Epoch 10, batch 17400, loss[loss=0.1244, simple_loss=0.1996, pruned_loss=0.0246, over 4977.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2135, pruned_loss=0.034, over 974086.21 frames.], batch size: 25, lr: 2.14e-04 +2022-05-06 20:29:24,007 INFO [train.py:715] (6/8) Epoch 10, batch 17450, loss[loss=0.1199, simple_loss=0.1976, pruned_loss=0.02109, over 4819.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2125, pruned_loss=0.03346, over 973505.51 frames.], batch size: 26, lr: 2.14e-04 +2022-05-06 20:30:02,983 INFO [train.py:715] (6/8) Epoch 10, batch 17500, loss[loss=0.1451, simple_loss=0.213, pruned_loss=0.0386, over 4929.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2134, pruned_loss=0.03395, over 973868.70 frames.], batch size: 18, lr: 2.14e-04 +2022-05-06 20:30:42,973 INFO [train.py:715] (6/8) Epoch 10, batch 17550, loss[loss=0.1048, simple_loss=0.1813, pruned_loss=0.01418, over 4920.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2125, pruned_loss=0.03336, over 973258.34 frames.], batch size: 29, lr: 2.14e-04 +2022-05-06 20:31:21,950 INFO [train.py:715] (6/8) Epoch 10, batch 17600, loss[loss=0.1387, simple_loss=0.2251, pruned_loss=0.02617, over 4879.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2123, pruned_loss=0.03361, over 972011.76 frames.], batch size: 16, lr: 2.14e-04 +2022-05-06 20:32:01,508 INFO [train.py:715] (6/8) Epoch 10, batch 17650, loss[loss=0.1438, simple_loss=0.2162, pruned_loss=0.03567, over 4757.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2121, pruned_loss=0.03303, over 972414.19 frames.], batch size: 16, lr: 2.14e-04 +2022-05-06 20:32:40,270 INFO [train.py:715] (6/8) Epoch 10, batch 17700, loss[loss=0.1025, simple_loss=0.1691, pruned_loss=0.018, over 4842.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.03359, over 972527.47 frames.], batch size: 12, lr: 2.14e-04 +2022-05-06 20:33:20,047 INFO [train.py:715] (6/8) Epoch 10, batch 17750, loss[loss=0.1337, simple_loss=0.2048, pruned_loss=0.03132, over 4706.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2127, pruned_loss=0.03339, over 972171.21 frames.], batch size: 15, lr: 2.14e-04 +2022-05-06 20:33:59,783 INFO [train.py:715] (6/8) Epoch 10, batch 17800, loss[loss=0.126, simple_loss=0.1899, pruned_loss=0.031, over 4744.00 frames.], tot_loss[loss=0.1401, simple_loss=0.213, pruned_loss=0.03362, over 972356.55 frames.], batch size: 12, lr: 2.14e-04 +2022-05-06 20:34:38,718 INFO [train.py:715] (6/8) Epoch 10, batch 17850, loss[loss=0.115, simple_loss=0.1833, pruned_loss=0.02331, over 4916.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2129, pruned_loss=0.03368, over 971901.26 frames.], batch size: 18, lr: 2.14e-04 +2022-05-06 20:35:18,473 INFO [train.py:715] (6/8) Epoch 10, batch 17900, loss[loss=0.15, simple_loss=0.2254, pruned_loss=0.03731, over 4945.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.03357, over 972149.80 frames.], batch size: 18, lr: 2.14e-04 +2022-05-06 20:35:57,407 INFO [train.py:715] (6/8) Epoch 10, batch 17950, loss[loss=0.1175, simple_loss=0.1926, pruned_loss=0.02124, over 4791.00 frames.], tot_loss[loss=0.1402, simple_loss=0.213, pruned_loss=0.03372, over 972839.63 frames.], batch size: 21, lr: 2.14e-04 +2022-05-06 20:36:36,022 INFO [train.py:715] (6/8) Epoch 10, batch 18000, loss[loss=0.1417, simple_loss=0.2165, pruned_loss=0.03349, over 4831.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.03358, over 972148.56 frames.], batch size: 15, lr: 2.14e-04 +2022-05-06 20:36:36,023 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 20:36:45,529 INFO [train.py:742] (6/8) Epoch 10, validation: loss=0.1064, simple_loss=0.1906, pruned_loss=0.01104, over 914524.00 frames. +2022-05-06 20:37:24,880 INFO [train.py:715] (6/8) Epoch 10, batch 18050, loss[loss=0.1238, simple_loss=0.2002, pruned_loss=0.02373, over 4851.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2139, pruned_loss=0.03371, over 973056.80 frames.], batch size: 13, lr: 2.14e-04 +2022-05-06 20:38:03,975 INFO [train.py:715] (6/8) Epoch 10, batch 18100, loss[loss=0.137, simple_loss=0.2166, pruned_loss=0.02871, over 4988.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2134, pruned_loss=0.03342, over 972918.08 frames.], batch size: 20, lr: 2.14e-04 +2022-05-06 20:38:43,265 INFO [train.py:715] (6/8) Epoch 10, batch 18150, loss[loss=0.1402, simple_loss=0.2123, pruned_loss=0.03406, over 4968.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2128, pruned_loss=0.0335, over 972432.41 frames.], batch size: 28, lr: 2.14e-04 +2022-05-06 20:39:21,944 INFO [train.py:715] (6/8) Epoch 10, batch 18200, loss[loss=0.1231, simple_loss=0.1902, pruned_loss=0.02802, over 4797.00 frames.], tot_loss[loss=0.14, simple_loss=0.2132, pruned_loss=0.03344, over 973007.03 frames.], batch size: 12, lr: 2.14e-04 +2022-05-06 20:40:00,621 INFO [train.py:715] (6/8) Epoch 10, batch 18250, loss[loss=0.1251, simple_loss=0.1989, pruned_loss=0.02566, over 4855.00 frames.], tot_loss[loss=0.1402, simple_loss=0.213, pruned_loss=0.03366, over 972790.17 frames.], batch size: 20, lr: 2.14e-04 +2022-05-06 20:40:40,108 INFO [train.py:715] (6/8) Epoch 10, batch 18300, loss[loss=0.1408, simple_loss=0.2137, pruned_loss=0.03396, over 4941.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2131, pruned_loss=0.0336, over 973146.60 frames.], batch size: 29, lr: 2.14e-04 +2022-05-06 20:41:19,473 INFO [train.py:715] (6/8) Epoch 10, batch 18350, loss[loss=0.1823, simple_loss=0.2555, pruned_loss=0.05451, over 4696.00 frames.], tot_loss[loss=0.1388, simple_loss=0.212, pruned_loss=0.03282, over 972116.50 frames.], batch size: 15, lr: 2.14e-04 +2022-05-06 20:41:57,960 INFO [train.py:715] (6/8) Epoch 10, batch 18400, loss[loss=0.1243, simple_loss=0.2074, pruned_loss=0.02059, over 4932.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2127, pruned_loss=0.03302, over 971687.44 frames.], batch size: 23, lr: 2.14e-04 +2022-05-06 20:42:37,148 INFO [train.py:715] (6/8) Epoch 10, batch 18450, loss[loss=0.1289, simple_loss=0.2068, pruned_loss=0.02556, over 4818.00 frames.], tot_loss[loss=0.1398, simple_loss=0.213, pruned_loss=0.03333, over 971014.00 frames.], batch size: 26, lr: 2.14e-04 +2022-05-06 20:43:16,003 INFO [train.py:715] (6/8) Epoch 10, batch 18500, loss[loss=0.1504, simple_loss=0.2211, pruned_loss=0.0398, over 4910.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2135, pruned_loss=0.03347, over 972065.76 frames.], batch size: 17, lr: 2.14e-04 +2022-05-06 20:43:55,527 INFO [train.py:715] (6/8) Epoch 10, batch 18550, loss[loss=0.1444, simple_loss=0.221, pruned_loss=0.0339, over 4836.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2134, pruned_loss=0.03357, over 970911.79 frames.], batch size: 30, lr: 2.13e-04 +2022-05-06 20:44:33,846 INFO [train.py:715] (6/8) Epoch 10, batch 18600, loss[loss=0.1316, simple_loss=0.2069, pruned_loss=0.02813, over 4983.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2129, pruned_loss=0.03377, over 971759.42 frames.], batch size: 35, lr: 2.13e-04 +2022-05-06 20:45:13,255 INFO [train.py:715] (6/8) Epoch 10, batch 18650, loss[loss=0.1679, simple_loss=0.2292, pruned_loss=0.05329, over 4884.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2123, pruned_loss=0.03363, over 972103.35 frames.], batch size: 39, lr: 2.13e-04 +2022-05-06 20:45:52,995 INFO [train.py:715] (6/8) Epoch 10, batch 18700, loss[loss=0.1309, simple_loss=0.2188, pruned_loss=0.02145, over 4808.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2123, pruned_loss=0.03356, over 971507.12 frames.], batch size: 24, lr: 2.13e-04 +2022-05-06 20:46:31,256 INFO [train.py:715] (6/8) Epoch 10, batch 18750, loss[loss=0.1421, simple_loss=0.2084, pruned_loss=0.03793, over 4937.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2121, pruned_loss=0.03386, over 972382.25 frames.], batch size: 18, lr: 2.13e-04 +2022-05-06 20:47:10,637 INFO [train.py:715] (6/8) Epoch 10, batch 18800, loss[loss=0.1202, simple_loss=0.1991, pruned_loss=0.0206, over 4747.00 frames.], tot_loss[loss=0.14, simple_loss=0.2123, pruned_loss=0.03384, over 971885.67 frames.], batch size: 12, lr: 2.13e-04 +2022-05-06 20:47:50,113 INFO [train.py:715] (6/8) Epoch 10, batch 18850, loss[loss=0.1237, simple_loss=0.1903, pruned_loss=0.02859, over 4790.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2119, pruned_loss=0.03345, over 971579.69 frames.], batch size: 12, lr: 2.13e-04 +2022-05-06 20:48:29,010 INFO [train.py:715] (6/8) Epoch 10, batch 18900, loss[loss=0.1397, simple_loss=0.2103, pruned_loss=0.03456, over 4967.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2128, pruned_loss=0.03384, over 971805.34 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 20:49:08,064 INFO [train.py:715] (6/8) Epoch 10, batch 18950, loss[loss=0.1491, simple_loss=0.2277, pruned_loss=0.03525, over 4702.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2129, pruned_loss=0.03387, over 970972.74 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 20:49:48,334 INFO [train.py:715] (6/8) Epoch 10, batch 19000, loss[loss=0.1431, simple_loss=0.2143, pruned_loss=0.036, over 4840.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2131, pruned_loss=0.03393, over 970771.70 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 20:50:27,659 INFO [train.py:715] (6/8) Epoch 10, batch 19050, loss[loss=0.1441, simple_loss=0.2055, pruned_loss=0.04137, over 4869.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2133, pruned_loss=0.03378, over 971103.29 frames.], batch size: 16, lr: 2.13e-04 +2022-05-06 20:51:06,470 INFO [train.py:715] (6/8) Epoch 10, batch 19100, loss[loss=0.1905, simple_loss=0.2793, pruned_loss=0.05081, over 4984.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2134, pruned_loss=0.03382, over 971073.56 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 20:51:46,344 INFO [train.py:715] (6/8) Epoch 10, batch 19150, loss[loss=0.1399, simple_loss=0.2159, pruned_loss=0.03191, over 4808.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2135, pruned_loss=0.03386, over 971020.47 frames.], batch size: 21, lr: 2.13e-04 +2022-05-06 20:52:26,513 INFO [train.py:715] (6/8) Epoch 10, batch 19200, loss[loss=0.1312, simple_loss=0.2028, pruned_loss=0.02981, over 4861.00 frames.], tot_loss[loss=0.1406, simple_loss=0.213, pruned_loss=0.03404, over 971642.83 frames.], batch size: 32, lr: 2.13e-04 +2022-05-06 20:53:06,191 INFO [train.py:715] (6/8) Epoch 10, batch 19250, loss[loss=0.1055, simple_loss=0.1809, pruned_loss=0.01502, over 4824.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2127, pruned_loss=0.03338, over 971109.86 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 20:53:46,087 INFO [train.py:715] (6/8) Epoch 10, batch 19300, loss[loss=0.1173, simple_loss=0.1896, pruned_loss=0.02248, over 4755.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2135, pruned_loss=0.03378, over 970861.38 frames.], batch size: 12, lr: 2.13e-04 +2022-05-06 20:54:26,491 INFO [train.py:715] (6/8) Epoch 10, batch 19350, loss[loss=0.1457, simple_loss=0.2096, pruned_loss=0.04086, over 4828.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2133, pruned_loss=0.03394, over 971710.22 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 20:55:06,668 INFO [train.py:715] (6/8) Epoch 10, batch 19400, loss[loss=0.1334, simple_loss=0.2037, pruned_loss=0.03159, over 4987.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2116, pruned_loss=0.0331, over 971486.66 frames.], batch size: 28, lr: 2.13e-04 +2022-05-06 20:55:45,812 INFO [train.py:715] (6/8) Epoch 10, batch 19450, loss[loss=0.1375, simple_loss=0.2208, pruned_loss=0.02705, over 4963.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2125, pruned_loss=0.0332, over 971493.47 frames.], batch size: 24, lr: 2.13e-04 +2022-05-06 20:56:25,410 INFO [train.py:715] (6/8) Epoch 10, batch 19500, loss[loss=0.1239, simple_loss=0.1966, pruned_loss=0.02557, over 4825.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2124, pruned_loss=0.03311, over 971762.63 frames.], batch size: 26, lr: 2.13e-04 +2022-05-06 20:57:04,608 INFO [train.py:715] (6/8) Epoch 10, batch 19550, loss[loss=0.1224, simple_loss=0.2032, pruned_loss=0.02078, over 4912.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2122, pruned_loss=0.03328, over 971538.46 frames.], batch size: 19, lr: 2.13e-04 +2022-05-06 20:57:43,330 INFO [train.py:715] (6/8) Epoch 10, batch 19600, loss[loss=0.1591, simple_loss=0.2206, pruned_loss=0.04882, over 4771.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2113, pruned_loss=0.03287, over 971998.96 frames.], batch size: 14, lr: 2.13e-04 +2022-05-06 20:58:22,309 INFO [train.py:715] (6/8) Epoch 10, batch 19650, loss[loss=0.1574, simple_loss=0.2297, pruned_loss=0.04251, over 4975.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2117, pruned_loss=0.03299, over 972147.98 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 20:59:01,944 INFO [train.py:715] (6/8) Epoch 10, batch 19700, loss[loss=0.1626, simple_loss=0.2509, pruned_loss=0.03718, over 4981.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2115, pruned_loss=0.03313, over 972018.65 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 20:59:41,298 INFO [train.py:715] (6/8) Epoch 10, batch 19750, loss[loss=0.1456, simple_loss=0.2229, pruned_loss=0.03413, over 4899.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2123, pruned_loss=0.03292, over 972301.10 frames.], batch size: 19, lr: 2.13e-04 +2022-05-06 21:00:19,606 INFO [train.py:715] (6/8) Epoch 10, batch 19800, loss[loss=0.1589, simple_loss=0.2194, pruned_loss=0.04921, over 4867.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2118, pruned_loss=0.03273, over 972081.69 frames.], batch size: 16, lr: 2.13e-04 +2022-05-06 21:00:59,241 INFO [train.py:715] (6/8) Epoch 10, batch 19850, loss[loss=0.1421, simple_loss=0.2146, pruned_loss=0.03478, over 4980.00 frames.], tot_loss[loss=0.1388, simple_loss=0.212, pruned_loss=0.03282, over 971704.36 frames.], batch size: 25, lr: 2.13e-04 +2022-05-06 21:01:38,758 INFO [train.py:715] (6/8) Epoch 10, batch 19900, loss[loss=0.1567, simple_loss=0.2257, pruned_loss=0.04389, over 4941.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.0325, over 972212.56 frames.], batch size: 14, lr: 2.13e-04 +2022-05-06 21:02:19,873 INFO [train.py:715] (6/8) Epoch 10, batch 19950, loss[loss=0.1153, simple_loss=0.1995, pruned_loss=0.0155, over 4891.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2118, pruned_loss=0.03238, over 971558.66 frames.], batch size: 22, lr: 2.13e-04 +2022-05-06 21:02:58,932 INFO [train.py:715] (6/8) Epoch 10, batch 20000, loss[loss=0.1529, simple_loss=0.2292, pruned_loss=0.03834, over 4791.00 frames.], tot_loss[loss=0.139, simple_loss=0.2125, pruned_loss=0.03272, over 972357.74 frames.], batch size: 18, lr: 2.13e-04 +2022-05-06 21:03:37,947 INFO [train.py:715] (6/8) Epoch 10, batch 20050, loss[loss=0.1222, simple_loss=0.1902, pruned_loss=0.02712, over 4780.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2131, pruned_loss=0.03318, over 972512.36 frames.], batch size: 17, lr: 2.13e-04 +2022-05-06 21:04:17,428 INFO [train.py:715] (6/8) Epoch 10, batch 20100, loss[loss=0.1191, simple_loss=0.1928, pruned_loss=0.02275, over 4954.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2135, pruned_loss=0.0334, over 972595.04 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 21:04:55,531 INFO [train.py:715] (6/8) Epoch 10, batch 20150, loss[loss=0.1298, simple_loss=0.2102, pruned_loss=0.02467, over 4776.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2132, pruned_loss=0.03353, over 971558.49 frames.], batch size: 18, lr: 2.13e-04 +2022-05-06 21:05:34,944 INFO [train.py:715] (6/8) Epoch 10, batch 20200, loss[loss=0.1432, simple_loss=0.2187, pruned_loss=0.03388, over 4768.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2128, pruned_loss=0.03313, over 971536.42 frames.], batch size: 19, lr: 2.13e-04 +2022-05-06 21:06:13,958 INFO [train.py:715] (6/8) Epoch 10, batch 20250, loss[loss=0.1183, simple_loss=0.1898, pruned_loss=0.02335, over 4945.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2123, pruned_loss=0.03291, over 972121.83 frames.], batch size: 29, lr: 2.13e-04 +2022-05-06 21:06:52,622 INFO [train.py:715] (6/8) Epoch 10, batch 20300, loss[loss=0.1605, simple_loss=0.2123, pruned_loss=0.05435, over 4743.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2138, pruned_loss=0.03343, over 972562.02 frames.], batch size: 12, lr: 2.13e-04 +2022-05-06 21:07:31,404 INFO [train.py:715] (6/8) Epoch 10, batch 20350, loss[loss=0.125, simple_loss=0.1955, pruned_loss=0.0272, over 4788.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2128, pruned_loss=0.03305, over 972144.96 frames.], batch size: 24, lr: 2.13e-04 +2022-05-06 21:08:10,507 INFO [train.py:715] (6/8) Epoch 10, batch 20400, loss[loss=0.1543, simple_loss=0.2313, pruned_loss=0.03862, over 4849.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2134, pruned_loss=0.03386, over 971884.94 frames.], batch size: 32, lr: 2.13e-04 +2022-05-06 21:08:49,426 INFO [train.py:715] (6/8) Epoch 10, batch 20450, loss[loss=0.1424, simple_loss=0.2115, pruned_loss=0.03664, over 4901.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2146, pruned_loss=0.03448, over 973241.92 frames.], batch size: 17, lr: 2.13e-04 +2022-05-06 21:09:27,878 INFO [train.py:715] (6/8) Epoch 10, batch 20500, loss[loss=0.1377, simple_loss=0.2175, pruned_loss=0.02892, over 4864.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2148, pruned_loss=0.03426, over 973266.67 frames.], batch size: 20, lr: 2.13e-04 +2022-05-06 21:10:06,954 INFO [train.py:715] (6/8) Epoch 10, batch 20550, loss[loss=0.1238, simple_loss=0.195, pruned_loss=0.02634, over 4985.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2143, pruned_loss=0.03405, over 973292.49 frames.], batch size: 31, lr: 2.13e-04 +2022-05-06 21:10:46,032 INFO [train.py:715] (6/8) Epoch 10, batch 20600, loss[loss=0.1484, simple_loss=0.2194, pruned_loss=0.03871, over 4787.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2147, pruned_loss=0.03418, over 972756.50 frames.], batch size: 18, lr: 2.13e-04 +2022-05-06 21:11:25,464 INFO [train.py:715] (6/8) Epoch 10, batch 20650, loss[loss=0.1515, simple_loss=0.2331, pruned_loss=0.03493, over 4961.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2143, pruned_loss=0.03364, over 972915.89 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 21:12:04,261 INFO [train.py:715] (6/8) Epoch 10, batch 20700, loss[loss=0.1514, simple_loss=0.2271, pruned_loss=0.03781, over 4744.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2126, pruned_loss=0.03276, over 973222.92 frames.], batch size: 19, lr: 2.13e-04 +2022-05-06 21:12:44,603 INFO [train.py:715] (6/8) Epoch 10, batch 20750, loss[loss=0.1327, simple_loss=0.2079, pruned_loss=0.02881, over 4888.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2126, pruned_loss=0.03316, over 973024.19 frames.], batch size: 19, lr: 2.13e-04 +2022-05-06 21:13:24,581 INFO [train.py:715] (6/8) Epoch 10, batch 20800, loss[loss=0.1656, simple_loss=0.2206, pruned_loss=0.05531, over 4780.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2124, pruned_loss=0.03321, over 973609.60 frames.], batch size: 17, lr: 2.13e-04 +2022-05-06 21:14:03,354 INFO [train.py:715] (6/8) Epoch 10, batch 20850, loss[loss=0.1689, simple_loss=0.237, pruned_loss=0.05041, over 4849.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2122, pruned_loss=0.03331, over 973450.84 frames.], batch size: 30, lr: 2.13e-04 +2022-05-06 21:14:43,296 INFO [train.py:715] (6/8) Epoch 10, batch 20900, loss[loss=0.164, simple_loss=0.2484, pruned_loss=0.03976, over 4892.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2126, pruned_loss=0.03349, over 973539.98 frames.], batch size: 38, lr: 2.13e-04 +2022-05-06 21:15:23,754 INFO [train.py:715] (6/8) Epoch 10, batch 20950, loss[loss=0.123, simple_loss=0.1946, pruned_loss=0.02568, over 4954.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03335, over 973765.86 frames.], batch size: 29, lr: 2.13e-04 +2022-05-06 21:16:02,702 INFO [train.py:715] (6/8) Epoch 10, batch 21000, loss[loss=0.107, simple_loss=0.1798, pruned_loss=0.01712, over 4932.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2123, pruned_loss=0.03321, over 973394.60 frames.], batch size: 23, lr: 2.13e-04 +2022-05-06 21:16:02,703 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 21:16:12,203 INFO [train.py:742] (6/8) Epoch 10, validation: loss=0.1065, simple_loss=0.1909, pruned_loss=0.01111, over 914524.00 frames. +2022-05-06 21:16:51,729 INFO [train.py:715] (6/8) Epoch 10, batch 21050, loss[loss=0.1652, simple_loss=0.2435, pruned_loss=0.0435, over 4792.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2124, pruned_loss=0.03298, over 974327.10 frames.], batch size: 21, lr: 2.13e-04 +2022-05-06 21:17:32,538 INFO [train.py:715] (6/8) Epoch 10, batch 21100, loss[loss=0.1289, simple_loss=0.1978, pruned_loss=0.03004, over 4743.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03253, over 974387.60 frames.], batch size: 14, lr: 2.13e-04 +2022-05-06 21:18:14,014 INFO [train.py:715] (6/8) Epoch 10, batch 21150, loss[loss=0.1723, simple_loss=0.2301, pruned_loss=0.05725, over 4847.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2115, pruned_loss=0.0325, over 973436.20 frames.], batch size: 30, lr: 2.13e-04 +2022-05-06 21:18:55,101 INFO [train.py:715] (6/8) Epoch 10, batch 21200, loss[loss=0.1443, simple_loss=0.2152, pruned_loss=0.0367, over 4906.00 frames.], tot_loss[loss=0.1378, simple_loss=0.211, pruned_loss=0.03231, over 971848.47 frames.], batch size: 19, lr: 2.13e-04 +2022-05-06 21:19:35,766 INFO [train.py:715] (6/8) Epoch 10, batch 21250, loss[loss=0.1675, simple_loss=0.2572, pruned_loss=0.03888, over 4825.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.0325, over 971516.23 frames.], batch size: 21, lr: 2.13e-04 +2022-05-06 21:20:17,431 INFO [train.py:715] (6/8) Epoch 10, batch 21300, loss[loss=0.1644, simple_loss=0.2414, pruned_loss=0.04371, over 4776.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2112, pruned_loss=0.03288, over 971532.43 frames.], batch size: 17, lr: 2.13e-04 +2022-05-06 21:20:58,700 INFO [train.py:715] (6/8) Epoch 10, batch 21350, loss[loss=0.1394, simple_loss=0.2095, pruned_loss=0.03461, over 4638.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2119, pruned_loss=0.03335, over 972010.60 frames.], batch size: 13, lr: 2.13e-04 +2022-05-06 21:21:39,106 INFO [train.py:715] (6/8) Epoch 10, batch 21400, loss[loss=0.1374, simple_loss=0.2065, pruned_loss=0.0341, over 4927.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2121, pruned_loss=0.03305, over 972732.43 frames.], batch size: 29, lr: 2.13e-04 +2022-05-06 21:22:20,538 INFO [train.py:715] (6/8) Epoch 10, batch 21450, loss[loss=0.1389, simple_loss=0.2178, pruned_loss=0.03, over 4937.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03326, over 972577.48 frames.], batch size: 23, lr: 2.13e-04 +2022-05-06 21:23:02,354 INFO [train.py:715] (6/8) Epoch 10, batch 21500, loss[loss=0.1214, simple_loss=0.1982, pruned_loss=0.02229, over 4881.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2127, pruned_loss=0.03347, over 971921.86 frames.], batch size: 22, lr: 2.13e-04 +2022-05-06 21:23:43,381 INFO [train.py:715] (6/8) Epoch 10, batch 21550, loss[loss=0.1431, simple_loss=0.2233, pruned_loss=0.03148, over 4980.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2123, pruned_loss=0.03359, over 971957.46 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 21:24:24,260 INFO [train.py:715] (6/8) Epoch 10, batch 21600, loss[loss=0.1153, simple_loss=0.1939, pruned_loss=0.01841, over 4923.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2126, pruned_loss=0.03347, over 972808.58 frames.], batch size: 21, lr: 2.13e-04 +2022-05-06 21:25:06,215 INFO [train.py:715] (6/8) Epoch 10, batch 21650, loss[loss=0.126, simple_loss=0.1999, pruned_loss=0.02612, over 4985.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2129, pruned_loss=0.03413, over 973796.37 frames.], batch size: 26, lr: 2.13e-04 +2022-05-06 21:25:47,750 INFO [train.py:715] (6/8) Epoch 10, batch 21700, loss[loss=0.1447, simple_loss=0.2179, pruned_loss=0.03571, over 4990.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2129, pruned_loss=0.03405, over 973367.84 frames.], batch size: 28, lr: 2.13e-04 +2022-05-06 21:26:28,011 INFO [train.py:715] (6/8) Epoch 10, batch 21750, loss[loss=0.1313, simple_loss=0.2141, pruned_loss=0.02422, over 4983.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2121, pruned_loss=0.03384, over 973815.38 frames.], batch size: 25, lr: 2.13e-04 +2022-05-06 21:27:08,997 INFO [train.py:715] (6/8) Epoch 10, batch 21800, loss[loss=0.1572, simple_loss=0.2286, pruned_loss=0.04294, over 4773.00 frames.], tot_loss[loss=0.14, simple_loss=0.212, pruned_loss=0.03396, over 973495.34 frames.], batch size: 18, lr: 2.13e-04 +2022-05-06 21:27:50,698 INFO [train.py:715] (6/8) Epoch 10, batch 21850, loss[loss=0.1385, simple_loss=0.2088, pruned_loss=0.03408, over 4872.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2122, pruned_loss=0.03378, over 973929.37 frames.], batch size: 22, lr: 2.13e-04 +2022-05-06 21:28:31,167 INFO [train.py:715] (6/8) Epoch 10, batch 21900, loss[loss=0.1342, simple_loss=0.2142, pruned_loss=0.02707, over 4879.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2118, pruned_loss=0.03343, over 973613.46 frames.], batch size: 22, lr: 2.13e-04 +2022-05-06 21:29:11,916 INFO [train.py:715] (6/8) Epoch 10, batch 21950, loss[loss=0.1248, simple_loss=0.1999, pruned_loss=0.02481, over 4834.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2112, pruned_loss=0.03276, over 974152.15 frames.], batch size: 25, lr: 2.13e-04 +2022-05-06 21:29:53,133 INFO [train.py:715] (6/8) Epoch 10, batch 22000, loss[loss=0.1272, simple_loss=0.2082, pruned_loss=0.02307, over 4772.00 frames.], tot_loss[loss=0.1379, simple_loss=0.211, pruned_loss=0.03243, over 973995.97 frames.], batch size: 18, lr: 2.12e-04 +2022-05-06 21:30:33,466 INFO [train.py:715] (6/8) Epoch 10, batch 22050, loss[loss=0.1334, simple_loss=0.205, pruned_loss=0.03094, over 4930.00 frames.], tot_loss[loss=0.1379, simple_loss=0.211, pruned_loss=0.03244, over 974142.59 frames.], batch size: 23, lr: 2.12e-04 +2022-05-06 21:31:14,082 INFO [train.py:715] (6/8) Epoch 10, batch 22100, loss[loss=0.1403, simple_loss=0.2226, pruned_loss=0.02894, over 4879.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2112, pruned_loss=0.03229, over 973298.91 frames.], batch size: 16, lr: 2.12e-04 +2022-05-06 21:31:54,937 INFO [train.py:715] (6/8) Epoch 10, batch 22150, loss[loss=0.1584, simple_loss=0.2271, pruned_loss=0.04483, over 4827.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2114, pruned_loss=0.03283, over 972378.77 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 21:32:36,004 INFO [train.py:715] (6/8) Epoch 10, batch 22200, loss[loss=0.1512, simple_loss=0.2327, pruned_loss=0.03482, over 4684.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2122, pruned_loss=0.03303, over 972479.04 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 21:33:16,087 INFO [train.py:715] (6/8) Epoch 10, batch 22250, loss[loss=0.1401, simple_loss=0.2198, pruned_loss=0.03018, over 4807.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03286, over 973026.26 frames.], batch size: 21, lr: 2.12e-04 +2022-05-06 21:33:56,744 INFO [train.py:715] (6/8) Epoch 10, batch 22300, loss[loss=0.1159, simple_loss=0.1933, pruned_loss=0.01926, over 4792.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.03282, over 972895.00 frames.], batch size: 12, lr: 2.12e-04 +2022-05-06 21:34:37,778 INFO [train.py:715] (6/8) Epoch 10, batch 22350, loss[loss=0.1339, simple_loss=0.2153, pruned_loss=0.02627, over 4949.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03329, over 972308.99 frames.], batch size: 29, lr: 2.12e-04 +2022-05-06 21:35:17,619 INFO [train.py:715] (6/8) Epoch 10, batch 22400, loss[loss=0.1279, simple_loss=0.2045, pruned_loss=0.02568, over 4986.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2128, pruned_loss=0.03309, over 972313.23 frames.], batch size: 28, lr: 2.12e-04 +2022-05-06 21:35:56,791 INFO [train.py:715] (6/8) Epoch 10, batch 22450, loss[loss=0.1532, simple_loss=0.2217, pruned_loss=0.04229, over 4854.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2134, pruned_loss=0.03358, over 972936.56 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 21:36:36,733 INFO [train.py:715] (6/8) Epoch 10, batch 22500, loss[loss=0.1229, simple_loss=0.2007, pruned_loss=0.02256, over 4835.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2132, pruned_loss=0.03353, over 973252.05 frames.], batch size: 26, lr: 2.12e-04 +2022-05-06 21:37:17,614 INFO [train.py:715] (6/8) Epoch 10, batch 22550, loss[loss=0.1693, simple_loss=0.236, pruned_loss=0.05128, over 4819.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2135, pruned_loss=0.03372, over 973371.36 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 21:37:56,430 INFO [train.py:715] (6/8) Epoch 10, batch 22600, loss[loss=0.1238, simple_loss=0.1962, pruned_loss=0.02566, over 4892.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2137, pruned_loss=0.03366, over 972960.43 frames.], batch size: 19, lr: 2.12e-04 +2022-05-06 21:38:37,513 INFO [train.py:715] (6/8) Epoch 10, batch 22650, loss[loss=0.1557, simple_loss=0.2342, pruned_loss=0.03859, over 4893.00 frames.], tot_loss[loss=0.14, simple_loss=0.2135, pruned_loss=0.03321, over 973133.30 frames.], batch size: 39, lr: 2.12e-04 +2022-05-06 21:39:19,369 INFO [train.py:715] (6/8) Epoch 10, batch 22700, loss[loss=0.1147, simple_loss=0.1914, pruned_loss=0.01897, over 4786.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2133, pruned_loss=0.03303, over 973026.33 frames.], batch size: 12, lr: 2.12e-04 +2022-05-06 21:40:00,100 INFO [train.py:715] (6/8) Epoch 10, batch 22750, loss[loss=0.1385, simple_loss=0.2089, pruned_loss=0.03408, over 4801.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2133, pruned_loss=0.03297, over 972548.07 frames.], batch size: 21, lr: 2.12e-04 +2022-05-06 21:40:41,328 INFO [train.py:715] (6/8) Epoch 10, batch 22800, loss[loss=0.1322, simple_loss=0.2083, pruned_loss=0.02805, over 4964.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2137, pruned_loss=0.0332, over 972461.87 frames.], batch size: 29, lr: 2.12e-04 +2022-05-06 21:41:22,882 INFO [train.py:715] (6/8) Epoch 10, batch 22850, loss[loss=0.1405, simple_loss=0.2123, pruned_loss=0.03438, over 4787.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2143, pruned_loss=0.03355, over 972768.85 frames.], batch size: 17, lr: 2.12e-04 +2022-05-06 21:42:04,583 INFO [train.py:715] (6/8) Epoch 10, batch 22900, loss[loss=0.1265, simple_loss=0.2071, pruned_loss=0.0229, over 4856.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2139, pruned_loss=0.03353, over 973353.81 frames.], batch size: 20, lr: 2.12e-04 +2022-05-06 21:42:45,056 INFO [train.py:715] (6/8) Epoch 10, batch 22950, loss[loss=0.1314, simple_loss=0.2066, pruned_loss=0.02811, over 4932.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2137, pruned_loss=0.0337, over 972942.41 frames.], batch size: 23, lr: 2.12e-04 +2022-05-06 21:43:27,079 INFO [train.py:715] (6/8) Epoch 10, batch 23000, loss[loss=0.1269, simple_loss=0.2047, pruned_loss=0.02455, over 4930.00 frames.], tot_loss[loss=0.14, simple_loss=0.2134, pruned_loss=0.03323, over 972533.03 frames.], batch size: 21, lr: 2.12e-04 +2022-05-06 21:44:09,138 INFO [train.py:715] (6/8) Epoch 10, batch 23050, loss[loss=0.1539, simple_loss=0.2239, pruned_loss=0.04197, over 4977.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2137, pruned_loss=0.03323, over 972042.64 frames.], batch size: 14, lr: 2.12e-04 +2022-05-06 21:44:49,660 INFO [train.py:715] (6/8) Epoch 10, batch 23100, loss[loss=0.1377, simple_loss=0.2106, pruned_loss=0.03238, over 4817.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2131, pruned_loss=0.03315, over 972276.68 frames.], batch size: 13, lr: 2.12e-04 +2022-05-06 21:45:30,870 INFO [train.py:715] (6/8) Epoch 10, batch 23150, loss[loss=0.1485, simple_loss=0.2195, pruned_loss=0.03871, over 4899.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2133, pruned_loss=0.03313, over 972753.77 frames.], batch size: 19, lr: 2.12e-04 +2022-05-06 21:46:12,876 INFO [train.py:715] (6/8) Epoch 10, batch 23200, loss[loss=0.1466, simple_loss=0.2199, pruned_loss=0.03661, over 4785.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2136, pruned_loss=0.03352, over 973070.76 frames.], batch size: 14, lr: 2.12e-04 +2022-05-06 21:46:54,210 INFO [train.py:715] (6/8) Epoch 10, batch 23250, loss[loss=0.1432, simple_loss=0.2092, pruned_loss=0.03857, over 4636.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2139, pruned_loss=0.03338, over 971906.91 frames.], batch size: 13, lr: 2.12e-04 +2022-05-06 21:47:34,837 INFO [train.py:715] (6/8) Epoch 10, batch 23300, loss[loss=0.1545, simple_loss=0.2269, pruned_loss=0.0411, over 4797.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2133, pruned_loss=0.03331, over 972065.99 frames.], batch size: 18, lr: 2.12e-04 +2022-05-06 21:48:16,728 INFO [train.py:715] (6/8) Epoch 10, batch 23350, loss[loss=0.1408, simple_loss=0.2102, pruned_loss=0.03567, over 4771.00 frames.], tot_loss[loss=0.14, simple_loss=0.213, pruned_loss=0.03355, over 971674.30 frames.], batch size: 17, lr: 2.12e-04 +2022-05-06 21:48:58,863 INFO [train.py:715] (6/8) Epoch 10, batch 23400, loss[loss=0.149, simple_loss=0.2257, pruned_loss=0.03618, over 4774.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2124, pruned_loss=0.03346, over 971201.14 frames.], batch size: 16, lr: 2.12e-04 +2022-05-06 21:49:39,775 INFO [train.py:715] (6/8) Epoch 10, batch 23450, loss[loss=0.1279, simple_loss=0.1998, pruned_loss=0.02802, over 4634.00 frames.], tot_loss[loss=0.1401, simple_loss=0.213, pruned_loss=0.03363, over 971585.82 frames.], batch size: 13, lr: 2.12e-04 +2022-05-06 21:50:20,156 INFO [train.py:715] (6/8) Epoch 10, batch 23500, loss[loss=0.1359, simple_loss=0.2104, pruned_loss=0.03069, over 4935.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2134, pruned_loss=0.03381, over 972166.06 frames.], batch size: 18, lr: 2.12e-04 +2022-05-06 21:51:02,211 INFO [train.py:715] (6/8) Epoch 10, batch 23550, loss[loss=0.1456, simple_loss=0.2271, pruned_loss=0.03203, over 4761.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2131, pruned_loss=0.03359, over 972302.63 frames.], batch size: 16, lr: 2.12e-04 +2022-05-06 21:51:43,367 INFO [train.py:715] (6/8) Epoch 10, batch 23600, loss[loss=0.1553, simple_loss=0.2304, pruned_loss=0.04016, over 4778.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2127, pruned_loss=0.03382, over 971316.21 frames.], batch size: 18, lr: 2.12e-04 +2022-05-06 21:52:23,129 INFO [train.py:715] (6/8) Epoch 10, batch 23650, loss[loss=0.1282, simple_loss=0.2029, pruned_loss=0.02676, over 4936.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2133, pruned_loss=0.03395, over 971029.54 frames.], batch size: 29, lr: 2.12e-04 +2022-05-06 21:53:03,643 INFO [train.py:715] (6/8) Epoch 10, batch 23700, loss[loss=0.1351, simple_loss=0.2057, pruned_loss=0.03218, over 4787.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2128, pruned_loss=0.03365, over 970644.78 frames.], batch size: 17, lr: 2.12e-04 +2022-05-06 21:53:44,222 INFO [train.py:715] (6/8) Epoch 10, batch 23750, loss[loss=0.1308, simple_loss=0.1957, pruned_loss=0.03299, over 4755.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2121, pruned_loss=0.0331, over 969583.05 frames.], batch size: 12, lr: 2.12e-04 +2022-05-06 21:54:24,361 INFO [train.py:715] (6/8) Epoch 10, batch 23800, loss[loss=0.1512, simple_loss=0.2197, pruned_loss=0.04133, over 4932.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2114, pruned_loss=0.0326, over 969768.37 frames.], batch size: 23, lr: 2.12e-04 +2022-05-06 21:55:04,952 INFO [train.py:715] (6/8) Epoch 10, batch 23850, loss[loss=0.1407, simple_loss=0.215, pruned_loss=0.03319, over 4919.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2125, pruned_loss=0.03291, over 970881.36 frames.], batch size: 18, lr: 2.12e-04 +2022-05-06 21:55:46,220 INFO [train.py:715] (6/8) Epoch 10, batch 23900, loss[loss=0.1276, simple_loss=0.2022, pruned_loss=0.02647, over 4782.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2127, pruned_loss=0.03305, over 971133.71 frames.], batch size: 14, lr: 2.12e-04 +2022-05-06 21:56:25,835 INFO [train.py:715] (6/8) Epoch 10, batch 23950, loss[loss=0.1383, simple_loss=0.2133, pruned_loss=0.03159, over 4921.00 frames.], tot_loss[loss=0.14, simple_loss=0.2132, pruned_loss=0.03338, over 971879.42 frames.], batch size: 18, lr: 2.12e-04 +2022-05-06 21:57:06,218 INFO [train.py:715] (6/8) Epoch 10, batch 24000, loss[loss=0.1337, simple_loss=0.2109, pruned_loss=0.02828, over 4991.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2134, pruned_loss=0.0334, over 971213.75 frames.], batch size: 25, lr: 2.12e-04 +2022-05-06 21:57:06,219 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 21:57:15,894 INFO [train.py:742] (6/8) Epoch 10, validation: loss=0.1061, simple_loss=0.1905, pruned_loss=0.01087, over 914524.00 frames. +2022-05-06 21:57:55,804 INFO [train.py:715] (6/8) Epoch 10, batch 24050, loss[loss=0.1318, simple_loss=0.2178, pruned_loss=0.02295, over 4892.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.03358, over 971477.27 frames.], batch size: 22, lr: 2.12e-04 +2022-05-06 21:58:36,849 INFO [train.py:715] (6/8) Epoch 10, batch 24100, loss[loss=0.1261, simple_loss=0.2033, pruned_loss=0.02445, over 4929.00 frames.], tot_loss[loss=0.1389, simple_loss=0.212, pruned_loss=0.03288, over 972971.95 frames.], batch size: 29, lr: 2.12e-04 +2022-05-06 21:59:18,111 INFO [train.py:715] (6/8) Epoch 10, batch 24150, loss[loss=0.1226, simple_loss=0.1946, pruned_loss=0.0253, over 4762.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2115, pruned_loss=0.03261, over 972944.17 frames.], batch size: 12, lr: 2.12e-04 +2022-05-06 21:59:57,437 INFO [train.py:715] (6/8) Epoch 10, batch 24200, loss[loss=0.139, simple_loss=0.2094, pruned_loss=0.03428, over 4894.00 frames.], tot_loss[loss=0.1379, simple_loss=0.211, pruned_loss=0.03239, over 973367.01 frames.], batch size: 17, lr: 2.12e-04 +2022-05-06 22:00:38,183 INFO [train.py:715] (6/8) Epoch 10, batch 24250, loss[loss=0.1295, simple_loss=0.2069, pruned_loss=0.02603, over 4756.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2118, pruned_loss=0.03267, over 972352.41 frames.], batch size: 19, lr: 2.12e-04 +2022-05-06 22:01:19,299 INFO [train.py:715] (6/8) Epoch 10, batch 24300, loss[loss=0.1252, simple_loss=0.1959, pruned_loss=0.02727, over 4781.00 frames.], tot_loss[loss=0.1381, simple_loss=0.211, pruned_loss=0.03263, over 973141.23 frames.], batch size: 14, lr: 2.12e-04 +2022-05-06 22:01:59,418 INFO [train.py:715] (6/8) Epoch 10, batch 24350, loss[loss=0.1142, simple_loss=0.1818, pruned_loss=0.02332, over 4902.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2112, pruned_loss=0.03315, over 973077.57 frames.], batch size: 17, lr: 2.12e-04 +2022-05-06 22:02:39,463 INFO [train.py:715] (6/8) Epoch 10, batch 24400, loss[loss=0.1312, simple_loss=0.2077, pruned_loss=0.02735, over 4980.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.03356, over 973247.58 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 22:03:20,176 INFO [train.py:715] (6/8) Epoch 10, batch 24450, loss[loss=0.1306, simple_loss=0.1937, pruned_loss=0.0337, over 4827.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2129, pruned_loss=0.03409, over 973014.91 frames.], batch size: 25, lr: 2.12e-04 +2022-05-06 22:04:01,141 INFO [train.py:715] (6/8) Epoch 10, batch 24500, loss[loss=0.1285, simple_loss=0.1994, pruned_loss=0.02884, over 4898.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2119, pruned_loss=0.03362, over 972546.65 frames.], batch size: 19, lr: 2.12e-04 +2022-05-06 22:04:40,224 INFO [train.py:715] (6/8) Epoch 10, batch 24550, loss[loss=0.1277, simple_loss=0.2012, pruned_loss=0.02708, over 4941.00 frames.], tot_loss[loss=0.1398, simple_loss=0.212, pruned_loss=0.03378, over 972105.52 frames.], batch size: 21, lr: 2.12e-04 +2022-05-06 22:05:20,208 INFO [train.py:715] (6/8) Epoch 10, batch 24600, loss[loss=0.1443, simple_loss=0.2119, pruned_loss=0.03833, over 4979.00 frames.], tot_loss[loss=0.1396, simple_loss=0.212, pruned_loss=0.0336, over 971500.69 frames.], batch size: 14, lr: 2.12e-04 +2022-05-06 22:06:00,570 INFO [train.py:715] (6/8) Epoch 10, batch 24650, loss[loss=0.1123, simple_loss=0.19, pruned_loss=0.01731, over 4765.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2112, pruned_loss=0.03274, over 971393.25 frames.], batch size: 18, lr: 2.12e-04 +2022-05-06 22:06:39,587 INFO [train.py:715] (6/8) Epoch 10, batch 24700, loss[loss=0.1643, simple_loss=0.2217, pruned_loss=0.05341, over 4962.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2123, pruned_loss=0.03323, over 972019.40 frames.], batch size: 35, lr: 2.12e-04 +2022-05-06 22:07:18,185 INFO [train.py:715] (6/8) Epoch 10, batch 24750, loss[loss=0.1312, simple_loss=0.2023, pruned_loss=0.03005, over 4885.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2125, pruned_loss=0.03349, over 972717.45 frames.], batch size: 22, lr: 2.12e-04 +2022-05-06 22:07:57,679 INFO [train.py:715] (6/8) Epoch 10, batch 24800, loss[loss=0.1558, simple_loss=0.2309, pruned_loss=0.04032, over 4955.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2122, pruned_loss=0.03327, over 972719.84 frames.], batch size: 21, lr: 2.12e-04 +2022-05-06 22:08:36,826 INFO [train.py:715] (6/8) Epoch 10, batch 24850, loss[loss=0.1398, simple_loss=0.2116, pruned_loss=0.03398, over 4900.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2117, pruned_loss=0.03292, over 972545.49 frames.], batch size: 17, lr: 2.12e-04 +2022-05-06 22:09:14,899 INFO [train.py:715] (6/8) Epoch 10, batch 24900, loss[loss=0.1461, simple_loss=0.2149, pruned_loss=0.03861, over 4929.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2116, pruned_loss=0.03271, over 972867.51 frames.], batch size: 18, lr: 2.12e-04 +2022-05-06 22:09:54,524 INFO [train.py:715] (6/8) Epoch 10, batch 24950, loss[loss=0.1216, simple_loss=0.2009, pruned_loss=0.02109, over 4801.00 frames.], tot_loss[loss=0.1388, simple_loss=0.212, pruned_loss=0.03282, over 972863.37 frames.], batch size: 21, lr: 2.12e-04 +2022-05-06 22:10:34,375 INFO [train.py:715] (6/8) Epoch 10, batch 25000, loss[loss=0.1618, simple_loss=0.2278, pruned_loss=0.0479, over 4779.00 frames.], tot_loss[loss=0.139, simple_loss=0.2119, pruned_loss=0.03302, over 973006.49 frames.], batch size: 14, lr: 2.12e-04 +2022-05-06 22:11:13,228 INFO [train.py:715] (6/8) Epoch 10, batch 25050, loss[loss=0.1461, simple_loss=0.2229, pruned_loss=0.03467, over 4960.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2135, pruned_loss=0.03406, over 972387.14 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 22:11:52,697 INFO [train.py:715] (6/8) Epoch 10, batch 25100, loss[loss=0.1397, simple_loss=0.2133, pruned_loss=0.03304, over 4968.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2129, pruned_loss=0.03314, over 972442.68 frames.], batch size: 28, lr: 2.12e-04 +2022-05-06 22:12:32,719 INFO [train.py:715] (6/8) Epoch 10, batch 25150, loss[loss=0.1508, simple_loss=0.2224, pruned_loss=0.03962, over 4923.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2127, pruned_loss=0.03328, over 972513.44 frames.], batch size: 17, lr: 2.12e-04 +2022-05-06 22:13:12,210 INFO [train.py:715] (6/8) Epoch 10, batch 25200, loss[loss=0.1199, simple_loss=0.1989, pruned_loss=0.02043, over 4887.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2128, pruned_loss=0.03323, over 971788.51 frames.], batch size: 22, lr: 2.12e-04 +2022-05-06 22:13:50,341 INFO [train.py:715] (6/8) Epoch 10, batch 25250, loss[loss=0.139, simple_loss=0.2135, pruned_loss=0.03223, over 4937.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2132, pruned_loss=0.03349, over 971612.30 frames.], batch size: 29, lr: 2.12e-04 +2022-05-06 22:14:29,217 INFO [train.py:715] (6/8) Epoch 10, batch 25300, loss[loss=0.1629, simple_loss=0.2199, pruned_loss=0.05296, over 4848.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2123, pruned_loss=0.03315, over 971122.88 frames.], batch size: 30, lr: 2.12e-04 +2022-05-06 22:15:08,865 INFO [train.py:715] (6/8) Epoch 10, batch 25350, loss[loss=0.1638, simple_loss=0.2351, pruned_loss=0.04628, over 4790.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03339, over 970759.00 frames.], batch size: 17, lr: 2.12e-04 +2022-05-06 22:15:47,382 INFO [train.py:715] (6/8) Epoch 10, batch 25400, loss[loss=0.1393, simple_loss=0.2233, pruned_loss=0.02764, over 4888.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2126, pruned_loss=0.0332, over 970768.91 frames.], batch size: 22, lr: 2.12e-04 +2022-05-06 22:16:26,238 INFO [train.py:715] (6/8) Epoch 10, batch 25450, loss[loss=0.1512, simple_loss=0.2213, pruned_loss=0.04057, over 4695.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2123, pruned_loss=0.0333, over 970922.66 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 22:17:06,159 INFO [train.py:715] (6/8) Epoch 10, batch 25500, loss[loss=0.1129, simple_loss=0.1882, pruned_loss=0.01883, over 4771.00 frames.], tot_loss[loss=0.14, simple_loss=0.2124, pruned_loss=0.03384, over 971281.97 frames.], batch size: 14, lr: 2.11e-04 +2022-05-06 22:17:45,979 INFO [train.py:715] (6/8) Epoch 10, batch 25550, loss[loss=0.1428, simple_loss=0.2136, pruned_loss=0.03596, over 4872.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2127, pruned_loss=0.03349, over 971113.04 frames.], batch size: 22, lr: 2.11e-04 +2022-05-06 22:18:24,977 INFO [train.py:715] (6/8) Epoch 10, batch 25600, loss[loss=0.1585, simple_loss=0.2401, pruned_loss=0.0385, over 4931.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2124, pruned_loss=0.03295, over 971587.50 frames.], batch size: 23, lr: 2.11e-04 +2022-05-06 22:19:05,133 INFO [train.py:715] (6/8) Epoch 10, batch 25650, loss[loss=0.1012, simple_loss=0.1661, pruned_loss=0.01818, over 4878.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2123, pruned_loss=0.03268, over 971768.54 frames.], batch size: 22, lr: 2.11e-04 +2022-05-06 22:19:45,507 INFO [train.py:715] (6/8) Epoch 10, batch 25700, loss[loss=0.1324, simple_loss=0.2044, pruned_loss=0.0302, over 4815.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2116, pruned_loss=0.03232, over 972039.18 frames.], batch size: 27, lr: 2.11e-04 +2022-05-06 22:20:25,370 INFO [train.py:715] (6/8) Epoch 10, batch 25750, loss[loss=0.1544, simple_loss=0.229, pruned_loss=0.03991, over 4855.00 frames.], tot_loss[loss=0.138, simple_loss=0.2113, pruned_loss=0.03238, over 971942.31 frames.], batch size: 20, lr: 2.11e-04 +2022-05-06 22:21:04,773 INFO [train.py:715] (6/8) Epoch 10, batch 25800, loss[loss=0.141, simple_loss=0.2235, pruned_loss=0.02922, over 4953.00 frames.], tot_loss[loss=0.1376, simple_loss=0.211, pruned_loss=0.03209, over 972168.74 frames.], batch size: 21, lr: 2.11e-04 +2022-05-06 22:21:45,309 INFO [train.py:715] (6/8) Epoch 10, batch 25850, loss[loss=0.1308, simple_loss=0.2002, pruned_loss=0.03068, over 4910.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2124, pruned_loss=0.03298, over 972286.12 frames.], batch size: 18, lr: 2.11e-04 +2022-05-06 22:22:25,227 INFO [train.py:715] (6/8) Epoch 10, batch 25900, loss[loss=0.1677, simple_loss=0.2473, pruned_loss=0.04408, over 4773.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2127, pruned_loss=0.0334, over 971879.13 frames.], batch size: 17, lr: 2.11e-04 +2022-05-06 22:23:03,947 INFO [train.py:715] (6/8) Epoch 10, batch 25950, loss[loss=0.1364, simple_loss=0.2104, pruned_loss=0.03126, over 4819.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2131, pruned_loss=0.03362, over 972006.56 frames.], batch size: 26, lr: 2.11e-04 +2022-05-06 22:23:42,716 INFO [train.py:715] (6/8) Epoch 10, batch 26000, loss[loss=0.1517, simple_loss=0.2245, pruned_loss=0.0395, over 4921.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2135, pruned_loss=0.03364, over 971774.84 frames.], batch size: 23, lr: 2.11e-04 +2022-05-06 22:24:21,991 INFO [train.py:715] (6/8) Epoch 10, batch 26050, loss[loss=0.1199, simple_loss=0.1851, pruned_loss=0.0273, over 4937.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2132, pruned_loss=0.03366, over 971742.93 frames.], batch size: 23, lr: 2.11e-04 +2022-05-06 22:25:00,976 INFO [train.py:715] (6/8) Epoch 10, batch 26100, loss[loss=0.1193, simple_loss=0.199, pruned_loss=0.01978, over 4875.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.03364, over 971494.01 frames.], batch size: 16, lr: 2.11e-04 +2022-05-06 22:25:40,340 INFO [train.py:715] (6/8) Epoch 10, batch 26150, loss[loss=0.1675, simple_loss=0.2321, pruned_loss=0.05146, over 4698.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.03353, over 971679.60 frames.], batch size: 15, lr: 2.11e-04 +2022-05-06 22:26:21,098 INFO [train.py:715] (6/8) Epoch 10, batch 26200, loss[loss=0.1293, simple_loss=0.2079, pruned_loss=0.02537, over 4702.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2131, pruned_loss=0.0337, over 971380.83 frames.], batch size: 15, lr: 2.11e-04 +2022-05-06 22:27:00,354 INFO [train.py:715] (6/8) Epoch 10, batch 26250, loss[loss=0.1148, simple_loss=0.1944, pruned_loss=0.01756, over 4852.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2131, pruned_loss=0.0334, over 971453.04 frames.], batch size: 20, lr: 2.11e-04 +2022-05-06 22:27:40,000 INFO [train.py:715] (6/8) Epoch 10, batch 26300, loss[loss=0.1459, simple_loss=0.2137, pruned_loss=0.03906, over 4927.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2125, pruned_loss=0.03316, over 972244.68 frames.], batch size: 39, lr: 2.11e-04 +2022-05-06 22:28:19,566 INFO [train.py:715] (6/8) Epoch 10, batch 26350, loss[loss=0.1426, simple_loss=0.2221, pruned_loss=0.03157, over 4807.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2117, pruned_loss=0.03244, over 972421.90 frames.], batch size: 26, lr: 2.11e-04 +2022-05-06 22:28:59,169 INFO [train.py:715] (6/8) Epoch 10, batch 26400, loss[loss=0.1204, simple_loss=0.2002, pruned_loss=0.02029, over 4821.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2124, pruned_loss=0.03293, over 972133.37 frames.], batch size: 26, lr: 2.11e-04 +2022-05-06 22:29:38,874 INFO [train.py:715] (6/8) Epoch 10, batch 26450, loss[loss=0.1493, simple_loss=0.2165, pruned_loss=0.04104, over 4984.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2122, pruned_loss=0.03275, over 971936.29 frames.], batch size: 15, lr: 2.11e-04 +2022-05-06 22:30:18,685 INFO [train.py:715] (6/8) Epoch 10, batch 26500, loss[loss=0.1306, simple_loss=0.1988, pruned_loss=0.03124, over 4967.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2125, pruned_loss=0.03286, over 973093.47 frames.], batch size: 24, lr: 2.11e-04 +2022-05-06 22:30:59,101 INFO [train.py:715] (6/8) Epoch 10, batch 26550, loss[loss=0.1447, simple_loss=0.227, pruned_loss=0.03123, over 4966.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2125, pruned_loss=0.03249, over 973003.76 frames.], batch size: 15, lr: 2.11e-04 +2022-05-06 22:31:37,641 INFO [train.py:715] (6/8) Epoch 10, batch 26600, loss[loss=0.1399, simple_loss=0.215, pruned_loss=0.03239, over 4922.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2132, pruned_loss=0.03306, over 973099.63 frames.], batch size: 29, lr: 2.11e-04 +2022-05-06 22:32:17,165 INFO [train.py:715] (6/8) Epoch 10, batch 26650, loss[loss=0.1401, simple_loss=0.2059, pruned_loss=0.03716, over 4849.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2129, pruned_loss=0.03328, over 973116.69 frames.], batch size: 34, lr: 2.11e-04 +2022-05-06 22:32:56,675 INFO [train.py:715] (6/8) Epoch 10, batch 26700, loss[loss=0.1328, simple_loss=0.2151, pruned_loss=0.02519, over 4919.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2128, pruned_loss=0.03332, over 972846.99 frames.], batch size: 21, lr: 2.11e-04 +2022-05-06 22:33:36,179 INFO [train.py:715] (6/8) Epoch 10, batch 26750, loss[loss=0.1242, simple_loss=0.1887, pruned_loss=0.02987, over 4694.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2125, pruned_loss=0.03354, over 972042.13 frames.], batch size: 15, lr: 2.11e-04 +2022-05-06 22:34:14,866 INFO [train.py:715] (6/8) Epoch 10, batch 26800, loss[loss=0.1463, simple_loss=0.2227, pruned_loss=0.03493, over 4933.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2128, pruned_loss=0.03391, over 972029.57 frames.], batch size: 29, lr: 2.11e-04 +2022-05-06 22:34:54,621 INFO [train.py:715] (6/8) Epoch 10, batch 26850, loss[loss=0.1584, simple_loss=0.2354, pruned_loss=0.04074, over 4857.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2126, pruned_loss=0.03401, over 971148.24 frames.], batch size: 20, lr: 2.11e-04 +2022-05-06 22:35:34,131 INFO [train.py:715] (6/8) Epoch 10, batch 26900, loss[loss=0.1504, simple_loss=0.2208, pruned_loss=0.04005, over 4928.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2122, pruned_loss=0.03371, over 971379.35 frames.], batch size: 39, lr: 2.11e-04 +2022-05-06 22:36:12,943 INFO [train.py:715] (6/8) Epoch 10, batch 26950, loss[loss=0.1247, simple_loss=0.2045, pruned_loss=0.02244, over 4929.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2123, pruned_loss=0.03311, over 971443.59 frames.], batch size: 24, lr: 2.11e-04 +2022-05-06 22:36:51,896 INFO [train.py:715] (6/8) Epoch 10, batch 27000, loss[loss=0.1362, simple_loss=0.2082, pruned_loss=0.03208, over 4799.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2127, pruned_loss=0.03356, over 972663.63 frames.], batch size: 13, lr: 2.11e-04 +2022-05-06 22:36:51,896 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 22:37:01,643 INFO [train.py:742] (6/8) Epoch 10, validation: loss=0.1063, simple_loss=0.1906, pruned_loss=0.01104, over 914524.00 frames. +2022-05-06 22:37:41,042 INFO [train.py:715] (6/8) Epoch 10, batch 27050, loss[loss=0.1288, simple_loss=0.2083, pruned_loss=0.02464, over 4803.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2122, pruned_loss=0.03348, over 973043.90 frames.], batch size: 21, lr: 2.11e-04 +2022-05-06 22:38:21,006 INFO [train.py:715] (6/8) Epoch 10, batch 27100, loss[loss=0.1703, simple_loss=0.2434, pruned_loss=0.04855, over 4730.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2129, pruned_loss=0.03397, over 973256.03 frames.], batch size: 16, lr: 2.11e-04 +2022-05-06 22:38:59,618 INFO [train.py:715] (6/8) Epoch 10, batch 27150, loss[loss=0.1506, simple_loss=0.2223, pruned_loss=0.0395, over 4880.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2143, pruned_loss=0.03421, over 973438.24 frames.], batch size: 39, lr: 2.11e-04 +2022-05-06 22:39:38,790 INFO [train.py:715] (6/8) Epoch 10, batch 27200, loss[loss=0.1444, simple_loss=0.2183, pruned_loss=0.03523, over 4774.00 frames.], tot_loss[loss=0.1409, simple_loss=0.214, pruned_loss=0.03392, over 973330.44 frames.], batch size: 18, lr: 2.11e-04 +2022-05-06 22:40:18,813 INFO [train.py:715] (6/8) Epoch 10, batch 27250, loss[loss=0.1272, simple_loss=0.2009, pruned_loss=0.02675, over 4984.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2135, pruned_loss=0.03385, over 974197.21 frames.], batch size: 25, lr: 2.11e-04 +2022-05-06 22:40:58,229 INFO [train.py:715] (6/8) Epoch 10, batch 27300, loss[loss=0.1547, simple_loss=0.2293, pruned_loss=0.04005, over 4775.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2124, pruned_loss=0.03355, over 973313.11 frames.], batch size: 18, lr: 2.11e-04 +2022-05-06 22:41:36,436 INFO [train.py:715] (6/8) Epoch 10, batch 27350, loss[loss=0.1196, simple_loss=0.1944, pruned_loss=0.02237, over 4975.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2129, pruned_loss=0.0333, over 972782.73 frames.], batch size: 25, lr: 2.11e-04 +2022-05-06 22:42:15,731 INFO [train.py:715] (6/8) Epoch 10, batch 27400, loss[loss=0.1647, simple_loss=0.2327, pruned_loss=0.04841, over 4814.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2127, pruned_loss=0.03355, over 972366.27 frames.], batch size: 25, lr: 2.11e-04 +2022-05-06 22:42:55,895 INFO [train.py:715] (6/8) Epoch 10, batch 27450, loss[loss=0.1105, simple_loss=0.196, pruned_loss=0.01245, over 4917.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2119, pruned_loss=0.03298, over 972087.86 frames.], batch size: 23, lr: 2.11e-04 +2022-05-06 22:43:34,160 INFO [train.py:715] (6/8) Epoch 10, batch 27500, loss[loss=0.1539, simple_loss=0.2232, pruned_loss=0.04233, over 4843.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2122, pruned_loss=0.03262, over 972315.24 frames.], batch size: 26, lr: 2.11e-04 +2022-05-06 22:44:13,416 INFO [train.py:715] (6/8) Epoch 10, batch 27550, loss[loss=0.1503, simple_loss=0.2298, pruned_loss=0.0354, over 4861.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2127, pruned_loss=0.03311, over 971806.30 frames.], batch size: 16, lr: 2.11e-04 +2022-05-06 22:44:52,781 INFO [train.py:715] (6/8) Epoch 10, batch 27600, loss[loss=0.1372, simple_loss=0.2077, pruned_loss=0.03335, over 4893.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2122, pruned_loss=0.03302, over 972480.54 frames.], batch size: 22, lr: 2.11e-04 +2022-05-06 22:45:32,116 INFO [train.py:715] (6/8) Epoch 10, batch 27650, loss[loss=0.1608, simple_loss=0.219, pruned_loss=0.05127, over 4836.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2118, pruned_loss=0.03304, over 972518.77 frames.], batch size: 30, lr: 2.11e-04 +2022-05-06 22:46:11,030 INFO [train.py:715] (6/8) Epoch 10, batch 27700, loss[loss=0.1202, simple_loss=0.1952, pruned_loss=0.02257, over 4761.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2113, pruned_loss=0.03305, over 971884.30 frames.], batch size: 12, lr: 2.11e-04 +2022-05-06 22:46:51,026 INFO [train.py:715] (6/8) Epoch 10, batch 27750, loss[loss=0.162, simple_loss=0.2298, pruned_loss=0.04714, over 4865.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2123, pruned_loss=0.03337, over 972537.51 frames.], batch size: 39, lr: 2.11e-04 +2022-05-06 22:47:31,104 INFO [train.py:715] (6/8) Epoch 10, batch 27800, loss[loss=0.132, simple_loss=0.1952, pruned_loss=0.03437, over 4856.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2119, pruned_loss=0.03329, over 972257.04 frames.], batch size: 20, lr: 2.11e-04 +2022-05-06 22:48:10,302 INFO [train.py:715] (6/8) Epoch 10, batch 27850, loss[loss=0.1514, simple_loss=0.2269, pruned_loss=0.0379, over 4922.00 frames.], tot_loss[loss=0.1391, simple_loss=0.212, pruned_loss=0.03312, over 971929.52 frames.], batch size: 17, lr: 2.11e-04 +2022-05-06 22:48:50,679 INFO [train.py:715] (6/8) Epoch 10, batch 27900, loss[loss=0.1125, simple_loss=0.1968, pruned_loss=0.01414, over 4896.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2111, pruned_loss=0.03269, over 972479.18 frames.], batch size: 22, lr: 2.11e-04 +2022-05-06 22:49:34,040 INFO [train.py:715] (6/8) Epoch 10, batch 27950, loss[loss=0.1346, simple_loss=0.1986, pruned_loss=0.03534, over 4940.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2112, pruned_loss=0.03264, over 972514.29 frames.], batch size: 29, lr: 2.11e-04 +2022-05-06 22:50:13,532 INFO [train.py:715] (6/8) Epoch 10, batch 28000, loss[loss=0.1274, simple_loss=0.196, pruned_loss=0.02942, over 4854.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.03273, over 972302.21 frames.], batch size: 32, lr: 2.11e-04 +2022-05-06 22:50:53,595 INFO [train.py:715] (6/8) Epoch 10, batch 28050, loss[loss=0.1489, simple_loss=0.2165, pruned_loss=0.04067, over 4848.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.03273, over 972642.06 frames.], batch size: 32, lr: 2.11e-04 +2022-05-06 22:51:34,468 INFO [train.py:715] (6/8) Epoch 10, batch 28100, loss[loss=0.1679, simple_loss=0.2387, pruned_loss=0.04855, over 4933.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2118, pruned_loss=0.03316, over 971998.42 frames.], batch size: 18, lr: 2.11e-04 +2022-05-06 22:52:15,153 INFO [train.py:715] (6/8) Epoch 10, batch 28150, loss[loss=0.1167, simple_loss=0.1947, pruned_loss=0.01936, over 4836.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03339, over 972149.45 frames.], batch size: 12, lr: 2.11e-04 +2022-05-06 22:52:54,893 INFO [train.py:715] (6/8) Epoch 10, batch 28200, loss[loss=0.1477, simple_loss=0.2146, pruned_loss=0.04037, over 4876.00 frames.], tot_loss[loss=0.1406, simple_loss=0.213, pruned_loss=0.03414, over 972462.01 frames.], batch size: 30, lr: 2.11e-04 +2022-05-06 22:53:35,218 INFO [train.py:715] (6/8) Epoch 10, batch 28250, loss[loss=0.1307, simple_loss=0.2099, pruned_loss=0.02579, over 4943.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2137, pruned_loss=0.03436, over 971481.14 frames.], batch size: 21, lr: 2.11e-04 +2022-05-06 22:54:16,806 INFO [train.py:715] (6/8) Epoch 10, batch 28300, loss[loss=0.135, simple_loss=0.2101, pruned_loss=0.0299, over 4932.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2137, pruned_loss=0.03421, over 971927.87 frames.], batch size: 29, lr: 2.11e-04 +2022-05-06 22:54:56,899 INFO [train.py:715] (6/8) Epoch 10, batch 28350, loss[loss=0.1246, simple_loss=0.1948, pruned_loss=0.02716, over 4695.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2123, pruned_loss=0.03345, over 972135.19 frames.], batch size: 15, lr: 2.11e-04 +2022-05-06 22:55:37,456 INFO [train.py:715] (6/8) Epoch 10, batch 28400, loss[loss=0.1431, simple_loss=0.2184, pruned_loss=0.03389, over 4764.00 frames.], tot_loss[loss=0.141, simple_loss=0.2139, pruned_loss=0.03401, over 972815.05 frames.], batch size: 19, lr: 2.11e-04 +2022-05-06 22:56:19,124 INFO [train.py:715] (6/8) Epoch 10, batch 28450, loss[loss=0.1367, simple_loss=0.2148, pruned_loss=0.02933, over 4974.00 frames.], tot_loss[loss=0.141, simple_loss=0.2135, pruned_loss=0.03421, over 973072.65 frames.], batch size: 24, lr: 2.11e-04 +2022-05-06 22:57:00,144 INFO [train.py:715] (6/8) Epoch 10, batch 28500, loss[loss=0.1498, simple_loss=0.215, pruned_loss=0.04224, over 4691.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.03358, over 973146.16 frames.], batch size: 15, lr: 2.11e-04 +2022-05-06 22:57:40,545 INFO [train.py:715] (6/8) Epoch 10, batch 28550, loss[loss=0.1147, simple_loss=0.1962, pruned_loss=0.01657, over 4888.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2135, pruned_loss=0.03364, over 973094.67 frames.], batch size: 19, lr: 2.11e-04 +2022-05-06 22:58:21,445 INFO [train.py:715] (6/8) Epoch 10, batch 28600, loss[loss=0.1367, simple_loss=0.2199, pruned_loss=0.02675, over 4944.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2134, pruned_loss=0.03347, over 973375.34 frames.], batch size: 21, lr: 2.11e-04 +2022-05-06 22:59:03,595 INFO [train.py:715] (6/8) Epoch 10, batch 28650, loss[loss=0.1623, simple_loss=0.2263, pruned_loss=0.04909, over 4952.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2122, pruned_loss=0.03311, over 973066.56 frames.], batch size: 23, lr: 2.11e-04 +2022-05-06 22:59:43,749 INFO [train.py:715] (6/8) Epoch 10, batch 28700, loss[loss=0.1368, simple_loss=0.2124, pruned_loss=0.03057, over 4936.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2124, pruned_loss=0.03323, over 973209.81 frames.], batch size: 29, lr: 2.11e-04 +2022-05-06 23:00:24,819 INFO [train.py:715] (6/8) Epoch 10, batch 28750, loss[loss=0.1561, simple_loss=0.2282, pruned_loss=0.042, over 4778.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2124, pruned_loss=0.03356, over 972902.75 frames.], batch size: 18, lr: 2.11e-04 +2022-05-06 23:01:05,930 INFO [train.py:715] (6/8) Epoch 10, batch 28800, loss[loss=0.1184, simple_loss=0.1857, pruned_loss=0.02556, over 4883.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2128, pruned_loss=0.03395, over 972050.35 frames.], batch size: 13, lr: 2.11e-04 +2022-05-06 23:01:46,834 INFO [train.py:715] (6/8) Epoch 10, batch 28850, loss[loss=0.1245, simple_loss=0.1882, pruned_loss=0.0304, over 4839.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2122, pruned_loss=0.03362, over 971763.34 frames.], batch size: 13, lr: 2.11e-04 +2022-05-06 23:02:27,352 INFO [train.py:715] (6/8) Epoch 10, batch 28900, loss[loss=0.1496, simple_loss=0.2164, pruned_loss=0.04138, over 4836.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2122, pruned_loss=0.03349, over 972167.21 frames.], batch size: 30, lr: 2.11e-04 +2022-05-06 23:03:08,205 INFO [train.py:715] (6/8) Epoch 10, batch 28950, loss[loss=0.1216, simple_loss=0.2079, pruned_loss=0.01769, over 4829.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2131, pruned_loss=0.03409, over 972418.20 frames.], batch size: 15, lr: 2.11e-04 +2022-05-06 23:03:49,286 INFO [train.py:715] (6/8) Epoch 10, batch 29000, loss[loss=0.18, simple_loss=0.2471, pruned_loss=0.05645, over 4914.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2132, pruned_loss=0.03385, over 972233.47 frames.], batch size: 17, lr: 2.11e-04 +2022-05-06 23:04:28,434 INFO [train.py:715] (6/8) Epoch 10, batch 29050, loss[loss=0.1338, simple_loss=0.21, pruned_loss=0.02881, over 4794.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2123, pruned_loss=0.03343, over 972421.76 frames.], batch size: 17, lr: 2.10e-04 +2022-05-06 23:05:07,303 INFO [train.py:715] (6/8) Epoch 10, batch 29100, loss[loss=0.1413, simple_loss=0.209, pruned_loss=0.03682, over 4900.00 frames.], tot_loss[loss=0.1394, simple_loss=0.212, pruned_loss=0.03341, over 972259.22 frames.], batch size: 17, lr: 2.10e-04 +2022-05-06 23:05:47,481 INFO [train.py:715] (6/8) Epoch 10, batch 29150, loss[loss=0.1331, simple_loss=0.2032, pruned_loss=0.03152, over 4925.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2112, pruned_loss=0.03344, over 972360.08 frames.], batch size: 18, lr: 2.10e-04 +2022-05-06 23:06:27,779 INFO [train.py:715] (6/8) Epoch 10, batch 29200, loss[loss=0.1392, simple_loss=0.2122, pruned_loss=0.03307, over 4944.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2116, pruned_loss=0.03309, over 972855.86 frames.], batch size: 21, lr: 2.10e-04 +2022-05-06 23:07:06,678 INFO [train.py:715] (6/8) Epoch 10, batch 29250, loss[loss=0.168, simple_loss=0.2369, pruned_loss=0.04956, over 4886.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2125, pruned_loss=0.03358, over 972508.01 frames.], batch size: 39, lr: 2.10e-04 +2022-05-06 23:07:46,921 INFO [train.py:715] (6/8) Epoch 10, batch 29300, loss[loss=0.1474, simple_loss=0.215, pruned_loss=0.03996, over 4984.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2131, pruned_loss=0.03412, over 972510.70 frames.], batch size: 15, lr: 2.10e-04 +2022-05-06 23:08:27,018 INFO [train.py:715] (6/8) Epoch 10, batch 29350, loss[loss=0.1467, simple_loss=0.2221, pruned_loss=0.03568, over 4906.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2142, pruned_loss=0.03442, over 972442.33 frames.], batch size: 19, lr: 2.10e-04 +2022-05-06 23:09:06,026 INFO [train.py:715] (6/8) Epoch 10, batch 29400, loss[loss=0.1215, simple_loss=0.1997, pruned_loss=0.02166, over 4781.00 frames.], tot_loss[loss=0.1411, simple_loss=0.214, pruned_loss=0.03411, over 971794.83 frames.], batch size: 17, lr: 2.10e-04 +2022-05-06 23:09:45,808 INFO [train.py:715] (6/8) Epoch 10, batch 29450, loss[loss=0.1513, simple_loss=0.2296, pruned_loss=0.03651, over 4945.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2132, pruned_loss=0.03389, over 971682.72 frames.], batch size: 21, lr: 2.10e-04 +2022-05-06 23:10:26,004 INFO [train.py:715] (6/8) Epoch 10, batch 29500, loss[loss=0.1536, simple_loss=0.2172, pruned_loss=0.045, over 4967.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2137, pruned_loss=0.03429, over 971550.71 frames.], batch size: 15, lr: 2.10e-04 +2022-05-06 23:11:05,709 INFO [train.py:715] (6/8) Epoch 10, batch 29550, loss[loss=0.1285, simple_loss=0.2104, pruned_loss=0.0233, over 4816.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2136, pruned_loss=0.0344, over 971715.30 frames.], batch size: 25, lr: 2.10e-04 +2022-05-06 23:11:44,348 INFO [train.py:715] (6/8) Epoch 10, batch 29600, loss[loss=0.1239, simple_loss=0.2039, pruned_loss=0.02192, over 4902.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2128, pruned_loss=0.03393, over 973069.53 frames.], batch size: 19, lr: 2.10e-04 +2022-05-06 23:12:23,997 INFO [train.py:715] (6/8) Epoch 10, batch 29650, loss[loss=0.152, simple_loss=0.2207, pruned_loss=0.04162, over 4835.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2129, pruned_loss=0.03375, over 972952.06 frames.], batch size: 15, lr: 2.10e-04 +2022-05-06 23:13:03,435 INFO [train.py:715] (6/8) Epoch 10, batch 29700, loss[loss=0.1333, simple_loss=0.2121, pruned_loss=0.02726, over 4646.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2123, pruned_loss=0.03361, over 972373.60 frames.], batch size: 13, lr: 2.10e-04 +2022-05-06 23:13:42,105 INFO [train.py:715] (6/8) Epoch 10, batch 29750, loss[loss=0.1215, simple_loss=0.187, pruned_loss=0.02797, over 4785.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2122, pruned_loss=0.03375, over 972680.98 frames.], batch size: 12, lr: 2.10e-04 +2022-05-06 23:14:21,082 INFO [train.py:715] (6/8) Epoch 10, batch 29800, loss[loss=0.1419, simple_loss=0.2156, pruned_loss=0.03408, over 4880.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2125, pruned_loss=0.0336, over 973174.87 frames.], batch size: 22, lr: 2.10e-04 +2022-05-06 23:15:00,556 INFO [train.py:715] (6/8) Epoch 10, batch 29850, loss[loss=0.1234, simple_loss=0.2016, pruned_loss=0.02262, over 4751.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2124, pruned_loss=0.0336, over 972859.24 frames.], batch size: 19, lr: 2.10e-04 +2022-05-06 23:15:39,441 INFO [train.py:715] (6/8) Epoch 10, batch 29900, loss[loss=0.1451, simple_loss=0.2141, pruned_loss=0.03806, over 4809.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2129, pruned_loss=0.03379, over 971323.67 frames.], batch size: 17, lr: 2.10e-04 +2022-05-06 23:16:17,898 INFO [train.py:715] (6/8) Epoch 10, batch 29950, loss[loss=0.1265, simple_loss=0.2089, pruned_loss=0.022, over 4928.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2135, pruned_loss=0.03397, over 972328.96 frames.], batch size: 23, lr: 2.10e-04 +2022-05-06 23:16:57,116 INFO [train.py:715] (6/8) Epoch 10, batch 30000, loss[loss=0.1401, simple_loss=0.2162, pruned_loss=0.03199, over 4962.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2132, pruned_loss=0.03393, over 972877.45 frames.], batch size: 21, lr: 2.10e-04 +2022-05-06 23:16:57,116 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 23:17:06,541 INFO [train.py:742] (6/8) Epoch 10, validation: loss=0.1063, simple_loss=0.1906, pruned_loss=0.01106, over 914524.00 frames. +2022-05-06 23:17:46,310 INFO [train.py:715] (6/8) Epoch 10, batch 30050, loss[loss=0.156, simple_loss=0.2305, pruned_loss=0.04079, over 4859.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2129, pruned_loss=0.03389, over 971796.57 frames.], batch size: 20, lr: 2.10e-04 +2022-05-06 23:18:25,806 INFO [train.py:715] (6/8) Epoch 10, batch 30100, loss[loss=0.1811, simple_loss=0.2439, pruned_loss=0.05918, over 4946.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2135, pruned_loss=0.03414, over 972800.32 frames.], batch size: 39, lr: 2.10e-04 +2022-05-06 23:19:04,201 INFO [train.py:715] (6/8) Epoch 10, batch 30150, loss[loss=0.1144, simple_loss=0.1801, pruned_loss=0.02431, over 4766.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2131, pruned_loss=0.03384, over 972422.72 frames.], batch size: 12, lr: 2.10e-04 +2022-05-06 23:19:44,550 INFO [train.py:715] (6/8) Epoch 10, batch 30200, loss[loss=0.142, simple_loss=0.2082, pruned_loss=0.03797, over 4803.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2131, pruned_loss=0.03384, over 973296.00 frames.], batch size: 14, lr: 2.10e-04 +2022-05-06 23:20:24,573 INFO [train.py:715] (6/8) Epoch 10, batch 30250, loss[loss=0.1215, simple_loss=0.1975, pruned_loss=0.02276, over 4906.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2119, pruned_loss=0.03323, over 973376.91 frames.], batch size: 29, lr: 2.10e-04 +2022-05-06 23:21:02,964 INFO [train.py:715] (6/8) Epoch 10, batch 30300, loss[loss=0.1314, simple_loss=0.2073, pruned_loss=0.02777, over 4904.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.0336, over 973006.75 frames.], batch size: 17, lr: 2.10e-04 +2022-05-06 23:21:41,382 INFO [train.py:715] (6/8) Epoch 10, batch 30350, loss[loss=0.1242, simple_loss=0.2038, pruned_loss=0.02228, over 4835.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2123, pruned_loss=0.03331, over 973224.61 frames.], batch size: 27, lr: 2.10e-04 +2022-05-06 23:22:21,182 INFO [train.py:715] (6/8) Epoch 10, batch 30400, loss[loss=0.1581, simple_loss=0.2318, pruned_loss=0.04219, over 4919.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2118, pruned_loss=0.0329, over 973228.17 frames.], batch size: 39, lr: 2.10e-04 +2022-05-06 23:23:00,549 INFO [train.py:715] (6/8) Epoch 10, batch 30450, loss[loss=0.1666, simple_loss=0.2392, pruned_loss=0.04702, over 4794.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03285, over 972217.90 frames.], batch size: 24, lr: 2.10e-04 +2022-05-06 23:23:38,709 INFO [train.py:715] (6/8) Epoch 10, batch 30500, loss[loss=0.1238, simple_loss=0.1975, pruned_loss=0.02512, over 4747.00 frames.], tot_loss[loss=0.139, simple_loss=0.212, pruned_loss=0.03298, over 972246.96 frames.], batch size: 16, lr: 2.10e-04 +2022-05-06 23:24:18,306 INFO [train.py:715] (6/8) Epoch 10, batch 30550, loss[loss=0.1379, simple_loss=0.2106, pruned_loss=0.03257, over 4783.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.03278, over 971897.82 frames.], batch size: 13, lr: 2.10e-04 +2022-05-06 23:24:57,944 INFO [train.py:715] (6/8) Epoch 10, batch 30600, loss[loss=0.1422, simple_loss=0.2096, pruned_loss=0.03737, over 4970.00 frames.], tot_loss[loss=0.1385, simple_loss=0.212, pruned_loss=0.03249, over 971576.47 frames.], batch size: 31, lr: 2.10e-04 +2022-05-06 23:25:36,409 INFO [train.py:715] (6/8) Epoch 10, batch 30650, loss[loss=0.1226, simple_loss=0.1975, pruned_loss=0.02381, over 4959.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2115, pruned_loss=0.0322, over 971132.27 frames.], batch size: 24, lr: 2.10e-04 +2022-05-06 23:26:15,885 INFO [train.py:715] (6/8) Epoch 10, batch 30700, loss[loss=0.1186, simple_loss=0.1979, pruned_loss=0.01961, over 4884.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2109, pruned_loss=0.03229, over 970963.88 frames.], batch size: 16, lr: 2.10e-04 +2022-05-06 23:26:55,010 INFO [train.py:715] (6/8) Epoch 10, batch 30750, loss[loss=0.1364, simple_loss=0.2194, pruned_loss=0.02673, over 4854.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2101, pruned_loss=0.03208, over 971185.38 frames.], batch size: 30, lr: 2.10e-04 +2022-05-06 23:27:33,901 INFO [train.py:715] (6/8) Epoch 10, batch 30800, loss[loss=0.1564, simple_loss=0.2334, pruned_loss=0.0397, over 4888.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.032, over 970865.99 frames.], batch size: 16, lr: 2.10e-04 +2022-05-06 23:28:12,412 INFO [train.py:715] (6/8) Epoch 10, batch 30850, loss[loss=0.1291, simple_loss=0.1986, pruned_loss=0.02978, over 4761.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2106, pruned_loss=0.03184, over 971841.10 frames.], batch size: 16, lr: 2.10e-04 +2022-05-06 23:28:52,167 INFO [train.py:715] (6/8) Epoch 10, batch 30900, loss[loss=0.1169, simple_loss=0.1893, pruned_loss=0.02222, over 4963.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2119, pruned_loss=0.03258, over 972425.82 frames.], batch size: 24, lr: 2.10e-04 +2022-05-06 23:29:32,114 INFO [train.py:715] (6/8) Epoch 10, batch 30950, loss[loss=0.1382, simple_loss=0.2075, pruned_loss=0.03442, over 4867.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2113, pruned_loss=0.03272, over 973130.64 frames.], batch size: 20, lr: 2.10e-04 +2022-05-06 23:30:11,643 INFO [train.py:715] (6/8) Epoch 10, batch 31000, loss[loss=0.1635, simple_loss=0.2489, pruned_loss=0.03903, over 4828.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2127, pruned_loss=0.0336, over 972080.66 frames.], batch size: 25, lr: 2.10e-04 +2022-05-06 23:30:50,320 INFO [train.py:715] (6/8) Epoch 10, batch 31050, loss[loss=0.137, simple_loss=0.2019, pruned_loss=0.03602, over 4956.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2123, pruned_loss=0.03345, over 972812.63 frames.], batch size: 15, lr: 2.10e-04 +2022-05-06 23:31:29,594 INFO [train.py:715] (6/8) Epoch 10, batch 31100, loss[loss=0.1593, simple_loss=0.211, pruned_loss=0.05382, over 4788.00 frames.], tot_loss[loss=0.1395, simple_loss=0.212, pruned_loss=0.03351, over 972519.59 frames.], batch size: 12, lr: 2.10e-04 +2022-05-06 23:32:09,330 INFO [train.py:715] (6/8) Epoch 10, batch 31150, loss[loss=0.1353, simple_loss=0.2075, pruned_loss=0.03156, over 4943.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2121, pruned_loss=0.03345, over 973546.79 frames.], batch size: 21, lr: 2.10e-04 +2022-05-06 23:32:47,335 INFO [train.py:715] (6/8) Epoch 10, batch 31200, loss[loss=0.1181, simple_loss=0.2003, pruned_loss=0.01791, over 4961.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2128, pruned_loss=0.03335, over 972846.58 frames.], batch size: 24, lr: 2.10e-04 +2022-05-06 23:33:26,830 INFO [train.py:715] (6/8) Epoch 10, batch 31250, loss[loss=0.1345, simple_loss=0.2076, pruned_loss=0.03073, over 4809.00 frames.], tot_loss[loss=0.139, simple_loss=0.2123, pruned_loss=0.03286, over 972675.63 frames.], batch size: 21, lr: 2.10e-04 +2022-05-06 23:34:06,254 INFO [train.py:715] (6/8) Epoch 10, batch 31300, loss[loss=0.1169, simple_loss=0.1916, pruned_loss=0.0211, over 4850.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2127, pruned_loss=0.033, over 972647.99 frames.], batch size: 20, lr: 2.10e-04 +2022-05-06 23:34:45,242 INFO [train.py:715] (6/8) Epoch 10, batch 31350, loss[loss=0.1131, simple_loss=0.1833, pruned_loss=0.02148, over 4858.00 frames.], tot_loss[loss=0.141, simple_loss=0.2144, pruned_loss=0.03376, over 973078.18 frames.], batch size: 13, lr: 2.10e-04 +2022-05-06 23:35:23,741 INFO [train.py:715] (6/8) Epoch 10, batch 31400, loss[loss=0.1519, simple_loss=0.2241, pruned_loss=0.03991, over 4917.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2141, pruned_loss=0.03365, over 972608.66 frames.], batch size: 18, lr: 2.10e-04 +2022-05-06 23:36:02,746 INFO [train.py:715] (6/8) Epoch 10, batch 31450, loss[loss=0.1674, simple_loss=0.2352, pruned_loss=0.04984, over 4749.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2145, pruned_loss=0.03385, over 971766.53 frames.], batch size: 16, lr: 2.10e-04 +2022-05-06 23:36:42,177 INFO [train.py:715] (6/8) Epoch 10, batch 31500, loss[loss=0.1504, simple_loss=0.2153, pruned_loss=0.0427, over 4888.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2144, pruned_loss=0.03369, over 972341.62 frames.], batch size: 17, lr: 2.10e-04 +2022-05-06 23:37:19,855 INFO [train.py:715] (6/8) Epoch 10, batch 31550, loss[loss=0.1247, simple_loss=0.1981, pruned_loss=0.02563, over 4978.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2147, pruned_loss=0.03399, over 973166.71 frames.], batch size: 24, lr: 2.10e-04 +2022-05-06 23:37:58,955 INFO [train.py:715] (6/8) Epoch 10, batch 31600, loss[loss=0.1257, simple_loss=0.1959, pruned_loss=0.02771, over 4791.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2139, pruned_loss=0.0338, over 972510.44 frames.], batch size: 17, lr: 2.10e-04 +2022-05-06 23:38:38,096 INFO [train.py:715] (6/8) Epoch 10, batch 31650, loss[loss=0.1287, simple_loss=0.2045, pruned_loss=0.02643, over 4908.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2136, pruned_loss=0.03381, over 973044.67 frames.], batch size: 19, lr: 2.10e-04 +2022-05-06 23:39:17,245 INFO [train.py:715] (6/8) Epoch 10, batch 31700, loss[loss=0.1172, simple_loss=0.192, pruned_loss=0.02119, over 4882.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2137, pruned_loss=0.03377, over 972429.40 frames.], batch size: 22, lr: 2.10e-04 +2022-05-06 23:39:55,913 INFO [train.py:715] (6/8) Epoch 10, batch 31750, loss[loss=0.146, simple_loss=0.2311, pruned_loss=0.03039, over 4915.00 frames.], tot_loss[loss=0.14, simple_loss=0.2132, pruned_loss=0.03339, over 972262.68 frames.], batch size: 19, lr: 2.10e-04 +2022-05-06 23:40:34,952 INFO [train.py:715] (6/8) Epoch 10, batch 31800, loss[loss=0.1513, simple_loss=0.2139, pruned_loss=0.04434, over 4842.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2132, pruned_loss=0.03361, over 973305.60 frames.], batch size: 30, lr: 2.10e-04 +2022-05-06 23:41:14,310 INFO [train.py:715] (6/8) Epoch 10, batch 31850, loss[loss=0.1396, simple_loss=0.2206, pruned_loss=0.02931, over 4774.00 frames.], tot_loss[loss=0.14, simple_loss=0.213, pruned_loss=0.03347, over 973040.52 frames.], batch size: 14, lr: 2.10e-04 +2022-05-06 23:41:52,374 INFO [train.py:715] (6/8) Epoch 10, batch 31900, loss[loss=0.1555, simple_loss=0.23, pruned_loss=0.04049, over 4793.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2134, pruned_loss=0.03314, over 972916.88 frames.], batch size: 21, lr: 2.10e-04 +2022-05-06 23:42:31,529 INFO [train.py:715] (6/8) Epoch 10, batch 31950, loss[loss=0.1443, simple_loss=0.2189, pruned_loss=0.0348, over 4917.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2125, pruned_loss=0.03268, over 973357.22 frames.], batch size: 18, lr: 2.10e-04 +2022-05-06 23:43:10,931 INFO [train.py:715] (6/8) Epoch 10, batch 32000, loss[loss=0.1383, simple_loss=0.2111, pruned_loss=0.03271, over 4749.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2119, pruned_loss=0.03243, over 972803.46 frames.], batch size: 16, lr: 2.10e-04 +2022-05-06 23:43:49,603 INFO [train.py:715] (6/8) Epoch 10, batch 32050, loss[loss=0.1627, simple_loss=0.2214, pruned_loss=0.05196, over 4968.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03266, over 972480.67 frames.], batch size: 35, lr: 2.10e-04 +2022-05-06 23:44:27,918 INFO [train.py:715] (6/8) Epoch 10, batch 32100, loss[loss=0.1336, simple_loss=0.2045, pruned_loss=0.03136, over 4906.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2111, pruned_loss=0.0323, over 972787.26 frames.], batch size: 18, lr: 2.10e-04 +2022-05-06 23:45:06,915 INFO [train.py:715] (6/8) Epoch 10, batch 32150, loss[loss=0.1386, simple_loss=0.2148, pruned_loss=0.03122, over 4939.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2106, pruned_loss=0.0323, over 972908.14 frames.], batch size: 21, lr: 2.10e-04 +2022-05-06 23:45:45,855 INFO [train.py:715] (6/8) Epoch 10, batch 32200, loss[loss=0.1236, simple_loss=0.205, pruned_loss=0.02107, over 4782.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2111, pruned_loss=0.03274, over 972329.83 frames.], batch size: 17, lr: 2.10e-04 +2022-05-06 23:46:23,727 INFO [train.py:715] (6/8) Epoch 10, batch 32250, loss[loss=0.1376, simple_loss=0.2047, pruned_loss=0.03521, over 4735.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2111, pruned_loss=0.0327, over 971651.80 frames.], batch size: 12, lr: 2.10e-04 +2022-05-06 23:47:02,887 INFO [train.py:715] (6/8) Epoch 10, batch 32300, loss[loss=0.1257, simple_loss=0.1881, pruned_loss=0.03165, over 4773.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2113, pruned_loss=0.03304, over 973019.67 frames.], batch size: 19, lr: 2.10e-04 +2022-05-06 23:47:42,102 INFO [train.py:715] (6/8) Epoch 10, batch 32350, loss[loss=0.1485, simple_loss=0.2272, pruned_loss=0.03489, over 4889.00 frames.], tot_loss[loss=0.1389, simple_loss=0.212, pruned_loss=0.03291, over 973496.81 frames.], batch size: 22, lr: 2.10e-04 +2022-05-06 23:48:20,904 INFO [train.py:715] (6/8) Epoch 10, batch 32400, loss[loss=0.1747, simple_loss=0.2382, pruned_loss=0.0556, over 4859.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2112, pruned_loss=0.03291, over 973601.97 frames.], batch size: 20, lr: 2.10e-04 +2022-05-06 23:48:59,316 INFO [train.py:715] (6/8) Epoch 10, batch 32450, loss[loss=0.1308, simple_loss=0.2068, pruned_loss=0.02737, over 4888.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2107, pruned_loss=0.03251, over 973032.26 frames.], batch size: 32, lr: 2.10e-04 +2022-05-06 23:49:38,635 INFO [train.py:715] (6/8) Epoch 10, batch 32500, loss[loss=0.15, simple_loss=0.219, pruned_loss=0.04048, over 4885.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2111, pruned_loss=0.03276, over 973575.01 frames.], batch size: 22, lr: 2.10e-04 +2022-05-06 23:50:18,348 INFO [train.py:715] (6/8) Epoch 10, batch 32550, loss[loss=0.1354, simple_loss=0.2124, pruned_loss=0.02917, over 4872.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2106, pruned_loss=0.03264, over 973029.19 frames.], batch size: 16, lr: 2.10e-04 +2022-05-06 23:50:56,262 INFO [train.py:715] (6/8) Epoch 10, batch 32600, loss[loss=0.1337, simple_loss=0.2133, pruned_loss=0.02703, over 4948.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2117, pruned_loss=0.03292, over 972608.06 frames.], batch size: 23, lr: 2.10e-04 +2022-05-06 23:51:35,798 INFO [train.py:715] (6/8) Epoch 10, batch 32650, loss[loss=0.132, simple_loss=0.1993, pruned_loss=0.03237, over 4706.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2118, pruned_loss=0.03282, over 972551.16 frames.], batch size: 15, lr: 2.10e-04 +2022-05-06 23:52:15,569 INFO [train.py:715] (6/8) Epoch 10, batch 32700, loss[loss=0.1335, simple_loss=0.2133, pruned_loss=0.02686, over 4812.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2116, pruned_loss=0.0327, over 972842.46 frames.], batch size: 26, lr: 2.09e-04 +2022-05-06 23:52:53,820 INFO [train.py:715] (6/8) Epoch 10, batch 32750, loss[loss=0.1324, simple_loss=0.1982, pruned_loss=0.03325, over 4877.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2114, pruned_loss=0.03291, over 972823.92 frames.], batch size: 32, lr: 2.09e-04 +2022-05-06 23:53:34,511 INFO [train.py:715] (6/8) Epoch 10, batch 32800, loss[loss=0.1327, simple_loss=0.1969, pruned_loss=0.03427, over 4905.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2121, pruned_loss=0.03309, over 972812.58 frames.], batch size: 19, lr: 2.09e-04 +2022-05-06 23:54:14,777 INFO [train.py:715] (6/8) Epoch 10, batch 32850, loss[loss=0.1276, simple_loss=0.1985, pruned_loss=0.02839, over 4947.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2121, pruned_loss=0.03315, over 972312.20 frames.], batch size: 35, lr: 2.09e-04 +2022-05-06 23:54:54,886 INFO [train.py:715] (6/8) Epoch 10, batch 32900, loss[loss=0.1612, simple_loss=0.2425, pruned_loss=0.03996, over 4770.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2125, pruned_loss=0.03303, over 973028.25 frames.], batch size: 17, lr: 2.09e-04 +2022-05-06 23:55:34,230 INFO [train.py:715] (6/8) Epoch 10, batch 32950, loss[loss=0.1354, simple_loss=0.217, pruned_loss=0.02687, over 4933.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2118, pruned_loss=0.03297, over 972645.86 frames.], batch size: 21, lr: 2.09e-04 +2022-05-06 23:56:14,910 INFO [train.py:715] (6/8) Epoch 10, batch 33000, loss[loss=0.127, simple_loss=0.2075, pruned_loss=0.02325, over 4819.00 frames.], tot_loss[loss=0.1391, simple_loss=0.212, pruned_loss=0.03309, over 972366.05 frames.], batch size: 15, lr: 2.09e-04 +2022-05-06 23:56:14,910 INFO [train.py:733] (6/8) Computing validation loss +2022-05-06 23:56:24,575 INFO [train.py:742] (6/8) Epoch 10, validation: loss=0.1063, simple_loss=0.1905, pruned_loss=0.01103, over 914524.00 frames. +2022-05-06 23:57:03,965 INFO [train.py:715] (6/8) Epoch 10, batch 33050, loss[loss=0.1482, simple_loss=0.2217, pruned_loss=0.03737, over 4924.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2124, pruned_loss=0.03313, over 971888.81 frames.], batch size: 29, lr: 2.09e-04 +2022-05-06 23:57:43,741 INFO [train.py:715] (6/8) Epoch 10, batch 33100, loss[loss=0.1537, simple_loss=0.2294, pruned_loss=0.03899, over 4903.00 frames.], tot_loss[loss=0.1402, simple_loss=0.213, pruned_loss=0.03368, over 971494.57 frames.], batch size: 19, lr: 2.09e-04 +2022-05-06 23:58:21,694 INFO [train.py:715] (6/8) Epoch 10, batch 33150, loss[loss=0.1207, simple_loss=0.1875, pruned_loss=0.027, over 4934.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2125, pruned_loss=0.03298, over 971571.13 frames.], batch size: 23, lr: 2.09e-04 +2022-05-06 23:59:00,826 INFO [train.py:715] (6/8) Epoch 10, batch 33200, loss[loss=0.1361, simple_loss=0.2043, pruned_loss=0.03391, over 4786.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2122, pruned_loss=0.03303, over 971763.89 frames.], batch size: 17, lr: 2.09e-04 +2022-05-06 23:59:40,446 INFO [train.py:715] (6/8) Epoch 10, batch 33250, loss[loss=0.1414, simple_loss=0.2234, pruned_loss=0.02967, over 4796.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03278, over 971756.89 frames.], batch size: 18, lr: 2.09e-04 +2022-05-07 00:00:18,361 INFO [train.py:715] (6/8) Epoch 10, batch 33300, loss[loss=0.1606, simple_loss=0.2248, pruned_loss=0.04822, over 4735.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2121, pruned_loss=0.03268, over 972358.60 frames.], batch size: 16, lr: 2.09e-04 +2022-05-07 00:00:57,773 INFO [train.py:715] (6/8) Epoch 10, batch 33350, loss[loss=0.1281, simple_loss=0.2061, pruned_loss=0.02504, over 4739.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03266, over 972720.58 frames.], batch size: 16, lr: 2.09e-04 +2022-05-07 00:01:37,020 INFO [train.py:715] (6/8) Epoch 10, batch 33400, loss[loss=0.1501, simple_loss=0.2133, pruned_loss=0.04344, over 4754.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2122, pruned_loss=0.03301, over 971992.74 frames.], batch size: 19, lr: 2.09e-04 +2022-05-07 00:02:16,545 INFO [train.py:715] (6/8) Epoch 10, batch 33450, loss[loss=0.1463, simple_loss=0.2094, pruned_loss=0.04161, over 4829.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2127, pruned_loss=0.03327, over 971771.19 frames.], batch size: 14, lr: 2.09e-04 +2022-05-07 00:02:54,369 INFO [train.py:715] (6/8) Epoch 10, batch 33500, loss[loss=0.1377, simple_loss=0.2064, pruned_loss=0.03445, over 4954.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2129, pruned_loss=0.03341, over 971607.61 frames.], batch size: 15, lr: 2.09e-04 +2022-05-07 00:03:33,962 INFO [train.py:715] (6/8) Epoch 10, batch 33550, loss[loss=0.1203, simple_loss=0.189, pruned_loss=0.02582, over 4963.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.03358, over 971847.20 frames.], batch size: 24, lr: 2.09e-04 +2022-05-07 00:04:13,565 INFO [train.py:715] (6/8) Epoch 10, batch 33600, loss[loss=0.124, simple_loss=0.2018, pruned_loss=0.02312, over 4872.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2127, pruned_loss=0.03346, over 971992.80 frames.], batch size: 16, lr: 2.09e-04 +2022-05-07 00:04:52,116 INFO [train.py:715] (6/8) Epoch 10, batch 33650, loss[loss=0.1347, simple_loss=0.2047, pruned_loss=0.03233, over 4798.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03326, over 972872.63 frames.], batch size: 14, lr: 2.09e-04 +2022-05-07 00:05:30,843 INFO [train.py:715] (6/8) Epoch 10, batch 33700, loss[loss=0.1244, simple_loss=0.2034, pruned_loss=0.02264, over 4783.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2127, pruned_loss=0.03306, over 972819.27 frames.], batch size: 17, lr: 2.09e-04 +2022-05-07 00:06:10,498 INFO [train.py:715] (6/8) Epoch 10, batch 33750, loss[loss=0.1453, simple_loss=0.2139, pruned_loss=0.03829, over 4842.00 frames.], tot_loss[loss=0.139, simple_loss=0.2125, pruned_loss=0.03275, over 971112.03 frames.], batch size: 13, lr: 2.09e-04 +2022-05-07 00:06:50,169 INFO [train.py:715] (6/8) Epoch 10, batch 33800, loss[loss=0.1167, simple_loss=0.1925, pruned_loss=0.02045, over 4782.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2134, pruned_loss=0.03298, over 971620.53 frames.], batch size: 12, lr: 2.09e-04 +2022-05-07 00:07:29,175 INFO [train.py:715] (6/8) Epoch 10, batch 33850, loss[loss=0.1263, simple_loss=0.1987, pruned_loss=0.02691, over 4847.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2132, pruned_loss=0.03277, over 971631.33 frames.], batch size: 15, lr: 2.09e-04 +2022-05-07 00:08:08,844 INFO [train.py:715] (6/8) Epoch 10, batch 33900, loss[loss=0.1139, simple_loss=0.1812, pruned_loss=0.02325, over 4912.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2128, pruned_loss=0.03269, over 972194.05 frames.], batch size: 17, lr: 2.09e-04 +2022-05-07 00:08:48,753 INFO [train.py:715] (6/8) Epoch 10, batch 33950, loss[loss=0.1161, simple_loss=0.1941, pruned_loss=0.01903, over 4779.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2118, pruned_loss=0.03243, over 972053.47 frames.], batch size: 14, lr: 2.09e-04 +2022-05-07 00:09:27,308 INFO [train.py:715] (6/8) Epoch 10, batch 34000, loss[loss=0.1265, simple_loss=0.2039, pruned_loss=0.02451, over 4882.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2119, pruned_loss=0.0326, over 972408.25 frames.], batch size: 16, lr: 2.09e-04 +2022-05-07 00:10:06,634 INFO [train.py:715] (6/8) Epoch 10, batch 34050, loss[loss=0.1573, simple_loss=0.2353, pruned_loss=0.03968, over 4825.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2123, pruned_loss=0.03294, over 973064.03 frames.], batch size: 26, lr: 2.09e-04 +2022-05-07 00:10:45,879 INFO [train.py:715] (6/8) Epoch 10, batch 34100, loss[loss=0.13, simple_loss=0.2077, pruned_loss=0.02619, over 4787.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2126, pruned_loss=0.0329, over 972874.26 frames.], batch size: 21, lr: 2.09e-04 +2022-05-07 00:11:25,364 INFO [train.py:715] (6/8) Epoch 10, batch 34150, loss[loss=0.1423, simple_loss=0.2109, pruned_loss=0.0368, over 4789.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2129, pruned_loss=0.03311, over 972627.88 frames.], batch size: 14, lr: 2.09e-04 +2022-05-07 00:12:04,926 INFO [train.py:715] (6/8) Epoch 10, batch 34200, loss[loss=0.1419, simple_loss=0.2167, pruned_loss=0.03351, over 4830.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2125, pruned_loss=0.03311, over 973198.26 frames.], batch size: 15, lr: 2.09e-04 +2022-05-07 00:12:44,146 INFO [train.py:715] (6/8) Epoch 10, batch 34250, loss[loss=0.1357, simple_loss=0.2019, pruned_loss=0.03477, over 4902.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03343, over 973447.10 frames.], batch size: 19, lr: 2.09e-04 +2022-05-07 00:13:23,645 INFO [train.py:715] (6/8) Epoch 10, batch 34300, loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03016, over 4910.00 frames.], tot_loss[loss=0.1393, simple_loss=0.212, pruned_loss=0.03332, over 972971.64 frames.], batch size: 17, lr: 2.09e-04 +2022-05-07 00:14:03,553 INFO [train.py:715] (6/8) Epoch 10, batch 34350, loss[loss=0.1481, simple_loss=0.2181, pruned_loss=0.03905, over 4910.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2124, pruned_loss=0.03343, over 973118.49 frames.], batch size: 39, lr: 2.09e-04 +2022-05-07 00:14:43,453 INFO [train.py:715] (6/8) Epoch 10, batch 34400, loss[loss=0.1963, simple_loss=0.2487, pruned_loss=0.07194, over 4783.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2119, pruned_loss=0.03324, over 973039.07 frames.], batch size: 14, lr: 2.09e-04 +2022-05-07 00:15:23,602 INFO [train.py:715] (6/8) Epoch 10, batch 34450, loss[loss=0.168, simple_loss=0.2456, pruned_loss=0.04515, over 4921.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2139, pruned_loss=0.0342, over 973115.90 frames.], batch size: 18, lr: 2.09e-04 +2022-05-07 00:16:03,672 INFO [train.py:715] (6/8) Epoch 10, batch 34500, loss[loss=0.1394, simple_loss=0.2003, pruned_loss=0.03922, over 4919.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2138, pruned_loss=0.03389, over 972963.85 frames.], batch size: 19, lr: 2.09e-04 +2022-05-07 00:16:42,869 INFO [train.py:715] (6/8) Epoch 10, batch 34550, loss[loss=0.1146, simple_loss=0.1899, pruned_loss=0.01962, over 4801.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2127, pruned_loss=0.03341, over 972881.94 frames.], batch size: 24, lr: 2.09e-04 +2022-05-07 00:17:23,171 INFO [train.py:715] (6/8) Epoch 10, batch 34600, loss[loss=0.1321, simple_loss=0.2016, pruned_loss=0.03131, over 4850.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2121, pruned_loss=0.03324, over 972711.67 frames.], batch size: 30, lr: 2.09e-04 +2022-05-07 00:18:03,629 INFO [train.py:715] (6/8) Epoch 10, batch 34650, loss[loss=0.1525, simple_loss=0.23, pruned_loss=0.03749, over 4801.00 frames.], tot_loss[loss=0.1401, simple_loss=0.213, pruned_loss=0.03363, over 972791.58 frames.], batch size: 24, lr: 2.09e-04 +2022-05-07 00:18:42,671 INFO [train.py:715] (6/8) Epoch 10, batch 34700, loss[loss=0.1302, simple_loss=0.1977, pruned_loss=0.03135, over 4937.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2133, pruned_loss=0.03368, over 972903.53 frames.], batch size: 35, lr: 2.09e-04 +2022-05-07 00:19:21,236 INFO [train.py:715] (6/8) Epoch 10, batch 34750, loss[loss=0.129, simple_loss=0.1976, pruned_loss=0.03014, over 4963.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2136, pruned_loss=0.03405, over 973096.28 frames.], batch size: 14, lr: 2.09e-04 +2022-05-07 00:19:57,690 INFO [train.py:715] (6/8) Epoch 10, batch 34800, loss[loss=0.1042, simple_loss=0.1732, pruned_loss=0.0176, over 4823.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2114, pruned_loss=0.03349, over 972196.67 frames.], batch size: 12, lr: 2.09e-04 +2022-05-07 00:20:47,594 INFO [train.py:715] (6/8) Epoch 11, batch 0, loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02973, over 4857.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02973, over 4857.00 frames.], batch size: 20, lr: 2.00e-04 +2022-05-07 00:21:26,502 INFO [train.py:715] (6/8) Epoch 11, batch 50, loss[loss=0.1609, simple_loss=0.2386, pruned_loss=0.04158, over 4987.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2159, pruned_loss=0.03447, over 219494.80 frames.], batch size: 15, lr: 2.00e-04 +2022-05-07 00:22:06,417 INFO [train.py:715] (6/8) Epoch 11, batch 100, loss[loss=0.153, simple_loss=0.2289, pruned_loss=0.0385, over 4743.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2146, pruned_loss=0.03458, over 385995.38 frames.], batch size: 16, lr: 2.00e-04 +2022-05-07 00:22:46,292 INFO [train.py:715] (6/8) Epoch 11, batch 150, loss[loss=0.1271, simple_loss=0.2067, pruned_loss=0.02376, over 4839.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2136, pruned_loss=0.0339, over 516262.21 frames.], batch size: 30, lr: 2.00e-04 +2022-05-07 00:23:26,842 INFO [train.py:715] (6/8) Epoch 11, batch 200, loss[loss=0.183, simple_loss=0.2477, pruned_loss=0.05908, over 4848.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2128, pruned_loss=0.03324, over 617918.94 frames.], batch size: 30, lr: 2.00e-04 +2022-05-07 00:24:06,701 INFO [train.py:715] (6/8) Epoch 11, batch 250, loss[loss=0.1219, simple_loss=0.1863, pruned_loss=0.02871, over 4828.00 frames.], tot_loss[loss=0.1398, simple_loss=0.212, pruned_loss=0.03373, over 696493.31 frames.], batch size: 13, lr: 2.00e-04 +2022-05-07 00:24:45,520 INFO [train.py:715] (6/8) Epoch 11, batch 300, loss[loss=0.148, simple_loss=0.2184, pruned_loss=0.03882, over 4829.00 frames.], tot_loss[loss=0.141, simple_loss=0.2134, pruned_loss=0.03425, over 758135.71 frames.], batch size: 25, lr: 2.00e-04 +2022-05-07 00:25:26,104 INFO [train.py:715] (6/8) Epoch 11, batch 350, loss[loss=0.1309, simple_loss=0.1989, pruned_loss=0.0314, over 4914.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2138, pruned_loss=0.03456, over 805884.30 frames.], batch size: 29, lr: 2.00e-04 +2022-05-07 00:26:05,777 INFO [train.py:715] (6/8) Epoch 11, batch 400, loss[loss=0.1209, simple_loss=0.2035, pruned_loss=0.01919, over 4919.00 frames.], tot_loss[loss=0.1406, simple_loss=0.213, pruned_loss=0.03408, over 842769.33 frames.], batch size: 29, lr: 2.00e-04 +2022-05-07 00:26:46,459 INFO [train.py:715] (6/8) Epoch 11, batch 450, loss[loss=0.1504, simple_loss=0.2162, pruned_loss=0.0423, over 4875.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2117, pruned_loss=0.03333, over 871412.40 frames.], batch size: 30, lr: 2.00e-04 +2022-05-07 00:27:27,797 INFO [train.py:715] (6/8) Epoch 11, batch 500, loss[loss=0.1453, simple_loss=0.2175, pruned_loss=0.03654, over 4859.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2114, pruned_loss=0.03297, over 894034.52 frames.], batch size: 32, lr: 2.00e-04 +2022-05-07 00:28:09,392 INFO [train.py:715] (6/8) Epoch 11, batch 550, loss[loss=0.124, simple_loss=0.2049, pruned_loss=0.02152, over 4975.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2121, pruned_loss=0.03318, over 912083.22 frames.], batch size: 15, lr: 2.00e-04 +2022-05-07 00:28:50,704 INFO [train.py:715] (6/8) Epoch 11, batch 600, loss[loss=0.1202, simple_loss=0.1901, pruned_loss=0.02514, over 4960.00 frames.], tot_loss[loss=0.1404, simple_loss=0.213, pruned_loss=0.03391, over 924645.12 frames.], batch size: 28, lr: 2.00e-04 +2022-05-07 00:29:32,043 INFO [train.py:715] (6/8) Epoch 11, batch 650, loss[loss=0.1257, simple_loss=0.2022, pruned_loss=0.02466, over 4897.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2133, pruned_loss=0.03426, over 935174.95 frames.], batch size: 19, lr: 2.00e-04 +2022-05-07 00:30:13,298 INFO [train.py:715] (6/8) Epoch 11, batch 700, loss[loss=0.1285, simple_loss=0.2103, pruned_loss=0.02337, over 4954.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2124, pruned_loss=0.03368, over 942877.06 frames.], batch size: 24, lr: 2.00e-04 +2022-05-07 00:30:54,883 INFO [train.py:715] (6/8) Epoch 11, batch 750, loss[loss=0.1739, simple_loss=0.2299, pruned_loss=0.05897, over 4744.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2119, pruned_loss=0.03341, over 949644.29 frames.], batch size: 16, lr: 2.00e-04 +2022-05-07 00:31:36,035 INFO [train.py:715] (6/8) Epoch 11, batch 800, loss[loss=0.1424, simple_loss=0.2151, pruned_loss=0.03483, over 4909.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2116, pruned_loss=0.03272, over 954644.89 frames.], batch size: 18, lr: 2.00e-04 +2022-05-07 00:32:16,768 INFO [train.py:715] (6/8) Epoch 11, batch 850, loss[loss=0.1499, simple_loss=0.2106, pruned_loss=0.04463, over 4789.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2117, pruned_loss=0.0328, over 958256.08 frames.], batch size: 17, lr: 2.00e-04 +2022-05-07 00:32:58,367 INFO [train.py:715] (6/8) Epoch 11, batch 900, loss[loss=0.133, simple_loss=0.2021, pruned_loss=0.03195, over 4802.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2106, pruned_loss=0.03232, over 961033.03 frames.], batch size: 18, lr: 2.00e-04 +2022-05-07 00:33:38,991 INFO [train.py:715] (6/8) Epoch 11, batch 950, loss[loss=0.1277, simple_loss=0.204, pruned_loss=0.02564, over 4867.00 frames.], tot_loss[loss=0.1376, simple_loss=0.211, pruned_loss=0.03209, over 963683.49 frames.], batch size: 32, lr: 2.00e-04 +2022-05-07 00:34:19,480 INFO [train.py:715] (6/8) Epoch 11, batch 1000, loss[loss=0.1285, simple_loss=0.2062, pruned_loss=0.02543, over 4808.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2114, pruned_loss=0.03265, over 965138.32 frames.], batch size: 24, lr: 2.00e-04 +2022-05-07 00:34:58,895 INFO [train.py:715] (6/8) Epoch 11, batch 1050, loss[loss=0.1492, simple_loss=0.2172, pruned_loss=0.0406, over 4884.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2113, pruned_loss=0.03243, over 966173.34 frames.], batch size: 32, lr: 2.00e-04 +2022-05-07 00:35:41,052 INFO [train.py:715] (6/8) Epoch 11, batch 1100, loss[loss=0.1298, simple_loss=0.2072, pruned_loss=0.02623, over 4874.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2113, pruned_loss=0.03253, over 967379.76 frames.], batch size: 22, lr: 2.00e-04 +2022-05-07 00:36:20,723 INFO [train.py:715] (6/8) Epoch 11, batch 1150, loss[loss=0.1464, simple_loss=0.2337, pruned_loss=0.0296, over 4949.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03255, over 969677.70 frames.], batch size: 39, lr: 2.00e-04 +2022-05-07 00:37:00,330 INFO [train.py:715] (6/8) Epoch 11, batch 1200, loss[loss=0.1457, simple_loss=0.2206, pruned_loss=0.03534, over 4751.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2118, pruned_loss=0.03272, over 970093.46 frames.], batch size: 16, lr: 2.00e-04 +2022-05-07 00:37:39,163 INFO [train.py:715] (6/8) Epoch 11, batch 1250, loss[loss=0.1317, simple_loss=0.2035, pruned_loss=0.02992, over 4764.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2126, pruned_loss=0.03321, over 970168.59 frames.], batch size: 19, lr: 2.00e-04 +2022-05-07 00:38:18,007 INFO [train.py:715] (6/8) Epoch 11, batch 1300, loss[loss=0.1659, simple_loss=0.2309, pruned_loss=0.0504, over 4857.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2127, pruned_loss=0.03318, over 970665.32 frames.], batch size: 30, lr: 2.00e-04 +2022-05-07 00:38:56,865 INFO [train.py:715] (6/8) Epoch 11, batch 1350, loss[loss=0.1184, simple_loss=0.1862, pruned_loss=0.02527, over 4868.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2122, pruned_loss=0.03314, over 970874.76 frames.], batch size: 22, lr: 2.00e-04 +2022-05-07 00:39:35,884 INFO [train.py:715] (6/8) Epoch 11, batch 1400, loss[loss=0.1234, simple_loss=0.2037, pruned_loss=0.02157, over 4965.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2115, pruned_loss=0.03306, over 970607.11 frames.], batch size: 24, lr: 2.00e-04 +2022-05-07 00:40:14,716 INFO [train.py:715] (6/8) Epoch 11, batch 1450, loss[loss=0.1296, simple_loss=0.2084, pruned_loss=0.02539, over 4837.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2118, pruned_loss=0.03282, over 970530.01 frames.], batch size: 30, lr: 2.00e-04 +2022-05-07 00:40:53,346 INFO [train.py:715] (6/8) Epoch 11, batch 1500, loss[loss=0.1394, simple_loss=0.2182, pruned_loss=0.03025, over 4899.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2125, pruned_loss=0.033, over 971324.68 frames.], batch size: 19, lr: 2.00e-04 +2022-05-07 00:41:31,715 INFO [train.py:715] (6/8) Epoch 11, batch 1550, loss[loss=0.1395, simple_loss=0.2073, pruned_loss=0.03583, over 4770.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2115, pruned_loss=0.03235, over 971482.60 frames.], batch size: 14, lr: 2.00e-04 +2022-05-07 00:42:10,773 INFO [train.py:715] (6/8) Epoch 11, batch 1600, loss[loss=0.1502, simple_loss=0.232, pruned_loss=0.03423, over 4923.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2112, pruned_loss=0.03212, over 971540.47 frames.], batch size: 18, lr: 2.00e-04 +2022-05-07 00:42:49,742 INFO [train.py:715] (6/8) Epoch 11, batch 1650, loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.02957, over 4738.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2118, pruned_loss=0.03255, over 971911.85 frames.], batch size: 16, lr: 2.00e-04 +2022-05-07 00:43:28,111 INFO [train.py:715] (6/8) Epoch 11, batch 1700, loss[loss=0.1742, simple_loss=0.2429, pruned_loss=0.0527, over 4971.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2113, pruned_loss=0.03252, over 972198.79 frames.], batch size: 24, lr: 2.00e-04 +2022-05-07 00:44:07,381 INFO [train.py:715] (6/8) Epoch 11, batch 1750, loss[loss=0.134, simple_loss=0.2127, pruned_loss=0.0277, over 4900.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2109, pruned_loss=0.03231, over 971772.13 frames.], batch size: 22, lr: 2.00e-04 +2022-05-07 00:44:46,271 INFO [train.py:715] (6/8) Epoch 11, batch 1800, loss[loss=0.1449, simple_loss=0.2215, pruned_loss=0.03413, over 4966.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2105, pruned_loss=0.03236, over 971594.12 frames.], batch size: 21, lr: 2.00e-04 +2022-05-07 00:45:25,307 INFO [train.py:715] (6/8) Epoch 11, batch 1850, loss[loss=0.1685, simple_loss=0.2327, pruned_loss=0.05217, over 4800.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2111, pruned_loss=0.03268, over 970775.33 frames.], batch size: 14, lr: 2.00e-04 +2022-05-07 00:46:04,488 INFO [train.py:715] (6/8) Epoch 11, batch 1900, loss[loss=0.1212, simple_loss=0.1988, pruned_loss=0.02183, over 4919.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2113, pruned_loss=0.03269, over 971285.06 frames.], batch size: 18, lr: 2.00e-04 +2022-05-07 00:46:43,766 INFO [train.py:715] (6/8) Epoch 11, batch 1950, loss[loss=0.1632, simple_loss=0.238, pruned_loss=0.04414, over 4838.00 frames.], tot_loss[loss=0.139, simple_loss=0.2114, pruned_loss=0.03326, over 971860.15 frames.], batch size: 30, lr: 2.00e-04 +2022-05-07 00:47:23,300 INFO [train.py:715] (6/8) Epoch 11, batch 2000, loss[loss=0.1364, simple_loss=0.2075, pruned_loss=0.03266, over 4701.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2113, pruned_loss=0.03306, over 971568.46 frames.], batch size: 15, lr: 2.00e-04 +2022-05-07 00:48:01,931 INFO [train.py:715] (6/8) Epoch 11, batch 2050, loss[loss=0.1574, simple_loss=0.2329, pruned_loss=0.04101, over 4899.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2116, pruned_loss=0.03292, over 971398.82 frames.], batch size: 19, lr: 2.00e-04 +2022-05-07 00:48:41,071 INFO [train.py:715] (6/8) Epoch 11, batch 2100, loss[loss=0.1348, simple_loss=0.2145, pruned_loss=0.02754, over 4911.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2115, pruned_loss=0.03281, over 971514.56 frames.], batch size: 29, lr: 2.00e-04 +2022-05-07 00:49:20,362 INFO [train.py:715] (6/8) Epoch 11, batch 2150, loss[loss=0.145, simple_loss=0.2241, pruned_loss=0.0329, over 4641.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2115, pruned_loss=0.0328, over 971794.26 frames.], batch size: 13, lr: 2.00e-04 +2022-05-07 00:49:59,564 INFO [train.py:715] (6/8) Epoch 11, batch 2200, loss[loss=0.1342, simple_loss=0.207, pruned_loss=0.03071, over 4762.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2104, pruned_loss=0.03275, over 972493.15 frames.], batch size: 16, lr: 2.00e-04 +2022-05-07 00:50:38,222 INFO [train.py:715] (6/8) Epoch 11, batch 2250, loss[loss=0.1381, simple_loss=0.2027, pruned_loss=0.03674, over 4748.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2115, pruned_loss=0.03307, over 972766.19 frames.], batch size: 19, lr: 2.00e-04 +2022-05-07 00:51:17,281 INFO [train.py:715] (6/8) Epoch 11, batch 2300, loss[loss=0.1288, simple_loss=0.2047, pruned_loss=0.02647, over 4899.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2114, pruned_loss=0.03298, over 972558.77 frames.], batch size: 19, lr: 2.00e-04 +2022-05-07 00:51:56,681 INFO [train.py:715] (6/8) Epoch 11, batch 2350, loss[loss=0.1656, simple_loss=0.2397, pruned_loss=0.04575, over 4835.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2114, pruned_loss=0.03273, over 972746.34 frames.], batch size: 15, lr: 2.00e-04 +2022-05-07 00:52:35,085 INFO [train.py:715] (6/8) Epoch 11, batch 2400, loss[loss=0.1222, simple_loss=0.1962, pruned_loss=0.02414, over 4889.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2112, pruned_loss=0.03271, over 972676.75 frames.], batch size: 16, lr: 2.00e-04 +2022-05-07 00:53:14,459 INFO [train.py:715] (6/8) Epoch 11, batch 2450, loss[loss=0.1599, simple_loss=0.2249, pruned_loss=0.04742, over 4867.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2114, pruned_loss=0.03302, over 973490.39 frames.], batch size: 30, lr: 2.00e-04 +2022-05-07 00:53:54,032 INFO [train.py:715] (6/8) Epoch 11, batch 2500, loss[loss=0.1445, simple_loss=0.2074, pruned_loss=0.04085, over 4837.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2117, pruned_loss=0.0333, over 973124.60 frames.], batch size: 13, lr: 2.00e-04 +2022-05-07 00:54:33,183 INFO [train.py:715] (6/8) Epoch 11, batch 2550, loss[loss=0.1452, simple_loss=0.2234, pruned_loss=0.0335, over 4749.00 frames.], tot_loss[loss=0.139, simple_loss=0.2116, pruned_loss=0.03322, over 973305.33 frames.], batch size: 19, lr: 2.00e-04 +2022-05-07 00:55:12,423 INFO [train.py:715] (6/8) Epoch 11, batch 2600, loss[loss=0.1331, simple_loss=0.2184, pruned_loss=0.02393, over 4908.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2118, pruned_loss=0.03356, over 972782.71 frames.], batch size: 17, lr: 2.00e-04 +2022-05-07 00:55:51,265 INFO [train.py:715] (6/8) Epoch 11, batch 2650, loss[loss=0.1163, simple_loss=0.1977, pruned_loss=0.01745, over 4859.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2125, pruned_loss=0.03362, over 973681.21 frames.], batch size: 30, lr: 2.00e-04 +2022-05-07 00:56:30,348 INFO [train.py:715] (6/8) Epoch 11, batch 2700, loss[loss=0.1157, simple_loss=0.1948, pruned_loss=0.0183, over 4824.00 frames.], tot_loss[loss=0.139, simple_loss=0.2121, pruned_loss=0.03296, over 973466.27 frames.], batch size: 25, lr: 2.00e-04 +2022-05-07 00:57:09,094 INFO [train.py:715] (6/8) Epoch 11, batch 2750, loss[loss=0.1349, simple_loss=0.1992, pruned_loss=0.03531, over 4942.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2124, pruned_loss=0.03295, over 973324.50 frames.], batch size: 21, lr: 2.00e-04 +2022-05-07 00:57:48,074 INFO [train.py:715] (6/8) Epoch 11, batch 2800, loss[loss=0.1299, simple_loss=0.2077, pruned_loss=0.02601, over 4863.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2135, pruned_loss=0.03343, over 973031.04 frames.], batch size: 20, lr: 2.00e-04 +2022-05-07 00:58:27,253 INFO [train.py:715] (6/8) Epoch 11, batch 2850, loss[loss=0.1585, simple_loss=0.231, pruned_loss=0.04295, over 4875.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2134, pruned_loss=0.03367, over 973515.30 frames.], batch size: 16, lr: 2.00e-04 +2022-05-07 00:59:05,711 INFO [train.py:715] (6/8) Epoch 11, batch 2900, loss[loss=0.1401, simple_loss=0.213, pruned_loss=0.03354, over 4926.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2129, pruned_loss=0.03351, over 972542.01 frames.], batch size: 23, lr: 2.00e-04 +2022-05-07 00:59:45,167 INFO [train.py:715] (6/8) Epoch 11, batch 2950, loss[loss=0.1206, simple_loss=0.191, pruned_loss=0.02513, over 4935.00 frames.], tot_loss[loss=0.1389, simple_loss=0.212, pruned_loss=0.0329, over 972854.52 frames.], batch size: 21, lr: 2.00e-04 +2022-05-07 01:00:25,031 INFO [train.py:715] (6/8) Epoch 11, batch 3000, loss[loss=0.1532, simple_loss=0.2272, pruned_loss=0.03962, over 4746.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.03288, over 971714.79 frames.], batch size: 16, lr: 2.00e-04 +2022-05-07 01:00:25,032 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 01:00:34,772 INFO [train.py:742] (6/8) Epoch 11, validation: loss=0.1061, simple_loss=0.1902, pruned_loss=0.01097, over 914524.00 frames. +2022-05-07 01:01:14,748 INFO [train.py:715] (6/8) Epoch 11, batch 3050, loss[loss=0.1635, simple_loss=0.2359, pruned_loss=0.0456, over 4929.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2115, pruned_loss=0.03301, over 971810.04 frames.], batch size: 23, lr: 2.00e-04 +2022-05-07 01:01:54,033 INFO [train.py:715] (6/8) Epoch 11, batch 3100, loss[loss=0.1387, simple_loss=0.2034, pruned_loss=0.03702, over 4974.00 frames.], tot_loss[loss=0.139, simple_loss=0.2115, pruned_loss=0.03326, over 972400.21 frames.], batch size: 31, lr: 2.00e-04 +2022-05-07 01:02:34,092 INFO [train.py:715] (6/8) Epoch 11, batch 3150, loss[loss=0.161, simple_loss=0.2278, pruned_loss=0.04711, over 4965.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2108, pruned_loss=0.03344, over 972857.91 frames.], batch size: 35, lr: 2.00e-04 +2022-05-07 01:03:13,128 INFO [train.py:715] (6/8) Epoch 11, batch 3200, loss[loss=0.1789, simple_loss=0.2485, pruned_loss=0.05463, over 4894.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2112, pruned_loss=0.03351, over 972696.62 frames.], batch size: 17, lr: 2.00e-04 +2022-05-07 01:03:52,802 INFO [train.py:715] (6/8) Epoch 11, batch 3250, loss[loss=0.1356, simple_loss=0.2115, pruned_loss=0.02985, over 4788.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2119, pruned_loss=0.03375, over 973092.85 frames.], batch size: 14, lr: 2.00e-04 +2022-05-07 01:04:31,532 INFO [train.py:715] (6/8) Epoch 11, batch 3300, loss[loss=0.1543, simple_loss=0.2388, pruned_loss=0.03491, over 4741.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2119, pruned_loss=0.03337, over 971994.49 frames.], batch size: 16, lr: 2.00e-04 +2022-05-07 01:05:10,791 INFO [train.py:715] (6/8) Epoch 11, batch 3350, loss[loss=0.1241, simple_loss=0.1887, pruned_loss=0.02977, over 4806.00 frames.], tot_loss[loss=0.1393, simple_loss=0.212, pruned_loss=0.03332, over 971643.51 frames.], batch size: 12, lr: 2.00e-04 +2022-05-07 01:05:50,446 INFO [train.py:715] (6/8) Epoch 11, batch 3400, loss[loss=0.1111, simple_loss=0.1836, pruned_loss=0.01927, over 4836.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2115, pruned_loss=0.03303, over 972180.06 frames.], batch size: 13, lr: 2.00e-04 +2022-05-07 01:06:29,437 INFO [train.py:715] (6/8) Epoch 11, batch 3450, loss[loss=0.1178, simple_loss=0.1958, pruned_loss=0.01985, over 4868.00 frames.], tot_loss[loss=0.1384, simple_loss=0.211, pruned_loss=0.03285, over 971908.77 frames.], batch size: 16, lr: 2.00e-04 +2022-05-07 01:07:08,298 INFO [train.py:715] (6/8) Epoch 11, batch 3500, loss[loss=0.1238, simple_loss=0.201, pruned_loss=0.0233, over 4807.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2113, pruned_loss=0.0328, over 971682.70 frames.], batch size: 14, lr: 1.99e-04 +2022-05-07 01:07:47,575 INFO [train.py:715] (6/8) Epoch 11, batch 3550, loss[loss=0.1161, simple_loss=0.1829, pruned_loss=0.02468, over 4801.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2116, pruned_loss=0.03274, over 971226.27 frames.], batch size: 12, lr: 1.99e-04 +2022-05-07 01:08:27,196 INFO [train.py:715] (6/8) Epoch 11, batch 3600, loss[loss=0.125, simple_loss=0.2006, pruned_loss=0.02467, over 4934.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2107, pruned_loss=0.03241, over 971961.08 frames.], batch size: 23, lr: 1.99e-04 +2022-05-07 01:09:05,516 INFO [train.py:715] (6/8) Epoch 11, batch 3650, loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03042, over 4955.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2106, pruned_loss=0.03205, over 972755.74 frames.], batch size: 14, lr: 1.99e-04 +2022-05-07 01:09:45,167 INFO [train.py:715] (6/8) Epoch 11, batch 3700, loss[loss=0.1404, simple_loss=0.2096, pruned_loss=0.03559, over 4898.00 frames.], tot_loss[loss=0.137, simple_loss=0.2102, pruned_loss=0.03194, over 972631.66 frames.], batch size: 19, lr: 1.99e-04 +2022-05-07 01:10:24,606 INFO [train.py:715] (6/8) Epoch 11, batch 3750, loss[loss=0.1145, simple_loss=0.1967, pruned_loss=0.01615, over 4817.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2093, pruned_loss=0.03148, over 971956.76 frames.], batch size: 13, lr: 1.99e-04 +2022-05-07 01:11:03,050 INFO [train.py:715] (6/8) Epoch 11, batch 3800, loss[loss=0.1245, simple_loss=0.2059, pruned_loss=0.02148, over 4802.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2098, pruned_loss=0.03184, over 971658.07 frames.], batch size: 24, lr: 1.99e-04 +2022-05-07 01:11:42,114 INFO [train.py:715] (6/8) Epoch 11, batch 3850, loss[loss=0.151, simple_loss=0.2102, pruned_loss=0.04592, over 4804.00 frames.], tot_loss[loss=0.138, simple_loss=0.2105, pruned_loss=0.03271, over 971542.89 frames.], batch size: 12, lr: 1.99e-04 +2022-05-07 01:12:21,424 INFO [train.py:715] (6/8) Epoch 11, batch 3900, loss[loss=0.1248, simple_loss=0.2004, pruned_loss=0.02457, over 4906.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2111, pruned_loss=0.03302, over 972020.84 frames.], batch size: 17, lr: 1.99e-04 +2022-05-07 01:13:01,150 INFO [train.py:715] (6/8) Epoch 11, batch 3950, loss[loss=0.1291, simple_loss=0.2072, pruned_loss=0.02552, over 4858.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2114, pruned_loss=0.03287, over 971630.66 frames.], batch size: 20, lr: 1.99e-04 +2022-05-07 01:13:39,997 INFO [train.py:715] (6/8) Epoch 11, batch 4000, loss[loss=0.1439, simple_loss=0.2339, pruned_loss=0.02693, over 4798.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2123, pruned_loss=0.03327, over 971212.92 frames.], batch size: 21, lr: 1.99e-04 +2022-05-07 01:14:19,839 INFO [train.py:715] (6/8) Epoch 11, batch 4050, loss[loss=0.1428, simple_loss=0.2123, pruned_loss=0.03662, over 4900.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2129, pruned_loss=0.03362, over 970456.40 frames.], batch size: 19, lr: 1.99e-04 +2022-05-07 01:14:59,480 INFO [train.py:715] (6/8) Epoch 11, batch 4100, loss[loss=0.1273, simple_loss=0.2057, pruned_loss=0.02441, over 4756.00 frames.], tot_loss[loss=0.1397, simple_loss=0.213, pruned_loss=0.03324, over 969921.39 frames.], batch size: 19, lr: 1.99e-04 +2022-05-07 01:15:38,032 INFO [train.py:715] (6/8) Epoch 11, batch 4150, loss[loss=0.1274, simple_loss=0.196, pruned_loss=0.02935, over 4642.00 frames.], tot_loss[loss=0.139, simple_loss=0.2122, pruned_loss=0.03289, over 970040.32 frames.], batch size: 13, lr: 1.99e-04 +2022-05-07 01:16:16,418 INFO [train.py:715] (6/8) Epoch 11, batch 4200, loss[loss=0.1307, simple_loss=0.2069, pruned_loss=0.02724, over 4966.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03254, over 970426.84 frames.], batch size: 28, lr: 1.99e-04 +2022-05-07 01:16:56,681 INFO [train.py:715] (6/8) Epoch 11, batch 4250, loss[loss=0.1508, simple_loss=0.234, pruned_loss=0.03381, over 4760.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2121, pruned_loss=0.03279, over 970000.12 frames.], batch size: 18, lr: 1.99e-04 +2022-05-07 01:17:36,676 INFO [train.py:715] (6/8) Epoch 11, batch 4300, loss[loss=0.1335, simple_loss=0.1986, pruned_loss=0.03422, over 4872.00 frames.], tot_loss[loss=0.139, simple_loss=0.2125, pruned_loss=0.03274, over 971046.49 frames.], batch size: 32, lr: 1.99e-04 +2022-05-07 01:18:15,843 INFO [train.py:715] (6/8) Epoch 11, batch 4350, loss[loss=0.1418, simple_loss=0.2225, pruned_loss=0.03055, over 4800.00 frames.], tot_loss[loss=0.1388, simple_loss=0.212, pruned_loss=0.03279, over 971164.78 frames.], batch size: 14, lr: 1.99e-04 +2022-05-07 01:18:56,202 INFO [train.py:715] (6/8) Epoch 11, batch 4400, loss[loss=0.1183, simple_loss=0.1822, pruned_loss=0.02722, over 4762.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03238, over 971378.49 frames.], batch size: 12, lr: 1.99e-04 +2022-05-07 01:19:36,316 INFO [train.py:715] (6/8) Epoch 11, batch 4450, loss[loss=0.1608, simple_loss=0.2339, pruned_loss=0.0438, over 4731.00 frames.], tot_loss[loss=0.1391, simple_loss=0.212, pruned_loss=0.03307, over 971550.17 frames.], batch size: 16, lr: 1.99e-04 +2022-05-07 01:20:15,942 INFO [train.py:715] (6/8) Epoch 11, batch 4500, loss[loss=0.1317, simple_loss=0.2044, pruned_loss=0.02952, over 4770.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2112, pruned_loss=0.03297, over 970506.92 frames.], batch size: 14, lr: 1.99e-04 +2022-05-07 01:20:55,961 INFO [train.py:715] (6/8) Epoch 11, batch 4550, loss[loss=0.1197, simple_loss=0.1963, pruned_loss=0.0215, over 4867.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2116, pruned_loss=0.03311, over 970508.98 frames.], batch size: 22, lr: 1.99e-04 +2022-05-07 01:21:36,013 INFO [train.py:715] (6/8) Epoch 11, batch 4600, loss[loss=0.1212, simple_loss=0.1921, pruned_loss=0.02517, over 4765.00 frames.], tot_loss[loss=0.139, simple_loss=0.2119, pruned_loss=0.03308, over 970712.84 frames.], batch size: 16, lr: 1.99e-04 +2022-05-07 01:22:15,481 INFO [train.py:715] (6/8) Epoch 11, batch 4650, loss[loss=0.1728, simple_loss=0.2458, pruned_loss=0.04992, over 4747.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2122, pruned_loss=0.03313, over 971163.38 frames.], batch size: 16, lr: 1.99e-04 +2022-05-07 01:22:55,198 INFO [train.py:715] (6/8) Epoch 11, batch 4700, loss[loss=0.1468, simple_loss=0.2303, pruned_loss=0.03161, over 4699.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2125, pruned_loss=0.03293, over 971750.22 frames.], batch size: 15, lr: 1.99e-04 +2022-05-07 01:23:35,368 INFO [train.py:715] (6/8) Epoch 11, batch 4750, loss[loss=0.1402, simple_loss=0.2115, pruned_loss=0.0345, over 4905.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2124, pruned_loss=0.03323, over 971431.20 frames.], batch size: 18, lr: 1.99e-04 +2022-05-07 01:24:15,539 INFO [train.py:715] (6/8) Epoch 11, batch 4800, loss[loss=0.1157, simple_loss=0.1979, pruned_loss=0.01676, over 4912.00 frames.], tot_loss[loss=0.139, simple_loss=0.2123, pruned_loss=0.03291, over 971757.80 frames.], batch size: 29, lr: 1.99e-04 +2022-05-07 01:24:55,151 INFO [train.py:715] (6/8) Epoch 11, batch 4850, loss[loss=0.1202, simple_loss=0.1974, pruned_loss=0.02153, over 4783.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2125, pruned_loss=0.03306, over 971543.74 frames.], batch size: 17, lr: 1.99e-04 +2022-05-07 01:25:34,940 INFO [train.py:715] (6/8) Epoch 11, batch 4900, loss[loss=0.1198, simple_loss=0.1966, pruned_loss=0.02147, over 4984.00 frames.], tot_loss[loss=0.139, simple_loss=0.2122, pruned_loss=0.03287, over 971655.77 frames.], batch size: 25, lr: 1.99e-04 +2022-05-07 01:26:14,640 INFO [train.py:715] (6/8) Epoch 11, batch 4950, loss[loss=0.1215, simple_loss=0.1949, pruned_loss=0.024, over 4912.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2111, pruned_loss=0.0323, over 971804.36 frames.], batch size: 19, lr: 1.99e-04 +2022-05-07 01:26:53,440 INFO [train.py:715] (6/8) Epoch 11, batch 5000, loss[loss=0.1333, simple_loss=0.2176, pruned_loss=0.0245, over 4823.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2107, pruned_loss=0.03221, over 970858.76 frames.], batch size: 15, lr: 1.99e-04 +2022-05-07 01:27:31,883 INFO [train.py:715] (6/8) Epoch 11, batch 5050, loss[loss=0.1746, simple_loss=0.2382, pruned_loss=0.05551, over 4955.00 frames.], tot_loss[loss=0.138, simple_loss=0.2107, pruned_loss=0.03266, over 971172.28 frames.], batch size: 21, lr: 1.99e-04 +2022-05-07 01:28:11,145 INFO [train.py:715] (6/8) Epoch 11, batch 5100, loss[loss=0.1417, simple_loss=0.2268, pruned_loss=0.02828, over 4811.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2109, pruned_loss=0.03288, over 971188.91 frames.], batch size: 21, lr: 1.99e-04 +2022-05-07 01:28:50,275 INFO [train.py:715] (6/8) Epoch 11, batch 5150, loss[loss=0.1223, simple_loss=0.1943, pruned_loss=0.02518, over 4737.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2119, pruned_loss=0.03315, over 971585.35 frames.], batch size: 12, lr: 1.99e-04 +2022-05-07 01:29:29,205 INFO [train.py:715] (6/8) Epoch 11, batch 5200, loss[loss=0.127, simple_loss=0.2033, pruned_loss=0.02534, over 4644.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2108, pruned_loss=0.03237, over 971038.41 frames.], batch size: 13, lr: 1.99e-04 +2022-05-07 01:30:08,611 INFO [train.py:715] (6/8) Epoch 11, batch 5250, loss[loss=0.143, simple_loss=0.2185, pruned_loss=0.03372, over 4906.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2106, pruned_loss=0.03248, over 971812.37 frames.], batch size: 17, lr: 1.99e-04 +2022-05-07 01:30:48,293 INFO [train.py:715] (6/8) Epoch 11, batch 5300, loss[loss=0.1246, simple_loss=0.2047, pruned_loss=0.02222, over 4700.00 frames.], tot_loss[loss=0.1371, simple_loss=0.21, pruned_loss=0.03207, over 971269.67 frames.], batch size: 15, lr: 1.99e-04 +2022-05-07 01:31:27,446 INFO [train.py:715] (6/8) Epoch 11, batch 5350, loss[loss=0.152, simple_loss=0.2193, pruned_loss=0.04232, over 4786.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2101, pruned_loss=0.03203, over 971606.97 frames.], batch size: 17, lr: 1.99e-04 +2022-05-07 01:32:06,514 INFO [train.py:715] (6/8) Epoch 11, batch 5400, loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.0308, over 4924.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2099, pruned_loss=0.03172, over 972156.79 frames.], batch size: 18, lr: 1.99e-04 +2022-05-07 01:32:45,901 INFO [train.py:715] (6/8) Epoch 11, batch 5450, loss[loss=0.1339, simple_loss=0.2055, pruned_loss=0.03119, over 4762.00 frames.], tot_loss[loss=0.1379, simple_loss=0.211, pruned_loss=0.03236, over 972143.62 frames.], batch size: 12, lr: 1.99e-04 +2022-05-07 01:33:25,401 INFO [train.py:715] (6/8) Epoch 11, batch 5500, loss[loss=0.158, simple_loss=0.2284, pruned_loss=0.04383, over 4864.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2109, pruned_loss=0.03243, over 972149.93 frames.], batch size: 20, lr: 1.99e-04 +2022-05-07 01:34:04,255 INFO [train.py:715] (6/8) Epoch 11, batch 5550, loss[loss=0.1312, simple_loss=0.207, pruned_loss=0.02772, over 4939.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2109, pruned_loss=0.03237, over 972017.99 frames.], batch size: 23, lr: 1.99e-04 +2022-05-07 01:34:42,709 INFO [train.py:715] (6/8) Epoch 11, batch 5600, loss[loss=0.1615, simple_loss=0.2321, pruned_loss=0.04544, over 4856.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2112, pruned_loss=0.03248, over 972713.97 frames.], batch size: 20, lr: 1.99e-04 +2022-05-07 01:35:22,175 INFO [train.py:715] (6/8) Epoch 11, batch 5650, loss[loss=0.1521, simple_loss=0.2183, pruned_loss=0.04299, over 4940.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2109, pruned_loss=0.03228, over 973281.69 frames.], batch size: 21, lr: 1.99e-04 +2022-05-07 01:36:01,617 INFO [train.py:715] (6/8) Epoch 11, batch 5700, loss[loss=0.1579, simple_loss=0.2373, pruned_loss=0.03926, over 4962.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2113, pruned_loss=0.03245, over 973663.90 frames.], batch size: 15, lr: 1.99e-04 +2022-05-07 01:36:40,402 INFO [train.py:715] (6/8) Epoch 11, batch 5750, loss[loss=0.1634, simple_loss=0.2277, pruned_loss=0.0495, over 4985.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2125, pruned_loss=0.03301, over 974048.77 frames.], batch size: 31, lr: 1.99e-04 +2022-05-07 01:37:19,379 INFO [train.py:715] (6/8) Epoch 11, batch 5800, loss[loss=0.1551, simple_loss=0.2325, pruned_loss=0.03888, over 4770.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2129, pruned_loss=0.03314, over 973009.83 frames.], batch size: 18, lr: 1.99e-04 +2022-05-07 01:37:58,488 INFO [train.py:715] (6/8) Epoch 11, batch 5850, loss[loss=0.174, simple_loss=0.2332, pruned_loss=0.05743, over 4970.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2134, pruned_loss=0.03334, over 972777.90 frames.], batch size: 35, lr: 1.99e-04 +2022-05-07 01:38:37,496 INFO [train.py:715] (6/8) Epoch 11, batch 5900, loss[loss=0.1339, simple_loss=0.2009, pruned_loss=0.03348, over 4903.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2126, pruned_loss=0.03289, over 972156.77 frames.], batch size: 39, lr: 1.99e-04 +2022-05-07 01:39:16,657 INFO [train.py:715] (6/8) Epoch 11, batch 5950, loss[loss=0.1683, simple_loss=0.2388, pruned_loss=0.04895, over 4686.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2134, pruned_loss=0.03346, over 971712.44 frames.], batch size: 15, lr: 1.99e-04 +2022-05-07 01:39:56,448 INFO [train.py:715] (6/8) Epoch 11, batch 6000, loss[loss=0.1487, simple_loss=0.2319, pruned_loss=0.03278, over 4700.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2126, pruned_loss=0.03304, over 972300.55 frames.], batch size: 15, lr: 1.99e-04 +2022-05-07 01:39:56,449 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 01:40:06,015 INFO [train.py:742] (6/8) Epoch 11, validation: loss=0.1059, simple_loss=0.1901, pruned_loss=0.01082, over 914524.00 frames. +2022-05-07 01:40:45,577 INFO [train.py:715] (6/8) Epoch 11, batch 6050, loss[loss=0.152, simple_loss=0.2201, pruned_loss=0.04195, over 4945.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2126, pruned_loss=0.03303, over 972851.61 frames.], batch size: 21, lr: 1.99e-04 +2022-05-07 01:41:24,991 INFO [train.py:715] (6/8) Epoch 11, batch 6100, loss[loss=0.1234, simple_loss=0.2044, pruned_loss=0.02119, over 4818.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2124, pruned_loss=0.03354, over 972537.62 frames.], batch size: 27, lr: 1.99e-04 +2022-05-07 01:42:03,738 INFO [train.py:715] (6/8) Epoch 11, batch 6150, loss[loss=0.1439, simple_loss=0.2128, pruned_loss=0.03755, over 4953.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2122, pruned_loss=0.03313, over 971514.80 frames.], batch size: 24, lr: 1.99e-04 +2022-05-07 01:42:43,200 INFO [train.py:715] (6/8) Epoch 11, batch 6200, loss[loss=0.1332, simple_loss=0.2113, pruned_loss=0.02749, over 4845.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2122, pruned_loss=0.03313, over 971872.81 frames.], batch size: 30, lr: 1.99e-04 +2022-05-07 01:43:22,227 INFO [train.py:715] (6/8) Epoch 11, batch 6250, loss[loss=0.1272, simple_loss=0.2105, pruned_loss=0.02195, over 4915.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2116, pruned_loss=0.03301, over 971823.35 frames.], batch size: 17, lr: 1.99e-04 +2022-05-07 01:44:01,018 INFO [train.py:715] (6/8) Epoch 11, batch 6300, loss[loss=0.1542, simple_loss=0.2277, pruned_loss=0.0404, over 4854.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2127, pruned_loss=0.03316, over 971977.80 frames.], batch size: 13, lr: 1.99e-04 +2022-05-07 01:44:39,711 INFO [train.py:715] (6/8) Epoch 11, batch 6350, loss[loss=0.1851, simple_loss=0.2532, pruned_loss=0.05846, over 4705.00 frames.], tot_loss[loss=0.1397, simple_loss=0.213, pruned_loss=0.03318, over 972939.51 frames.], batch size: 15, lr: 1.99e-04 +2022-05-07 01:45:20,273 INFO [train.py:715] (6/8) Epoch 11, batch 6400, loss[loss=0.1415, simple_loss=0.2002, pruned_loss=0.04143, over 4834.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2124, pruned_loss=0.0326, over 972811.40 frames.], batch size: 15, lr: 1.99e-04 +2022-05-07 01:45:59,617 INFO [train.py:715] (6/8) Epoch 11, batch 6450, loss[loss=0.1174, simple_loss=0.1924, pruned_loss=0.0212, over 4924.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2122, pruned_loss=0.03222, over 972173.20 frames.], batch size: 35, lr: 1.99e-04 +2022-05-07 01:46:38,693 INFO [train.py:715] (6/8) Epoch 11, batch 6500, loss[loss=0.1304, simple_loss=0.1974, pruned_loss=0.03169, over 4763.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2111, pruned_loss=0.03206, over 971906.55 frames.], batch size: 16, lr: 1.99e-04 +2022-05-07 01:47:18,038 INFO [train.py:715] (6/8) Epoch 11, batch 6550, loss[loss=0.1194, simple_loss=0.1919, pruned_loss=0.02347, over 4807.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2112, pruned_loss=0.03197, over 972969.82 frames.], batch size: 21, lr: 1.99e-04 +2022-05-07 01:47:58,219 INFO [train.py:715] (6/8) Epoch 11, batch 6600, loss[loss=0.1319, simple_loss=0.2083, pruned_loss=0.02777, over 4821.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2119, pruned_loss=0.03199, over 972743.31 frames.], batch size: 25, lr: 1.99e-04 +2022-05-07 01:48:38,346 INFO [train.py:715] (6/8) Epoch 11, batch 6650, loss[loss=0.148, simple_loss=0.2249, pruned_loss=0.03549, over 4772.00 frames.], tot_loss[loss=0.138, simple_loss=0.2119, pruned_loss=0.03209, over 972012.49 frames.], batch size: 19, lr: 1.99e-04 +2022-05-07 01:49:17,553 INFO [train.py:715] (6/8) Epoch 11, batch 6700, loss[loss=0.1336, simple_loss=0.1992, pruned_loss=0.03401, over 4764.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2123, pruned_loss=0.03222, over 972073.43 frames.], batch size: 19, lr: 1.99e-04 +2022-05-07 01:49:57,807 INFO [train.py:715] (6/8) Epoch 11, batch 6750, loss[loss=0.1331, simple_loss=0.2161, pruned_loss=0.02511, over 4892.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2123, pruned_loss=0.03253, over 972097.93 frames.], batch size: 19, lr: 1.99e-04 +2022-05-07 01:50:37,610 INFO [train.py:715] (6/8) Epoch 11, batch 6800, loss[loss=0.1338, simple_loss=0.2046, pruned_loss=0.03147, over 4936.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2128, pruned_loss=0.03279, over 971828.33 frames.], batch size: 23, lr: 1.99e-04 +2022-05-07 01:51:16,478 INFO [train.py:715] (6/8) Epoch 11, batch 6850, loss[loss=0.1348, simple_loss=0.2152, pruned_loss=0.02717, over 4986.00 frames.], tot_loss[loss=0.14, simple_loss=0.2132, pruned_loss=0.03336, over 972300.40 frames.], batch size: 14, lr: 1.99e-04 +2022-05-07 01:51:55,545 INFO [train.py:715] (6/8) Epoch 11, batch 6900, loss[loss=0.1538, simple_loss=0.2297, pruned_loss=0.03894, over 4932.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2129, pruned_loss=0.03284, over 971790.58 frames.], batch size: 29, lr: 1.99e-04 +2022-05-07 01:52:34,236 INFO [train.py:715] (6/8) Epoch 11, batch 6950, loss[loss=0.1474, simple_loss=0.2241, pruned_loss=0.03538, over 4926.00 frames.], tot_loss[loss=0.14, simple_loss=0.2136, pruned_loss=0.0332, over 971056.91 frames.], batch size: 23, lr: 1.99e-04 +2022-05-07 01:53:13,693 INFO [train.py:715] (6/8) Epoch 11, batch 7000, loss[loss=0.1367, simple_loss=0.1962, pruned_loss=0.0386, over 4988.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2127, pruned_loss=0.03293, over 971419.33 frames.], batch size: 31, lr: 1.99e-04 +2022-05-07 01:53:52,256 INFO [train.py:715] (6/8) Epoch 11, batch 7050, loss[loss=0.1492, simple_loss=0.2328, pruned_loss=0.03274, over 4818.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2127, pruned_loss=0.03287, over 971843.70 frames.], batch size: 25, lr: 1.99e-04 +2022-05-07 01:54:31,699 INFO [train.py:715] (6/8) Epoch 11, batch 7100, loss[loss=0.1233, simple_loss=0.1985, pruned_loss=0.02412, over 4966.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2116, pruned_loss=0.03258, over 971832.19 frames.], batch size: 14, lr: 1.99e-04 +2022-05-07 01:55:10,748 INFO [train.py:715] (6/8) Epoch 11, batch 7150, loss[loss=0.145, simple_loss=0.2049, pruned_loss=0.04258, over 4869.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2114, pruned_loss=0.03274, over 972366.73 frames.], batch size: 30, lr: 1.99e-04 +2022-05-07 01:55:49,510 INFO [train.py:715] (6/8) Epoch 11, batch 7200, loss[loss=0.1643, simple_loss=0.2302, pruned_loss=0.04917, over 4878.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2122, pruned_loss=0.03313, over 972775.15 frames.], batch size: 38, lr: 1.99e-04 +2022-05-07 01:56:28,452 INFO [train.py:715] (6/8) Epoch 11, batch 7250, loss[loss=0.1643, simple_loss=0.2252, pruned_loss=0.05167, over 4846.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2124, pruned_loss=0.03353, over 972357.51 frames.], batch size: 30, lr: 1.99e-04 +2022-05-07 01:57:07,430 INFO [train.py:715] (6/8) Epoch 11, batch 7300, loss[loss=0.1644, simple_loss=0.2384, pruned_loss=0.04523, over 4906.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2128, pruned_loss=0.03387, over 973197.90 frames.], batch size: 19, lr: 1.99e-04 +2022-05-07 01:57:46,541 INFO [train.py:715] (6/8) Epoch 11, batch 7350, loss[loss=0.1295, simple_loss=0.2051, pruned_loss=0.02699, over 4787.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2141, pruned_loss=0.03403, over 972705.14 frames.], batch size: 24, lr: 1.99e-04 +2022-05-07 01:58:25,305 INFO [train.py:715] (6/8) Epoch 11, batch 7400, loss[loss=0.1128, simple_loss=0.1784, pruned_loss=0.02363, over 4770.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.03357, over 971773.86 frames.], batch size: 12, lr: 1.98e-04 +2022-05-07 01:59:04,703 INFO [train.py:715] (6/8) Epoch 11, batch 7450, loss[loss=0.1511, simple_loss=0.2205, pruned_loss=0.04085, over 4876.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.03363, over 971367.60 frames.], batch size: 32, lr: 1.98e-04 +2022-05-07 01:59:43,840 INFO [train.py:715] (6/8) Epoch 11, batch 7500, loss[loss=0.1519, simple_loss=0.2213, pruned_loss=0.04122, over 4784.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2125, pruned_loss=0.03349, over 971153.33 frames.], batch size: 17, lr: 1.98e-04 +2022-05-07 02:00:23,096 INFO [train.py:715] (6/8) Epoch 11, batch 7550, loss[loss=0.1525, simple_loss=0.2158, pruned_loss=0.04459, over 4769.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2122, pruned_loss=0.03346, over 970333.39 frames.], batch size: 14, lr: 1.98e-04 +2022-05-07 02:01:02,846 INFO [train.py:715] (6/8) Epoch 11, batch 7600, loss[loss=0.1122, simple_loss=0.1902, pruned_loss=0.01705, over 4872.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03254, over 970974.34 frames.], batch size: 20, lr: 1.98e-04 +2022-05-07 02:01:42,517 INFO [train.py:715] (6/8) Epoch 11, batch 7650, loss[loss=0.1752, simple_loss=0.2367, pruned_loss=0.05685, over 4841.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2119, pruned_loss=0.03255, over 970656.52 frames.], batch size: 30, lr: 1.98e-04 +2022-05-07 02:02:22,057 INFO [train.py:715] (6/8) Epoch 11, batch 7700, loss[loss=0.1604, simple_loss=0.2259, pruned_loss=0.0474, over 4895.00 frames.], tot_loss[loss=0.1389, simple_loss=0.212, pruned_loss=0.03286, over 970289.14 frames.], batch size: 19, lr: 1.98e-04 +2022-05-07 02:03:01,235 INFO [train.py:715] (6/8) Epoch 11, batch 7750, loss[loss=0.1359, simple_loss=0.2181, pruned_loss=0.02679, over 4863.00 frames.], tot_loss[loss=0.139, simple_loss=0.2123, pruned_loss=0.03281, over 970181.35 frames.], batch size: 20, lr: 1.98e-04 +2022-05-07 02:03:40,569 INFO [train.py:715] (6/8) Epoch 11, batch 7800, loss[loss=0.1266, simple_loss=0.2008, pruned_loss=0.02626, over 4946.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2119, pruned_loss=0.03255, over 971008.67 frames.], batch size: 35, lr: 1.98e-04 +2022-05-07 02:04:19,856 INFO [train.py:715] (6/8) Epoch 11, batch 7850, loss[loss=0.1054, simple_loss=0.1697, pruned_loss=0.02057, over 4729.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2122, pruned_loss=0.03246, over 970989.45 frames.], batch size: 12, lr: 1.98e-04 +2022-05-07 02:04:58,996 INFO [train.py:715] (6/8) Epoch 11, batch 7900, loss[loss=0.121, simple_loss=0.1997, pruned_loss=0.0212, over 4826.00 frames.], tot_loss[loss=0.139, simple_loss=0.2127, pruned_loss=0.03263, over 971900.16 frames.], batch size: 26, lr: 1.98e-04 +2022-05-07 02:05:37,734 INFO [train.py:715] (6/8) Epoch 11, batch 7950, loss[loss=0.1209, simple_loss=0.1952, pruned_loss=0.0233, over 4974.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2121, pruned_loss=0.03231, over 972369.78 frames.], batch size: 24, lr: 1.98e-04 +2022-05-07 02:06:18,364 INFO [train.py:715] (6/8) Epoch 11, batch 8000, loss[loss=0.1283, simple_loss=0.2054, pruned_loss=0.02563, over 4874.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2127, pruned_loss=0.03313, over 972956.32 frames.], batch size: 20, lr: 1.98e-04 +2022-05-07 02:06:57,624 INFO [train.py:715] (6/8) Epoch 11, batch 8050, loss[loss=0.1321, simple_loss=0.215, pruned_loss=0.02466, over 4797.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03326, over 972950.01 frames.], batch size: 21, lr: 1.98e-04 +2022-05-07 02:07:37,891 INFO [train.py:715] (6/8) Epoch 11, batch 8100, loss[loss=0.1707, simple_loss=0.2363, pruned_loss=0.05255, over 4964.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2125, pruned_loss=0.03334, over 972857.56 frames.], batch size: 35, lr: 1.98e-04 +2022-05-07 02:08:17,873 INFO [train.py:715] (6/8) Epoch 11, batch 8150, loss[loss=0.151, simple_loss=0.2235, pruned_loss=0.03921, over 4833.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2119, pruned_loss=0.03328, over 972731.72 frames.], batch size: 30, lr: 1.98e-04 +2022-05-07 02:08:57,400 INFO [train.py:715] (6/8) Epoch 11, batch 8200, loss[loss=0.1274, simple_loss=0.1984, pruned_loss=0.02824, over 4749.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2122, pruned_loss=0.03313, over 972898.79 frames.], batch size: 16, lr: 1.98e-04 +2022-05-07 02:09:36,724 INFO [train.py:715] (6/8) Epoch 11, batch 8250, loss[loss=0.142, simple_loss=0.2146, pruned_loss=0.03473, over 4864.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03284, over 972665.55 frames.], batch size: 32, lr: 1.98e-04 +2022-05-07 02:10:15,063 INFO [train.py:715] (6/8) Epoch 11, batch 8300, loss[loss=0.1403, simple_loss=0.2171, pruned_loss=0.03175, over 4823.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2118, pruned_loss=0.03278, over 972356.45 frames.], batch size: 25, lr: 1.98e-04 +2022-05-07 02:10:54,959 INFO [train.py:715] (6/8) Epoch 11, batch 8350, loss[loss=0.1458, simple_loss=0.22, pruned_loss=0.0358, over 4888.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.03276, over 972925.80 frames.], batch size: 38, lr: 1.98e-04 +2022-05-07 02:11:34,528 INFO [train.py:715] (6/8) Epoch 11, batch 8400, loss[loss=0.1248, simple_loss=0.2041, pruned_loss=0.02274, over 4969.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2119, pruned_loss=0.03255, over 973277.89 frames.], batch size: 24, lr: 1.98e-04 +2022-05-07 02:12:13,505 INFO [train.py:715] (6/8) Epoch 11, batch 8450, loss[loss=0.1417, simple_loss=0.2138, pruned_loss=0.03484, over 4987.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2107, pruned_loss=0.03233, over 973683.70 frames.], batch size: 31, lr: 1.98e-04 +2022-05-07 02:12:52,196 INFO [train.py:715] (6/8) Epoch 11, batch 8500, loss[loss=0.1367, simple_loss=0.2186, pruned_loss=0.02743, over 4880.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2118, pruned_loss=0.0329, over 972836.15 frames.], batch size: 20, lr: 1.98e-04 +2022-05-07 02:13:32,009 INFO [train.py:715] (6/8) Epoch 11, batch 8550, loss[loss=0.1239, simple_loss=0.1976, pruned_loss=0.0251, over 4846.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2122, pruned_loss=0.03315, over 973010.49 frames.], batch size: 15, lr: 1.98e-04 +2022-05-07 02:14:11,215 INFO [train.py:715] (6/8) Epoch 11, batch 8600, loss[loss=0.1482, simple_loss=0.2331, pruned_loss=0.03163, over 4950.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.03352, over 973847.78 frames.], batch size: 24, lr: 1.98e-04 +2022-05-07 02:14:49,544 INFO [train.py:715] (6/8) Epoch 11, batch 8650, loss[loss=0.1356, simple_loss=0.21, pruned_loss=0.0306, over 4768.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2139, pruned_loss=0.03388, over 973722.30 frames.], batch size: 18, lr: 1.98e-04 +2022-05-07 02:15:29,404 INFO [train.py:715] (6/8) Epoch 11, batch 8700, loss[loss=0.1453, simple_loss=0.2105, pruned_loss=0.04008, over 4972.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2137, pruned_loss=0.03387, over 973537.59 frames.], batch size: 14, lr: 1.98e-04 +2022-05-07 02:16:08,723 INFO [train.py:715] (6/8) Epoch 11, batch 8750, loss[loss=0.1352, simple_loss=0.2027, pruned_loss=0.03383, over 4949.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2133, pruned_loss=0.03391, over 973982.54 frames.], batch size: 21, lr: 1.98e-04 +2022-05-07 02:16:47,706 INFO [train.py:715] (6/8) Epoch 11, batch 8800, loss[loss=0.1222, simple_loss=0.1944, pruned_loss=0.025, over 4896.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2132, pruned_loss=0.0335, over 974090.99 frames.], batch size: 17, lr: 1.98e-04 +2022-05-07 02:17:26,840 INFO [train.py:715] (6/8) Epoch 11, batch 8850, loss[loss=0.1493, simple_loss=0.2332, pruned_loss=0.03271, over 4929.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2119, pruned_loss=0.03266, over 974393.79 frames.], batch size: 29, lr: 1.98e-04 +2022-05-07 02:18:06,535 INFO [train.py:715] (6/8) Epoch 11, batch 8900, loss[loss=0.1534, simple_loss=0.229, pruned_loss=0.03889, over 4794.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2116, pruned_loss=0.03263, over 973803.62 frames.], batch size: 14, lr: 1.98e-04 +2022-05-07 02:18:46,168 INFO [train.py:715] (6/8) Epoch 11, batch 8950, loss[loss=0.1406, simple_loss=0.2107, pruned_loss=0.0353, over 4764.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2127, pruned_loss=0.0331, over 973570.04 frames.], batch size: 18, lr: 1.98e-04 +2022-05-07 02:19:25,278 INFO [train.py:715] (6/8) Epoch 11, batch 9000, loss[loss=0.1701, simple_loss=0.2393, pruned_loss=0.0505, over 4910.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2116, pruned_loss=0.03265, over 973813.39 frames.], batch size: 17, lr: 1.98e-04 +2022-05-07 02:19:25,278 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 02:19:34,857 INFO [train.py:742] (6/8) Epoch 11, validation: loss=0.1061, simple_loss=0.1903, pruned_loss=0.011, over 914524.00 frames. +2022-05-07 02:20:13,753 INFO [train.py:715] (6/8) Epoch 11, batch 9050, loss[loss=0.1408, simple_loss=0.204, pruned_loss=0.03881, over 4980.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03245, over 973873.97 frames.], batch size: 31, lr: 1.98e-04 +2022-05-07 02:20:55,920 INFO [train.py:715] (6/8) Epoch 11, batch 9100, loss[loss=0.1644, simple_loss=0.2425, pruned_loss=0.04313, over 4774.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2118, pruned_loss=0.03252, over 973442.00 frames.], batch size: 14, lr: 1.98e-04 +2022-05-07 02:21:35,543 INFO [train.py:715] (6/8) Epoch 11, batch 9150, loss[loss=0.1365, simple_loss=0.216, pruned_loss=0.02848, over 4943.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2121, pruned_loss=0.03263, over 973429.83 frames.], batch size: 21, lr: 1.98e-04 +2022-05-07 02:22:15,056 INFO [train.py:715] (6/8) Epoch 11, batch 9200, loss[loss=0.1202, simple_loss=0.2034, pruned_loss=0.01845, over 4913.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2129, pruned_loss=0.03321, over 972506.31 frames.], batch size: 17, lr: 1.98e-04 +2022-05-07 02:22:54,637 INFO [train.py:715] (6/8) Epoch 11, batch 9250, loss[loss=0.1571, simple_loss=0.2396, pruned_loss=0.03731, over 4697.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2135, pruned_loss=0.03341, over 972691.07 frames.], batch size: 15, lr: 1.98e-04 +2022-05-07 02:23:33,875 INFO [train.py:715] (6/8) Epoch 11, batch 9300, loss[loss=0.1354, simple_loss=0.2183, pruned_loss=0.0263, over 4868.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2131, pruned_loss=0.03298, over 972538.98 frames.], batch size: 20, lr: 1.98e-04 +2022-05-07 02:24:12,711 INFO [train.py:715] (6/8) Epoch 11, batch 9350, loss[loss=0.1255, simple_loss=0.206, pruned_loss=0.0225, over 4827.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2137, pruned_loss=0.03309, over 972270.41 frames.], batch size: 15, lr: 1.98e-04 +2022-05-07 02:24:51,485 INFO [train.py:715] (6/8) Epoch 11, batch 9400, loss[loss=0.1376, simple_loss=0.2093, pruned_loss=0.03293, over 4765.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2135, pruned_loss=0.03282, over 972510.18 frames.], batch size: 19, lr: 1.98e-04 +2022-05-07 02:25:31,002 INFO [train.py:715] (6/8) Epoch 11, batch 9450, loss[loss=0.1339, simple_loss=0.2124, pruned_loss=0.02768, over 4759.00 frames.], tot_loss[loss=0.1403, simple_loss=0.214, pruned_loss=0.03327, over 972405.23 frames.], batch size: 19, lr: 1.98e-04 +2022-05-07 02:26:10,040 INFO [train.py:715] (6/8) Epoch 11, batch 9500, loss[loss=0.1443, simple_loss=0.2052, pruned_loss=0.04165, over 4978.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2134, pruned_loss=0.0331, over 972171.24 frames.], batch size: 15, lr: 1.98e-04 +2022-05-07 02:26:48,576 INFO [train.py:715] (6/8) Epoch 11, batch 9550, loss[loss=0.1139, simple_loss=0.1937, pruned_loss=0.01704, over 4877.00 frames.], tot_loss[loss=0.1396, simple_loss=0.213, pruned_loss=0.03307, over 972158.49 frames.], batch size: 22, lr: 1.98e-04 +2022-05-07 02:27:28,240 INFO [train.py:715] (6/8) Epoch 11, batch 9600, loss[loss=0.1475, simple_loss=0.2191, pruned_loss=0.03793, over 4909.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2118, pruned_loss=0.03285, over 971896.02 frames.], batch size: 23, lr: 1.98e-04 +2022-05-07 02:28:07,059 INFO [train.py:715] (6/8) Epoch 11, batch 9650, loss[loss=0.1315, simple_loss=0.2042, pruned_loss=0.0294, over 4973.00 frames.], tot_loss[loss=0.139, simple_loss=0.2119, pruned_loss=0.03303, over 971730.44 frames.], batch size: 27, lr: 1.98e-04 +2022-05-07 02:28:45,586 INFO [train.py:715] (6/8) Epoch 11, batch 9700, loss[loss=0.1126, simple_loss=0.1911, pruned_loss=0.0171, over 4884.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2123, pruned_loss=0.03331, over 972676.86 frames.], batch size: 19, lr: 1.98e-04 +2022-05-07 02:29:24,607 INFO [train.py:715] (6/8) Epoch 11, batch 9750, loss[loss=0.1414, simple_loss=0.2221, pruned_loss=0.03035, over 4743.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2129, pruned_loss=0.03338, over 973098.57 frames.], batch size: 16, lr: 1.98e-04 +2022-05-07 02:30:03,697 INFO [train.py:715] (6/8) Epoch 11, batch 9800, loss[loss=0.1196, simple_loss=0.1904, pruned_loss=0.02434, over 4822.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03326, over 973394.23 frames.], batch size: 26, lr: 1.98e-04 +2022-05-07 02:30:43,345 INFO [train.py:715] (6/8) Epoch 11, batch 9850, loss[loss=0.1355, simple_loss=0.2196, pruned_loss=0.02569, over 4951.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2126, pruned_loss=0.03312, over 973154.67 frames.], batch size: 21, lr: 1.98e-04 +2022-05-07 02:31:22,303 INFO [train.py:715] (6/8) Epoch 11, batch 9900, loss[loss=0.1547, simple_loss=0.2267, pruned_loss=0.04131, over 4910.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2127, pruned_loss=0.03286, over 973737.72 frames.], batch size: 17, lr: 1.98e-04 +2022-05-07 02:32:02,542 INFO [train.py:715] (6/8) Epoch 11, batch 9950, loss[loss=0.1362, simple_loss=0.2133, pruned_loss=0.02955, over 4903.00 frames.], tot_loss[loss=0.1384, simple_loss=0.212, pruned_loss=0.03235, over 973949.72 frames.], batch size: 19, lr: 1.98e-04 +2022-05-07 02:32:41,873 INFO [train.py:715] (6/8) Epoch 11, batch 10000, loss[loss=0.139, simple_loss=0.201, pruned_loss=0.03852, over 4689.00 frames.], tot_loss[loss=0.138, simple_loss=0.2116, pruned_loss=0.03216, over 972519.10 frames.], batch size: 15, lr: 1.98e-04 +2022-05-07 02:33:21,580 INFO [train.py:715] (6/8) Epoch 11, batch 10050, loss[loss=0.1208, simple_loss=0.1956, pruned_loss=0.02297, over 4954.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2122, pruned_loss=0.03229, over 972670.18 frames.], batch size: 21, lr: 1.98e-04 +2022-05-07 02:33:59,723 INFO [train.py:715] (6/8) Epoch 11, batch 10100, loss[loss=0.1204, simple_loss=0.2005, pruned_loss=0.02011, over 4767.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2136, pruned_loss=0.0329, over 972613.85 frames.], batch size: 18, lr: 1.98e-04 +2022-05-07 02:34:38,759 INFO [train.py:715] (6/8) Epoch 11, batch 10150, loss[loss=0.1826, simple_loss=0.2437, pruned_loss=0.06073, over 4868.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2139, pruned_loss=0.03336, over 972156.20 frames.], batch size: 38, lr: 1.98e-04 +2022-05-07 02:35:17,189 INFO [train.py:715] (6/8) Epoch 11, batch 10200, loss[loss=0.1412, simple_loss=0.2211, pruned_loss=0.03065, over 4892.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2138, pruned_loss=0.03357, over 972096.49 frames.], batch size: 19, lr: 1.98e-04 +2022-05-07 02:35:55,360 INFO [train.py:715] (6/8) Epoch 11, batch 10250, loss[loss=0.1405, simple_loss=0.2082, pruned_loss=0.03637, over 4752.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2125, pruned_loss=0.03305, over 972452.73 frames.], batch size: 16, lr: 1.98e-04 +2022-05-07 02:36:34,758 INFO [train.py:715] (6/8) Epoch 11, batch 10300, loss[loss=0.1416, simple_loss=0.2025, pruned_loss=0.04038, over 4957.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2134, pruned_loss=0.03387, over 971253.48 frames.], batch size: 14, lr: 1.98e-04 +2022-05-07 02:37:13,486 INFO [train.py:715] (6/8) Epoch 11, batch 10350, loss[loss=0.1345, simple_loss=0.2186, pruned_loss=0.02526, over 4980.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2137, pruned_loss=0.03371, over 971843.92 frames.], batch size: 24, lr: 1.98e-04 +2022-05-07 02:37:52,312 INFO [train.py:715] (6/8) Epoch 11, batch 10400, loss[loss=0.1392, simple_loss=0.2125, pruned_loss=0.03294, over 4788.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2129, pruned_loss=0.03318, over 971208.20 frames.], batch size: 17, lr: 1.98e-04 +2022-05-07 02:38:30,789 INFO [train.py:715] (6/8) Epoch 11, batch 10450, loss[loss=0.1341, simple_loss=0.2014, pruned_loss=0.03334, over 4808.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2126, pruned_loss=0.03311, over 971595.45 frames.], batch size: 21, lr: 1.98e-04 +2022-05-07 02:39:09,431 INFO [train.py:715] (6/8) Epoch 11, batch 10500, loss[loss=0.1363, simple_loss=0.2087, pruned_loss=0.03193, over 4993.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2117, pruned_loss=0.03257, over 970968.87 frames.], batch size: 20, lr: 1.98e-04 +2022-05-07 02:39:48,490 INFO [train.py:715] (6/8) Epoch 11, batch 10550, loss[loss=0.1169, simple_loss=0.191, pruned_loss=0.02142, over 4922.00 frames.], tot_loss[loss=0.139, simple_loss=0.2124, pruned_loss=0.03281, over 971644.49 frames.], batch size: 29, lr: 1.98e-04 +2022-05-07 02:40:27,837 INFO [train.py:715] (6/8) Epoch 11, batch 10600, loss[loss=0.1592, simple_loss=0.2214, pruned_loss=0.04854, over 4642.00 frames.], tot_loss[loss=0.139, simple_loss=0.2123, pruned_loss=0.03288, over 971963.98 frames.], batch size: 13, lr: 1.98e-04 +2022-05-07 02:41:06,627 INFO [train.py:715] (6/8) Epoch 11, batch 10650, loss[loss=0.1471, simple_loss=0.2272, pruned_loss=0.03352, over 4948.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2127, pruned_loss=0.03296, over 972548.69 frames.], batch size: 29, lr: 1.98e-04 +2022-05-07 02:41:45,852 INFO [train.py:715] (6/8) Epoch 11, batch 10700, loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02941, over 4747.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.03274, over 972035.10 frames.], batch size: 19, lr: 1.98e-04 +2022-05-07 02:42:25,055 INFO [train.py:715] (6/8) Epoch 11, batch 10750, loss[loss=0.1646, simple_loss=0.2362, pruned_loss=0.04649, over 4751.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2124, pruned_loss=0.03293, over 971560.55 frames.], batch size: 16, lr: 1.98e-04 +2022-05-07 02:43:03,973 INFO [train.py:715] (6/8) Epoch 11, batch 10800, loss[loss=0.1242, simple_loss=0.2097, pruned_loss=0.0194, over 4807.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2133, pruned_loss=0.03303, over 971309.72 frames.], batch size: 21, lr: 1.98e-04 +2022-05-07 02:43:43,677 INFO [train.py:715] (6/8) Epoch 11, batch 10850, loss[loss=0.1244, simple_loss=0.1931, pruned_loss=0.02787, over 4702.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2128, pruned_loss=0.03294, over 971920.77 frames.], batch size: 15, lr: 1.98e-04 +2022-05-07 02:44:23,472 INFO [train.py:715] (6/8) Epoch 11, batch 10900, loss[loss=0.1163, simple_loss=0.1921, pruned_loss=0.0202, over 4958.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2115, pruned_loss=0.03249, over 971664.33 frames.], batch size: 35, lr: 1.98e-04 +2022-05-07 02:45:02,834 INFO [train.py:715] (6/8) Epoch 11, batch 10950, loss[loss=0.1331, simple_loss=0.2124, pruned_loss=0.02687, over 4985.00 frames.], tot_loss[loss=0.138, simple_loss=0.2112, pruned_loss=0.03236, over 971726.89 frames.], batch size: 25, lr: 1.98e-04 +2022-05-07 02:45:42,049 INFO [train.py:715] (6/8) Epoch 11, batch 11000, loss[loss=0.1297, simple_loss=0.2045, pruned_loss=0.02749, over 4931.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03214, over 972590.34 frames.], batch size: 23, lr: 1.98e-04 +2022-05-07 02:46:21,454 INFO [train.py:715] (6/8) Epoch 11, batch 11050, loss[loss=0.1655, simple_loss=0.24, pruned_loss=0.04548, over 4917.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2121, pruned_loss=0.03254, over 971963.49 frames.], batch size: 17, lr: 1.98e-04 +2022-05-07 02:47:00,459 INFO [train.py:715] (6/8) Epoch 11, batch 11100, loss[loss=0.1474, simple_loss=0.208, pruned_loss=0.04342, over 4917.00 frames.], tot_loss[loss=0.1391, simple_loss=0.212, pruned_loss=0.03305, over 971706.71 frames.], batch size: 18, lr: 1.98e-04 +2022-05-07 02:47:39,069 INFO [train.py:715] (6/8) Epoch 11, batch 11150, loss[loss=0.1319, simple_loss=0.2038, pruned_loss=0.02995, over 4763.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2125, pruned_loss=0.03299, over 971417.95 frames.], batch size: 17, lr: 1.98e-04 +2022-05-07 02:48:18,474 INFO [train.py:715] (6/8) Epoch 11, batch 11200, loss[loss=0.1538, simple_loss=0.2315, pruned_loss=0.03807, over 4800.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2128, pruned_loss=0.03294, over 971253.72 frames.], batch size: 14, lr: 1.98e-04 +2022-05-07 02:48:57,590 INFO [train.py:715] (6/8) Epoch 11, batch 11250, loss[loss=0.1327, simple_loss=0.2144, pruned_loss=0.02548, over 4979.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2125, pruned_loss=0.03259, over 971306.79 frames.], batch size: 39, lr: 1.98e-04 +2022-05-07 02:49:35,931 INFO [train.py:715] (6/8) Epoch 11, batch 11300, loss[loss=0.1356, simple_loss=0.2161, pruned_loss=0.02758, over 4720.00 frames.], tot_loss[loss=0.1382, simple_loss=0.212, pruned_loss=0.03226, over 970591.97 frames.], batch size: 15, lr: 1.98e-04 +2022-05-07 02:50:14,826 INFO [train.py:715] (6/8) Epoch 11, batch 11350, loss[loss=0.1477, simple_loss=0.2181, pruned_loss=0.0387, over 4780.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03173, over 970787.04 frames.], batch size: 18, lr: 1.97e-04 +2022-05-07 02:50:54,374 INFO [train.py:715] (6/8) Epoch 11, batch 11400, loss[loss=0.1371, simple_loss=0.2099, pruned_loss=0.03217, over 4848.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03146, over 970659.98 frames.], batch size: 30, lr: 1.97e-04 +2022-05-07 02:51:32,953 INFO [train.py:715] (6/8) Epoch 11, batch 11450, loss[loss=0.1539, simple_loss=0.225, pruned_loss=0.04141, over 4964.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2111, pruned_loss=0.03217, over 971578.17 frames.], batch size: 35, lr: 1.97e-04 +2022-05-07 02:52:11,283 INFO [train.py:715] (6/8) Epoch 11, batch 11500, loss[loss=0.1306, simple_loss=0.1959, pruned_loss=0.0327, over 4988.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2112, pruned_loss=0.03202, over 971956.72 frames.], batch size: 14, lr: 1.97e-04 +2022-05-07 02:52:50,114 INFO [train.py:715] (6/8) Epoch 11, batch 11550, loss[loss=0.1225, simple_loss=0.2061, pruned_loss=0.01946, over 4930.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2109, pruned_loss=0.0319, over 970810.00 frames.], batch size: 23, lr: 1.97e-04 +2022-05-07 02:53:29,305 INFO [train.py:715] (6/8) Epoch 11, batch 11600, loss[loss=0.1377, simple_loss=0.2023, pruned_loss=0.03658, over 4946.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03212, over 970857.89 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 02:54:08,235 INFO [train.py:715] (6/8) Epoch 11, batch 11650, loss[loss=0.1828, simple_loss=0.2429, pruned_loss=0.06135, over 4788.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2108, pruned_loss=0.03225, over 970313.98 frames.], batch size: 14, lr: 1.97e-04 +2022-05-07 02:54:46,495 INFO [train.py:715] (6/8) Epoch 11, batch 11700, loss[loss=0.1531, simple_loss=0.2225, pruned_loss=0.04184, over 4763.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2106, pruned_loss=0.032, over 970530.95 frames.], batch size: 19, lr: 1.97e-04 +2022-05-07 02:55:25,412 INFO [train.py:715] (6/8) Epoch 11, batch 11750, loss[loss=0.1354, simple_loss=0.1991, pruned_loss=0.03591, over 4734.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2116, pruned_loss=0.03235, over 971066.13 frames.], batch size: 16, lr: 1.97e-04 +2022-05-07 02:56:04,628 INFO [train.py:715] (6/8) Epoch 11, batch 11800, loss[loss=0.1776, simple_loss=0.2357, pruned_loss=0.05972, over 4879.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2114, pruned_loss=0.03222, over 971812.93 frames.], batch size: 32, lr: 1.97e-04 +2022-05-07 02:56:43,715 INFO [train.py:715] (6/8) Epoch 11, batch 11850, loss[loss=0.1314, simple_loss=0.202, pruned_loss=0.0304, over 4780.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2108, pruned_loss=0.03178, over 972507.98 frames.], batch size: 18, lr: 1.97e-04 +2022-05-07 02:57:23,415 INFO [train.py:715] (6/8) Epoch 11, batch 11900, loss[loss=0.1422, simple_loss=0.2113, pruned_loss=0.03655, over 4950.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2107, pruned_loss=0.03151, over 972868.30 frames.], batch size: 35, lr: 1.97e-04 +2022-05-07 02:58:03,754 INFO [train.py:715] (6/8) Epoch 11, batch 11950, loss[loss=0.1445, simple_loss=0.2153, pruned_loss=0.03684, over 4943.00 frames.], tot_loss[loss=0.137, simple_loss=0.2108, pruned_loss=0.03157, over 973077.89 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 02:58:43,548 INFO [train.py:715] (6/8) Epoch 11, batch 12000, loss[loss=0.1719, simple_loss=0.2399, pruned_loss=0.05193, over 4974.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2114, pruned_loss=0.032, over 973434.91 frames.], batch size: 35, lr: 1.97e-04 +2022-05-07 02:58:43,549 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 02:58:53,275 INFO [train.py:742] (6/8) Epoch 11, validation: loss=0.1061, simple_loss=0.1902, pruned_loss=0.01096, over 914524.00 frames. +2022-05-07 02:59:33,215 INFO [train.py:715] (6/8) Epoch 11, batch 12050, loss[loss=0.1409, simple_loss=0.2197, pruned_loss=0.03108, over 4818.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2127, pruned_loss=0.03248, over 974240.40 frames.], batch size: 25, lr: 1.97e-04 +2022-05-07 03:00:12,650 INFO [train.py:715] (6/8) Epoch 11, batch 12100, loss[loss=0.1363, simple_loss=0.211, pruned_loss=0.03075, over 4937.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2131, pruned_loss=0.03249, over 973431.17 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 03:00:51,875 INFO [train.py:715] (6/8) Epoch 11, batch 12150, loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03064, over 4942.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2113, pruned_loss=0.03178, over 972911.19 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 03:01:31,405 INFO [train.py:715] (6/8) Epoch 11, batch 12200, loss[loss=0.1612, simple_loss=0.2186, pruned_loss=0.05192, over 4811.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2119, pruned_loss=0.03233, over 972995.99 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 03:02:09,904 INFO [train.py:715] (6/8) Epoch 11, batch 12250, loss[loss=0.1352, simple_loss=0.2095, pruned_loss=0.03038, over 4911.00 frames.], tot_loss[loss=0.1375, simple_loss=0.211, pruned_loss=0.03205, over 972676.01 frames.], batch size: 17, lr: 1.97e-04 +2022-05-07 03:02:49,516 INFO [train.py:715] (6/8) Epoch 11, batch 12300, loss[loss=0.1207, simple_loss=0.2024, pruned_loss=0.01948, over 4951.00 frames.], tot_loss[loss=0.138, simple_loss=0.2113, pruned_loss=0.03234, over 971967.53 frames.], batch size: 29, lr: 1.97e-04 +2022-05-07 03:03:29,339 INFO [train.py:715] (6/8) Epoch 11, batch 12350, loss[loss=0.134, simple_loss=0.2153, pruned_loss=0.02636, over 4694.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2113, pruned_loss=0.03211, over 972377.71 frames.], batch size: 15, lr: 1.97e-04 +2022-05-07 03:04:08,697 INFO [train.py:715] (6/8) Epoch 11, batch 12400, loss[loss=0.1539, simple_loss=0.2172, pruned_loss=0.04534, over 4861.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03162, over 972175.99 frames.], batch size: 30, lr: 1.97e-04 +2022-05-07 03:04:46,933 INFO [train.py:715] (6/8) Epoch 11, batch 12450, loss[loss=0.1202, simple_loss=0.1892, pruned_loss=0.02565, over 4886.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.0318, over 971835.23 frames.], batch size: 19, lr: 1.97e-04 +2022-05-07 03:05:26,169 INFO [train.py:715] (6/8) Epoch 11, batch 12500, loss[loss=0.149, simple_loss=0.2228, pruned_loss=0.03765, over 4989.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.03162, over 971918.88 frames.], batch size: 26, lr: 1.97e-04 +2022-05-07 03:06:05,438 INFO [train.py:715] (6/8) Epoch 11, batch 12550, loss[loss=0.1235, simple_loss=0.2003, pruned_loss=0.02337, over 4921.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03162, over 971703.01 frames.], batch size: 23, lr: 1.97e-04 +2022-05-07 03:06:44,094 INFO [train.py:715] (6/8) Epoch 11, batch 12600, loss[loss=0.1339, simple_loss=0.2123, pruned_loss=0.02773, over 4776.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2104, pruned_loss=0.03143, over 971585.08 frames.], batch size: 18, lr: 1.97e-04 +2022-05-07 03:07:23,082 INFO [train.py:715] (6/8) Epoch 11, batch 12650, loss[loss=0.1398, simple_loss=0.2139, pruned_loss=0.03287, over 4746.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2114, pruned_loss=0.03206, over 970693.45 frames.], batch size: 16, lr: 1.97e-04 +2022-05-07 03:08:02,195 INFO [train.py:715] (6/8) Epoch 11, batch 12700, loss[loss=0.1332, simple_loss=0.2079, pruned_loss=0.02927, over 4796.00 frames.], tot_loss[loss=0.138, simple_loss=0.2116, pruned_loss=0.03216, over 970977.40 frames.], batch size: 24, lr: 1.97e-04 +2022-05-07 03:08:40,891 INFO [train.py:715] (6/8) Epoch 11, batch 12750, loss[loss=0.167, simple_loss=0.2367, pruned_loss=0.04862, over 4936.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2114, pruned_loss=0.03218, over 972625.51 frames.], batch size: 23, lr: 1.97e-04 +2022-05-07 03:09:19,303 INFO [train.py:715] (6/8) Epoch 11, batch 12800, loss[loss=0.1558, simple_loss=0.229, pruned_loss=0.04133, over 4791.00 frames.], tot_loss[loss=0.138, simple_loss=0.2115, pruned_loss=0.03223, over 972546.16 frames.], batch size: 18, lr: 1.97e-04 +2022-05-07 03:09:58,876 INFO [train.py:715] (6/8) Epoch 11, batch 12850, loss[loss=0.119, simple_loss=0.195, pruned_loss=0.02149, over 4805.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2112, pruned_loss=0.03215, over 972025.98 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 03:10:38,290 INFO [train.py:715] (6/8) Epoch 11, batch 12900, loss[loss=0.1302, simple_loss=0.2065, pruned_loss=0.02696, over 4904.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2107, pruned_loss=0.03177, over 972001.80 frames.], batch size: 23, lr: 1.97e-04 +2022-05-07 03:11:17,933 INFO [train.py:715] (6/8) Epoch 11, batch 12950, loss[loss=0.1485, simple_loss=0.2246, pruned_loss=0.03617, over 4763.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2105, pruned_loss=0.03161, over 972284.61 frames.], batch size: 19, lr: 1.97e-04 +2022-05-07 03:11:56,712 INFO [train.py:715] (6/8) Epoch 11, batch 13000, loss[loss=0.1512, simple_loss=0.2295, pruned_loss=0.03643, over 4965.00 frames.], tot_loss[loss=0.1375, simple_loss=0.211, pruned_loss=0.03201, over 971445.16 frames.], batch size: 15, lr: 1.97e-04 +2022-05-07 03:12:36,379 INFO [train.py:715] (6/8) Epoch 11, batch 13050, loss[loss=0.1366, simple_loss=0.2168, pruned_loss=0.02823, over 4820.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2116, pruned_loss=0.03211, over 971091.22 frames.], batch size: 13, lr: 1.97e-04 +2022-05-07 03:13:15,475 INFO [train.py:715] (6/8) Epoch 11, batch 13100, loss[loss=0.1314, simple_loss=0.2139, pruned_loss=0.0245, over 4762.00 frames.], tot_loss[loss=0.1373, simple_loss=0.211, pruned_loss=0.03183, over 971283.74 frames.], batch size: 19, lr: 1.97e-04 +2022-05-07 03:13:53,588 INFO [train.py:715] (6/8) Epoch 11, batch 13150, loss[loss=0.1117, simple_loss=0.1842, pruned_loss=0.01961, over 4921.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2113, pruned_loss=0.03191, over 971902.01 frames.], batch size: 18, lr: 1.97e-04 +2022-05-07 03:14:32,699 INFO [train.py:715] (6/8) Epoch 11, batch 13200, loss[loss=0.1375, simple_loss=0.2044, pruned_loss=0.03532, over 4853.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03164, over 971383.70 frames.], batch size: 30, lr: 1.97e-04 +2022-05-07 03:15:11,061 INFO [train.py:715] (6/8) Epoch 11, batch 13250, loss[loss=0.1288, simple_loss=0.2032, pruned_loss=0.02721, over 4931.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2093, pruned_loss=0.03163, over 971450.03 frames.], batch size: 35, lr: 1.97e-04 +2022-05-07 03:15:50,455 INFO [train.py:715] (6/8) Epoch 11, batch 13300, loss[loss=0.1306, simple_loss=0.2076, pruned_loss=0.02681, over 4976.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2094, pruned_loss=0.0314, over 971503.05 frames.], batch size: 31, lr: 1.97e-04 +2022-05-07 03:16:29,355 INFO [train.py:715] (6/8) Epoch 11, batch 13350, loss[loss=0.1842, simple_loss=0.2648, pruned_loss=0.05179, over 4964.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2097, pruned_loss=0.03167, over 971374.78 frames.], batch size: 15, lr: 1.97e-04 +2022-05-07 03:17:08,600 INFO [train.py:715] (6/8) Epoch 11, batch 13400, loss[loss=0.09992, simple_loss=0.1685, pruned_loss=0.01566, over 4841.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2094, pruned_loss=0.03154, over 971815.93 frames.], batch size: 12, lr: 1.97e-04 +2022-05-07 03:17:47,310 INFO [train.py:715] (6/8) Epoch 11, batch 13450, loss[loss=0.1529, simple_loss=0.2301, pruned_loss=0.03785, over 4815.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2097, pruned_loss=0.03171, over 972822.97 frames.], batch size: 25, lr: 1.97e-04 +2022-05-07 03:18:26,311 INFO [train.py:715] (6/8) Epoch 11, batch 13500, loss[loss=0.1409, simple_loss=0.211, pruned_loss=0.0354, over 4814.00 frames.], tot_loss[loss=0.1369, simple_loss=0.21, pruned_loss=0.03191, over 973138.08 frames.], batch size: 26, lr: 1.97e-04 +2022-05-07 03:19:05,024 INFO [train.py:715] (6/8) Epoch 11, batch 13550, loss[loss=0.154, simple_loss=0.2294, pruned_loss=0.03929, over 4967.00 frames.], tot_loss[loss=0.138, simple_loss=0.2108, pruned_loss=0.03255, over 972719.31 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 03:19:44,150 INFO [train.py:715] (6/8) Epoch 11, batch 13600, loss[loss=0.1248, simple_loss=0.2046, pruned_loss=0.02247, over 4866.00 frames.], tot_loss[loss=0.1371, simple_loss=0.21, pruned_loss=0.03209, over 972637.37 frames.], batch size: 20, lr: 1.97e-04 +2022-05-07 03:20:22,539 INFO [train.py:715] (6/8) Epoch 11, batch 13650, loss[loss=0.1482, simple_loss=0.2194, pruned_loss=0.03847, over 4925.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2103, pruned_loss=0.03234, over 971739.35 frames.], batch size: 23, lr: 1.97e-04 +2022-05-07 03:21:00,723 INFO [train.py:715] (6/8) Epoch 11, batch 13700, loss[loss=0.1234, simple_loss=0.1982, pruned_loss=0.02431, over 4646.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2118, pruned_loss=0.03253, over 971248.18 frames.], batch size: 13, lr: 1.97e-04 +2022-05-07 03:21:39,815 INFO [train.py:715] (6/8) Epoch 11, batch 13750, loss[loss=0.1537, simple_loss=0.2215, pruned_loss=0.043, over 4847.00 frames.], tot_loss[loss=0.1389, simple_loss=0.212, pruned_loss=0.0329, over 971476.46 frames.], batch size: 32, lr: 1.97e-04 +2022-05-07 03:22:19,175 INFO [train.py:715] (6/8) Epoch 11, batch 13800, loss[loss=0.1158, simple_loss=0.1885, pruned_loss=0.0215, over 4911.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2126, pruned_loss=0.03321, over 972024.97 frames.], batch size: 23, lr: 1.97e-04 +2022-05-07 03:22:57,643 INFO [train.py:715] (6/8) Epoch 11, batch 13850, loss[loss=0.1297, simple_loss=0.2133, pruned_loss=0.02311, over 4939.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2128, pruned_loss=0.03317, over 972742.43 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 03:23:37,053 INFO [train.py:715] (6/8) Epoch 11, batch 13900, loss[loss=0.1173, simple_loss=0.1955, pruned_loss=0.01956, over 4890.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2113, pruned_loss=0.03274, over 973138.14 frames.], batch size: 22, lr: 1.97e-04 +2022-05-07 03:24:15,992 INFO [train.py:715] (6/8) Epoch 11, batch 13950, loss[loss=0.1235, simple_loss=0.1948, pruned_loss=0.02608, over 4981.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.03286, over 972388.89 frames.], batch size: 31, lr: 1.97e-04 +2022-05-07 03:24:55,162 INFO [train.py:715] (6/8) Epoch 11, batch 14000, loss[loss=0.1312, simple_loss=0.2132, pruned_loss=0.02457, over 4832.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2118, pruned_loss=0.03293, over 971904.95 frames.], batch size: 27, lr: 1.97e-04 +2022-05-07 03:25:34,610 INFO [train.py:715] (6/8) Epoch 11, batch 14050, loss[loss=0.1614, simple_loss=0.2253, pruned_loss=0.0487, over 4771.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2122, pruned_loss=0.03362, over 972193.34 frames.], batch size: 18, lr: 1.97e-04 +2022-05-07 03:26:14,332 INFO [train.py:715] (6/8) Epoch 11, batch 14100, loss[loss=0.1302, simple_loss=0.1935, pruned_loss=0.03344, over 4764.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2132, pruned_loss=0.03409, over 971993.02 frames.], batch size: 17, lr: 1.97e-04 +2022-05-07 03:26:53,600 INFO [train.py:715] (6/8) Epoch 11, batch 14150, loss[loss=0.1507, simple_loss=0.2341, pruned_loss=0.03362, over 4790.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2128, pruned_loss=0.03371, over 971873.09 frames.], batch size: 17, lr: 1.97e-04 +2022-05-07 03:27:32,870 INFO [train.py:715] (6/8) Epoch 11, batch 14200, loss[loss=0.1257, simple_loss=0.1917, pruned_loss=0.02982, over 4935.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.0336, over 971759.67 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 03:28:13,019 INFO [train.py:715] (6/8) Epoch 11, batch 14250, loss[loss=0.1276, simple_loss=0.1979, pruned_loss=0.02868, over 4782.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2129, pruned_loss=0.03302, over 972664.42 frames.], batch size: 14, lr: 1.97e-04 +2022-05-07 03:28:53,023 INFO [train.py:715] (6/8) Epoch 11, batch 14300, loss[loss=0.171, simple_loss=0.2463, pruned_loss=0.04787, over 4845.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2127, pruned_loss=0.03303, over 972343.11 frames.], batch size: 30, lr: 1.97e-04 +2022-05-07 03:29:32,288 INFO [train.py:715] (6/8) Epoch 11, batch 14350, loss[loss=0.1532, simple_loss=0.2226, pruned_loss=0.04184, over 4842.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2128, pruned_loss=0.03273, over 971896.18 frames.], batch size: 15, lr: 1.97e-04 +2022-05-07 03:30:12,239 INFO [train.py:715] (6/8) Epoch 11, batch 14400, loss[loss=0.1392, simple_loss=0.2133, pruned_loss=0.03259, over 4934.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2127, pruned_loss=0.03295, over 971949.17 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 03:30:52,510 INFO [train.py:715] (6/8) Epoch 11, batch 14450, loss[loss=0.1343, simple_loss=0.2061, pruned_loss=0.03124, over 4881.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2123, pruned_loss=0.03269, over 972444.79 frames.], batch size: 20, lr: 1.97e-04 +2022-05-07 03:31:31,924 INFO [train.py:715] (6/8) Epoch 11, batch 14500, loss[loss=0.1303, simple_loss=0.1984, pruned_loss=0.03114, over 4770.00 frames.], tot_loss[loss=0.1385, simple_loss=0.212, pruned_loss=0.03251, over 972102.56 frames.], batch size: 19, lr: 1.97e-04 +2022-05-07 03:32:11,425 INFO [train.py:715] (6/8) Epoch 11, batch 14550, loss[loss=0.1425, simple_loss=0.2144, pruned_loss=0.03526, over 4944.00 frames.], tot_loss[loss=0.139, simple_loss=0.2126, pruned_loss=0.03269, over 971117.26 frames.], batch size: 35, lr: 1.97e-04 +2022-05-07 03:32:51,264 INFO [train.py:715] (6/8) Epoch 11, batch 14600, loss[loss=0.136, simple_loss=0.2071, pruned_loss=0.03242, over 4920.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2131, pruned_loss=0.03287, over 972158.63 frames.], batch size: 29, lr: 1.97e-04 +2022-05-07 03:33:30,640 INFO [train.py:715] (6/8) Epoch 11, batch 14650, loss[loss=0.1342, simple_loss=0.2036, pruned_loss=0.03244, over 4890.00 frames.], tot_loss[loss=0.1385, simple_loss=0.212, pruned_loss=0.03255, over 972641.88 frames.], batch size: 22, lr: 1.97e-04 +2022-05-07 03:34:09,057 INFO [train.py:715] (6/8) Epoch 11, batch 14700, loss[loss=0.1432, simple_loss=0.2235, pruned_loss=0.03142, over 4888.00 frames.], tot_loss[loss=0.1376, simple_loss=0.211, pruned_loss=0.03206, over 972747.99 frames.], batch size: 22, lr: 1.97e-04 +2022-05-07 03:34:48,553 INFO [train.py:715] (6/8) Epoch 11, batch 14750, loss[loss=0.1253, simple_loss=0.2034, pruned_loss=0.02359, over 4835.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2114, pruned_loss=0.03211, over 972937.06 frames.], batch size: 26, lr: 1.97e-04 +2022-05-07 03:35:27,683 INFO [train.py:715] (6/8) Epoch 11, batch 14800, loss[loss=0.1367, simple_loss=0.208, pruned_loss=0.03265, over 4867.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2124, pruned_loss=0.03301, over 972266.06 frames.], batch size: 16, lr: 1.97e-04 +2022-05-07 03:36:06,361 INFO [train.py:715] (6/8) Epoch 11, batch 14850, loss[loss=0.1296, simple_loss=0.1983, pruned_loss=0.0305, over 4972.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03324, over 972472.33 frames.], batch size: 25, lr: 1.97e-04 +2022-05-07 03:36:45,863 INFO [train.py:715] (6/8) Epoch 11, batch 14900, loss[loss=0.1532, simple_loss=0.2222, pruned_loss=0.04206, over 4863.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2132, pruned_loss=0.03313, over 972624.63 frames.], batch size: 30, lr: 1.97e-04 +2022-05-07 03:37:25,088 INFO [train.py:715] (6/8) Epoch 11, batch 14950, loss[loss=0.1184, simple_loss=0.2003, pruned_loss=0.01825, over 4935.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2129, pruned_loss=0.03302, over 972144.47 frames.], batch size: 23, lr: 1.97e-04 +2022-05-07 03:38:03,593 INFO [train.py:715] (6/8) Epoch 11, batch 15000, loss[loss=0.1142, simple_loss=0.1843, pruned_loss=0.02206, over 4892.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2136, pruned_loss=0.03351, over 973060.44 frames.], batch size: 16, lr: 1.97e-04 +2022-05-07 03:38:03,594 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 03:38:13,230 INFO [train.py:742] (6/8) Epoch 11, validation: loss=0.106, simple_loss=0.1901, pruned_loss=0.01091, over 914524.00 frames. +2022-05-07 03:38:52,004 INFO [train.py:715] (6/8) Epoch 11, batch 15050, loss[loss=0.1642, simple_loss=0.2509, pruned_loss=0.03869, over 4822.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2138, pruned_loss=0.03342, over 973728.03 frames.], batch size: 25, lr: 1.97e-04 +2022-05-07 03:39:30,961 INFO [train.py:715] (6/8) Epoch 11, batch 15100, loss[loss=0.19, simple_loss=0.2481, pruned_loss=0.06593, over 4900.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2131, pruned_loss=0.03296, over 973769.59 frames.], batch size: 18, lr: 1.97e-04 +2022-05-07 03:40:10,675 INFO [train.py:715] (6/8) Epoch 11, batch 15150, loss[loss=0.1261, simple_loss=0.2036, pruned_loss=0.02428, over 4975.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2136, pruned_loss=0.03362, over 974617.97 frames.], batch size: 35, lr: 1.97e-04 +2022-05-07 03:40:49,843 INFO [train.py:715] (6/8) Epoch 11, batch 15200, loss[loss=0.1407, simple_loss=0.2199, pruned_loss=0.03077, over 4965.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2138, pruned_loss=0.03376, over 974029.13 frames.], batch size: 29, lr: 1.97e-04 +2022-05-07 03:41:28,413 INFO [train.py:715] (6/8) Epoch 11, batch 15250, loss[loss=0.1464, simple_loss=0.2199, pruned_loss=0.03643, over 4901.00 frames.], tot_loss[loss=0.141, simple_loss=0.2139, pruned_loss=0.03404, over 973549.93 frames.], batch size: 17, lr: 1.97e-04 +2022-05-07 03:42:07,671 INFO [train.py:715] (6/8) Epoch 11, batch 15300, loss[loss=0.1208, simple_loss=0.1965, pruned_loss=0.0225, over 4968.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03328, over 973453.46 frames.], batch size: 24, lr: 1.97e-04 +2022-05-07 03:42:46,994 INFO [train.py:715] (6/8) Epoch 11, batch 15350, loss[loss=0.1418, simple_loss=0.2071, pruned_loss=0.03825, over 4709.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2126, pruned_loss=0.0333, over 972713.27 frames.], batch size: 15, lr: 1.96e-04 +2022-05-07 03:43:25,866 INFO [train.py:715] (6/8) Epoch 11, batch 15400, loss[loss=0.1289, simple_loss=0.1988, pruned_loss=0.02951, over 4785.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2123, pruned_loss=0.03301, over 973304.50 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 03:44:04,609 INFO [train.py:715] (6/8) Epoch 11, batch 15450, loss[loss=0.1456, simple_loss=0.2173, pruned_loss=0.03696, over 4748.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2117, pruned_loss=0.03292, over 971904.09 frames.], batch size: 16, lr: 1.96e-04 +2022-05-07 03:44:44,029 INFO [train.py:715] (6/8) Epoch 11, batch 15500, loss[loss=0.2055, simple_loss=0.2801, pruned_loss=0.0654, over 4898.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2113, pruned_loss=0.03288, over 972225.01 frames.], batch size: 39, lr: 1.96e-04 +2022-05-07 03:45:23,173 INFO [train.py:715] (6/8) Epoch 11, batch 15550, loss[loss=0.1271, simple_loss=0.2098, pruned_loss=0.02217, over 4872.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2112, pruned_loss=0.03285, over 972025.22 frames.], batch size: 30, lr: 1.96e-04 +2022-05-07 03:46:01,710 INFO [train.py:715] (6/8) Epoch 11, batch 15600, loss[loss=0.1387, simple_loss=0.2184, pruned_loss=0.02955, over 4909.00 frames.], tot_loss[loss=0.139, simple_loss=0.2119, pruned_loss=0.033, over 972413.83 frames.], batch size: 19, lr: 1.96e-04 +2022-05-07 03:46:40,884 INFO [train.py:715] (6/8) Epoch 11, batch 15650, loss[loss=0.1295, simple_loss=0.2065, pruned_loss=0.02625, over 4867.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.0328, over 972655.66 frames.], batch size: 12, lr: 1.96e-04 +2022-05-07 03:47:19,844 INFO [train.py:715] (6/8) Epoch 11, batch 15700, loss[loss=0.1245, simple_loss=0.1961, pruned_loss=0.0264, over 4965.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2118, pruned_loss=0.03306, over 972099.73 frames.], batch size: 15, lr: 1.96e-04 +2022-05-07 03:47:58,646 INFO [train.py:715] (6/8) Epoch 11, batch 15750, loss[loss=0.1254, simple_loss=0.1881, pruned_loss=0.03136, over 4869.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2115, pruned_loss=0.03269, over 972800.70 frames.], batch size: 32, lr: 1.96e-04 +2022-05-07 03:48:37,397 INFO [train.py:715] (6/8) Epoch 11, batch 15800, loss[loss=0.1406, simple_loss=0.2096, pruned_loss=0.03584, over 4876.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2116, pruned_loss=0.03276, over 973607.48 frames.], batch size: 16, lr: 1.96e-04 +2022-05-07 03:49:16,759 INFO [train.py:715] (6/8) Epoch 11, batch 15850, loss[loss=0.1286, simple_loss=0.2058, pruned_loss=0.02571, over 4774.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.03205, over 973390.34 frames.], batch size: 19, lr: 1.96e-04 +2022-05-07 03:49:55,695 INFO [train.py:715] (6/8) Epoch 11, batch 15900, loss[loss=0.1795, simple_loss=0.2392, pruned_loss=0.05993, over 4789.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2118, pruned_loss=0.03231, over 972462.93 frames.], batch size: 17, lr: 1.96e-04 +2022-05-07 03:50:34,610 INFO [train.py:715] (6/8) Epoch 11, batch 15950, loss[loss=0.1316, simple_loss=0.2017, pruned_loss=0.03074, over 4949.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2117, pruned_loss=0.03222, over 971894.70 frames.], batch size: 29, lr: 1.96e-04 +2022-05-07 03:51:13,826 INFO [train.py:715] (6/8) Epoch 11, batch 16000, loss[loss=0.1667, simple_loss=0.2487, pruned_loss=0.04238, over 4720.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2117, pruned_loss=0.03248, over 972063.20 frames.], batch size: 16, lr: 1.96e-04 +2022-05-07 03:51:53,251 INFO [train.py:715] (6/8) Epoch 11, batch 16050, loss[loss=0.1371, simple_loss=0.1951, pruned_loss=0.03952, over 4755.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.03279, over 972109.52 frames.], batch size: 14, lr: 1.96e-04 +2022-05-07 03:52:31,940 INFO [train.py:715] (6/8) Epoch 11, batch 16100, loss[loss=0.1519, simple_loss=0.2305, pruned_loss=0.03663, over 4805.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.0328, over 971832.16 frames.], batch size: 26, lr: 1.96e-04 +2022-05-07 03:53:10,817 INFO [train.py:715] (6/8) Epoch 11, batch 16150, loss[loss=0.132, simple_loss=0.2012, pruned_loss=0.03141, over 4816.00 frames.], tot_loss[loss=0.138, simple_loss=0.2116, pruned_loss=0.03215, over 971722.99 frames.], batch size: 13, lr: 1.96e-04 +2022-05-07 03:53:50,405 INFO [train.py:715] (6/8) Epoch 11, batch 16200, loss[loss=0.1526, simple_loss=0.2219, pruned_loss=0.04164, over 4751.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2117, pruned_loss=0.03245, over 972104.17 frames.], batch size: 19, lr: 1.96e-04 +2022-05-07 03:54:29,889 INFO [train.py:715] (6/8) Epoch 11, batch 16250, loss[loss=0.1376, simple_loss=0.2084, pruned_loss=0.03339, over 4758.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2111, pruned_loss=0.03213, over 972171.11 frames.], batch size: 19, lr: 1.96e-04 +2022-05-07 03:55:08,238 INFO [train.py:715] (6/8) Epoch 11, batch 16300, loss[loss=0.1409, simple_loss=0.2009, pruned_loss=0.04042, over 4752.00 frames.], tot_loss[loss=0.138, simple_loss=0.2114, pruned_loss=0.03227, over 972247.38 frames.], batch size: 16, lr: 1.96e-04 +2022-05-07 03:55:47,435 INFO [train.py:715] (6/8) Epoch 11, batch 16350, loss[loss=0.1532, simple_loss=0.2379, pruned_loss=0.03431, over 4808.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2121, pruned_loss=0.03264, over 972322.43 frames.], batch size: 21, lr: 1.96e-04 +2022-05-07 03:56:26,684 INFO [train.py:715] (6/8) Epoch 11, batch 16400, loss[loss=0.1592, simple_loss=0.2315, pruned_loss=0.04345, over 4912.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2122, pruned_loss=0.03267, over 972309.60 frames.], batch size: 17, lr: 1.96e-04 +2022-05-07 03:57:05,183 INFO [train.py:715] (6/8) Epoch 11, batch 16450, loss[loss=0.1644, simple_loss=0.2258, pruned_loss=0.05156, over 4774.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2115, pruned_loss=0.03245, over 971961.56 frames.], batch size: 14, lr: 1.96e-04 +2022-05-07 03:57:44,152 INFO [train.py:715] (6/8) Epoch 11, batch 16500, loss[loss=0.1383, simple_loss=0.2144, pruned_loss=0.03115, over 4891.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.03271, over 972428.01 frames.], batch size: 22, lr: 1.96e-04 +2022-05-07 03:58:23,674 INFO [train.py:715] (6/8) Epoch 11, batch 16550, loss[loss=0.129, simple_loss=0.1931, pruned_loss=0.03242, over 4853.00 frames.], tot_loss[loss=0.138, simple_loss=0.2113, pruned_loss=0.03231, over 972265.18 frames.], batch size: 32, lr: 1.96e-04 +2022-05-07 03:59:02,828 INFO [train.py:715] (6/8) Epoch 11, batch 16600, loss[loss=0.1305, simple_loss=0.2103, pruned_loss=0.02531, over 4810.00 frames.], tot_loss[loss=0.138, simple_loss=0.2114, pruned_loss=0.03232, over 972428.02 frames.], batch size: 21, lr: 1.96e-04 +2022-05-07 03:59:41,213 INFO [train.py:715] (6/8) Epoch 11, batch 16650, loss[loss=0.1541, simple_loss=0.2398, pruned_loss=0.0342, over 4907.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03245, over 971995.94 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 04:00:20,434 INFO [train.py:715] (6/8) Epoch 11, batch 16700, loss[loss=0.1164, simple_loss=0.187, pruned_loss=0.0229, over 4940.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03252, over 972210.73 frames.], batch size: 29, lr: 1.96e-04 +2022-05-07 04:00:59,402 INFO [train.py:715] (6/8) Epoch 11, batch 16750, loss[loss=0.1499, simple_loss=0.2242, pruned_loss=0.03776, over 4922.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2106, pruned_loss=0.0319, over 972476.07 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 04:01:38,342 INFO [train.py:715] (6/8) Epoch 11, batch 16800, loss[loss=0.1299, simple_loss=0.2081, pruned_loss=0.02579, over 4894.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03266, over 972530.68 frames.], batch size: 16, lr: 1.96e-04 +2022-05-07 04:02:17,999 INFO [train.py:715] (6/8) Epoch 11, batch 16850, loss[loss=0.1336, simple_loss=0.2141, pruned_loss=0.02654, over 4815.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2116, pruned_loss=0.03237, over 972497.58 frames.], batch size: 25, lr: 1.96e-04 +2022-05-07 04:02:57,561 INFO [train.py:715] (6/8) Epoch 11, batch 16900, loss[loss=0.1418, simple_loss=0.2162, pruned_loss=0.03367, over 4939.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.03286, over 971794.17 frames.], batch size: 23, lr: 1.96e-04 +2022-05-07 04:03:37,030 INFO [train.py:715] (6/8) Epoch 11, batch 16950, loss[loss=0.1233, simple_loss=0.2039, pruned_loss=0.02139, over 4984.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2111, pruned_loss=0.0325, over 972311.54 frames.], batch size: 25, lr: 1.96e-04 +2022-05-07 04:04:15,766 INFO [train.py:715] (6/8) Epoch 11, batch 17000, loss[loss=0.145, simple_loss=0.2267, pruned_loss=0.0317, over 4925.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.03275, over 972297.85 frames.], batch size: 23, lr: 1.96e-04 +2022-05-07 04:04:55,496 INFO [train.py:715] (6/8) Epoch 11, batch 17050, loss[loss=0.1442, simple_loss=0.2136, pruned_loss=0.03743, over 4856.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2118, pruned_loss=0.03256, over 973278.33 frames.], batch size: 34, lr: 1.96e-04 +2022-05-07 04:05:38,135 INFO [train.py:715] (6/8) Epoch 11, batch 17100, loss[loss=0.1872, simple_loss=0.2877, pruned_loss=0.04333, over 4895.00 frames.], tot_loss[loss=0.1382, simple_loss=0.212, pruned_loss=0.03215, over 973252.49 frames.], batch size: 19, lr: 1.96e-04 +2022-05-07 04:06:17,132 INFO [train.py:715] (6/8) Epoch 11, batch 17150, loss[loss=0.122, simple_loss=0.2057, pruned_loss=0.0192, over 4859.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2122, pruned_loss=0.03237, over 972859.63 frames.], batch size: 22, lr: 1.96e-04 +2022-05-07 04:06:56,396 INFO [train.py:715] (6/8) Epoch 11, batch 17200, loss[loss=0.1365, simple_loss=0.2195, pruned_loss=0.02673, over 4813.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2117, pruned_loss=0.03167, over 972932.75 frames.], batch size: 25, lr: 1.96e-04 +2022-05-07 04:07:35,866 INFO [train.py:715] (6/8) Epoch 11, batch 17250, loss[loss=0.1647, simple_loss=0.2332, pruned_loss=0.04808, over 4792.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2111, pruned_loss=0.03183, over 972810.89 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 04:08:14,918 INFO [train.py:715] (6/8) Epoch 11, batch 17300, loss[loss=0.154, simple_loss=0.2269, pruned_loss=0.04052, over 4928.00 frames.], tot_loss[loss=0.138, simple_loss=0.2117, pruned_loss=0.0321, over 972557.19 frames.], batch size: 29, lr: 1.96e-04 +2022-05-07 04:08:53,647 INFO [train.py:715] (6/8) Epoch 11, batch 17350, loss[loss=0.1222, simple_loss=0.1991, pruned_loss=0.0227, over 4959.00 frames.], tot_loss[loss=0.1382, simple_loss=0.212, pruned_loss=0.03221, over 971834.84 frames.], batch size: 24, lr: 1.96e-04 +2022-05-07 04:09:33,978 INFO [train.py:715] (6/8) Epoch 11, batch 17400, loss[loss=0.1244, simple_loss=0.2105, pruned_loss=0.0192, over 4807.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2124, pruned_loss=0.03275, over 971931.77 frames.], batch size: 25, lr: 1.96e-04 +2022-05-07 04:10:14,473 INFO [train.py:715] (6/8) Epoch 11, batch 17450, loss[loss=0.1763, simple_loss=0.2453, pruned_loss=0.05364, over 4775.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2127, pruned_loss=0.03313, over 971953.58 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 04:10:53,787 INFO [train.py:715] (6/8) Epoch 11, batch 17500, loss[loss=0.1397, simple_loss=0.216, pruned_loss=0.03174, over 4695.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2129, pruned_loss=0.03322, over 972919.76 frames.], batch size: 15, lr: 1.96e-04 +2022-05-07 04:11:33,224 INFO [train.py:715] (6/8) Epoch 11, batch 17550, loss[loss=0.1112, simple_loss=0.1828, pruned_loss=0.01977, over 4767.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2129, pruned_loss=0.033, over 971875.56 frames.], batch size: 12, lr: 1.96e-04 +2022-05-07 04:12:12,577 INFO [train.py:715] (6/8) Epoch 11, batch 17600, loss[loss=0.1332, simple_loss=0.2126, pruned_loss=0.02686, over 4792.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2132, pruned_loss=0.03308, over 971380.19 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 04:12:51,731 INFO [train.py:715] (6/8) Epoch 11, batch 17650, loss[loss=0.1335, simple_loss=0.2121, pruned_loss=0.0274, over 4788.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2126, pruned_loss=0.03282, over 972282.66 frames.], batch size: 17, lr: 1.96e-04 +2022-05-07 04:13:29,971 INFO [train.py:715] (6/8) Epoch 11, batch 17700, loss[loss=0.1582, simple_loss=0.2246, pruned_loss=0.04587, over 4772.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2127, pruned_loss=0.03311, over 972968.60 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 04:14:09,456 INFO [train.py:715] (6/8) Epoch 11, batch 17750, loss[loss=0.1722, simple_loss=0.2428, pruned_loss=0.05083, over 4692.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2128, pruned_loss=0.03335, over 973011.43 frames.], batch size: 15, lr: 1.96e-04 +2022-05-07 04:14:49,016 INFO [train.py:715] (6/8) Epoch 11, batch 17800, loss[loss=0.143, simple_loss=0.2174, pruned_loss=0.03429, over 4962.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2132, pruned_loss=0.03327, over 973432.77 frames.], batch size: 39, lr: 1.96e-04 +2022-05-07 04:15:27,265 INFO [train.py:715] (6/8) Epoch 11, batch 17850, loss[loss=0.1712, simple_loss=0.2461, pruned_loss=0.04819, over 4957.00 frames.], tot_loss[loss=0.1397, simple_loss=0.213, pruned_loss=0.03318, over 973456.41 frames.], batch size: 35, lr: 1.96e-04 +2022-05-07 04:16:06,258 INFO [train.py:715] (6/8) Epoch 11, batch 17900, loss[loss=0.1222, simple_loss=0.1919, pruned_loss=0.02628, over 4784.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2121, pruned_loss=0.03281, over 973495.10 frames.], batch size: 14, lr: 1.96e-04 +2022-05-07 04:16:45,881 INFO [train.py:715] (6/8) Epoch 11, batch 17950, loss[loss=0.163, simple_loss=0.2312, pruned_loss=0.04737, over 4974.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2124, pruned_loss=0.03317, over 973075.29 frames.], batch size: 39, lr: 1.96e-04 +2022-05-07 04:17:24,875 INFO [train.py:715] (6/8) Epoch 11, batch 18000, loss[loss=0.1493, simple_loss=0.2273, pruned_loss=0.03562, over 4960.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2124, pruned_loss=0.03315, over 973232.49 frames.], batch size: 35, lr: 1.96e-04 +2022-05-07 04:17:24,875 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 04:17:34,462 INFO [train.py:742] (6/8) Epoch 11, validation: loss=0.1061, simple_loss=0.1903, pruned_loss=0.01092, over 914524.00 frames. +2022-05-07 04:18:14,141 INFO [train.py:715] (6/8) Epoch 11, batch 18050, loss[loss=0.1259, simple_loss=0.1971, pruned_loss=0.02739, over 4787.00 frames.], tot_loss[loss=0.1388, simple_loss=0.212, pruned_loss=0.03273, over 973412.80 frames.], batch size: 14, lr: 1.96e-04 +2022-05-07 04:18:53,409 INFO [train.py:715] (6/8) Epoch 11, batch 18100, loss[loss=0.1483, simple_loss=0.2245, pruned_loss=0.03607, over 4883.00 frames.], tot_loss[loss=0.1386, simple_loss=0.212, pruned_loss=0.03263, over 973953.78 frames.], batch size: 22, lr: 1.96e-04 +2022-05-07 04:19:32,615 INFO [train.py:715] (6/8) Epoch 11, batch 18150, loss[loss=0.1348, simple_loss=0.2151, pruned_loss=0.02725, over 4905.00 frames.], tot_loss[loss=0.1385, simple_loss=0.212, pruned_loss=0.03246, over 972695.68 frames.], batch size: 19, lr: 1.96e-04 +2022-05-07 04:20:12,193 INFO [train.py:715] (6/8) Epoch 11, batch 18200, loss[loss=0.1383, simple_loss=0.2067, pruned_loss=0.03499, over 4850.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2123, pruned_loss=0.03263, over 972281.58 frames.], batch size: 30, lr: 1.96e-04 +2022-05-07 04:20:50,627 INFO [train.py:715] (6/8) Epoch 11, batch 18250, loss[loss=0.1507, simple_loss=0.2215, pruned_loss=0.03995, over 4695.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2123, pruned_loss=0.03268, over 972059.44 frames.], batch size: 15, lr: 1.96e-04 +2022-05-07 04:21:29,929 INFO [train.py:715] (6/8) Epoch 11, batch 18300, loss[loss=0.1178, simple_loss=0.193, pruned_loss=0.0213, over 4983.00 frames.], tot_loss[loss=0.1384, simple_loss=0.212, pruned_loss=0.03246, over 972429.48 frames.], batch size: 31, lr: 1.96e-04 +2022-05-07 04:22:09,176 INFO [train.py:715] (6/8) Epoch 11, batch 18350, loss[loss=0.1285, simple_loss=0.2037, pruned_loss=0.02663, over 4788.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.03272, over 972371.76 frames.], batch size: 24, lr: 1.96e-04 +2022-05-07 04:22:47,573 INFO [train.py:715] (6/8) Epoch 11, batch 18400, loss[loss=0.1336, simple_loss=0.1974, pruned_loss=0.03486, over 4954.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03286, over 971553.84 frames.], batch size: 21, lr: 1.96e-04 +2022-05-07 04:23:25,988 INFO [train.py:715] (6/8) Epoch 11, batch 18450, loss[loss=0.1477, simple_loss=0.2169, pruned_loss=0.03924, over 4927.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03264, over 971649.38 frames.], batch size: 21, lr: 1.96e-04 +2022-05-07 04:24:05,024 INFO [train.py:715] (6/8) Epoch 11, batch 18500, loss[loss=0.1297, simple_loss=0.1947, pruned_loss=0.03236, over 4781.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2118, pruned_loss=0.03286, over 971383.65 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 04:24:44,463 INFO [train.py:715] (6/8) Epoch 11, batch 18550, loss[loss=0.1066, simple_loss=0.1863, pruned_loss=0.01347, over 4880.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2116, pruned_loss=0.03274, over 972197.45 frames.], batch size: 19, lr: 1.96e-04 +2022-05-07 04:25:22,565 INFO [train.py:715] (6/8) Epoch 11, batch 18600, loss[loss=0.1297, simple_loss=0.2093, pruned_loss=0.02506, over 4745.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2122, pruned_loss=0.03249, over 971867.80 frames.], batch size: 16, lr: 1.96e-04 +2022-05-07 04:26:01,411 INFO [train.py:715] (6/8) Epoch 11, batch 18650, loss[loss=0.1508, simple_loss=0.2289, pruned_loss=0.03638, over 4906.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2119, pruned_loss=0.03218, over 971221.09 frames.], batch size: 17, lr: 1.96e-04 +2022-05-07 04:26:40,669 INFO [train.py:715] (6/8) Epoch 11, batch 18700, loss[loss=0.1404, simple_loss=0.2085, pruned_loss=0.03611, over 4968.00 frames.], tot_loss[loss=0.1384, simple_loss=0.212, pruned_loss=0.03237, over 972186.95 frames.], batch size: 15, lr: 1.96e-04 +2022-05-07 04:27:18,911 INFO [train.py:715] (6/8) Epoch 11, batch 18750, loss[loss=0.1425, simple_loss=0.2114, pruned_loss=0.0368, over 4979.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2128, pruned_loss=0.03274, over 971453.75 frames.], batch size: 25, lr: 1.96e-04 +2022-05-07 04:27:57,980 INFO [train.py:715] (6/8) Epoch 11, batch 18800, loss[loss=0.1354, simple_loss=0.2035, pruned_loss=0.03365, over 4984.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.03281, over 972158.30 frames.], batch size: 15, lr: 1.96e-04 +2022-05-07 04:28:36,593 INFO [train.py:715] (6/8) Epoch 11, batch 18850, loss[loss=0.1368, simple_loss=0.2033, pruned_loss=0.03513, over 4857.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2121, pruned_loss=0.03306, over 973208.89 frames.], batch size: 30, lr: 1.96e-04 +2022-05-07 04:29:16,486 INFO [train.py:715] (6/8) Epoch 11, batch 18900, loss[loss=0.1403, simple_loss=0.2039, pruned_loss=0.0383, over 4816.00 frames.], tot_loss[loss=0.139, simple_loss=0.2121, pruned_loss=0.03298, over 972667.08 frames.], batch size: 13, lr: 1.96e-04 +2022-05-07 04:29:55,267 INFO [train.py:715] (6/8) Epoch 11, batch 18950, loss[loss=0.1591, simple_loss=0.2242, pruned_loss=0.04694, over 4894.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.0328, over 972289.52 frames.], batch size: 16, lr: 1.96e-04 +2022-05-07 04:30:34,362 INFO [train.py:715] (6/8) Epoch 11, batch 19000, loss[loss=0.1714, simple_loss=0.2475, pruned_loss=0.04764, over 4972.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2127, pruned_loss=0.03284, over 972519.02 frames.], batch size: 35, lr: 1.96e-04 +2022-05-07 04:31:13,456 INFO [train.py:715] (6/8) Epoch 11, batch 19050, loss[loss=0.1503, simple_loss=0.2339, pruned_loss=0.03336, over 4821.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2122, pruned_loss=0.0326, over 972347.65 frames.], batch size: 27, lr: 1.96e-04 +2022-05-07 04:31:52,058 INFO [train.py:715] (6/8) Epoch 11, batch 19100, loss[loss=0.1359, simple_loss=0.2157, pruned_loss=0.02804, over 4868.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2115, pruned_loss=0.0324, over 972692.53 frames.], batch size: 38, lr: 1.96e-04 +2022-05-07 04:32:31,180 INFO [train.py:715] (6/8) Epoch 11, batch 19150, loss[loss=0.1315, simple_loss=0.2184, pruned_loss=0.02234, over 4776.00 frames.], tot_loss[loss=0.1379, simple_loss=0.211, pruned_loss=0.0324, over 971818.33 frames.], batch size: 17, lr: 1.96e-04 +2022-05-07 04:33:10,079 INFO [train.py:715] (6/8) Epoch 11, batch 19200, loss[loss=0.1222, simple_loss=0.1935, pruned_loss=0.02543, over 4877.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2114, pruned_loss=0.03264, over 972369.42 frames.], batch size: 16, lr: 1.96e-04 +2022-05-07 04:33:49,486 INFO [train.py:715] (6/8) Epoch 11, batch 19250, loss[loss=0.1195, simple_loss=0.1961, pruned_loss=0.02147, over 4959.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2107, pruned_loss=0.03215, over 973024.79 frames.], batch size: 24, lr: 1.96e-04 +2022-05-07 04:34:27,828 INFO [train.py:715] (6/8) Epoch 11, batch 19300, loss[loss=0.1286, simple_loss=0.2057, pruned_loss=0.02572, over 4803.00 frames.], tot_loss[loss=0.1375, simple_loss=0.211, pruned_loss=0.032, over 972322.01 frames.], batch size: 24, lr: 1.96e-04 +2022-05-07 04:35:06,982 INFO [train.py:715] (6/8) Epoch 11, batch 19350, loss[loss=0.1656, simple_loss=0.2354, pruned_loss=0.04788, over 4770.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2118, pruned_loss=0.03228, over 971787.41 frames.], batch size: 14, lr: 1.96e-04 +2022-05-07 04:35:46,161 INFO [train.py:715] (6/8) Epoch 11, batch 19400, loss[loss=0.1268, simple_loss=0.2101, pruned_loss=0.02178, over 4776.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2119, pruned_loss=0.03268, over 971316.13 frames.], batch size: 17, lr: 1.96e-04 +2022-05-07 04:36:24,111 INFO [train.py:715] (6/8) Epoch 11, batch 19450, loss[loss=0.1477, simple_loss=0.2118, pruned_loss=0.04183, over 4767.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2113, pruned_loss=0.03264, over 971141.53 frames.], batch size: 18, lr: 1.95e-04 +2022-05-07 04:37:03,253 INFO [train.py:715] (6/8) Epoch 11, batch 19500, loss[loss=0.1481, simple_loss=0.2208, pruned_loss=0.03773, over 4785.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2108, pruned_loss=0.03229, over 971226.64 frames.], batch size: 14, lr: 1.95e-04 +2022-05-07 04:37:42,219 INFO [train.py:715] (6/8) Epoch 11, batch 19550, loss[loss=0.1514, simple_loss=0.2184, pruned_loss=0.04218, over 4988.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03222, over 971649.99 frames.], batch size: 26, lr: 1.95e-04 +2022-05-07 04:38:20,965 INFO [train.py:715] (6/8) Epoch 11, batch 19600, loss[loss=0.1521, simple_loss=0.2279, pruned_loss=0.03813, over 4889.00 frames.], tot_loss[loss=0.138, simple_loss=0.2112, pruned_loss=0.03239, over 970947.00 frames.], batch size: 39, lr: 1.95e-04 +2022-05-07 04:38:59,548 INFO [train.py:715] (6/8) Epoch 11, batch 19650, loss[loss=0.1423, simple_loss=0.2222, pruned_loss=0.03123, over 4748.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03214, over 970910.42 frames.], batch size: 16, lr: 1.95e-04 +2022-05-07 04:39:38,337 INFO [train.py:715] (6/8) Epoch 11, batch 19700, loss[loss=0.1189, simple_loss=0.185, pruned_loss=0.02633, over 4851.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2115, pruned_loss=0.03258, over 970959.39 frames.], batch size: 12, lr: 1.95e-04 +2022-05-07 04:40:17,422 INFO [train.py:715] (6/8) Epoch 11, batch 19750, loss[loss=0.1199, simple_loss=0.1792, pruned_loss=0.03028, over 4818.00 frames.], tot_loss[loss=0.139, simple_loss=0.2123, pruned_loss=0.03281, over 971507.63 frames.], batch size: 12, lr: 1.95e-04 +2022-05-07 04:40:55,512 INFO [train.py:715] (6/8) Epoch 11, batch 19800, loss[loss=0.1481, simple_loss=0.2236, pruned_loss=0.0363, over 4906.00 frames.], tot_loss[loss=0.1388, simple_loss=0.212, pruned_loss=0.03281, over 971802.20 frames.], batch size: 17, lr: 1.95e-04 +2022-05-07 04:41:35,008 INFO [train.py:715] (6/8) Epoch 11, batch 19850, loss[loss=0.1218, simple_loss=0.1952, pruned_loss=0.02427, over 4789.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2115, pruned_loss=0.0324, over 971713.34 frames.], batch size: 21, lr: 1.95e-04 +2022-05-07 04:42:14,377 INFO [train.py:715] (6/8) Epoch 11, batch 19900, loss[loss=0.1159, simple_loss=0.1987, pruned_loss=0.01657, over 4933.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2117, pruned_loss=0.03276, over 971721.70 frames.], batch size: 29, lr: 1.95e-04 +2022-05-07 04:42:53,605 INFO [train.py:715] (6/8) Epoch 11, batch 19950, loss[loss=0.1177, simple_loss=0.1907, pruned_loss=0.02232, over 4810.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2114, pruned_loss=0.03266, over 972320.08 frames.], batch size: 14, lr: 1.95e-04 +2022-05-07 04:43:32,805 INFO [train.py:715] (6/8) Epoch 11, batch 20000, loss[loss=0.1344, simple_loss=0.2107, pruned_loss=0.02899, over 4862.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2103, pruned_loss=0.03191, over 972627.80 frames.], batch size: 30, lr: 1.95e-04 +2022-05-07 04:44:11,790 INFO [train.py:715] (6/8) Epoch 11, batch 20050, loss[loss=0.1707, simple_loss=0.2321, pruned_loss=0.05464, over 4991.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2104, pruned_loss=0.03221, over 973680.75 frames.], batch size: 16, lr: 1.95e-04 +2022-05-07 04:44:51,032 INFO [train.py:715] (6/8) Epoch 11, batch 20100, loss[loss=0.1546, simple_loss=0.2219, pruned_loss=0.04366, over 4814.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2108, pruned_loss=0.03231, over 973012.00 frames.], batch size: 26, lr: 1.95e-04 +2022-05-07 04:45:29,361 INFO [train.py:715] (6/8) Epoch 11, batch 20150, loss[loss=0.1531, simple_loss=0.2202, pruned_loss=0.04296, over 4787.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2108, pruned_loss=0.03282, over 972614.89 frames.], batch size: 18, lr: 1.95e-04 +2022-05-07 04:46:08,145 INFO [train.py:715] (6/8) Epoch 11, batch 20200, loss[loss=0.1302, simple_loss=0.2006, pruned_loss=0.02994, over 4936.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2106, pruned_loss=0.03261, over 973534.60 frames.], batch size: 35, lr: 1.95e-04 +2022-05-07 04:46:46,984 INFO [train.py:715] (6/8) Epoch 11, batch 20250, loss[loss=0.1367, simple_loss=0.2186, pruned_loss=0.02738, over 4832.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2109, pruned_loss=0.03274, over 972870.39 frames.], batch size: 26, lr: 1.95e-04 +2022-05-07 04:47:25,724 INFO [train.py:715] (6/8) Epoch 11, batch 20300, loss[loss=0.162, simple_loss=0.2277, pruned_loss=0.04818, over 4860.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2114, pruned_loss=0.03311, over 972603.96 frames.], batch size: 30, lr: 1.95e-04 +2022-05-07 04:48:04,824 INFO [train.py:715] (6/8) Epoch 11, batch 20350, loss[loss=0.1376, simple_loss=0.2164, pruned_loss=0.0294, over 4881.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2118, pruned_loss=0.03351, over 972131.49 frames.], batch size: 16, lr: 1.95e-04 +2022-05-07 04:48:43,801 INFO [train.py:715] (6/8) Epoch 11, batch 20400, loss[loss=0.1567, simple_loss=0.2378, pruned_loss=0.03777, over 4912.00 frames.], tot_loss[loss=0.1398, simple_loss=0.212, pruned_loss=0.03382, over 972900.43 frames.], batch size: 23, lr: 1.95e-04 +2022-05-07 04:49:23,228 INFO [train.py:715] (6/8) Epoch 11, batch 20450, loss[loss=0.1179, simple_loss=0.1905, pruned_loss=0.02263, over 4748.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2119, pruned_loss=0.03314, over 973262.82 frames.], batch size: 19, lr: 1.95e-04 +2022-05-07 04:50:01,763 INFO [train.py:715] (6/8) Epoch 11, batch 20500, loss[loss=0.1369, simple_loss=0.1987, pruned_loss=0.03752, over 4790.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2114, pruned_loss=0.03313, over 972472.51 frames.], batch size: 13, lr: 1.95e-04 +2022-05-07 04:50:41,081 INFO [train.py:715] (6/8) Epoch 11, batch 20550, loss[loss=0.1311, simple_loss=0.2226, pruned_loss=0.01981, over 4923.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2116, pruned_loss=0.0328, over 973498.75 frames.], batch size: 29, lr: 1.95e-04 +2022-05-07 04:51:19,714 INFO [train.py:715] (6/8) Epoch 11, batch 20600, loss[loss=0.1261, simple_loss=0.2107, pruned_loss=0.02075, over 4807.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03284, over 973181.23 frames.], batch size: 25, lr: 1.95e-04 +2022-05-07 04:51:57,492 INFO [train.py:715] (6/8) Epoch 11, batch 20650, loss[loss=0.1377, simple_loss=0.222, pruned_loss=0.02669, over 4914.00 frames.], tot_loss[loss=0.1387, simple_loss=0.212, pruned_loss=0.03265, over 973453.84 frames.], batch size: 29, lr: 1.95e-04 +2022-05-07 04:52:36,870 INFO [train.py:715] (6/8) Epoch 11, batch 20700, loss[loss=0.1165, simple_loss=0.1904, pruned_loss=0.02129, over 4755.00 frames.], tot_loss[loss=0.1387, simple_loss=0.212, pruned_loss=0.03273, over 973066.87 frames.], batch size: 19, lr: 1.95e-04 +2022-05-07 04:53:16,100 INFO [train.py:715] (6/8) Epoch 11, batch 20750, loss[loss=0.143, simple_loss=0.2189, pruned_loss=0.03354, over 4688.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03251, over 972620.80 frames.], batch size: 15, lr: 1.95e-04 +2022-05-07 04:53:54,800 INFO [train.py:715] (6/8) Epoch 11, batch 20800, loss[loss=0.1426, simple_loss=0.2123, pruned_loss=0.03643, over 4883.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03222, over 972417.94 frames.], batch size: 19, lr: 1.95e-04 +2022-05-07 04:54:33,172 INFO [train.py:715] (6/8) Epoch 11, batch 20850, loss[loss=0.1446, simple_loss=0.2191, pruned_loss=0.03501, over 4901.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2112, pruned_loss=0.03212, over 971868.80 frames.], batch size: 19, lr: 1.95e-04 +2022-05-07 04:55:12,419 INFO [train.py:715] (6/8) Epoch 11, batch 20900, loss[loss=0.1263, simple_loss=0.2138, pruned_loss=0.01947, over 4804.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2119, pruned_loss=0.03216, over 971613.72 frames.], batch size: 25, lr: 1.95e-04 +2022-05-07 04:55:52,031 INFO [train.py:715] (6/8) Epoch 11, batch 20950, loss[loss=0.145, simple_loss=0.2221, pruned_loss=0.03389, over 4797.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2119, pruned_loss=0.03215, over 970937.10 frames.], batch size: 18, lr: 1.95e-04 +2022-05-07 04:56:30,994 INFO [train.py:715] (6/8) Epoch 11, batch 21000, loss[loss=0.1297, simple_loss=0.2135, pruned_loss=0.02294, over 4952.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2114, pruned_loss=0.03208, over 971408.05 frames.], batch size: 29, lr: 1.95e-04 +2022-05-07 04:56:30,995 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 04:56:40,631 INFO [train.py:742] (6/8) Epoch 11, validation: loss=0.106, simple_loss=0.19, pruned_loss=0.01097, over 914524.00 frames. +2022-05-07 04:57:20,095 INFO [train.py:715] (6/8) Epoch 11, batch 21050, loss[loss=0.1312, simple_loss=0.2103, pruned_loss=0.02609, over 4843.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.03204, over 972040.23 frames.], batch size: 26, lr: 1.95e-04 +2022-05-07 04:57:59,827 INFO [train.py:715] (6/8) Epoch 11, batch 21100, loss[loss=0.1475, simple_loss=0.223, pruned_loss=0.03602, over 4935.00 frames.], tot_loss[loss=0.1372, simple_loss=0.211, pruned_loss=0.03172, over 971880.85 frames.], batch size: 35, lr: 1.95e-04 +2022-05-07 04:58:38,865 INFO [train.py:715] (6/8) Epoch 11, batch 21150, loss[loss=0.1282, simple_loss=0.2121, pruned_loss=0.02213, over 4805.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2112, pruned_loss=0.03202, over 972333.04 frames.], batch size: 25, lr: 1.95e-04 +2022-05-07 04:59:18,201 INFO [train.py:715] (6/8) Epoch 11, batch 21200, loss[loss=0.1587, simple_loss=0.2296, pruned_loss=0.04396, over 4918.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2114, pruned_loss=0.03256, over 971849.59 frames.], batch size: 19, lr: 1.95e-04 +2022-05-07 04:59:56,326 INFO [train.py:715] (6/8) Epoch 11, batch 21250, loss[loss=0.139, simple_loss=0.2177, pruned_loss=0.03009, over 4962.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03223, over 971355.83 frames.], batch size: 24, lr: 1.95e-04 +2022-05-07 05:00:35,640 INFO [train.py:715] (6/8) Epoch 11, batch 21300, loss[loss=0.1308, simple_loss=0.2038, pruned_loss=0.02896, over 4788.00 frames.], tot_loss[loss=0.138, simple_loss=0.211, pruned_loss=0.03254, over 971782.06 frames.], batch size: 17, lr: 1.95e-04 +2022-05-07 05:01:15,029 INFO [train.py:715] (6/8) Epoch 11, batch 21350, loss[loss=0.1463, simple_loss=0.2145, pruned_loss=0.03903, over 4756.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2106, pruned_loss=0.03236, over 971132.89 frames.], batch size: 16, lr: 1.95e-04 +2022-05-07 05:01:53,535 INFO [train.py:715] (6/8) Epoch 11, batch 21400, loss[loss=0.136, simple_loss=0.2143, pruned_loss=0.02886, over 4807.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2116, pruned_loss=0.0327, over 972224.43 frames.], batch size: 25, lr: 1.95e-04 +2022-05-07 05:02:32,173 INFO [train.py:715] (6/8) Epoch 11, batch 21450, loss[loss=0.1299, simple_loss=0.1966, pruned_loss=0.03158, over 4916.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2116, pruned_loss=0.03262, over 971955.28 frames.], batch size: 19, lr: 1.95e-04 +2022-05-07 05:03:11,025 INFO [train.py:715] (6/8) Epoch 11, batch 21500, loss[loss=0.1425, simple_loss=0.2208, pruned_loss=0.03205, over 4908.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.03203, over 972279.16 frames.], batch size: 19, lr: 1.95e-04 +2022-05-07 05:03:50,392 INFO [train.py:715] (6/8) Epoch 11, batch 21550, loss[loss=0.1345, simple_loss=0.2058, pruned_loss=0.03167, over 4810.00 frames.], tot_loss[loss=0.1376, simple_loss=0.211, pruned_loss=0.0321, over 972474.70 frames.], batch size: 12, lr: 1.95e-04 +2022-05-07 05:04:28,682 INFO [train.py:715] (6/8) Epoch 11, batch 21600, loss[loss=0.1276, simple_loss=0.1993, pruned_loss=0.02798, over 4751.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.03211, over 972510.28 frames.], batch size: 14, lr: 1.95e-04 +2022-05-07 05:05:07,531 INFO [train.py:715] (6/8) Epoch 11, batch 21650, loss[loss=0.152, simple_loss=0.2169, pruned_loss=0.04351, over 4643.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03336, over 972203.42 frames.], batch size: 13, lr: 1.95e-04 +2022-05-07 05:05:47,581 INFO [train.py:715] (6/8) Epoch 11, batch 21700, loss[loss=0.16, simple_loss=0.2209, pruned_loss=0.04962, over 4944.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2121, pruned_loss=0.03287, over 972943.58 frames.], batch size: 35, lr: 1.95e-04 +2022-05-07 05:06:26,874 INFO [train.py:715] (6/8) Epoch 11, batch 21750, loss[loss=0.1523, simple_loss=0.2308, pruned_loss=0.03689, over 4815.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2121, pruned_loss=0.03309, over 972847.85 frames.], batch size: 26, lr: 1.95e-04 +2022-05-07 05:07:07,059 INFO [train.py:715] (6/8) Epoch 11, batch 21800, loss[loss=0.1366, simple_loss=0.2037, pruned_loss=0.03474, over 4976.00 frames.], tot_loss[loss=0.1391, simple_loss=0.212, pruned_loss=0.03315, over 972683.79 frames.], batch size: 35, lr: 1.95e-04 +2022-05-07 05:07:46,731 INFO [train.py:715] (6/8) Epoch 11, batch 21850, loss[loss=0.1508, simple_loss=0.2265, pruned_loss=0.03752, over 4755.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2128, pruned_loss=0.0329, over 972371.35 frames.], batch size: 19, lr: 1.95e-04 +2022-05-07 05:08:27,226 INFO [train.py:715] (6/8) Epoch 11, batch 21900, loss[loss=0.1217, simple_loss=0.1988, pruned_loss=0.02228, over 4760.00 frames.], tot_loss[loss=0.1386, simple_loss=0.212, pruned_loss=0.03265, over 972241.38 frames.], batch size: 19, lr: 1.95e-04 +2022-05-07 05:09:06,448 INFO [train.py:715] (6/8) Epoch 11, batch 21950, loss[loss=0.1299, simple_loss=0.2074, pruned_loss=0.02617, over 4756.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2105, pruned_loss=0.03223, over 971915.18 frames.], batch size: 19, lr: 1.95e-04 +2022-05-07 05:09:46,767 INFO [train.py:715] (6/8) Epoch 11, batch 22000, loss[loss=0.1402, simple_loss=0.2119, pruned_loss=0.03422, over 4955.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2118, pruned_loss=0.0329, over 972334.50 frames.], batch size: 29, lr: 1.95e-04 +2022-05-07 05:10:27,230 INFO [train.py:715] (6/8) Epoch 11, batch 22050, loss[loss=0.1079, simple_loss=0.1721, pruned_loss=0.02182, over 4802.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2109, pruned_loss=0.03264, over 971864.33 frames.], batch size: 12, lr: 1.95e-04 +2022-05-07 05:11:05,501 INFO [train.py:715] (6/8) Epoch 11, batch 22100, loss[loss=0.1712, simple_loss=0.2407, pruned_loss=0.05081, over 4913.00 frames.], tot_loss[loss=0.1383, simple_loss=0.211, pruned_loss=0.0328, over 970968.43 frames.], batch size: 39, lr: 1.95e-04 +2022-05-07 05:11:45,099 INFO [train.py:715] (6/8) Epoch 11, batch 22150, loss[loss=0.1816, simple_loss=0.2566, pruned_loss=0.0533, over 4956.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2109, pruned_loss=0.0326, over 971093.55 frames.], batch size: 21, lr: 1.95e-04 +2022-05-07 05:12:24,692 INFO [train.py:715] (6/8) Epoch 11, batch 22200, loss[loss=0.136, simple_loss=0.2095, pruned_loss=0.03121, over 4973.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2112, pruned_loss=0.03271, over 970641.35 frames.], batch size: 15, lr: 1.95e-04 +2022-05-07 05:13:03,454 INFO [train.py:715] (6/8) Epoch 11, batch 22250, loss[loss=0.09522, simple_loss=0.1596, pruned_loss=0.01541, over 4858.00 frames.], tot_loss[loss=0.1392, simple_loss=0.212, pruned_loss=0.03317, over 970914.37 frames.], batch size: 12, lr: 1.95e-04 +2022-05-07 05:13:41,892 INFO [train.py:715] (6/8) Epoch 11, batch 22300, loss[loss=0.116, simple_loss=0.1841, pruned_loss=0.02388, over 4771.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2126, pruned_loss=0.03333, over 970810.44 frames.], batch size: 12, lr: 1.95e-04 +2022-05-07 05:14:21,102 INFO [train.py:715] (6/8) Epoch 11, batch 22350, loss[loss=0.1634, simple_loss=0.2423, pruned_loss=0.04221, over 4908.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2124, pruned_loss=0.03336, over 970723.11 frames.], batch size: 19, lr: 1.95e-04 +2022-05-07 05:15:00,558 INFO [train.py:715] (6/8) Epoch 11, batch 22400, loss[loss=0.1504, simple_loss=0.2117, pruned_loss=0.04453, over 4831.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2124, pruned_loss=0.03338, over 971116.42 frames.], batch size: 30, lr: 1.95e-04 +2022-05-07 05:15:38,493 INFO [train.py:715] (6/8) Epoch 11, batch 22450, loss[loss=0.1489, simple_loss=0.2146, pruned_loss=0.04154, over 4781.00 frames.], tot_loss[loss=0.139, simple_loss=0.2122, pruned_loss=0.03294, over 972097.28 frames.], batch size: 18, lr: 1.95e-04 +2022-05-07 05:16:18,409 INFO [train.py:715] (6/8) Epoch 11, batch 22500, loss[loss=0.1554, simple_loss=0.2375, pruned_loss=0.03665, over 4838.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2117, pruned_loss=0.03244, over 972052.05 frames.], batch size: 25, lr: 1.95e-04 +2022-05-07 05:16:57,483 INFO [train.py:715] (6/8) Epoch 11, batch 22550, loss[loss=0.1115, simple_loss=0.1874, pruned_loss=0.01779, over 4827.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2124, pruned_loss=0.03259, over 972808.25 frames.], batch size: 13, lr: 1.95e-04 +2022-05-07 05:17:36,657 INFO [train.py:715] (6/8) Epoch 11, batch 22600, loss[loss=0.1669, simple_loss=0.2453, pruned_loss=0.04425, over 4872.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2122, pruned_loss=0.03227, over 972139.91 frames.], batch size: 16, lr: 1.95e-04 +2022-05-07 05:18:15,030 INFO [train.py:715] (6/8) Epoch 11, batch 22650, loss[loss=0.1153, simple_loss=0.1887, pruned_loss=0.02098, over 4774.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2118, pruned_loss=0.03222, over 972274.99 frames.], batch size: 12, lr: 1.95e-04 +2022-05-07 05:18:54,216 INFO [train.py:715] (6/8) Epoch 11, batch 22700, loss[loss=0.1554, simple_loss=0.2237, pruned_loss=0.04355, over 4702.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2123, pruned_loss=0.03272, over 971978.45 frames.], batch size: 15, lr: 1.95e-04 +2022-05-07 05:19:34,075 INFO [train.py:715] (6/8) Epoch 11, batch 22750, loss[loss=0.178, simple_loss=0.2376, pruned_loss=0.05923, over 4858.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2122, pruned_loss=0.03323, over 971490.52 frames.], batch size: 32, lr: 1.95e-04 +2022-05-07 05:20:12,497 INFO [train.py:715] (6/8) Epoch 11, batch 22800, loss[loss=0.1254, simple_loss=0.2058, pruned_loss=0.02247, over 4949.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2125, pruned_loss=0.03311, over 971516.23 frames.], batch size: 21, lr: 1.95e-04 +2022-05-07 05:20:52,297 INFO [train.py:715] (6/8) Epoch 11, batch 22850, loss[loss=0.1269, simple_loss=0.2013, pruned_loss=0.02621, over 4857.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2119, pruned_loss=0.03316, over 971068.99 frames.], batch size: 20, lr: 1.95e-04 +2022-05-07 05:21:31,221 INFO [train.py:715] (6/8) Epoch 11, batch 22900, loss[loss=0.149, simple_loss=0.2366, pruned_loss=0.03074, over 4738.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2121, pruned_loss=0.03319, over 971374.44 frames.], batch size: 16, lr: 1.95e-04 +2022-05-07 05:22:10,211 INFO [train.py:715] (6/8) Epoch 11, batch 22950, loss[loss=0.1359, simple_loss=0.2161, pruned_loss=0.02783, over 4931.00 frames.], tot_loss[loss=0.1391, simple_loss=0.212, pruned_loss=0.03316, over 971406.78 frames.], batch size: 23, lr: 1.95e-04 +2022-05-07 05:22:48,360 INFO [train.py:715] (6/8) Epoch 11, batch 23000, loss[loss=0.1294, simple_loss=0.1939, pruned_loss=0.03247, over 4837.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03342, over 971300.06 frames.], batch size: 30, lr: 1.95e-04 +2022-05-07 05:23:27,331 INFO [train.py:715] (6/8) Epoch 11, batch 23050, loss[loss=0.1238, simple_loss=0.197, pruned_loss=0.02532, over 4927.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2113, pruned_loss=0.03275, over 971613.87 frames.], batch size: 29, lr: 1.95e-04 +2022-05-07 05:24:06,661 INFO [train.py:715] (6/8) Epoch 11, batch 23100, loss[loss=0.1663, simple_loss=0.2412, pruned_loss=0.04569, over 4899.00 frames.], tot_loss[loss=0.138, simple_loss=0.2112, pruned_loss=0.03238, over 972418.93 frames.], batch size: 19, lr: 1.95e-04 +2022-05-07 05:24:44,407 INFO [train.py:715] (6/8) Epoch 11, batch 23150, loss[loss=0.1528, simple_loss=0.2184, pruned_loss=0.0436, over 4957.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2114, pruned_loss=0.03269, over 972404.37 frames.], batch size: 14, lr: 1.95e-04 +2022-05-07 05:25:23,979 INFO [train.py:715] (6/8) Epoch 11, batch 23200, loss[loss=0.1743, simple_loss=0.2406, pruned_loss=0.05398, over 4742.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2108, pruned_loss=0.03241, over 971968.39 frames.], batch size: 16, lr: 1.95e-04 +2022-05-07 05:26:02,911 INFO [train.py:715] (6/8) Epoch 11, batch 23250, loss[loss=0.1586, simple_loss=0.2275, pruned_loss=0.04483, over 4689.00 frames.], tot_loss[loss=0.1379, simple_loss=0.211, pruned_loss=0.03236, over 972648.01 frames.], batch size: 15, lr: 1.95e-04 +2022-05-07 05:26:41,983 INFO [train.py:715] (6/8) Epoch 11, batch 23300, loss[loss=0.1427, simple_loss=0.2191, pruned_loss=0.03319, over 4917.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2114, pruned_loss=0.03257, over 971708.81 frames.], batch size: 17, lr: 1.95e-04 +2022-05-07 05:27:20,071 INFO [train.py:715] (6/8) Epoch 11, batch 23350, loss[loss=0.1293, simple_loss=0.1968, pruned_loss=0.03092, over 4835.00 frames.], tot_loss[loss=0.1382, simple_loss=0.211, pruned_loss=0.03266, over 971604.82 frames.], batch size: 13, lr: 1.95e-04 +2022-05-07 05:27:59,124 INFO [train.py:715] (6/8) Epoch 11, batch 23400, loss[loss=0.145, simple_loss=0.215, pruned_loss=0.03752, over 4965.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2114, pruned_loss=0.03309, over 970477.46 frames.], batch size: 15, lr: 1.95e-04 +2022-05-07 05:28:38,745 INFO [train.py:715] (6/8) Epoch 11, batch 23450, loss[loss=0.1255, simple_loss=0.1929, pruned_loss=0.02908, over 4934.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2115, pruned_loss=0.03243, over 972435.33 frames.], batch size: 29, lr: 1.95e-04 +2022-05-07 05:29:16,871 INFO [train.py:715] (6/8) Epoch 11, batch 23500, loss[loss=0.1293, simple_loss=0.1983, pruned_loss=0.0301, over 4790.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2115, pruned_loss=0.03238, over 973123.56 frames.], batch size: 14, lr: 1.95e-04 +2022-05-07 05:29:55,783 INFO [train.py:715] (6/8) Epoch 11, batch 23550, loss[loss=0.1508, simple_loss=0.2287, pruned_loss=0.03649, over 4978.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2109, pruned_loss=0.03232, over 972302.05 frames.], batch size: 15, lr: 1.95e-04 +2022-05-07 05:30:34,767 INFO [train.py:715] (6/8) Epoch 11, batch 23600, loss[loss=0.1197, simple_loss=0.1907, pruned_loss=0.02437, over 4849.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2119, pruned_loss=0.03268, over 973232.33 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 05:31:14,119 INFO [train.py:715] (6/8) Epoch 11, batch 23650, loss[loss=0.1243, simple_loss=0.1915, pruned_loss=0.02854, over 4978.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2123, pruned_loss=0.03312, over 973658.61 frames.], batch size: 25, lr: 1.94e-04 +2022-05-07 05:31:51,832 INFO [train.py:715] (6/8) Epoch 11, batch 23700, loss[loss=0.1166, simple_loss=0.1921, pruned_loss=0.02055, over 4872.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.03277, over 974371.20 frames.], batch size: 32, lr: 1.94e-04 +2022-05-07 05:32:30,814 INFO [train.py:715] (6/8) Epoch 11, batch 23750, loss[loss=0.1371, simple_loss=0.2189, pruned_loss=0.02762, over 4924.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2119, pruned_loss=0.03235, over 974989.77 frames.], batch size: 23, lr: 1.94e-04 +2022-05-07 05:33:09,309 INFO [train.py:715] (6/8) Epoch 11, batch 23800, loss[loss=0.1556, simple_loss=0.2257, pruned_loss=0.04278, over 4968.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2123, pruned_loss=0.03293, over 974608.31 frames.], batch size: 24, lr: 1.94e-04 +2022-05-07 05:33:46,738 INFO [train.py:715] (6/8) Epoch 11, batch 23850, loss[loss=0.1222, simple_loss=0.1976, pruned_loss=0.02345, over 4827.00 frames.], tot_loss[loss=0.139, simple_loss=0.2122, pruned_loss=0.03286, over 973325.47 frames.], batch size: 30, lr: 1.94e-04 +2022-05-07 05:34:24,311 INFO [train.py:715] (6/8) Epoch 11, batch 23900, loss[loss=0.122, simple_loss=0.1972, pruned_loss=0.02338, over 4893.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03247, over 972809.31 frames.], batch size: 22, lr: 1.94e-04 +2022-05-07 05:35:01,657 INFO [train.py:715] (6/8) Epoch 11, batch 23950, loss[loss=0.1586, simple_loss=0.2503, pruned_loss=0.03349, over 4940.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2115, pruned_loss=0.03278, over 972060.75 frames.], batch size: 21, lr: 1.94e-04 +2022-05-07 05:35:39,342 INFO [train.py:715] (6/8) Epoch 11, batch 24000, loss[loss=0.1408, simple_loss=0.2048, pruned_loss=0.03842, over 4952.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2107, pruned_loss=0.03223, over 972189.45 frames.], batch size: 24, lr: 1.94e-04 +2022-05-07 05:35:39,342 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 05:35:48,813 INFO [train.py:742] (6/8) Epoch 11, validation: loss=0.1059, simple_loss=0.19, pruned_loss=0.01092, over 914524.00 frames. +2022-05-07 05:36:27,138 INFO [train.py:715] (6/8) Epoch 11, batch 24050, loss[loss=0.125, simple_loss=0.2055, pruned_loss=0.02222, over 4986.00 frames.], tot_loss[loss=0.1379, simple_loss=0.211, pruned_loss=0.03239, over 972136.57 frames.], batch size: 28, lr: 1.94e-04 +2022-05-07 05:37:04,267 INFO [train.py:715] (6/8) Epoch 11, batch 24100, loss[loss=0.1253, simple_loss=0.2051, pruned_loss=0.02277, over 4873.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2101, pruned_loss=0.03223, over 972315.95 frames.], batch size: 22, lr: 1.94e-04 +2022-05-07 05:37:42,095 INFO [train.py:715] (6/8) Epoch 11, batch 24150, loss[loss=0.1237, simple_loss=0.1894, pruned_loss=0.02902, over 4925.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2108, pruned_loss=0.03203, over 972908.93 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 05:38:20,367 INFO [train.py:715] (6/8) Epoch 11, batch 24200, loss[loss=0.1601, simple_loss=0.2327, pruned_loss=0.04375, over 4775.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2113, pruned_loss=0.03225, over 973491.83 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 05:38:57,452 INFO [train.py:715] (6/8) Epoch 11, batch 24250, loss[loss=0.1752, simple_loss=0.2548, pruned_loss=0.04781, over 4785.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2108, pruned_loss=0.03201, over 972590.96 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 05:39:35,483 INFO [train.py:715] (6/8) Epoch 11, batch 24300, loss[loss=0.1235, simple_loss=0.2004, pruned_loss=0.02326, over 4968.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03236, over 973382.57 frames.], batch size: 25, lr: 1.94e-04 +2022-05-07 05:40:13,072 INFO [train.py:715] (6/8) Epoch 11, batch 24350, loss[loss=0.1553, simple_loss=0.2291, pruned_loss=0.04073, over 4970.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2115, pruned_loss=0.03232, over 973806.35 frames.], batch size: 35, lr: 1.94e-04 +2022-05-07 05:40:50,679 INFO [train.py:715] (6/8) Epoch 11, batch 24400, loss[loss=0.1493, simple_loss=0.2156, pruned_loss=0.04144, over 4710.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2113, pruned_loss=0.03257, over 973640.54 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 05:41:28,271 INFO [train.py:715] (6/8) Epoch 11, batch 24450, loss[loss=0.1829, simple_loss=0.2618, pruned_loss=0.05196, over 4783.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2116, pruned_loss=0.03278, over 972902.92 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 05:42:06,402 INFO [train.py:715] (6/8) Epoch 11, batch 24500, loss[loss=0.1515, simple_loss=0.2177, pruned_loss=0.04262, over 4851.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2117, pruned_loss=0.03257, over 972079.85 frames.], batch size: 34, lr: 1.94e-04 +2022-05-07 05:42:45,043 INFO [train.py:715] (6/8) Epoch 11, batch 24550, loss[loss=0.125, simple_loss=0.2006, pruned_loss=0.02471, over 4697.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2117, pruned_loss=0.03295, over 971975.20 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 05:43:23,067 INFO [train.py:715] (6/8) Epoch 11, batch 24600, loss[loss=0.1344, simple_loss=0.2067, pruned_loss=0.03109, over 4700.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2119, pruned_loss=0.0329, over 971717.36 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 05:44:01,543 INFO [train.py:715] (6/8) Epoch 11, batch 24650, loss[loss=0.1287, simple_loss=0.2178, pruned_loss=0.01976, over 4916.00 frames.], tot_loss[loss=0.1384, simple_loss=0.212, pruned_loss=0.03237, over 970739.48 frames.], batch size: 23, lr: 1.94e-04 +2022-05-07 05:44:39,876 INFO [train.py:715] (6/8) Epoch 11, batch 24700, loss[loss=0.1274, simple_loss=0.2038, pruned_loss=0.0255, over 4812.00 frames.], tot_loss[loss=0.1381, simple_loss=0.212, pruned_loss=0.03214, over 971884.60 frames.], batch size: 26, lr: 1.94e-04 +2022-05-07 05:45:18,504 INFO [train.py:715] (6/8) Epoch 11, batch 24750, loss[loss=0.1295, simple_loss=0.2063, pruned_loss=0.02637, over 4828.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2119, pruned_loss=0.03191, over 972659.28 frames.], batch size: 13, lr: 1.94e-04 +2022-05-07 05:45:56,399 INFO [train.py:715] (6/8) Epoch 11, batch 24800, loss[loss=0.1676, simple_loss=0.2378, pruned_loss=0.0487, over 4988.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2113, pruned_loss=0.032, over 972644.15 frames.], batch size: 31, lr: 1.94e-04 +2022-05-07 05:46:34,718 INFO [train.py:715] (6/8) Epoch 11, batch 24850, loss[loss=0.106, simple_loss=0.1823, pruned_loss=0.01485, over 4973.00 frames.], tot_loss[loss=0.138, simple_loss=0.2113, pruned_loss=0.03236, over 972465.34 frames.], batch size: 24, lr: 1.94e-04 +2022-05-07 05:47:13,645 INFO [train.py:715] (6/8) Epoch 11, batch 24900, loss[loss=0.1542, simple_loss=0.2227, pruned_loss=0.04286, over 4873.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2115, pruned_loss=0.03266, over 972219.77 frames.], batch size: 20, lr: 1.94e-04 +2022-05-07 05:47:51,709 INFO [train.py:715] (6/8) Epoch 11, batch 24950, loss[loss=0.17, simple_loss=0.2337, pruned_loss=0.05317, over 4794.00 frames.], tot_loss[loss=0.138, simple_loss=0.2117, pruned_loss=0.03218, over 972634.07 frames.], batch size: 14, lr: 1.94e-04 +2022-05-07 05:48:30,039 INFO [train.py:715] (6/8) Epoch 11, batch 25000, loss[loss=0.1272, simple_loss=0.214, pruned_loss=0.02024, over 4924.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2118, pruned_loss=0.0322, over 971642.82 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 05:49:08,334 INFO [train.py:715] (6/8) Epoch 11, batch 25050, loss[loss=0.1313, simple_loss=0.2008, pruned_loss=0.03088, over 4811.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2108, pruned_loss=0.03194, over 971080.35 frames.], batch size: 21, lr: 1.94e-04 +2022-05-07 05:49:49,696 INFO [train.py:715] (6/8) Epoch 11, batch 25100, loss[loss=0.1235, simple_loss=0.1936, pruned_loss=0.02669, over 4920.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03196, over 970041.70 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 05:50:27,854 INFO [train.py:715] (6/8) Epoch 11, batch 25150, loss[loss=0.1333, simple_loss=0.1901, pruned_loss=0.0382, over 4972.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2105, pruned_loss=0.0322, over 970428.69 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 05:51:06,447 INFO [train.py:715] (6/8) Epoch 11, batch 25200, loss[loss=0.133, simple_loss=0.2013, pruned_loss=0.03239, over 4967.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2109, pruned_loss=0.03281, over 970615.33 frames.], batch size: 35, lr: 1.94e-04 +2022-05-07 05:51:45,299 INFO [train.py:715] (6/8) Epoch 11, batch 25250, loss[loss=0.1305, simple_loss=0.2124, pruned_loss=0.02429, over 4916.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2111, pruned_loss=0.03286, over 971268.84 frames.], batch size: 29, lr: 1.94e-04 +2022-05-07 05:52:23,576 INFO [train.py:715] (6/8) Epoch 11, batch 25300, loss[loss=0.1251, simple_loss=0.1981, pruned_loss=0.02605, over 4902.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2106, pruned_loss=0.03262, over 971746.89 frames.], batch size: 17, lr: 1.94e-04 +2022-05-07 05:53:01,977 INFO [train.py:715] (6/8) Epoch 11, batch 25350, loss[loss=0.135, simple_loss=0.2111, pruned_loss=0.02949, over 4964.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2104, pruned_loss=0.03239, over 972266.47 frames.], batch size: 24, lr: 1.94e-04 +2022-05-07 05:53:40,617 INFO [train.py:715] (6/8) Epoch 11, batch 25400, loss[loss=0.1414, simple_loss=0.2209, pruned_loss=0.03098, over 4820.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2104, pruned_loss=0.03226, over 973015.01 frames.], batch size: 27, lr: 1.94e-04 +2022-05-07 05:54:19,434 INFO [train.py:715] (6/8) Epoch 11, batch 25450, loss[loss=0.1284, simple_loss=0.1963, pruned_loss=0.0302, over 4811.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2104, pruned_loss=0.03199, over 972813.18 frames.], batch size: 25, lr: 1.94e-04 +2022-05-07 05:54:57,487 INFO [train.py:715] (6/8) Epoch 11, batch 25500, loss[loss=0.1506, simple_loss=0.2216, pruned_loss=0.03974, over 4918.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2108, pruned_loss=0.0327, over 973221.65 frames.], batch size: 17, lr: 1.94e-04 +2022-05-07 05:55:36,096 INFO [train.py:715] (6/8) Epoch 11, batch 25550, loss[loss=0.1285, simple_loss=0.1939, pruned_loss=0.03154, over 4824.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2104, pruned_loss=0.03218, over 972298.52 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 05:56:15,330 INFO [train.py:715] (6/8) Epoch 11, batch 25600, loss[loss=0.1384, simple_loss=0.2159, pruned_loss=0.03046, over 4942.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2112, pruned_loss=0.03274, over 971171.45 frames.], batch size: 21, lr: 1.94e-04 +2022-05-07 05:56:53,610 INFO [train.py:715] (6/8) Epoch 11, batch 25650, loss[loss=0.1284, simple_loss=0.1926, pruned_loss=0.03215, over 4827.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03219, over 971769.72 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 05:57:31,766 INFO [train.py:715] (6/8) Epoch 11, batch 25700, loss[loss=0.1312, simple_loss=0.2097, pruned_loss=0.02636, over 4974.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2117, pruned_loss=0.03261, over 972347.42 frames.], batch size: 24, lr: 1.94e-04 +2022-05-07 05:58:10,598 INFO [train.py:715] (6/8) Epoch 11, batch 25750, loss[loss=0.1499, simple_loss=0.222, pruned_loss=0.03886, over 4978.00 frames.], tot_loss[loss=0.139, simple_loss=0.212, pruned_loss=0.03301, over 972057.54 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 05:58:48,916 INFO [train.py:715] (6/8) Epoch 11, batch 25800, loss[loss=0.1515, simple_loss=0.2353, pruned_loss=0.03381, over 4900.00 frames.], tot_loss[loss=0.1393, simple_loss=0.212, pruned_loss=0.03333, over 971685.69 frames.], batch size: 19, lr: 1.94e-04 +2022-05-07 05:59:26,919 INFO [train.py:715] (6/8) Epoch 11, batch 25850, loss[loss=0.1191, simple_loss=0.1911, pruned_loss=0.02354, over 4912.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2106, pruned_loss=0.0328, over 972294.19 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 06:00:05,578 INFO [train.py:715] (6/8) Epoch 11, batch 25900, loss[loss=0.1991, simple_loss=0.2661, pruned_loss=0.06606, over 4893.00 frames.], tot_loss[loss=0.1376, simple_loss=0.21, pruned_loss=0.03263, over 972371.93 frames.], batch size: 19, lr: 1.94e-04 +2022-05-07 06:00:44,285 INFO [train.py:715] (6/8) Epoch 11, batch 25950, loss[loss=0.1497, simple_loss=0.2096, pruned_loss=0.04489, over 4864.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2104, pruned_loss=0.03272, over 972454.68 frames.], batch size: 32, lr: 1.94e-04 +2022-05-07 06:01:22,334 INFO [train.py:715] (6/8) Epoch 11, batch 26000, loss[loss=0.14, simple_loss=0.2178, pruned_loss=0.03116, over 4892.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2115, pruned_loss=0.0334, over 972550.36 frames.], batch size: 22, lr: 1.94e-04 +2022-05-07 06:02:00,416 INFO [train.py:715] (6/8) Epoch 11, batch 26050, loss[loss=0.1271, simple_loss=0.1923, pruned_loss=0.03098, over 4948.00 frames.], tot_loss[loss=0.1387, simple_loss=0.211, pruned_loss=0.03319, over 972662.33 frames.], batch size: 21, lr: 1.94e-04 +2022-05-07 06:02:38,970 INFO [train.py:715] (6/8) Epoch 11, batch 26100, loss[loss=0.1502, simple_loss=0.2209, pruned_loss=0.03973, over 4983.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2113, pruned_loss=0.03321, over 973157.98 frames.], batch size: 25, lr: 1.94e-04 +2022-05-07 06:03:17,361 INFO [train.py:715] (6/8) Epoch 11, batch 26150, loss[loss=0.1293, simple_loss=0.1998, pruned_loss=0.0294, over 4956.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2117, pruned_loss=0.03326, over 972703.73 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 06:03:55,330 INFO [train.py:715] (6/8) Epoch 11, batch 26200, loss[loss=0.1523, simple_loss=0.2138, pruned_loss=0.04541, over 4913.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2117, pruned_loss=0.03324, over 972485.11 frames.], batch size: 19, lr: 1.94e-04 +2022-05-07 06:04:32,931 INFO [train.py:715] (6/8) Epoch 11, batch 26250, loss[loss=0.1257, simple_loss=0.2025, pruned_loss=0.02449, over 4990.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2114, pruned_loss=0.03299, over 971654.73 frames.], batch size: 28, lr: 1.94e-04 +2022-05-07 06:05:10,976 INFO [train.py:715] (6/8) Epoch 11, batch 26300, loss[loss=0.1438, simple_loss=0.2138, pruned_loss=0.03689, over 4974.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2114, pruned_loss=0.03296, over 971717.73 frames.], batch size: 24, lr: 1.94e-04 +2022-05-07 06:05:48,408 INFO [train.py:715] (6/8) Epoch 11, batch 26350, loss[loss=0.1618, simple_loss=0.2327, pruned_loss=0.04545, over 4888.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2105, pruned_loss=0.03241, over 971447.42 frames.], batch size: 17, lr: 1.94e-04 +2022-05-07 06:06:25,428 INFO [train.py:715] (6/8) Epoch 11, batch 26400, loss[loss=0.144, simple_loss=0.2273, pruned_loss=0.03034, over 4949.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2103, pruned_loss=0.03234, over 971482.65 frames.], batch size: 21, lr: 1.94e-04 +2022-05-07 06:07:03,856 INFO [train.py:715] (6/8) Epoch 11, batch 26450, loss[loss=0.1674, simple_loss=0.2443, pruned_loss=0.04523, over 4957.00 frames.], tot_loss[loss=0.1372, simple_loss=0.21, pruned_loss=0.0322, over 972174.00 frames.], batch size: 21, lr: 1.94e-04 +2022-05-07 06:07:41,339 INFO [train.py:715] (6/8) Epoch 11, batch 26500, loss[loss=0.1473, simple_loss=0.2208, pruned_loss=0.03693, over 4896.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2107, pruned_loss=0.03281, over 972159.40 frames.], batch size: 38, lr: 1.94e-04 +2022-05-07 06:08:19,084 INFO [train.py:715] (6/8) Epoch 11, batch 26550, loss[loss=0.1279, simple_loss=0.2053, pruned_loss=0.0252, over 4826.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2112, pruned_loss=0.03266, over 972143.83 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 06:08:56,821 INFO [train.py:715] (6/8) Epoch 11, batch 26600, loss[loss=0.1347, simple_loss=0.2011, pruned_loss=0.03409, over 4771.00 frames.], tot_loss[loss=0.139, simple_loss=0.2118, pruned_loss=0.03308, over 972920.37 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 06:09:34,847 INFO [train.py:715] (6/8) Epoch 11, batch 26650, loss[loss=0.1217, simple_loss=0.2018, pruned_loss=0.02082, over 4980.00 frames.], tot_loss[loss=0.1383, simple_loss=0.211, pruned_loss=0.0328, over 972986.27 frames.], batch size: 28, lr: 1.94e-04 +2022-05-07 06:10:12,912 INFO [train.py:715] (6/8) Epoch 11, batch 26700, loss[loss=0.1306, simple_loss=0.2056, pruned_loss=0.02782, over 4865.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2115, pruned_loss=0.03266, over 973076.01 frames.], batch size: 16, lr: 1.94e-04 +2022-05-07 06:10:49,899 INFO [train.py:715] (6/8) Epoch 11, batch 26750, loss[loss=0.1337, simple_loss=0.2006, pruned_loss=0.03344, over 4745.00 frames.], tot_loss[loss=0.138, simple_loss=0.2113, pruned_loss=0.03237, over 972450.29 frames.], batch size: 16, lr: 1.94e-04 +2022-05-07 06:11:28,543 INFO [train.py:715] (6/8) Epoch 11, batch 26800, loss[loss=0.1405, simple_loss=0.2205, pruned_loss=0.03023, over 4991.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2105, pruned_loss=0.0319, over 973013.08 frames.], batch size: 26, lr: 1.94e-04 +2022-05-07 06:12:06,145 INFO [train.py:715] (6/8) Epoch 11, batch 26850, loss[loss=0.1252, simple_loss=0.1977, pruned_loss=0.02629, over 4832.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2106, pruned_loss=0.03194, over 972950.15 frames.], batch size: 13, lr: 1.94e-04 +2022-05-07 06:12:43,632 INFO [train.py:715] (6/8) Epoch 11, batch 26900, loss[loss=0.1343, simple_loss=0.2145, pruned_loss=0.02704, over 4749.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2098, pruned_loss=0.03191, over 972168.84 frames.], batch size: 19, lr: 1.94e-04 +2022-05-07 06:13:21,272 INFO [train.py:715] (6/8) Epoch 11, batch 26950, loss[loss=0.149, simple_loss=0.222, pruned_loss=0.03798, over 4831.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2108, pruned_loss=0.03224, over 972947.30 frames.], batch size: 26, lr: 1.94e-04 +2022-05-07 06:13:59,654 INFO [train.py:715] (6/8) Epoch 11, batch 27000, loss[loss=0.1435, simple_loss=0.2036, pruned_loss=0.04173, over 4899.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2117, pruned_loss=0.03272, over 972005.35 frames.], batch size: 17, lr: 1.94e-04 +2022-05-07 06:13:59,655 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 06:14:09,117 INFO [train.py:742] (6/8) Epoch 11, validation: loss=0.1059, simple_loss=0.19, pruned_loss=0.01084, over 914524.00 frames. +2022-05-07 06:14:47,547 INFO [train.py:715] (6/8) Epoch 11, batch 27050, loss[loss=0.1352, simple_loss=0.2042, pruned_loss=0.0331, over 4900.00 frames.], tot_loss[loss=0.139, simple_loss=0.2119, pruned_loss=0.03306, over 972551.99 frames.], batch size: 17, lr: 1.94e-04 +2022-05-07 06:15:25,150 INFO [train.py:715] (6/8) Epoch 11, batch 27100, loss[loss=0.163, simple_loss=0.2423, pruned_loss=0.04185, over 4919.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2121, pruned_loss=0.03313, over 973067.10 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 06:16:02,382 INFO [train.py:715] (6/8) Epoch 11, batch 27150, loss[loss=0.1252, simple_loss=0.2116, pruned_loss=0.01941, over 4806.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2127, pruned_loss=0.03331, over 972826.26 frames.], batch size: 25, lr: 1.94e-04 +2022-05-07 06:16:41,021 INFO [train.py:715] (6/8) Epoch 11, batch 27200, loss[loss=0.1483, simple_loss=0.2235, pruned_loss=0.03657, over 4746.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.03348, over 972718.36 frames.], batch size: 16, lr: 1.94e-04 +2022-05-07 06:17:18,729 INFO [train.py:715] (6/8) Epoch 11, batch 27250, loss[loss=0.1336, simple_loss=0.2048, pruned_loss=0.03119, over 4827.00 frames.], tot_loss[loss=0.139, simple_loss=0.2119, pruned_loss=0.03303, over 972874.94 frames.], batch size: 30, lr: 1.94e-04 +2022-05-07 06:17:56,618 INFO [train.py:715] (6/8) Epoch 11, batch 27300, loss[loss=0.1215, simple_loss=0.2026, pruned_loss=0.02017, over 4902.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2122, pruned_loss=0.0331, over 973158.16 frames.], batch size: 22, lr: 1.94e-04 +2022-05-07 06:18:34,288 INFO [train.py:715] (6/8) Epoch 11, batch 27350, loss[loss=0.1149, simple_loss=0.1878, pruned_loss=0.02101, over 4907.00 frames.], tot_loss[loss=0.1378, simple_loss=0.211, pruned_loss=0.03235, over 972110.33 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 06:19:13,064 INFO [train.py:715] (6/8) Epoch 11, batch 27400, loss[loss=0.1276, simple_loss=0.1878, pruned_loss=0.03375, over 4700.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.03189, over 971749.56 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 06:19:50,855 INFO [train.py:715] (6/8) Epoch 11, batch 27450, loss[loss=0.1296, simple_loss=0.1949, pruned_loss=0.0321, over 4853.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2112, pruned_loss=0.03202, over 971474.74 frames.], batch size: 20, lr: 1.94e-04 +2022-05-07 06:20:28,137 INFO [train.py:715] (6/8) Epoch 11, batch 27500, loss[loss=0.1626, simple_loss=0.231, pruned_loss=0.04715, over 4924.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2125, pruned_loss=0.03233, over 972136.58 frames.], batch size: 39, lr: 1.94e-04 +2022-05-07 06:21:07,302 INFO [train.py:715] (6/8) Epoch 11, batch 27550, loss[loss=0.1424, simple_loss=0.2242, pruned_loss=0.03034, over 4871.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2122, pruned_loss=0.03236, over 972053.80 frames.], batch size: 22, lr: 1.94e-04 +2022-05-07 06:21:45,752 INFO [train.py:715] (6/8) Epoch 11, batch 27600, loss[loss=0.1266, simple_loss=0.2125, pruned_loss=0.0203, over 4978.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2121, pruned_loss=0.03213, over 971760.39 frames.], batch size: 28, lr: 1.94e-04 +2022-05-07 06:22:23,476 INFO [train.py:715] (6/8) Epoch 11, batch 27650, loss[loss=0.1344, simple_loss=0.2095, pruned_loss=0.02967, over 4914.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2115, pruned_loss=0.03167, over 971511.82 frames.], batch size: 39, lr: 1.94e-04 +2022-05-07 06:23:01,300 INFO [train.py:715] (6/8) Epoch 11, batch 27700, loss[loss=0.1218, simple_loss=0.1903, pruned_loss=0.02666, over 4953.00 frames.], tot_loss[loss=0.1369, simple_loss=0.211, pruned_loss=0.03145, over 972032.53 frames.], batch size: 21, lr: 1.94e-04 +2022-05-07 06:23:39,625 INFO [train.py:715] (6/8) Epoch 11, batch 27750, loss[loss=0.1496, simple_loss=0.2304, pruned_loss=0.03439, over 4919.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2114, pruned_loss=0.03152, over 972021.79 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 06:24:17,572 INFO [train.py:715] (6/8) Epoch 11, batch 27800, loss[loss=0.1519, simple_loss=0.2242, pruned_loss=0.03981, over 4950.00 frames.], tot_loss[loss=0.1373, simple_loss=0.211, pruned_loss=0.03179, over 971590.60 frames.], batch size: 21, lr: 1.93e-04 +2022-05-07 06:24:54,556 INFO [train.py:715] (6/8) Epoch 11, batch 27850, loss[loss=0.1558, simple_loss=0.2285, pruned_loss=0.04156, over 4954.00 frames.], tot_loss[loss=0.1373, simple_loss=0.211, pruned_loss=0.03179, over 971790.52 frames.], batch size: 24, lr: 1.93e-04 +2022-05-07 06:25:32,918 INFO [train.py:715] (6/8) Epoch 11, batch 27900, loss[loss=0.1456, simple_loss=0.2227, pruned_loss=0.03426, over 4986.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2109, pruned_loss=0.03178, over 972163.57 frames.], batch size: 28, lr: 1.93e-04 +2022-05-07 06:26:10,973 INFO [train.py:715] (6/8) Epoch 11, batch 27950, loss[loss=0.1457, simple_loss=0.2178, pruned_loss=0.03677, over 4757.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2106, pruned_loss=0.03186, over 971757.12 frames.], batch size: 16, lr: 1.93e-04 +2022-05-07 06:26:48,582 INFO [train.py:715] (6/8) Epoch 11, batch 28000, loss[loss=0.1285, simple_loss=0.2061, pruned_loss=0.02543, over 4962.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03154, over 971594.40 frames.], batch size: 35, lr: 1.93e-04 +2022-05-07 06:27:26,139 INFO [train.py:715] (6/8) Epoch 11, batch 28050, loss[loss=0.1408, simple_loss=0.2224, pruned_loss=0.02964, over 4976.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.03146, over 972387.23 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 06:28:04,142 INFO [train.py:715] (6/8) Epoch 11, batch 28100, loss[loss=0.1369, simple_loss=0.2162, pruned_loss=0.02885, over 4958.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2109, pruned_loss=0.03165, over 972342.28 frames.], batch size: 21, lr: 1.93e-04 +2022-05-07 06:28:41,422 INFO [train.py:715] (6/8) Epoch 11, batch 28150, loss[loss=0.1302, simple_loss=0.2085, pruned_loss=0.02597, over 4805.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03136, over 972274.32 frames.], batch size: 25, lr: 1.93e-04 +2022-05-07 06:29:18,869 INFO [train.py:715] (6/8) Epoch 11, batch 28200, loss[loss=0.1219, simple_loss=0.1945, pruned_loss=0.02471, over 4814.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03142, over 971032.81 frames.], batch size: 26, lr: 1.93e-04 +2022-05-07 06:29:57,423 INFO [train.py:715] (6/8) Epoch 11, batch 28250, loss[loss=0.1397, simple_loss=0.2205, pruned_loss=0.02947, over 4776.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2109, pruned_loss=0.03181, over 970923.17 frames.], batch size: 17, lr: 1.93e-04 +2022-05-07 06:30:34,926 INFO [train.py:715] (6/8) Epoch 11, batch 28300, loss[loss=0.1256, simple_loss=0.196, pruned_loss=0.02762, over 4843.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2106, pruned_loss=0.03143, over 970717.59 frames.], batch size: 13, lr: 1.93e-04 +2022-05-07 06:31:12,831 INFO [train.py:715] (6/8) Epoch 11, batch 28350, loss[loss=0.1183, simple_loss=0.1947, pruned_loss=0.0209, over 4743.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2113, pruned_loss=0.03199, over 971725.25 frames.], batch size: 16, lr: 1.93e-04 +2022-05-07 06:31:50,527 INFO [train.py:715] (6/8) Epoch 11, batch 28400, loss[loss=0.1339, simple_loss=0.214, pruned_loss=0.0269, over 4987.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2127, pruned_loss=0.03252, over 972310.86 frames.], batch size: 25, lr: 1.93e-04 +2022-05-07 06:32:28,901 INFO [train.py:715] (6/8) Epoch 11, batch 28450, loss[loss=0.139, simple_loss=0.2186, pruned_loss=0.02976, over 4946.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2135, pruned_loss=0.03298, over 971704.17 frames.], batch size: 35, lr: 1.93e-04 +2022-05-07 06:33:06,940 INFO [train.py:715] (6/8) Epoch 11, batch 28500, loss[loss=0.1669, simple_loss=0.2524, pruned_loss=0.04066, over 4960.00 frames.], tot_loss[loss=0.14, simple_loss=0.2136, pruned_loss=0.03317, over 971836.18 frames.], batch size: 39, lr: 1.93e-04 +2022-05-07 06:33:44,631 INFO [train.py:715] (6/8) Epoch 11, batch 28550, loss[loss=0.1435, simple_loss=0.2113, pruned_loss=0.03789, over 4863.00 frames.], tot_loss[loss=0.14, simple_loss=0.2133, pruned_loss=0.03329, over 972316.98 frames.], batch size: 32, lr: 1.93e-04 +2022-05-07 06:34:23,472 INFO [train.py:715] (6/8) Epoch 11, batch 28600, loss[loss=0.1215, simple_loss=0.2002, pruned_loss=0.02142, over 4934.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2136, pruned_loss=0.03357, over 971985.41 frames.], batch size: 23, lr: 1.93e-04 +2022-05-07 06:35:01,438 INFO [train.py:715] (6/8) Epoch 11, batch 28650, loss[loss=0.1485, simple_loss=0.2213, pruned_loss=0.03788, over 4866.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2138, pruned_loss=0.03373, over 972831.37 frames.], batch size: 32, lr: 1.93e-04 +2022-05-07 06:35:39,422 INFO [train.py:715] (6/8) Epoch 11, batch 28700, loss[loss=0.1109, simple_loss=0.1858, pruned_loss=0.01805, over 4783.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2136, pruned_loss=0.03364, over 972024.42 frames.], batch size: 18, lr: 1.93e-04 +2022-05-07 06:36:17,177 INFO [train.py:715] (6/8) Epoch 11, batch 28750, loss[loss=0.1686, simple_loss=0.2405, pruned_loss=0.04832, over 4966.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2119, pruned_loss=0.03263, over 972259.82 frames.], batch size: 39, lr: 1.93e-04 +2022-05-07 06:36:55,926 INFO [train.py:715] (6/8) Epoch 11, batch 28800, loss[loss=0.1336, simple_loss=0.2051, pruned_loss=0.03107, over 4981.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2118, pruned_loss=0.03253, over 972166.62 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 06:37:33,392 INFO [train.py:715] (6/8) Epoch 11, batch 28850, loss[loss=0.165, simple_loss=0.2278, pruned_loss=0.0511, over 4985.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2112, pruned_loss=0.03232, over 972239.02 frames.], batch size: 31, lr: 1.93e-04 +2022-05-07 06:38:10,805 INFO [train.py:715] (6/8) Epoch 11, batch 28900, loss[loss=0.1236, simple_loss=0.188, pruned_loss=0.02966, over 4822.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.03204, over 972639.96 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 06:38:49,569 INFO [train.py:715] (6/8) Epoch 11, batch 28950, loss[loss=0.1303, simple_loss=0.2053, pruned_loss=0.02768, over 4817.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2102, pruned_loss=0.03112, over 971753.27 frames.], batch size: 26, lr: 1.93e-04 +2022-05-07 06:39:27,041 INFO [train.py:715] (6/8) Epoch 11, batch 29000, loss[loss=0.1536, simple_loss=0.2329, pruned_loss=0.03715, over 4774.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03131, over 971446.42 frames.], batch size: 18, lr: 1.93e-04 +2022-05-07 06:40:04,957 INFO [train.py:715] (6/8) Epoch 11, batch 29050, loss[loss=0.1567, simple_loss=0.2344, pruned_loss=0.03944, over 4853.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2107, pruned_loss=0.03151, over 972133.69 frames.], batch size: 30, lr: 1.93e-04 +2022-05-07 06:40:42,753 INFO [train.py:715] (6/8) Epoch 11, batch 29100, loss[loss=0.1717, simple_loss=0.2492, pruned_loss=0.04714, over 4688.00 frames.], tot_loss[loss=0.138, simple_loss=0.2119, pruned_loss=0.03211, over 972038.95 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 06:41:21,083 INFO [train.py:715] (6/8) Epoch 11, batch 29150, loss[loss=0.1443, simple_loss=0.2153, pruned_loss=0.03662, over 4778.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2119, pruned_loss=0.03242, over 972211.28 frames.], batch size: 18, lr: 1.93e-04 +2022-05-07 06:41:58,821 INFO [train.py:715] (6/8) Epoch 11, batch 29200, loss[loss=0.1745, simple_loss=0.2497, pruned_loss=0.04967, over 4846.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2118, pruned_loss=0.03275, over 971955.08 frames.], batch size: 32, lr: 1.93e-04 +2022-05-07 06:42:36,369 INFO [train.py:715] (6/8) Epoch 11, batch 29250, loss[loss=0.1441, simple_loss=0.2148, pruned_loss=0.03673, over 4954.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2117, pruned_loss=0.0327, over 972722.74 frames.], batch size: 21, lr: 1.93e-04 +2022-05-07 06:43:15,063 INFO [train.py:715] (6/8) Epoch 11, batch 29300, loss[loss=0.1216, simple_loss=0.1994, pruned_loss=0.02187, over 4815.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2116, pruned_loss=0.03261, over 972175.26 frames.], batch size: 25, lr: 1.93e-04 +2022-05-07 06:43:53,137 INFO [train.py:715] (6/8) Epoch 11, batch 29350, loss[loss=0.1227, simple_loss=0.1836, pruned_loss=0.0309, over 4985.00 frames.], tot_loss[loss=0.138, simple_loss=0.211, pruned_loss=0.03248, over 972161.73 frames.], batch size: 28, lr: 1.93e-04 +2022-05-07 06:44:30,902 INFO [train.py:715] (6/8) Epoch 11, batch 29400, loss[loss=0.1447, simple_loss=0.2125, pruned_loss=0.03847, over 4918.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03223, over 971964.60 frames.], batch size: 18, lr: 1.93e-04 +2022-05-07 06:45:08,811 INFO [train.py:715] (6/8) Epoch 11, batch 29450, loss[loss=0.1412, simple_loss=0.2062, pruned_loss=0.03813, over 4973.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2114, pruned_loss=0.03284, over 972276.46 frames.], batch size: 35, lr: 1.93e-04 +2022-05-07 06:45:46,708 INFO [train.py:715] (6/8) Epoch 11, batch 29500, loss[loss=0.1462, simple_loss=0.2282, pruned_loss=0.03213, over 4811.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2114, pruned_loss=0.03285, over 972367.48 frames.], batch size: 21, lr: 1.93e-04 +2022-05-07 06:46:25,302 INFO [train.py:715] (6/8) Epoch 11, batch 29550, loss[loss=0.1598, simple_loss=0.2242, pruned_loss=0.04776, over 4770.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2112, pruned_loss=0.03293, over 972018.32 frames.], batch size: 19, lr: 1.93e-04 +2022-05-07 06:47:02,905 INFO [train.py:715] (6/8) Epoch 11, batch 29600, loss[loss=0.1611, simple_loss=0.2351, pruned_loss=0.04353, over 4731.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2113, pruned_loss=0.03302, over 972370.85 frames.], batch size: 16, lr: 1.93e-04 +2022-05-07 06:47:41,474 INFO [train.py:715] (6/8) Epoch 11, batch 29650, loss[loss=0.1569, simple_loss=0.2416, pruned_loss=0.0361, over 4952.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2115, pruned_loss=0.03278, over 971879.14 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 06:48:19,467 INFO [train.py:715] (6/8) Epoch 11, batch 29700, loss[loss=0.1474, simple_loss=0.2343, pruned_loss=0.03022, over 4881.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2112, pruned_loss=0.03258, over 971916.57 frames.], batch size: 22, lr: 1.93e-04 +2022-05-07 06:48:57,626 INFO [train.py:715] (6/8) Epoch 11, batch 29750, loss[loss=0.1261, simple_loss=0.1983, pruned_loss=0.02693, over 4817.00 frames.], tot_loss[loss=0.138, simple_loss=0.2111, pruned_loss=0.03248, over 971834.86 frames.], batch size: 21, lr: 1.93e-04 +2022-05-07 06:49:35,437 INFO [train.py:715] (6/8) Epoch 11, batch 29800, loss[loss=0.1155, simple_loss=0.2029, pruned_loss=0.0141, over 4968.00 frames.], tot_loss[loss=0.1378, simple_loss=0.211, pruned_loss=0.03228, over 972326.79 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 06:50:13,829 INFO [train.py:715] (6/8) Epoch 11, batch 29850, loss[loss=0.1102, simple_loss=0.1835, pruned_loss=0.01846, over 4874.00 frames.], tot_loss[loss=0.1376, simple_loss=0.211, pruned_loss=0.03214, over 972178.13 frames.], batch size: 22, lr: 1.93e-04 +2022-05-07 06:50:52,365 INFO [train.py:715] (6/8) Epoch 11, batch 29900, loss[loss=0.1364, simple_loss=0.2089, pruned_loss=0.03192, over 4839.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2108, pruned_loss=0.03179, over 972271.53 frames.], batch size: 12, lr: 1.93e-04 +2022-05-07 06:51:29,991 INFO [train.py:715] (6/8) Epoch 11, batch 29950, loss[loss=0.1257, simple_loss=0.2051, pruned_loss=0.02317, over 4755.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03177, over 972148.39 frames.], batch size: 19, lr: 1.93e-04 +2022-05-07 06:52:08,180 INFO [train.py:715] (6/8) Epoch 11, batch 30000, loss[loss=0.1323, simple_loss=0.2033, pruned_loss=0.03062, over 4803.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.03177, over 972783.47 frames.], batch size: 21, lr: 1.93e-04 +2022-05-07 06:52:08,181 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 06:52:17,626 INFO [train.py:742] (6/8) Epoch 11, validation: loss=0.106, simple_loss=0.19, pruned_loss=0.01095, over 914524.00 frames. +2022-05-07 06:52:56,516 INFO [train.py:715] (6/8) Epoch 11, batch 30050, loss[loss=0.1712, simple_loss=0.2472, pruned_loss=0.04764, over 4928.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2112, pruned_loss=0.03201, over 972721.67 frames.], batch size: 39, lr: 1.93e-04 +2022-05-07 06:53:34,388 INFO [train.py:715] (6/8) Epoch 11, batch 30100, loss[loss=0.1457, simple_loss=0.2206, pruned_loss=0.03539, over 4904.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2125, pruned_loss=0.0329, over 972930.72 frames.], batch size: 39, lr: 1.93e-04 +2022-05-07 06:54:13,051 INFO [train.py:715] (6/8) Epoch 11, batch 30150, loss[loss=0.1397, simple_loss=0.2189, pruned_loss=0.03026, over 4932.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2115, pruned_loss=0.03273, over 972686.24 frames.], batch size: 29, lr: 1.93e-04 +2022-05-07 06:54:50,400 INFO [train.py:715] (6/8) Epoch 11, batch 30200, loss[loss=0.1307, simple_loss=0.217, pruned_loss=0.02219, over 4971.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2122, pruned_loss=0.03319, over 972610.56 frames.], batch size: 23, lr: 1.93e-04 +2022-05-07 06:55:29,245 INFO [train.py:715] (6/8) Epoch 11, batch 30250, loss[loss=0.1318, simple_loss=0.2101, pruned_loss=0.02672, over 4903.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2127, pruned_loss=0.03286, over 972092.74 frames.], batch size: 19, lr: 1.93e-04 +2022-05-07 06:56:07,229 INFO [train.py:715] (6/8) Epoch 11, batch 30300, loss[loss=0.112, simple_loss=0.1832, pruned_loss=0.02042, over 4784.00 frames.], tot_loss[loss=0.1393, simple_loss=0.213, pruned_loss=0.03279, over 972937.20 frames.], batch size: 14, lr: 1.93e-04 +2022-05-07 06:56:45,178 INFO [train.py:715] (6/8) Epoch 11, batch 30350, loss[loss=0.1309, simple_loss=0.1968, pruned_loss=0.03246, over 4966.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2118, pruned_loss=0.03232, over 972579.19 frames.], batch size: 35, lr: 1.93e-04 +2022-05-07 06:57:23,262 INFO [train.py:715] (6/8) Epoch 11, batch 30400, loss[loss=0.1326, simple_loss=0.2117, pruned_loss=0.0267, over 4992.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2113, pruned_loss=0.03193, over 973321.98 frames.], batch size: 20, lr: 1.93e-04 +2022-05-07 06:58:01,503 INFO [train.py:715] (6/8) Epoch 11, batch 30450, loss[loss=0.1618, simple_loss=0.2209, pruned_loss=0.05134, over 4873.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2111, pruned_loss=0.03227, over 972673.74 frames.], batch size: 32, lr: 1.93e-04 +2022-05-07 06:58:39,333 INFO [train.py:715] (6/8) Epoch 11, batch 30500, loss[loss=0.1449, simple_loss=0.2141, pruned_loss=0.03786, over 4872.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2119, pruned_loss=0.03264, over 973024.68 frames.], batch size: 22, lr: 1.93e-04 +2022-05-07 06:59:17,143 INFO [train.py:715] (6/8) Epoch 11, batch 30550, loss[loss=0.135, simple_loss=0.2166, pruned_loss=0.02671, over 4771.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2114, pruned_loss=0.03266, over 972450.31 frames.], batch size: 19, lr: 1.93e-04 +2022-05-07 06:59:56,405 INFO [train.py:715] (6/8) Epoch 11, batch 30600, loss[loss=0.1443, simple_loss=0.2205, pruned_loss=0.03403, over 4820.00 frames.], tot_loss[loss=0.138, simple_loss=0.2111, pruned_loss=0.0325, over 972449.01 frames.], batch size: 25, lr: 1.93e-04 +2022-05-07 07:00:35,034 INFO [train.py:715] (6/8) Epoch 11, batch 30650, loss[loss=0.1509, simple_loss=0.2208, pruned_loss=0.04046, over 4782.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2109, pruned_loss=0.03269, over 972839.79 frames.], batch size: 18, lr: 1.93e-04 +2022-05-07 07:01:13,828 INFO [train.py:715] (6/8) Epoch 11, batch 30700, loss[loss=0.1246, simple_loss=0.2034, pruned_loss=0.02288, over 4948.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2116, pruned_loss=0.033, over 971978.63 frames.], batch size: 29, lr: 1.93e-04 +2022-05-07 07:01:52,335 INFO [train.py:715] (6/8) Epoch 11, batch 30750, loss[loss=0.1294, simple_loss=0.2016, pruned_loss=0.02855, over 4831.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2116, pruned_loss=0.0328, over 970869.77 frames.], batch size: 13, lr: 1.93e-04 +2022-05-07 07:02:30,948 INFO [train.py:715] (6/8) Epoch 11, batch 30800, loss[loss=0.1195, simple_loss=0.1932, pruned_loss=0.02287, over 4976.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2103, pruned_loss=0.03202, over 970267.56 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 07:03:09,708 INFO [train.py:715] (6/8) Epoch 11, batch 30850, loss[loss=0.1138, simple_loss=0.1932, pruned_loss=0.01716, over 4862.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2104, pruned_loss=0.03196, over 971279.96 frames.], batch size: 20, lr: 1.93e-04 +2022-05-07 07:03:48,267 INFO [train.py:715] (6/8) Epoch 11, batch 30900, loss[loss=0.1428, simple_loss=0.2104, pruned_loss=0.03761, over 4813.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2102, pruned_loss=0.03195, over 971844.63 frames.], batch size: 27, lr: 1.93e-04 +2022-05-07 07:04:27,074 INFO [train.py:715] (6/8) Epoch 11, batch 30950, loss[loss=0.1233, simple_loss=0.1932, pruned_loss=0.02664, over 4804.00 frames.], tot_loss[loss=0.1366, simple_loss=0.21, pruned_loss=0.03162, over 972341.00 frames.], batch size: 25, lr: 1.93e-04 +2022-05-07 07:05:06,007 INFO [train.py:715] (6/8) Epoch 11, batch 31000, loss[loss=0.1383, simple_loss=0.2068, pruned_loss=0.03493, over 4967.00 frames.], tot_loss[loss=0.1368, simple_loss=0.21, pruned_loss=0.03177, over 972370.45 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 07:05:44,522 INFO [train.py:715] (6/8) Epoch 11, batch 31050, loss[loss=0.1116, simple_loss=0.1898, pruned_loss=0.01669, over 4898.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2108, pruned_loss=0.03202, over 972414.46 frames.], batch size: 17, lr: 1.93e-04 +2022-05-07 07:06:23,342 INFO [train.py:715] (6/8) Epoch 11, batch 31100, loss[loss=0.153, simple_loss=0.2314, pruned_loss=0.03735, over 4955.00 frames.], tot_loss[loss=0.1366, simple_loss=0.21, pruned_loss=0.0316, over 972243.69 frames.], batch size: 24, lr: 1.93e-04 +2022-05-07 07:07:01,743 INFO [train.py:715] (6/8) Epoch 11, batch 31150, loss[loss=0.1704, simple_loss=0.2277, pruned_loss=0.05658, over 4831.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2106, pruned_loss=0.03209, over 971470.17 frames.], batch size: 27, lr: 1.93e-04 +2022-05-07 07:07:39,380 INFO [train.py:715] (6/8) Epoch 11, batch 31200, loss[loss=0.1356, simple_loss=0.2144, pruned_loss=0.02843, over 4902.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2113, pruned_loss=0.03253, over 972738.23 frames.], batch size: 19, lr: 1.93e-04 +2022-05-07 07:08:17,483 INFO [train.py:715] (6/8) Epoch 11, batch 31250, loss[loss=0.1266, simple_loss=0.1992, pruned_loss=0.02698, over 4992.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2118, pruned_loss=0.0327, over 973399.93 frames.], batch size: 14, lr: 1.93e-04 +2022-05-07 07:08:55,765 INFO [train.py:715] (6/8) Epoch 11, batch 31300, loss[loss=0.1338, simple_loss=0.2042, pruned_loss=0.03172, over 4835.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2115, pruned_loss=0.03264, over 972964.66 frames.], batch size: 32, lr: 1.93e-04 +2022-05-07 07:09:33,561 INFO [train.py:715] (6/8) Epoch 11, batch 31350, loss[loss=0.1789, simple_loss=0.2517, pruned_loss=0.05303, over 4809.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2119, pruned_loss=0.03312, over 973119.45 frames.], batch size: 26, lr: 1.93e-04 +2022-05-07 07:10:10,909 INFO [train.py:715] (6/8) Epoch 11, batch 31400, loss[loss=0.1383, simple_loss=0.2097, pruned_loss=0.03351, over 4841.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2113, pruned_loss=0.03277, over 972532.14 frames.], batch size: 13, lr: 1.93e-04 +2022-05-07 07:10:48,407 INFO [train.py:715] (6/8) Epoch 11, batch 31450, loss[loss=0.1129, simple_loss=0.1878, pruned_loss=0.01903, over 4953.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2108, pruned_loss=0.03248, over 972556.41 frames.], batch size: 14, lr: 1.93e-04 +2022-05-07 07:11:26,018 INFO [train.py:715] (6/8) Epoch 11, batch 31500, loss[loss=0.1439, simple_loss=0.2274, pruned_loss=0.03015, over 4813.00 frames.], tot_loss[loss=0.138, simple_loss=0.2114, pruned_loss=0.03228, over 972839.04 frames.], batch size: 27, lr: 1.93e-04 +2022-05-07 07:12:03,668 INFO [train.py:715] (6/8) Epoch 11, batch 31550, loss[loss=0.1405, simple_loss=0.215, pruned_loss=0.03303, over 4883.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2109, pruned_loss=0.03201, over 972342.68 frames.], batch size: 22, lr: 1.93e-04 +2022-05-07 07:12:41,672 INFO [train.py:715] (6/8) Epoch 11, batch 31600, loss[loss=0.1594, simple_loss=0.2295, pruned_loss=0.04467, over 4950.00 frames.], tot_loss[loss=0.138, simple_loss=0.2113, pruned_loss=0.0323, over 971883.48 frames.], batch size: 18, lr: 1.93e-04 +2022-05-07 07:13:19,757 INFO [train.py:715] (6/8) Epoch 11, batch 31650, loss[loss=0.1478, simple_loss=0.2105, pruned_loss=0.04257, over 4832.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.03271, over 972299.36 frames.], batch size: 30, lr: 1.93e-04 +2022-05-07 07:13:57,689 INFO [train.py:715] (6/8) Epoch 11, batch 31700, loss[loss=0.1489, simple_loss=0.2228, pruned_loss=0.03749, over 4850.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03216, over 972820.41 frames.], batch size: 32, lr: 1.93e-04 +2022-05-07 07:14:35,213 INFO [train.py:715] (6/8) Epoch 11, batch 31750, loss[loss=0.1637, simple_loss=0.2386, pruned_loss=0.04438, over 4867.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2116, pruned_loss=0.03244, over 972565.00 frames.], batch size: 20, lr: 1.93e-04 +2022-05-07 07:15:14,061 INFO [train.py:715] (6/8) Epoch 11, batch 31800, loss[loss=0.1482, simple_loss=0.2236, pruned_loss=0.03646, over 4869.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03246, over 972390.59 frames.], batch size: 16, lr: 1.93e-04 +2022-05-07 07:15:52,638 INFO [train.py:715] (6/8) Epoch 11, batch 31850, loss[loss=0.1331, simple_loss=0.2083, pruned_loss=0.0289, over 4987.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2118, pruned_loss=0.03241, over 972712.75 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 07:16:30,870 INFO [train.py:715] (6/8) Epoch 11, batch 31900, loss[loss=0.1514, simple_loss=0.2175, pruned_loss=0.04269, over 4819.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2124, pruned_loss=0.03266, over 973009.64 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 07:17:09,161 INFO [train.py:715] (6/8) Epoch 11, batch 31950, loss[loss=0.1533, simple_loss=0.2383, pruned_loss=0.03411, over 4891.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2127, pruned_loss=0.03295, over 972894.18 frames.], batch size: 22, lr: 1.93e-04 +2022-05-07 07:17:47,944 INFO [train.py:715] (6/8) Epoch 11, batch 32000, loss[loss=0.1659, simple_loss=0.2331, pruned_loss=0.04931, over 4779.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2126, pruned_loss=0.03299, over 973306.92 frames.], batch size: 14, lr: 1.93e-04 +2022-05-07 07:18:26,168 INFO [train.py:715] (6/8) Epoch 11, batch 32050, loss[loss=0.1364, simple_loss=0.2113, pruned_loss=0.03074, over 4812.00 frames.], tot_loss[loss=0.1389, simple_loss=0.212, pruned_loss=0.03288, over 973436.39 frames.], batch size: 25, lr: 1.93e-04 +2022-05-07 07:19:04,554 INFO [train.py:715] (6/8) Epoch 11, batch 32100, loss[loss=0.1193, simple_loss=0.1871, pruned_loss=0.02576, over 4780.00 frames.], tot_loss[loss=0.1379, simple_loss=0.211, pruned_loss=0.03243, over 973081.67 frames.], batch size: 14, lr: 1.92e-04 +2022-05-07 07:19:42,572 INFO [train.py:715] (6/8) Epoch 11, batch 32150, loss[loss=0.1389, simple_loss=0.2061, pruned_loss=0.03586, over 4819.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2103, pruned_loss=0.032, over 973401.23 frames.], batch size: 13, lr: 1.92e-04 +2022-05-07 07:20:19,990 INFO [train.py:715] (6/8) Epoch 11, batch 32200, loss[loss=0.1399, simple_loss=0.2092, pruned_loss=0.03531, over 4976.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2104, pruned_loss=0.03197, over 972618.10 frames.], batch size: 14, lr: 1.92e-04 +2022-05-07 07:20:57,515 INFO [train.py:715] (6/8) Epoch 11, batch 32250, loss[loss=0.1398, simple_loss=0.2084, pruned_loss=0.03558, over 4749.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2104, pruned_loss=0.03185, over 973211.22 frames.], batch size: 16, lr: 1.92e-04 +2022-05-07 07:21:35,351 INFO [train.py:715] (6/8) Epoch 11, batch 32300, loss[loss=0.1195, simple_loss=0.2005, pruned_loss=0.01924, over 4944.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2099, pruned_loss=0.03164, over 973432.50 frames.], batch size: 29, lr: 1.92e-04 +2022-05-07 07:22:13,998 INFO [train.py:715] (6/8) Epoch 11, batch 32350, loss[loss=0.1269, simple_loss=0.1983, pruned_loss=0.02778, over 4832.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2113, pruned_loss=0.03227, over 973016.31 frames.], batch size: 13, lr: 1.92e-04 +2022-05-07 07:22:51,417 INFO [train.py:715] (6/8) Epoch 11, batch 32400, loss[loss=0.1269, simple_loss=0.1975, pruned_loss=0.02812, over 4988.00 frames.], tot_loss[loss=0.1376, simple_loss=0.211, pruned_loss=0.0321, over 973031.43 frames.], batch size: 28, lr: 1.92e-04 +2022-05-07 07:23:29,417 INFO [train.py:715] (6/8) Epoch 11, batch 32450, loss[loss=0.1481, simple_loss=0.2221, pruned_loss=0.03704, over 4963.00 frames.], tot_loss[loss=0.1376, simple_loss=0.211, pruned_loss=0.03213, over 973398.19 frames.], batch size: 25, lr: 1.92e-04 +2022-05-07 07:24:07,457 INFO [train.py:715] (6/8) Epoch 11, batch 32500, loss[loss=0.1184, simple_loss=0.185, pruned_loss=0.02589, over 4744.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2109, pruned_loss=0.03224, over 973288.82 frames.], batch size: 16, lr: 1.92e-04 +2022-05-07 07:24:45,517 INFO [train.py:715] (6/8) Epoch 11, batch 32550, loss[loss=0.1436, simple_loss=0.2195, pruned_loss=0.03384, over 4869.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2111, pruned_loss=0.03226, over 972887.95 frames.], batch size: 32, lr: 1.92e-04 +2022-05-07 07:25:23,173 INFO [train.py:715] (6/8) Epoch 11, batch 32600, loss[loss=0.1335, simple_loss=0.2105, pruned_loss=0.02821, over 4815.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.0321, over 972822.83 frames.], batch size: 26, lr: 1.92e-04 +2022-05-07 07:26:01,261 INFO [train.py:715] (6/8) Epoch 11, batch 32650, loss[loss=0.1364, simple_loss=0.2109, pruned_loss=0.03089, over 4916.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2113, pruned_loss=0.03199, over 972869.61 frames.], batch size: 19, lr: 1.92e-04 +2022-05-07 07:26:39,442 INFO [train.py:715] (6/8) Epoch 11, batch 32700, loss[loss=0.1378, simple_loss=0.2061, pruned_loss=0.03475, over 4902.00 frames.], tot_loss[loss=0.1385, simple_loss=0.212, pruned_loss=0.03251, over 971899.03 frames.], batch size: 19, lr: 1.92e-04 +2022-05-07 07:27:16,891 INFO [train.py:715] (6/8) Epoch 11, batch 32750, loss[loss=0.1243, simple_loss=0.2046, pruned_loss=0.02194, over 4848.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2117, pruned_loss=0.03235, over 972917.65 frames.], batch size: 15, lr: 1.92e-04 +2022-05-07 07:27:55,659 INFO [train.py:715] (6/8) Epoch 11, batch 32800, loss[loss=0.1771, simple_loss=0.2413, pruned_loss=0.05646, over 4816.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03254, over 973220.04 frames.], batch size: 15, lr: 1.92e-04 +2022-05-07 07:28:35,368 INFO [train.py:715] (6/8) Epoch 11, batch 32850, loss[loss=0.167, simple_loss=0.235, pruned_loss=0.04947, over 4944.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2118, pruned_loss=0.03254, over 972987.27 frames.], batch size: 39, lr: 1.92e-04 +2022-05-07 07:29:13,920 INFO [train.py:715] (6/8) Epoch 11, batch 32900, loss[loss=0.1188, simple_loss=0.1906, pruned_loss=0.02352, over 4928.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03245, over 973040.95 frames.], batch size: 23, lr: 1.92e-04 +2022-05-07 07:29:52,133 INFO [train.py:715] (6/8) Epoch 11, batch 32950, loss[loss=0.1508, simple_loss=0.2264, pruned_loss=0.03759, over 4918.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03223, over 972690.98 frames.], batch size: 19, lr: 1.92e-04 +2022-05-07 07:30:31,049 INFO [train.py:715] (6/8) Epoch 11, batch 33000, loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02956, over 4696.00 frames.], tot_loss[loss=0.138, simple_loss=0.2111, pruned_loss=0.03239, over 971513.98 frames.], batch size: 15, lr: 1.92e-04 +2022-05-07 07:30:31,050 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 07:30:40,493 INFO [train.py:742] (6/8) Epoch 11, validation: loss=0.1059, simple_loss=0.1899, pruned_loss=0.0109, over 914524.00 frames. +2022-05-07 07:31:19,415 INFO [train.py:715] (6/8) Epoch 11, batch 33050, loss[loss=0.1357, simple_loss=0.2101, pruned_loss=0.03061, over 4886.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2113, pruned_loss=0.03273, over 971984.08 frames.], batch size: 16, lr: 1.92e-04 +2022-05-07 07:32:00,913 INFO [train.py:715] (6/8) Epoch 11, batch 33100, loss[loss=0.1626, simple_loss=0.2282, pruned_loss=0.04849, over 4847.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2113, pruned_loss=0.03309, over 972140.76 frames.], batch size: 32, lr: 1.92e-04 +2022-05-07 07:32:38,877 INFO [train.py:715] (6/8) Epoch 11, batch 33150, loss[loss=0.1418, simple_loss=0.2163, pruned_loss=0.03362, over 4991.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2093, pruned_loss=0.03218, over 971457.57 frames.], batch size: 20, lr: 1.92e-04 +2022-05-07 07:33:17,492 INFO [train.py:715] (6/8) Epoch 11, batch 33200, loss[loss=0.1386, simple_loss=0.2226, pruned_loss=0.02729, over 4876.00 frames.], tot_loss[loss=0.138, simple_loss=0.2107, pruned_loss=0.03265, over 971252.56 frames.], batch size: 22, lr: 1.92e-04 +2022-05-07 07:33:56,617 INFO [train.py:715] (6/8) Epoch 11, batch 33250, loss[loss=0.1328, simple_loss=0.2133, pruned_loss=0.02614, over 4983.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2106, pruned_loss=0.03223, over 972678.82 frames.], batch size: 28, lr: 1.92e-04 +2022-05-07 07:34:35,426 INFO [train.py:715] (6/8) Epoch 11, batch 33300, loss[loss=0.1329, simple_loss=0.2014, pruned_loss=0.03218, over 4924.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2106, pruned_loss=0.03248, over 972841.57 frames.], batch size: 35, lr: 1.92e-04 +2022-05-07 07:35:13,299 INFO [train.py:715] (6/8) Epoch 11, batch 33350, loss[loss=0.1336, simple_loss=0.2128, pruned_loss=0.02724, over 4732.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2103, pruned_loss=0.03224, over 973004.41 frames.], batch size: 16, lr: 1.92e-04 +2022-05-07 07:35:51,721 INFO [train.py:715] (6/8) Epoch 11, batch 33400, loss[loss=0.14, simple_loss=0.2211, pruned_loss=0.02941, over 4960.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2106, pruned_loss=0.03214, over 972367.14 frames.], batch size: 21, lr: 1.92e-04 +2022-05-07 07:36:30,408 INFO [train.py:715] (6/8) Epoch 11, batch 33450, loss[loss=0.1328, simple_loss=0.2091, pruned_loss=0.02829, over 4938.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03213, over 972339.83 frames.], batch size: 18, lr: 1.92e-04 +2022-05-07 07:37:08,747 INFO [train.py:715] (6/8) Epoch 11, batch 33500, loss[loss=0.1303, simple_loss=0.2155, pruned_loss=0.02257, over 4951.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2117, pruned_loss=0.03239, over 973122.75 frames.], batch size: 24, lr: 1.92e-04 +2022-05-07 07:37:47,190 INFO [train.py:715] (6/8) Epoch 11, batch 33550, loss[loss=0.1641, simple_loss=0.2218, pruned_loss=0.05325, over 4755.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2111, pruned_loss=0.03265, over 972820.06 frames.], batch size: 19, lr: 1.92e-04 +2022-05-07 07:38:25,778 INFO [train.py:715] (6/8) Epoch 11, batch 33600, loss[loss=0.1258, simple_loss=0.1985, pruned_loss=0.02656, over 4773.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2115, pruned_loss=0.03275, over 973162.65 frames.], batch size: 18, lr: 1.92e-04 +2022-05-07 07:39:04,185 INFO [train.py:715] (6/8) Epoch 11, batch 33650, loss[loss=0.1415, simple_loss=0.2192, pruned_loss=0.03189, over 4854.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2112, pruned_loss=0.03263, over 973177.80 frames.], batch size: 32, lr: 1.92e-04 +2022-05-07 07:39:42,306 INFO [train.py:715] (6/8) Epoch 11, batch 33700, loss[loss=0.1443, simple_loss=0.2163, pruned_loss=0.03618, over 4852.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2096, pruned_loss=0.03171, over 971923.01 frames.], batch size: 20, lr: 1.92e-04 +2022-05-07 07:40:20,594 INFO [train.py:715] (6/8) Epoch 11, batch 33750, loss[loss=0.1392, simple_loss=0.2161, pruned_loss=0.03115, over 4842.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03154, over 972469.67 frames.], batch size: 20, lr: 1.92e-04 +2022-05-07 07:40:59,176 INFO [train.py:715] (6/8) Epoch 11, batch 33800, loss[loss=0.1724, simple_loss=0.2476, pruned_loss=0.04855, over 4647.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2108, pruned_loss=0.03219, over 972288.19 frames.], batch size: 13, lr: 1.92e-04 +2022-05-07 07:41:37,162 INFO [train.py:715] (6/8) Epoch 11, batch 33850, loss[loss=0.133, simple_loss=0.2036, pruned_loss=0.03114, over 4779.00 frames.], tot_loss[loss=0.138, simple_loss=0.2111, pruned_loss=0.03245, over 972179.09 frames.], batch size: 17, lr: 1.92e-04 +2022-05-07 07:42:15,197 INFO [train.py:715] (6/8) Epoch 11, batch 33900, loss[loss=0.1434, simple_loss=0.2102, pruned_loss=0.03828, over 4933.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03243, over 972298.24 frames.], batch size: 23, lr: 1.92e-04 +2022-05-07 07:42:53,951 INFO [train.py:715] (6/8) Epoch 11, batch 33950, loss[loss=0.1199, simple_loss=0.2004, pruned_loss=0.01969, over 4877.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2112, pruned_loss=0.03213, over 973315.75 frames.], batch size: 22, lr: 1.92e-04 +2022-05-07 07:43:32,268 INFO [train.py:715] (6/8) Epoch 11, batch 34000, loss[loss=0.1638, simple_loss=0.2384, pruned_loss=0.04464, over 4946.00 frames.], tot_loss[loss=0.1378, simple_loss=0.211, pruned_loss=0.03225, over 973732.63 frames.], batch size: 23, lr: 1.92e-04 +2022-05-07 07:44:10,351 INFO [train.py:715] (6/8) Epoch 11, batch 34050, loss[loss=0.1472, simple_loss=0.2209, pruned_loss=0.03671, over 4843.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2113, pruned_loss=0.03271, over 973081.15 frames.], batch size: 32, lr: 1.92e-04 +2022-05-07 07:44:48,874 INFO [train.py:715] (6/8) Epoch 11, batch 34100, loss[loss=0.1463, simple_loss=0.2266, pruned_loss=0.033, over 4755.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03268, over 973166.97 frames.], batch size: 19, lr: 1.92e-04 +2022-05-07 07:45:27,613 INFO [train.py:715] (6/8) Epoch 11, batch 34150, loss[loss=0.121, simple_loss=0.1851, pruned_loss=0.02851, over 4894.00 frames.], tot_loss[loss=0.138, simple_loss=0.2114, pruned_loss=0.03227, over 974074.18 frames.], batch size: 19, lr: 1.92e-04 +2022-05-07 07:46:05,701 INFO [train.py:715] (6/8) Epoch 11, batch 34200, loss[loss=0.1333, simple_loss=0.2128, pruned_loss=0.0269, over 4916.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2114, pruned_loss=0.03211, over 972980.85 frames.], batch size: 29, lr: 1.92e-04 +2022-05-07 07:46:44,126 INFO [train.py:715] (6/8) Epoch 11, batch 34250, loss[loss=0.1052, simple_loss=0.1783, pruned_loss=0.01602, over 4829.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.03207, over 971931.97 frames.], batch size: 26, lr: 1.92e-04 +2022-05-07 07:47:23,288 INFO [train.py:715] (6/8) Epoch 11, batch 34300, loss[loss=0.1431, simple_loss=0.2185, pruned_loss=0.03388, over 4761.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.03177, over 972203.91 frames.], batch size: 18, lr: 1.92e-04 +2022-05-07 07:48:01,581 INFO [train.py:715] (6/8) Epoch 11, batch 34350, loss[loss=0.1522, simple_loss=0.2273, pruned_loss=0.03856, over 4975.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2103, pruned_loss=0.03194, over 972354.12 frames.], batch size: 15, lr: 1.92e-04 +2022-05-07 07:48:40,023 INFO [train.py:715] (6/8) Epoch 11, batch 34400, loss[loss=0.1416, simple_loss=0.2214, pruned_loss=0.03091, over 4931.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2103, pruned_loss=0.032, over 972286.64 frames.], batch size: 17, lr: 1.92e-04 +2022-05-07 07:49:18,675 INFO [train.py:715] (6/8) Epoch 11, batch 34450, loss[loss=0.1723, simple_loss=0.2353, pruned_loss=0.05463, over 4862.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2113, pruned_loss=0.03245, over 972184.31 frames.], batch size: 20, lr: 1.92e-04 +2022-05-07 07:49:57,850 INFO [train.py:715] (6/8) Epoch 11, batch 34500, loss[loss=0.1424, simple_loss=0.2106, pruned_loss=0.03707, over 4938.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03248, over 972530.65 frames.], batch size: 39, lr: 1.92e-04 +2022-05-07 07:50:35,957 INFO [train.py:715] (6/8) Epoch 11, batch 34550, loss[loss=0.1206, simple_loss=0.1992, pruned_loss=0.02102, over 4975.00 frames.], tot_loss[loss=0.138, simple_loss=0.2116, pruned_loss=0.03224, over 972949.13 frames.], batch size: 14, lr: 1.92e-04 +2022-05-07 07:51:12,742 INFO [train.py:715] (6/8) Epoch 11, batch 34600, loss[loss=0.1774, simple_loss=0.255, pruned_loss=0.04995, over 4856.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2119, pruned_loss=0.03238, over 972813.35 frames.], batch size: 38, lr: 1.92e-04 +2022-05-07 07:51:50,530 INFO [train.py:715] (6/8) Epoch 11, batch 34650, loss[loss=0.1425, simple_loss=0.2258, pruned_loss=0.0296, over 4825.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2117, pruned_loss=0.03241, over 972803.70 frames.], batch size: 26, lr: 1.92e-04 +2022-05-07 07:52:27,798 INFO [train.py:715] (6/8) Epoch 11, batch 34700, loss[loss=0.1228, simple_loss=0.1966, pruned_loss=0.02451, over 4990.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2118, pruned_loss=0.03227, over 972737.12 frames.], batch size: 28, lr: 1.92e-04 +2022-05-07 07:53:04,317 INFO [train.py:715] (6/8) Epoch 11, batch 34750, loss[loss=0.135, simple_loss=0.2038, pruned_loss=0.03311, over 4921.00 frames.], tot_loss[loss=0.138, simple_loss=0.2117, pruned_loss=0.03219, over 972101.25 frames.], batch size: 18, lr: 1.92e-04 +2022-05-07 07:53:39,313 INFO [train.py:715] (6/8) Epoch 11, batch 34800, loss[loss=0.1528, simple_loss=0.2241, pruned_loss=0.04076, over 4906.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2121, pruned_loss=0.03271, over 972067.01 frames.], batch size: 18, lr: 1.92e-04 +2022-05-07 07:54:26,267 INFO [train.py:715] (6/8) Epoch 12, batch 0, loss[loss=0.1408, simple_loss=0.22, pruned_loss=0.03079, over 4926.00 frames.], tot_loss[loss=0.1408, simple_loss=0.22, pruned_loss=0.03079, over 4926.00 frames.], batch size: 29, lr: 1.85e-04 +2022-05-07 07:55:04,628 INFO [train.py:715] (6/8) Epoch 12, batch 50, loss[loss=0.1417, simple_loss=0.2191, pruned_loss=0.03211, over 4922.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2129, pruned_loss=0.03318, over 220621.91 frames.], batch size: 23, lr: 1.85e-04 +2022-05-07 07:55:42,692 INFO [train.py:715] (6/8) Epoch 12, batch 100, loss[loss=0.1495, simple_loss=0.2148, pruned_loss=0.04213, over 4945.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2136, pruned_loss=0.03331, over 387538.62 frames.], batch size: 29, lr: 1.85e-04 +2022-05-07 07:56:21,320 INFO [train.py:715] (6/8) Epoch 12, batch 150, loss[loss=0.1531, simple_loss=0.2236, pruned_loss=0.04127, over 4871.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2117, pruned_loss=0.0324, over 516769.56 frames.], batch size: 32, lr: 1.85e-04 +2022-05-07 07:56:59,067 INFO [train.py:715] (6/8) Epoch 12, batch 200, loss[loss=0.1292, simple_loss=0.2089, pruned_loss=0.02479, over 4829.00 frames.], tot_loss[loss=0.1373, simple_loss=0.211, pruned_loss=0.03177, over 617483.80 frames.], batch size: 27, lr: 1.85e-04 +2022-05-07 07:57:38,280 INFO [train.py:715] (6/8) Epoch 12, batch 250, loss[loss=0.1064, simple_loss=0.175, pruned_loss=0.01888, over 4740.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2107, pruned_loss=0.03179, over 695244.74 frames.], batch size: 16, lr: 1.85e-04 +2022-05-07 07:58:16,542 INFO [train.py:715] (6/8) Epoch 12, batch 300, loss[loss=0.1121, simple_loss=0.1937, pruned_loss=0.01526, over 4898.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.03166, over 756827.66 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 07:58:54,473 INFO [train.py:715] (6/8) Epoch 12, batch 350, loss[loss=0.137, simple_loss=0.2138, pruned_loss=0.0301, over 4985.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2096, pruned_loss=0.03126, over 805210.82 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 07:59:32,949 INFO [train.py:715] (6/8) Epoch 12, batch 400, loss[loss=0.1216, simple_loss=0.1904, pruned_loss=0.02636, over 4872.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2098, pruned_loss=0.03178, over 842421.07 frames.], batch size: 16, lr: 1.84e-04 +2022-05-07 08:00:10,577 INFO [train.py:715] (6/8) Epoch 12, batch 450, loss[loss=0.144, simple_loss=0.222, pruned_loss=0.03297, over 4801.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2091, pruned_loss=0.03133, over 871484.76 frames.], batch size: 25, lr: 1.84e-04 +2022-05-07 08:00:48,786 INFO [train.py:715] (6/8) Epoch 12, batch 500, loss[loss=0.1072, simple_loss=0.1802, pruned_loss=0.0171, over 4810.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2088, pruned_loss=0.03132, over 894239.56 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:01:26,232 INFO [train.py:715] (6/8) Epoch 12, batch 550, loss[loss=0.1152, simple_loss=0.2051, pruned_loss=0.01265, over 4640.00 frames.], tot_loss[loss=0.1365, simple_loss=0.21, pruned_loss=0.03152, over 912079.26 frames.], batch size: 13, lr: 1.84e-04 +2022-05-07 08:02:04,573 INFO [train.py:715] (6/8) Epoch 12, batch 600, loss[loss=0.115, simple_loss=0.1923, pruned_loss=0.01882, over 4986.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.03182, over 926023.05 frames.], batch size: 28, lr: 1.84e-04 +2022-05-07 08:02:41,617 INFO [train.py:715] (6/8) Epoch 12, batch 650, loss[loss=0.1491, simple_loss=0.2183, pruned_loss=0.04, over 4829.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2099, pruned_loss=0.03175, over 936938.61 frames.], batch size: 30, lr: 1.84e-04 +2022-05-07 08:03:20,186 INFO [train.py:715] (6/8) Epoch 12, batch 700, loss[loss=0.1435, simple_loss=0.2163, pruned_loss=0.03535, over 4749.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2108, pruned_loss=0.03206, over 944324.15 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:03:58,807 INFO [train.py:715] (6/8) Epoch 12, batch 750, loss[loss=0.1077, simple_loss=0.1798, pruned_loss=0.01786, over 4949.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2109, pruned_loss=0.0324, over 951191.81 frames.], batch size: 21, lr: 1.84e-04 +2022-05-07 08:04:37,569 INFO [train.py:715] (6/8) Epoch 12, batch 800, loss[loss=0.1296, simple_loss=0.1942, pruned_loss=0.03249, over 4982.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2114, pruned_loss=0.03267, over 956118.67 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:05:16,049 INFO [train.py:715] (6/8) Epoch 12, batch 850, loss[loss=0.1552, simple_loss=0.2206, pruned_loss=0.04488, over 4903.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2111, pruned_loss=0.03285, over 960185.04 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:05:54,151 INFO [train.py:715] (6/8) Epoch 12, batch 900, loss[loss=0.1358, simple_loss=0.2173, pruned_loss=0.02718, over 4697.00 frames.], tot_loss[loss=0.138, simple_loss=0.2108, pruned_loss=0.03259, over 962836.56 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:06:32,488 INFO [train.py:715] (6/8) Epoch 12, batch 950, loss[loss=0.1636, simple_loss=0.2219, pruned_loss=0.05261, over 4982.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2109, pruned_loss=0.0326, over 965030.19 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:07:09,849 INFO [train.py:715] (6/8) Epoch 12, batch 1000, loss[loss=0.1356, simple_loss=0.2195, pruned_loss=0.02578, over 4878.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2115, pruned_loss=0.033, over 966256.67 frames.], batch size: 22, lr: 1.84e-04 +2022-05-07 08:07:47,343 INFO [train.py:715] (6/8) Epoch 12, batch 1050, loss[loss=0.1435, simple_loss=0.2141, pruned_loss=0.03644, over 4826.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2113, pruned_loss=0.03286, over 966598.45 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:08:25,193 INFO [train.py:715] (6/8) Epoch 12, batch 1100, loss[loss=0.123, simple_loss=0.1946, pruned_loss=0.02569, over 4781.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2115, pruned_loss=0.03273, over 968545.98 frames.], batch size: 17, lr: 1.84e-04 +2022-05-07 08:09:03,088 INFO [train.py:715] (6/8) Epoch 12, batch 1150, loss[loss=0.1109, simple_loss=0.1869, pruned_loss=0.01741, over 4796.00 frames.], tot_loss[loss=0.138, simple_loss=0.2111, pruned_loss=0.03241, over 969248.43 frames.], batch size: 24, lr: 1.84e-04 +2022-05-07 08:09:41,431 INFO [train.py:715] (6/8) Epoch 12, batch 1200, loss[loss=0.1239, simple_loss=0.197, pruned_loss=0.0254, over 4990.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2109, pruned_loss=0.0323, over 969191.53 frames.], batch size: 20, lr: 1.84e-04 +2022-05-07 08:10:18,739 INFO [train.py:715] (6/8) Epoch 12, batch 1250, loss[loss=0.1435, simple_loss=0.2062, pruned_loss=0.04036, over 4747.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2107, pruned_loss=0.03215, over 970103.49 frames.], batch size: 16, lr: 1.84e-04 +2022-05-07 08:10:56,843 INFO [train.py:715] (6/8) Epoch 12, batch 1300, loss[loss=0.1697, simple_loss=0.252, pruned_loss=0.04374, over 4808.00 frames.], tot_loss[loss=0.138, simple_loss=0.2114, pruned_loss=0.03233, over 970063.39 frames.], batch size: 17, lr: 1.84e-04 +2022-05-07 08:11:33,987 INFO [train.py:715] (6/8) Epoch 12, batch 1350, loss[loss=0.1361, simple_loss=0.2074, pruned_loss=0.03243, over 4867.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2111, pruned_loss=0.03256, over 970677.76 frames.], batch size: 30, lr: 1.84e-04 +2022-05-07 08:12:12,111 INFO [train.py:715] (6/8) Epoch 12, batch 1400, loss[loss=0.1408, simple_loss=0.221, pruned_loss=0.03027, over 4891.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2115, pruned_loss=0.03252, over 970738.21 frames.], batch size: 22, lr: 1.84e-04 +2022-05-07 08:12:49,763 INFO [train.py:715] (6/8) Epoch 12, batch 1450, loss[loss=0.119, simple_loss=0.1875, pruned_loss=0.02527, over 4984.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2103, pruned_loss=0.03196, over 971840.65 frames.], batch size: 25, lr: 1.84e-04 +2022-05-07 08:13:27,679 INFO [train.py:715] (6/8) Epoch 12, batch 1500, loss[loss=0.1366, simple_loss=0.2043, pruned_loss=0.03445, over 4770.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2111, pruned_loss=0.03227, over 971595.89 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:14:05,277 INFO [train.py:715] (6/8) Epoch 12, batch 1550, loss[loss=0.1283, simple_loss=0.2001, pruned_loss=0.02823, over 4980.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2118, pruned_loss=0.0325, over 971294.85 frames.], batch size: 28, lr: 1.84e-04 +2022-05-07 08:14:42,471 INFO [train.py:715] (6/8) Epoch 12, batch 1600, loss[loss=0.1168, simple_loss=0.1914, pruned_loss=0.02115, over 4824.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2121, pruned_loss=0.03279, over 971634.87 frames.], batch size: 13, lr: 1.84e-04 +2022-05-07 08:15:20,489 INFO [train.py:715] (6/8) Epoch 12, batch 1650, loss[loss=0.1115, simple_loss=0.1801, pruned_loss=0.02147, over 4782.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2124, pruned_loss=0.03329, over 971361.72 frames.], batch size: 17, lr: 1.84e-04 +2022-05-07 08:15:57,868 INFO [train.py:715] (6/8) Epoch 12, batch 1700, loss[loss=0.148, simple_loss=0.2126, pruned_loss=0.04176, over 4784.00 frames.], tot_loss[loss=0.139, simple_loss=0.212, pruned_loss=0.03306, over 971734.08 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:16:35,314 INFO [train.py:715] (6/8) Epoch 12, batch 1750, loss[loss=0.1241, simple_loss=0.1998, pruned_loss=0.02418, over 4757.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2124, pruned_loss=0.03308, over 970045.53 frames.], batch size: 16, lr: 1.84e-04 +2022-05-07 08:17:12,472 INFO [train.py:715] (6/8) Epoch 12, batch 1800, loss[loss=0.139, simple_loss=0.2174, pruned_loss=0.03029, over 4992.00 frames.], tot_loss[loss=0.1391, simple_loss=0.212, pruned_loss=0.03304, over 970174.06 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:17:50,168 INFO [train.py:715] (6/8) Epoch 12, batch 1850, loss[loss=0.1344, simple_loss=0.21, pruned_loss=0.02937, over 4858.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2121, pruned_loss=0.03334, over 970625.45 frames.], batch size: 20, lr: 1.84e-04 +2022-05-07 08:18:27,661 INFO [train.py:715] (6/8) Epoch 12, batch 1900, loss[loss=0.1359, simple_loss=0.2185, pruned_loss=0.02667, over 4862.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.0328, over 970517.11 frames.], batch size: 32, lr: 1.84e-04 +2022-05-07 08:19:05,286 INFO [train.py:715] (6/8) Epoch 12, batch 1950, loss[loss=0.1435, simple_loss=0.2049, pruned_loss=0.04106, over 4783.00 frames.], tot_loss[loss=0.138, simple_loss=0.2108, pruned_loss=0.03262, over 971091.71 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:19:43,104 INFO [train.py:715] (6/8) Epoch 12, batch 2000, loss[loss=0.1594, simple_loss=0.2264, pruned_loss=0.04619, over 4963.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2108, pruned_loss=0.03252, over 972199.85 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:20:21,265 INFO [train.py:715] (6/8) Epoch 12, batch 2050, loss[loss=0.1217, simple_loss=0.1953, pruned_loss=0.02408, over 4760.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2111, pruned_loss=0.03268, over 971500.37 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:20:59,323 INFO [train.py:715] (6/8) Epoch 12, batch 2100, loss[loss=0.1496, simple_loss=0.2259, pruned_loss=0.03664, over 4843.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2118, pruned_loss=0.03293, over 971842.29 frames.], batch size: 13, lr: 1.84e-04 +2022-05-07 08:21:36,625 INFO [train.py:715] (6/8) Epoch 12, batch 2150, loss[loss=0.122, simple_loss=0.2004, pruned_loss=0.02181, over 4783.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2118, pruned_loss=0.03273, over 971208.24 frames.], batch size: 12, lr: 1.84e-04 +2022-05-07 08:22:14,598 INFO [train.py:715] (6/8) Epoch 12, batch 2200, loss[loss=0.1303, simple_loss=0.2019, pruned_loss=0.02941, over 4771.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03323, over 971735.69 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:22:52,523 INFO [train.py:715] (6/8) Epoch 12, batch 2250, loss[loss=0.1315, simple_loss=0.2107, pruned_loss=0.02609, over 4735.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2116, pruned_loss=0.03266, over 971800.55 frames.], batch size: 16, lr: 1.84e-04 +2022-05-07 08:23:30,601 INFO [train.py:715] (6/8) Epoch 12, batch 2300, loss[loss=0.1342, simple_loss=0.2073, pruned_loss=0.03055, over 4873.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2116, pruned_loss=0.03269, over 972129.79 frames.], batch size: 22, lr: 1.84e-04 +2022-05-07 08:24:07,787 INFO [train.py:715] (6/8) Epoch 12, batch 2350, loss[loss=0.128, simple_loss=0.1988, pruned_loss=0.02864, over 4898.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2113, pruned_loss=0.03243, over 972444.12 frames.], batch size: 17, lr: 1.84e-04 +2022-05-07 08:24:45,332 INFO [train.py:715] (6/8) Epoch 12, batch 2400, loss[loss=0.1291, simple_loss=0.2045, pruned_loss=0.02687, over 4854.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2113, pruned_loss=0.03297, over 972756.05 frames.], batch size: 20, lr: 1.84e-04 +2022-05-07 08:25:23,252 INFO [train.py:715] (6/8) Epoch 12, batch 2450, loss[loss=0.1246, simple_loss=0.1985, pruned_loss=0.02537, over 4763.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2109, pruned_loss=0.03249, over 973196.89 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:26:00,044 INFO [train.py:715] (6/8) Epoch 12, batch 2500, loss[loss=0.1313, simple_loss=0.2128, pruned_loss=0.02491, over 4870.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.03281, over 973746.95 frames.], batch size: 20, lr: 1.84e-04 +2022-05-07 08:26:38,142 INFO [train.py:715] (6/8) Epoch 12, batch 2550, loss[loss=0.1305, simple_loss=0.203, pruned_loss=0.02905, over 4879.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2107, pruned_loss=0.03246, over 973195.66 frames.], batch size: 16, lr: 1.84e-04 +2022-05-07 08:27:15,554 INFO [train.py:715] (6/8) Epoch 12, batch 2600, loss[loss=0.1272, simple_loss=0.1999, pruned_loss=0.02728, over 4788.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2112, pruned_loss=0.03279, over 973403.58 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:27:54,373 INFO [train.py:715] (6/8) Epoch 12, batch 2650, loss[loss=0.1199, simple_loss=0.195, pruned_loss=0.02243, over 4947.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2116, pruned_loss=0.03258, over 973355.66 frames.], batch size: 29, lr: 1.84e-04 +2022-05-07 08:28:32,745 INFO [train.py:715] (6/8) Epoch 12, batch 2700, loss[loss=0.1259, simple_loss=0.2105, pruned_loss=0.02064, over 4952.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03222, over 972120.60 frames.], batch size: 24, lr: 1.84e-04 +2022-05-07 08:29:11,534 INFO [train.py:715] (6/8) Epoch 12, batch 2750, loss[loss=0.1296, simple_loss=0.2007, pruned_loss=0.02924, over 4939.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2108, pruned_loss=0.03183, over 973226.10 frames.], batch size: 29, lr: 1.84e-04 +2022-05-07 08:29:50,411 INFO [train.py:715] (6/8) Epoch 12, batch 2800, loss[loss=0.1526, simple_loss=0.2324, pruned_loss=0.03642, over 4982.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2111, pruned_loss=0.03176, over 973020.24 frames.], batch size: 16, lr: 1.84e-04 +2022-05-07 08:30:28,421 INFO [train.py:715] (6/8) Epoch 12, batch 2850, loss[loss=0.1213, simple_loss=0.2054, pruned_loss=0.01861, over 4808.00 frames.], tot_loss[loss=0.1371, simple_loss=0.211, pruned_loss=0.03158, over 972415.75 frames.], batch size: 21, lr: 1.84e-04 +2022-05-07 08:31:07,089 INFO [train.py:715] (6/8) Epoch 12, batch 2900, loss[loss=0.1208, simple_loss=0.1965, pruned_loss=0.02249, over 4981.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03149, over 971383.11 frames.], batch size: 33, lr: 1.84e-04 +2022-05-07 08:31:45,563 INFO [train.py:715] (6/8) Epoch 12, batch 2950, loss[loss=0.1169, simple_loss=0.1936, pruned_loss=0.02013, over 4868.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2093, pruned_loss=0.03124, over 972136.13 frames.], batch size: 20, lr: 1.84e-04 +2022-05-07 08:32:24,276 INFO [train.py:715] (6/8) Epoch 12, batch 3000, loss[loss=0.1588, simple_loss=0.2265, pruned_loss=0.0455, over 4813.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2084, pruned_loss=0.03089, over 971848.74 frames.], batch size: 26, lr: 1.84e-04 +2022-05-07 08:32:24,276 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 08:32:33,757 INFO [train.py:742] (6/8) Epoch 12, validation: loss=0.1056, simple_loss=0.1896, pruned_loss=0.01082, over 914524.00 frames. +2022-05-07 08:33:11,808 INFO [train.py:715] (6/8) Epoch 12, batch 3050, loss[loss=0.1518, simple_loss=0.2311, pruned_loss=0.03626, over 4861.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2092, pruned_loss=0.03131, over 972645.79 frames.], batch size: 20, lr: 1.84e-04 +2022-05-07 08:33:49,494 INFO [train.py:715] (6/8) Epoch 12, batch 3100, loss[loss=0.1121, simple_loss=0.1916, pruned_loss=0.01626, over 4823.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.0316, over 972992.60 frames.], batch size: 25, lr: 1.84e-04 +2022-05-07 08:34:27,406 INFO [train.py:715] (6/8) Epoch 12, batch 3150, loss[loss=0.1611, simple_loss=0.2491, pruned_loss=0.0365, over 4948.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2113, pruned_loss=0.0318, over 973053.08 frames.], batch size: 21, lr: 1.84e-04 +2022-05-07 08:35:05,544 INFO [train.py:715] (6/8) Epoch 12, batch 3200, loss[loss=0.1391, simple_loss=0.2137, pruned_loss=0.03224, over 4924.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2115, pruned_loss=0.03199, over 973453.76 frames.], batch size: 29, lr: 1.84e-04 +2022-05-07 08:35:43,248 INFO [train.py:715] (6/8) Epoch 12, batch 3250, loss[loss=0.1626, simple_loss=0.2309, pruned_loss=0.0471, over 4891.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2123, pruned_loss=0.03245, over 973252.19 frames.], batch size: 16, lr: 1.84e-04 +2022-05-07 08:36:21,485 INFO [train.py:715] (6/8) Epoch 12, batch 3300, loss[loss=0.1141, simple_loss=0.1902, pruned_loss=0.01904, over 4901.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2115, pruned_loss=0.03217, over 972966.21 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:36:59,235 INFO [train.py:715] (6/8) Epoch 12, batch 3350, loss[loss=0.1416, simple_loss=0.2112, pruned_loss=0.036, over 4687.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2117, pruned_loss=0.03246, over 972656.26 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:37:37,375 INFO [train.py:715] (6/8) Epoch 12, batch 3400, loss[loss=0.1479, simple_loss=0.2315, pruned_loss=0.03215, over 4907.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2112, pruned_loss=0.03255, over 972052.68 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:38:14,960 INFO [train.py:715] (6/8) Epoch 12, batch 3450, loss[loss=0.1415, simple_loss=0.2242, pruned_loss=0.02945, over 4967.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2115, pruned_loss=0.03256, over 971923.08 frames.], batch size: 24, lr: 1.84e-04 +2022-05-07 08:38:52,883 INFO [train.py:715] (6/8) Epoch 12, batch 3500, loss[loss=0.137, simple_loss=0.2177, pruned_loss=0.02819, over 4823.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.03204, over 972511.06 frames.], batch size: 27, lr: 1.84e-04 +2022-05-07 08:39:31,087 INFO [train.py:715] (6/8) Epoch 12, batch 3550, loss[loss=0.1235, simple_loss=0.1964, pruned_loss=0.02528, over 4781.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2118, pruned_loss=0.03275, over 973172.79 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:40:08,794 INFO [train.py:715] (6/8) Epoch 12, batch 3600, loss[loss=0.1373, simple_loss=0.2059, pruned_loss=0.03439, over 4879.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2109, pruned_loss=0.03196, over 973326.19 frames.], batch size: 22, lr: 1.84e-04 +2022-05-07 08:40:46,533 INFO [train.py:715] (6/8) Epoch 12, batch 3650, loss[loss=0.1221, simple_loss=0.1837, pruned_loss=0.03032, over 4885.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2106, pruned_loss=0.03186, over 973738.37 frames.], batch size: 17, lr: 1.84e-04 +2022-05-07 08:41:24,469 INFO [train.py:715] (6/8) Epoch 12, batch 3700, loss[loss=0.1438, simple_loss=0.2192, pruned_loss=0.03424, over 4807.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2106, pruned_loss=0.03176, over 972943.33 frames.], batch size: 25, lr: 1.84e-04 +2022-05-07 08:42:02,372 INFO [train.py:715] (6/8) Epoch 12, batch 3750, loss[loss=0.1261, simple_loss=0.1862, pruned_loss=0.03297, over 4800.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2106, pruned_loss=0.03188, over 972459.09 frames.], batch size: 12, lr: 1.84e-04 +2022-05-07 08:42:40,468 INFO [train.py:715] (6/8) Epoch 12, batch 3800, loss[loss=0.142, simple_loss=0.2065, pruned_loss=0.03871, over 4971.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.03187, over 971261.46 frames.], batch size: 25, lr: 1.84e-04 +2022-05-07 08:43:18,089 INFO [train.py:715] (6/8) Epoch 12, batch 3850, loss[loss=0.1289, simple_loss=0.2035, pruned_loss=0.02721, over 4785.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03172, over 971509.59 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:43:55,565 INFO [train.py:715] (6/8) Epoch 12, batch 3900, loss[loss=0.1329, simple_loss=0.2059, pruned_loss=0.02997, over 4797.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03161, over 971682.81 frames.], batch size: 21, lr: 1.84e-04 +2022-05-07 08:44:33,441 INFO [train.py:715] (6/8) Epoch 12, batch 3950, loss[loss=0.1418, simple_loss=0.2125, pruned_loss=0.03554, over 4684.00 frames.], tot_loss[loss=0.1368, simple_loss=0.21, pruned_loss=0.03184, over 971212.08 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:45:11,213 INFO [train.py:715] (6/8) Epoch 12, batch 4000, loss[loss=0.1349, simple_loss=0.2104, pruned_loss=0.02971, over 4948.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2106, pruned_loss=0.03225, over 971339.90 frames.], batch size: 21, lr: 1.84e-04 +2022-05-07 08:45:49,155 INFO [train.py:715] (6/8) Epoch 12, batch 4050, loss[loss=0.1357, simple_loss=0.2051, pruned_loss=0.03313, over 4850.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2114, pruned_loss=0.03315, over 970692.25 frames.], batch size: 13, lr: 1.84e-04 +2022-05-07 08:46:27,045 INFO [train.py:715] (6/8) Epoch 12, batch 4100, loss[loss=0.1194, simple_loss=0.1986, pruned_loss=0.02008, over 4909.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2117, pruned_loss=0.03343, over 970690.60 frames.], batch size: 18, lr: 1.84e-04 +2022-05-07 08:47:05,070 INFO [train.py:715] (6/8) Epoch 12, batch 4150, loss[loss=0.1203, simple_loss=0.1927, pruned_loss=0.02397, over 4768.00 frames.], tot_loss[loss=0.1381, simple_loss=0.211, pruned_loss=0.03263, over 970821.09 frames.], batch size: 17, lr: 1.84e-04 +2022-05-07 08:47:43,032 INFO [train.py:715] (6/8) Epoch 12, batch 4200, loss[loss=0.1256, simple_loss=0.1972, pruned_loss=0.02706, over 4906.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2103, pruned_loss=0.03221, over 971332.46 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:48:20,656 INFO [train.py:715] (6/8) Epoch 12, batch 4250, loss[loss=0.1241, simple_loss=0.19, pruned_loss=0.0291, over 4890.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2098, pruned_loss=0.03198, over 972137.72 frames.], batch size: 18, lr: 1.84e-04 +2022-05-07 08:48:58,346 INFO [train.py:715] (6/8) Epoch 12, batch 4300, loss[loss=0.1404, simple_loss=0.2107, pruned_loss=0.03507, over 4756.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2096, pruned_loss=0.03208, over 972313.15 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:49:37,514 INFO [train.py:715] (6/8) Epoch 12, batch 4350, loss[loss=0.149, simple_loss=0.2245, pruned_loss=0.03675, over 4909.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2107, pruned_loss=0.03269, over 972645.61 frames.], batch size: 18, lr: 1.84e-04 +2022-05-07 08:50:16,263 INFO [train.py:715] (6/8) Epoch 12, batch 4400, loss[loss=0.135, simple_loss=0.2184, pruned_loss=0.02576, over 4930.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2111, pruned_loss=0.03288, over 973088.15 frames.], batch size: 29, lr: 1.84e-04 +2022-05-07 08:50:54,764 INFO [train.py:715] (6/8) Epoch 12, batch 4450, loss[loss=0.1193, simple_loss=0.1969, pruned_loss=0.02083, over 4817.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2103, pruned_loss=0.03221, over 973357.00 frames.], batch size: 26, lr: 1.84e-04 +2022-05-07 08:51:33,202 INFO [train.py:715] (6/8) Epoch 12, batch 4500, loss[loss=0.1512, simple_loss=0.2223, pruned_loss=0.04, over 4826.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03226, over 973458.87 frames.], batch size: 27, lr: 1.84e-04 +2022-05-07 08:52:12,281 INFO [train.py:715] (6/8) Epoch 12, batch 4550, loss[loss=0.1367, simple_loss=0.2031, pruned_loss=0.03515, over 4645.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03219, over 973351.98 frames.], batch size: 13, lr: 1.84e-04 +2022-05-07 08:52:50,490 INFO [train.py:715] (6/8) Epoch 12, batch 4600, loss[loss=0.1282, simple_loss=0.2043, pruned_loss=0.02601, over 4809.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2108, pruned_loss=0.03213, over 972781.09 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:53:29,038 INFO [train.py:715] (6/8) Epoch 12, batch 4650, loss[loss=0.1288, simple_loss=0.1981, pruned_loss=0.02973, over 4951.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2105, pruned_loss=0.03204, over 973097.32 frames.], batch size: 21, lr: 1.84e-04 +2022-05-07 08:54:07,729 INFO [train.py:715] (6/8) Epoch 12, batch 4700, loss[loss=0.1286, simple_loss=0.1951, pruned_loss=0.03109, over 4838.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2095, pruned_loss=0.03166, over 972535.89 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:54:46,297 INFO [train.py:715] (6/8) Epoch 12, batch 4750, loss[loss=0.1343, simple_loss=0.2075, pruned_loss=0.03055, over 4790.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2096, pruned_loss=0.03182, over 972422.01 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:55:24,998 INFO [train.py:715] (6/8) Epoch 12, batch 4800, loss[loss=0.1521, simple_loss=0.2086, pruned_loss=0.04781, over 4761.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2094, pruned_loss=0.03204, over 972706.91 frames.], batch size: 18, lr: 1.84e-04 +2022-05-07 08:56:03,560 INFO [train.py:715] (6/8) Epoch 12, batch 4850, loss[loss=0.1386, simple_loss=0.2029, pruned_loss=0.03713, over 4931.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2093, pruned_loss=0.03219, over 973046.93 frames.], batch size: 18, lr: 1.84e-04 +2022-05-07 08:56:42,613 INFO [train.py:715] (6/8) Epoch 12, batch 4900, loss[loss=0.1286, simple_loss=0.2015, pruned_loss=0.02787, over 4961.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2099, pruned_loss=0.03218, over 972985.34 frames.], batch size: 21, lr: 1.83e-04 +2022-05-07 08:57:20,603 INFO [train.py:715] (6/8) Epoch 12, batch 4950, loss[loss=0.1672, simple_loss=0.2511, pruned_loss=0.04165, over 4745.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2095, pruned_loss=0.03177, over 973575.75 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 08:57:58,208 INFO [train.py:715] (6/8) Epoch 12, batch 5000, loss[loss=0.1325, simple_loss=0.208, pruned_loss=0.02849, over 4976.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2097, pruned_loss=0.03175, over 973867.08 frames.], batch size: 25, lr: 1.83e-04 +2022-05-07 08:58:36,393 INFO [train.py:715] (6/8) Epoch 12, batch 5050, loss[loss=0.1693, simple_loss=0.2373, pruned_loss=0.05063, over 4987.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2116, pruned_loss=0.03265, over 973814.88 frames.], batch size: 15, lr: 1.83e-04 +2022-05-07 08:59:13,985 INFO [train.py:715] (6/8) Epoch 12, batch 5100, loss[loss=0.1284, simple_loss=0.2096, pruned_loss=0.02364, over 4817.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03247, over 973659.89 frames.], batch size: 26, lr: 1.83e-04 +2022-05-07 08:59:52,115 INFO [train.py:715] (6/8) Epoch 12, batch 5150, loss[loss=0.09644, simple_loss=0.1686, pruned_loss=0.01212, over 4744.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03252, over 973566.37 frames.], batch size: 12, lr: 1.83e-04 +2022-05-07 09:00:30,013 INFO [train.py:715] (6/8) Epoch 12, batch 5200, loss[loss=0.1254, simple_loss=0.2016, pruned_loss=0.02459, over 4825.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2113, pruned_loss=0.03221, over 973450.29 frames.], batch size: 25, lr: 1.83e-04 +2022-05-07 09:01:08,126 INFO [train.py:715] (6/8) Epoch 12, batch 5250, loss[loss=0.1251, simple_loss=0.1971, pruned_loss=0.02657, over 4907.00 frames.], tot_loss[loss=0.1366, simple_loss=0.21, pruned_loss=0.03158, over 973175.13 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:01:45,994 INFO [train.py:715] (6/8) Epoch 12, batch 5300, loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02854, over 4848.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.03187, over 974003.55 frames.], batch size: 15, lr: 1.83e-04 +2022-05-07 09:02:24,111 INFO [train.py:715] (6/8) Epoch 12, batch 5350, loss[loss=0.1214, simple_loss=0.1991, pruned_loss=0.0218, over 4703.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.0322, over 973004.77 frames.], batch size: 15, lr: 1.83e-04 +2022-05-07 09:03:02,672 INFO [train.py:715] (6/8) Epoch 12, batch 5400, loss[loss=0.1419, simple_loss=0.2139, pruned_loss=0.03496, over 4750.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2111, pruned_loss=0.03224, over 972660.96 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:03:40,514 INFO [train.py:715] (6/8) Epoch 12, batch 5450, loss[loss=0.1629, simple_loss=0.2413, pruned_loss=0.04222, over 4784.00 frames.], tot_loss[loss=0.138, simple_loss=0.2115, pruned_loss=0.03223, over 972039.81 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:04:18,718 INFO [train.py:715] (6/8) Epoch 12, batch 5500, loss[loss=0.1258, simple_loss=0.2047, pruned_loss=0.02345, over 4815.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2121, pruned_loss=0.0325, over 972289.97 frames.], batch size: 26, lr: 1.83e-04 +2022-05-07 09:04:56,509 INFO [train.py:715] (6/8) Epoch 12, batch 5550, loss[loss=0.1401, simple_loss=0.2133, pruned_loss=0.03346, over 4957.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2116, pruned_loss=0.03241, over 973262.91 frames.], batch size: 21, lr: 1.83e-04 +2022-05-07 09:05:35,155 INFO [train.py:715] (6/8) Epoch 12, batch 5600, loss[loss=0.1339, simple_loss=0.2065, pruned_loss=0.03061, over 4905.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2109, pruned_loss=0.0322, over 973350.00 frames.], batch size: 19, lr: 1.83e-04 +2022-05-07 09:06:12,950 INFO [train.py:715] (6/8) Epoch 12, batch 5650, loss[loss=0.1342, simple_loss=0.2113, pruned_loss=0.02858, over 4911.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2115, pruned_loss=0.03266, over 972956.22 frames.], batch size: 17, lr: 1.83e-04 +2022-05-07 09:06:50,902 INFO [train.py:715] (6/8) Epoch 12, batch 5700, loss[loss=0.1316, simple_loss=0.2013, pruned_loss=0.03097, over 4935.00 frames.], tot_loss[loss=0.1379, simple_loss=0.211, pruned_loss=0.03236, over 972688.67 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:07:29,824 INFO [train.py:715] (6/8) Epoch 12, batch 5750, loss[loss=0.1251, simple_loss=0.1883, pruned_loss=0.03101, over 4839.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2123, pruned_loss=0.0328, over 973324.28 frames.], batch size: 30, lr: 1.83e-04 +2022-05-07 09:08:07,980 INFO [train.py:715] (6/8) Epoch 12, batch 5800, loss[loss=0.1521, simple_loss=0.2146, pruned_loss=0.04475, over 4921.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2123, pruned_loss=0.0323, over 973574.11 frames.], batch size: 29, lr: 1.83e-04 +2022-05-07 09:08:46,180 INFO [train.py:715] (6/8) Epoch 12, batch 5850, loss[loss=0.1041, simple_loss=0.17, pruned_loss=0.01913, over 4829.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2124, pruned_loss=0.03256, over 973032.13 frames.], batch size: 12, lr: 1.83e-04 +2022-05-07 09:09:24,395 INFO [train.py:715] (6/8) Epoch 12, batch 5900, loss[loss=0.1362, simple_loss=0.1975, pruned_loss=0.03748, over 4830.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2125, pruned_loss=0.0327, over 972183.51 frames.], batch size: 30, lr: 1.83e-04 +2022-05-07 09:10:02,493 INFO [train.py:715] (6/8) Epoch 12, batch 5950, loss[loss=0.1203, simple_loss=0.2047, pruned_loss=0.01796, over 4736.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2111, pruned_loss=0.03214, over 973144.76 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:10:40,375 INFO [train.py:715] (6/8) Epoch 12, batch 6000, loss[loss=0.1146, simple_loss=0.1935, pruned_loss=0.01788, over 4777.00 frames.], tot_loss[loss=0.137, simple_loss=0.2101, pruned_loss=0.03194, over 972683.13 frames.], batch size: 14, lr: 1.83e-04 +2022-05-07 09:10:40,376 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 09:10:49,852 INFO [train.py:742] (6/8) Epoch 12, validation: loss=0.1057, simple_loss=0.1897, pruned_loss=0.01086, over 914524.00 frames. +2022-05-07 09:11:28,465 INFO [train.py:715] (6/8) Epoch 12, batch 6050, loss[loss=0.1453, simple_loss=0.2202, pruned_loss=0.03515, over 4920.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2104, pruned_loss=0.03217, over 972986.73 frames.], batch size: 17, lr: 1.83e-04 +2022-05-07 09:12:07,171 INFO [train.py:715] (6/8) Epoch 12, batch 6100, loss[loss=0.1419, simple_loss=0.2147, pruned_loss=0.03455, over 4989.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03146, over 972564.74 frames.], batch size: 15, lr: 1.83e-04 +2022-05-07 09:12:46,247 INFO [train.py:715] (6/8) Epoch 12, batch 6150, loss[loss=0.1415, simple_loss=0.221, pruned_loss=0.031, over 4796.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.0317, over 973035.99 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:13:24,045 INFO [train.py:715] (6/8) Epoch 12, batch 6200, loss[loss=0.1177, simple_loss=0.2044, pruned_loss=0.01546, over 4964.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03187, over 973005.54 frames.], batch size: 24, lr: 1.83e-04 +2022-05-07 09:14:02,109 INFO [train.py:715] (6/8) Epoch 12, batch 6250, loss[loss=0.1356, simple_loss=0.2083, pruned_loss=0.03147, over 4758.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03174, over 972204.66 frames.], batch size: 19, lr: 1.83e-04 +2022-05-07 09:14:42,633 INFO [train.py:715] (6/8) Epoch 12, batch 6300, loss[loss=0.1409, simple_loss=0.2185, pruned_loss=0.03165, over 4772.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.03201, over 972628.56 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:15:20,411 INFO [train.py:715] (6/8) Epoch 12, batch 6350, loss[loss=0.1118, simple_loss=0.186, pruned_loss=0.0188, over 4818.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2108, pruned_loss=0.03182, over 973081.89 frames.], batch size: 26, lr: 1.83e-04 +2022-05-07 09:15:58,260 INFO [train.py:715] (6/8) Epoch 12, batch 6400, loss[loss=0.1366, simple_loss=0.2137, pruned_loss=0.02972, over 4903.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.03146, over 972274.75 frames.], batch size: 17, lr: 1.83e-04 +2022-05-07 09:16:36,188 INFO [train.py:715] (6/8) Epoch 12, batch 6450, loss[loss=0.1288, simple_loss=0.1993, pruned_loss=0.02915, over 4990.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2104, pruned_loss=0.03124, over 971431.08 frames.], batch size: 26, lr: 1.83e-04 +2022-05-07 09:17:14,182 INFO [train.py:715] (6/8) Epoch 12, batch 6500, loss[loss=0.1364, simple_loss=0.2088, pruned_loss=0.03202, over 4782.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2096, pruned_loss=0.03082, over 971883.64 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:17:51,824 INFO [train.py:715] (6/8) Epoch 12, batch 6550, loss[loss=0.131, simple_loss=0.2082, pruned_loss=0.0269, over 4985.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03105, over 971876.86 frames.], batch size: 28, lr: 1.83e-04 +2022-05-07 09:18:29,933 INFO [train.py:715] (6/8) Epoch 12, batch 6600, loss[loss=0.117, simple_loss=0.1858, pruned_loss=0.02407, over 4761.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2099, pruned_loss=0.0311, over 971850.57 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:19:08,080 INFO [train.py:715] (6/8) Epoch 12, batch 6650, loss[loss=0.133, simple_loss=0.2059, pruned_loss=0.03005, over 4960.00 frames.], tot_loss[loss=0.1375, simple_loss=0.211, pruned_loss=0.03195, over 972151.42 frames.], batch size: 24, lr: 1.83e-04 +2022-05-07 09:19:46,571 INFO [train.py:715] (6/8) Epoch 12, batch 6700, loss[loss=0.1298, simple_loss=0.2049, pruned_loss=0.0273, over 4963.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2106, pruned_loss=0.03162, over 972621.14 frames.], batch size: 15, lr: 1.83e-04 +2022-05-07 09:20:24,047 INFO [train.py:715] (6/8) Epoch 12, batch 6750, loss[loss=0.1331, simple_loss=0.2113, pruned_loss=0.02742, over 4792.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2105, pruned_loss=0.03156, over 973004.00 frames.], batch size: 24, lr: 1.83e-04 +2022-05-07 09:21:02,175 INFO [train.py:715] (6/8) Epoch 12, batch 6800, loss[loss=0.1396, simple_loss=0.2211, pruned_loss=0.029, over 4922.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2107, pruned_loss=0.03148, over 972876.63 frames.], batch size: 23, lr: 1.83e-04 +2022-05-07 09:21:40,240 INFO [train.py:715] (6/8) Epoch 12, batch 6850, loss[loss=0.1283, simple_loss=0.1973, pruned_loss=0.02964, over 4923.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.03166, over 973258.88 frames.], batch size: 39, lr: 1.83e-04 +2022-05-07 09:22:18,036 INFO [train.py:715] (6/8) Epoch 12, batch 6900, loss[loss=0.1187, simple_loss=0.1916, pruned_loss=0.0229, over 4949.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03152, over 973840.36 frames.], batch size: 21, lr: 1.83e-04 +2022-05-07 09:22:56,144 INFO [train.py:715] (6/8) Epoch 12, batch 6950, loss[loss=0.1388, simple_loss=0.2057, pruned_loss=0.03592, over 4733.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2093, pruned_loss=0.03122, over 972182.10 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:23:34,138 INFO [train.py:715] (6/8) Epoch 12, batch 7000, loss[loss=0.1273, simple_loss=0.2076, pruned_loss=0.02347, over 4820.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.0312, over 971785.96 frames.], batch size: 25, lr: 1.83e-04 +2022-05-07 09:24:12,557 INFO [train.py:715] (6/8) Epoch 12, batch 7050, loss[loss=0.1554, simple_loss=0.2298, pruned_loss=0.04049, over 4805.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2103, pruned_loss=0.03115, over 972332.02 frames.], batch size: 14, lr: 1.83e-04 +2022-05-07 09:24:50,036 INFO [train.py:715] (6/8) Epoch 12, batch 7100, loss[loss=0.1498, simple_loss=0.2323, pruned_loss=0.03364, over 4835.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2104, pruned_loss=0.03106, over 972225.52 frames.], batch size: 15, lr: 1.83e-04 +2022-05-07 09:25:28,612 INFO [train.py:715] (6/8) Epoch 12, batch 7150, loss[loss=0.1413, simple_loss=0.2141, pruned_loss=0.03429, over 4801.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2098, pruned_loss=0.03103, over 972601.69 frames.], batch size: 14, lr: 1.83e-04 +2022-05-07 09:26:06,440 INFO [train.py:715] (6/8) Epoch 12, batch 7200, loss[loss=0.1403, simple_loss=0.2212, pruned_loss=0.02965, over 4816.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.03167, over 972315.75 frames.], batch size: 13, lr: 1.83e-04 +2022-05-07 09:26:44,291 INFO [train.py:715] (6/8) Epoch 12, batch 7250, loss[loss=0.1373, simple_loss=0.2134, pruned_loss=0.03062, over 4978.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2115, pruned_loss=0.03219, over 972366.82 frames.], batch size: 25, lr: 1.83e-04 +2022-05-07 09:27:22,552 INFO [train.py:715] (6/8) Epoch 12, batch 7300, loss[loss=0.1377, simple_loss=0.2156, pruned_loss=0.02989, over 4906.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2122, pruned_loss=0.03237, over 972211.69 frames.], batch size: 39, lr: 1.83e-04 +2022-05-07 09:28:00,292 INFO [train.py:715] (6/8) Epoch 12, batch 7350, loss[loss=0.1424, simple_loss=0.2187, pruned_loss=0.033, over 4935.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2115, pruned_loss=0.03195, over 972796.45 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:28:38,316 INFO [train.py:715] (6/8) Epoch 12, batch 7400, loss[loss=0.1249, simple_loss=0.2003, pruned_loss=0.02476, over 4910.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2109, pruned_loss=0.03182, over 972156.79 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:29:16,071 INFO [train.py:715] (6/8) Epoch 12, batch 7450, loss[loss=0.138, simple_loss=0.2135, pruned_loss=0.03122, over 4881.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03161, over 971806.51 frames.], batch size: 39, lr: 1.83e-04 +2022-05-07 09:29:54,162 INFO [train.py:715] (6/8) Epoch 12, batch 7500, loss[loss=0.1424, simple_loss=0.2123, pruned_loss=0.03623, over 4924.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2107, pruned_loss=0.03137, over 972519.97 frames.], batch size: 39, lr: 1.83e-04 +2022-05-07 09:30:32,180 INFO [train.py:715] (6/8) Epoch 12, batch 7550, loss[loss=0.1429, simple_loss=0.2144, pruned_loss=0.03568, over 4865.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2107, pruned_loss=0.03153, over 972326.31 frames.], batch size: 30, lr: 1.83e-04 +2022-05-07 09:31:10,031 INFO [train.py:715] (6/8) Epoch 12, batch 7600, loss[loss=0.1493, simple_loss=0.2267, pruned_loss=0.03595, over 4846.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2103, pruned_loss=0.0314, over 972759.98 frames.], batch size: 13, lr: 1.83e-04 +2022-05-07 09:31:48,261 INFO [train.py:715] (6/8) Epoch 12, batch 7650, loss[loss=0.1327, simple_loss=0.1958, pruned_loss=0.0348, over 4737.00 frames.], tot_loss[loss=0.1362, simple_loss=0.21, pruned_loss=0.03125, over 972735.96 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:32:26,439 INFO [train.py:715] (6/8) Epoch 12, batch 7700, loss[loss=0.1158, simple_loss=0.1907, pruned_loss=0.02043, over 4805.00 frames.], tot_loss[loss=0.136, simple_loss=0.2095, pruned_loss=0.03123, over 973387.80 frames.], batch size: 13, lr: 1.83e-04 +2022-05-07 09:33:04,636 INFO [train.py:715] (6/8) Epoch 12, batch 7750, loss[loss=0.1289, simple_loss=0.1928, pruned_loss=0.03253, over 4842.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2092, pruned_loss=0.03117, over 973337.68 frames.], batch size: 32, lr: 1.83e-04 +2022-05-07 09:33:42,403 INFO [train.py:715] (6/8) Epoch 12, batch 7800, loss[loss=0.1365, simple_loss=0.2043, pruned_loss=0.03438, over 4864.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.03119, over 973761.70 frames.], batch size: 38, lr: 1.83e-04 +2022-05-07 09:34:20,595 INFO [train.py:715] (6/8) Epoch 12, batch 7850, loss[loss=0.1524, simple_loss=0.2248, pruned_loss=0.04, over 4790.00 frames.], tot_loss[loss=0.136, simple_loss=0.2093, pruned_loss=0.03135, over 973095.01 frames.], batch size: 17, lr: 1.83e-04 +2022-05-07 09:34:58,402 INFO [train.py:715] (6/8) Epoch 12, batch 7900, loss[loss=0.1359, simple_loss=0.1942, pruned_loss=0.03882, over 4798.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2096, pruned_loss=0.03166, over 972591.67 frames.], batch size: 17, lr: 1.83e-04 +2022-05-07 09:35:36,648 INFO [train.py:715] (6/8) Epoch 12, batch 7950, loss[loss=0.1536, simple_loss=0.2216, pruned_loss=0.04282, over 4874.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2095, pruned_loss=0.03168, over 972252.55 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:36:14,621 INFO [train.py:715] (6/8) Epoch 12, batch 8000, loss[loss=0.1282, simple_loss=0.1976, pruned_loss=0.02937, over 4974.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2096, pruned_loss=0.03169, over 972283.94 frames.], batch size: 35, lr: 1.83e-04 +2022-05-07 09:36:53,082 INFO [train.py:715] (6/8) Epoch 12, batch 8050, loss[loss=0.1564, simple_loss=0.233, pruned_loss=0.03987, over 4975.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2107, pruned_loss=0.03229, over 972476.52 frames.], batch size: 28, lr: 1.83e-04 +2022-05-07 09:37:31,432 INFO [train.py:715] (6/8) Epoch 12, batch 8100, loss[loss=0.1298, simple_loss=0.1935, pruned_loss=0.0331, over 4717.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2104, pruned_loss=0.03225, over 972421.12 frames.], batch size: 15, lr: 1.83e-04 +2022-05-07 09:38:09,022 INFO [train.py:715] (6/8) Epoch 12, batch 8150, loss[loss=0.1607, simple_loss=0.224, pruned_loss=0.04874, over 4857.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2104, pruned_loss=0.032, over 972519.65 frames.], batch size: 32, lr: 1.83e-04 +2022-05-07 09:38:47,285 INFO [train.py:715] (6/8) Epoch 12, batch 8200, loss[loss=0.1192, simple_loss=0.1927, pruned_loss=0.02287, over 4921.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2103, pruned_loss=0.03134, over 973251.26 frames.], batch size: 23, lr: 1.83e-04 +2022-05-07 09:39:25,281 INFO [train.py:715] (6/8) Epoch 12, batch 8250, loss[loss=0.1228, simple_loss=0.188, pruned_loss=0.02876, over 4871.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.0314, over 973276.33 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:40:03,013 INFO [train.py:715] (6/8) Epoch 12, batch 8300, loss[loss=0.1506, simple_loss=0.2152, pruned_loss=0.04306, over 4855.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2099, pruned_loss=0.0312, over 972967.83 frames.], batch size: 34, lr: 1.83e-04 +2022-05-07 09:40:41,114 INFO [train.py:715] (6/8) Epoch 12, batch 8350, loss[loss=0.1305, simple_loss=0.1985, pruned_loss=0.03128, over 4764.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03149, over 973431.53 frames.], batch size: 19, lr: 1.83e-04 +2022-05-07 09:41:19,301 INFO [train.py:715] (6/8) Epoch 12, batch 8400, loss[loss=0.1204, simple_loss=0.2009, pruned_loss=0.01997, over 4886.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2111, pruned_loss=0.03196, over 973390.62 frames.], batch size: 22, lr: 1.83e-04 +2022-05-07 09:41:57,373 INFO [train.py:715] (6/8) Epoch 12, batch 8450, loss[loss=0.1381, simple_loss=0.2232, pruned_loss=0.0265, over 4958.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2111, pruned_loss=0.03184, over 973245.01 frames.], batch size: 39, lr: 1.83e-04 +2022-05-07 09:42:34,902 INFO [train.py:715] (6/8) Epoch 12, batch 8500, loss[loss=0.1189, simple_loss=0.1925, pruned_loss=0.02262, over 4857.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2102, pruned_loss=0.03115, over 972777.84 frames.], batch size: 20, lr: 1.83e-04 +2022-05-07 09:43:13,187 INFO [train.py:715] (6/8) Epoch 12, batch 8550, loss[loss=0.1569, simple_loss=0.2261, pruned_loss=0.04385, over 4757.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2118, pruned_loss=0.0316, over 973228.23 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:43:51,188 INFO [train.py:715] (6/8) Epoch 12, batch 8600, loss[loss=0.1369, simple_loss=0.2152, pruned_loss=0.02934, over 4850.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2116, pruned_loss=0.03153, over 973442.23 frames.], batch size: 20, lr: 1.83e-04 +2022-05-07 09:44:28,881 INFO [train.py:715] (6/8) Epoch 12, batch 8650, loss[loss=0.1451, simple_loss=0.2301, pruned_loss=0.03001, over 4900.00 frames.], tot_loss[loss=0.138, simple_loss=0.2125, pruned_loss=0.03176, over 972624.41 frames.], batch size: 17, lr: 1.83e-04 +2022-05-07 09:45:07,106 INFO [train.py:715] (6/8) Epoch 12, batch 8700, loss[loss=0.1596, simple_loss=0.2446, pruned_loss=0.03732, over 4941.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2115, pruned_loss=0.03141, over 971468.80 frames.], batch size: 21, lr: 1.83e-04 +2022-05-07 09:45:45,272 INFO [train.py:715] (6/8) Epoch 12, batch 8750, loss[loss=0.1386, simple_loss=0.2164, pruned_loss=0.03039, over 4839.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2111, pruned_loss=0.03169, over 972047.20 frames.], batch size: 15, lr: 1.83e-04 +2022-05-07 09:46:23,701 INFO [train.py:715] (6/8) Epoch 12, batch 8800, loss[loss=0.1209, simple_loss=0.182, pruned_loss=0.02995, over 4779.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03246, over 972074.90 frames.], batch size: 17, lr: 1.83e-04 +2022-05-07 09:47:01,615 INFO [train.py:715] (6/8) Epoch 12, batch 8850, loss[loss=0.1401, simple_loss=0.2081, pruned_loss=0.03606, over 4917.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2118, pruned_loss=0.03261, over 972347.68 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:47:40,605 INFO [train.py:715] (6/8) Epoch 12, batch 8900, loss[loss=0.1298, simple_loss=0.1985, pruned_loss=0.0305, over 4889.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2111, pruned_loss=0.03237, over 972479.08 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:48:20,143 INFO [train.py:715] (6/8) Epoch 12, batch 8950, loss[loss=0.1323, simple_loss=0.2041, pruned_loss=0.03026, over 4812.00 frames.], tot_loss[loss=0.138, simple_loss=0.211, pruned_loss=0.03246, over 972930.10 frames.], batch size: 27, lr: 1.83e-04 +2022-05-07 09:48:58,105 INFO [train.py:715] (6/8) Epoch 12, batch 9000, loss[loss=0.1184, simple_loss=0.1902, pruned_loss=0.02332, over 4960.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2111, pruned_loss=0.03211, over 973213.36 frames.], batch size: 24, lr: 1.83e-04 +2022-05-07 09:48:58,105 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 09:49:07,571 INFO [train.py:742] (6/8) Epoch 12, validation: loss=0.1057, simple_loss=0.1898, pruned_loss=0.01084, over 914524.00 frames. +2022-05-07 09:49:45,345 INFO [train.py:715] (6/8) Epoch 12, batch 9050, loss[loss=0.1383, simple_loss=0.2214, pruned_loss=0.02764, over 4784.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03151, over 973864.04 frames.], batch size: 17, lr: 1.83e-04 +2022-05-07 09:50:23,564 INFO [train.py:715] (6/8) Epoch 12, batch 9100, loss[loss=0.1381, simple_loss=0.2145, pruned_loss=0.03079, over 4876.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03192, over 972755.16 frames.], batch size: 20, lr: 1.83e-04 +2022-05-07 09:51:01,822 INFO [train.py:715] (6/8) Epoch 12, batch 9150, loss[loss=0.1519, simple_loss=0.2183, pruned_loss=0.04276, over 4765.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03151, over 972665.95 frames.], batch size: 12, lr: 1.83e-04 +2022-05-07 09:51:39,540 INFO [train.py:715] (6/8) Epoch 12, batch 9200, loss[loss=0.1336, simple_loss=0.204, pruned_loss=0.03159, over 4908.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2102, pruned_loss=0.03183, over 972563.92 frames.], batch size: 23, lr: 1.83e-04 +2022-05-07 09:52:17,394 INFO [train.py:715] (6/8) Epoch 12, batch 9250, loss[loss=0.1296, simple_loss=0.2075, pruned_loss=0.02579, over 4871.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2106, pruned_loss=0.03199, over 972443.67 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:52:55,473 INFO [train.py:715] (6/8) Epoch 12, batch 9300, loss[loss=0.1419, simple_loss=0.2046, pruned_loss=0.0396, over 4877.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2116, pruned_loss=0.03231, over 972575.04 frames.], batch size: 32, lr: 1.83e-04 +2022-05-07 09:53:33,064 INFO [train.py:715] (6/8) Epoch 12, batch 9350, loss[loss=0.1128, simple_loss=0.1796, pruned_loss=0.02298, over 4842.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03214, over 973018.44 frames.], batch size: 12, lr: 1.83e-04 +2022-05-07 09:54:10,843 INFO [train.py:715] (6/8) Epoch 12, batch 9400, loss[loss=0.1125, simple_loss=0.1866, pruned_loss=0.01917, over 4983.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2115, pruned_loss=0.03266, over 973335.01 frames.], batch size: 15, lr: 1.83e-04 +2022-05-07 09:54:48,554 INFO [train.py:715] (6/8) Epoch 12, batch 9450, loss[loss=0.1322, simple_loss=0.1958, pruned_loss=0.03432, over 4865.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2114, pruned_loss=0.03258, over 973340.20 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:55:26,596 INFO [train.py:715] (6/8) Epoch 12, batch 9500, loss[loss=0.1218, simple_loss=0.1916, pruned_loss=0.02603, over 4802.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2114, pruned_loss=0.03267, over 973477.44 frames.], batch size: 25, lr: 1.83e-04 +2022-05-07 09:56:04,149 INFO [train.py:715] (6/8) Epoch 12, batch 9550, loss[loss=0.1398, simple_loss=0.2166, pruned_loss=0.03148, over 4964.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2108, pruned_loss=0.03224, over 973436.15 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 09:56:41,641 INFO [train.py:715] (6/8) Epoch 12, batch 9600, loss[loss=0.124, simple_loss=0.1952, pruned_loss=0.02643, over 4904.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.03208, over 973379.59 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 09:57:19,886 INFO [train.py:715] (6/8) Epoch 12, batch 9650, loss[loss=0.1483, simple_loss=0.2134, pruned_loss=0.04158, over 4802.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2111, pruned_loss=0.03258, over 973657.36 frames.], batch size: 12, lr: 1.82e-04 +2022-05-07 09:57:57,756 INFO [train.py:715] (6/8) Epoch 12, batch 9700, loss[loss=0.1124, simple_loss=0.1833, pruned_loss=0.02074, over 4792.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2107, pruned_loss=0.03225, over 973725.69 frames.], batch size: 24, lr: 1.82e-04 +2022-05-07 09:58:35,536 INFO [train.py:715] (6/8) Epoch 12, batch 9750, loss[loss=0.1471, simple_loss=0.2146, pruned_loss=0.03981, over 4917.00 frames.], tot_loss[loss=0.1378, simple_loss=0.211, pruned_loss=0.0323, over 973879.08 frames.], batch size: 23, lr: 1.82e-04 +2022-05-07 09:59:13,483 INFO [train.py:715] (6/8) Epoch 12, batch 9800, loss[loss=0.1474, simple_loss=0.2209, pruned_loss=0.03696, over 4940.00 frames.], tot_loss[loss=0.1379, simple_loss=0.211, pruned_loss=0.03243, over 974411.33 frames.], batch size: 39, lr: 1.82e-04 +2022-05-07 09:59:52,001 INFO [train.py:715] (6/8) Epoch 12, batch 9850, loss[loss=0.1785, simple_loss=0.2542, pruned_loss=0.05143, over 4780.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03243, over 973608.25 frames.], batch size: 17, lr: 1.82e-04 +2022-05-07 10:00:29,632 INFO [train.py:715] (6/8) Epoch 12, batch 9900, loss[loss=0.1427, simple_loss=0.2018, pruned_loss=0.04177, over 4908.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2111, pruned_loss=0.03227, over 973987.15 frames.], batch size: 17, lr: 1.82e-04 +2022-05-07 10:01:07,863 INFO [train.py:715] (6/8) Epoch 12, batch 9950, loss[loss=0.1391, simple_loss=0.2161, pruned_loss=0.03108, over 4988.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03253, over 973940.59 frames.], batch size: 25, lr: 1.82e-04 +2022-05-07 10:01:46,618 INFO [train.py:715] (6/8) Epoch 12, batch 10000, loss[loss=0.1408, simple_loss=0.2019, pruned_loss=0.03989, over 4882.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2118, pruned_loss=0.03281, over 973906.77 frames.], batch size: 32, lr: 1.82e-04 +2022-05-07 10:02:25,149 INFO [train.py:715] (6/8) Epoch 12, batch 10050, loss[loss=0.1607, simple_loss=0.2407, pruned_loss=0.04029, over 4899.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.03271, over 973934.89 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:03:03,487 INFO [train.py:715] (6/8) Epoch 12, batch 10100, loss[loss=0.141, simple_loss=0.2069, pruned_loss=0.03755, over 4814.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2125, pruned_loss=0.03335, over 973435.58 frames.], batch size: 12, lr: 1.82e-04 +2022-05-07 10:03:41,895 INFO [train.py:715] (6/8) Epoch 12, batch 10150, loss[loss=0.1576, simple_loss=0.2328, pruned_loss=0.04121, over 4742.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03248, over 973314.47 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:04:20,550 INFO [train.py:715] (6/8) Epoch 12, batch 10200, loss[loss=0.1322, simple_loss=0.214, pruned_loss=0.02524, over 4875.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2111, pruned_loss=0.03207, over 973522.20 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:04:57,864 INFO [train.py:715] (6/8) Epoch 12, batch 10250, loss[loss=0.1091, simple_loss=0.1703, pruned_loss=0.02401, over 4772.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2118, pruned_loss=0.03218, over 973281.29 frames.], batch size: 12, lr: 1.82e-04 +2022-05-07 10:05:36,038 INFO [train.py:715] (6/8) Epoch 12, batch 10300, loss[loss=0.1236, simple_loss=0.2055, pruned_loss=0.02086, over 4845.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2117, pruned_loss=0.03222, over 973693.03 frames.], batch size: 25, lr: 1.82e-04 +2022-05-07 10:06:14,197 INFO [train.py:715] (6/8) Epoch 12, batch 10350, loss[loss=0.1215, simple_loss=0.1798, pruned_loss=0.03158, over 4750.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2116, pruned_loss=0.03228, over 972471.78 frames.], batch size: 12, lr: 1.82e-04 +2022-05-07 10:06:52,242 INFO [train.py:715] (6/8) Epoch 12, batch 10400, loss[loss=0.1443, simple_loss=0.2237, pruned_loss=0.03239, over 4892.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2112, pruned_loss=0.03184, over 973086.00 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:07:29,800 INFO [train.py:715] (6/8) Epoch 12, batch 10450, loss[loss=0.1132, simple_loss=0.185, pruned_loss=0.02074, over 4959.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2108, pruned_loss=0.03179, over 973257.80 frames.], batch size: 15, lr: 1.82e-04 +2022-05-07 10:08:07,729 INFO [train.py:715] (6/8) Epoch 12, batch 10500, loss[loss=0.1253, simple_loss=0.2003, pruned_loss=0.02517, over 4841.00 frames.], tot_loss[loss=0.1374, simple_loss=0.211, pruned_loss=0.03196, over 972833.86 frames.], batch size: 30, lr: 1.82e-04 +2022-05-07 10:08:46,136 INFO [train.py:715] (6/8) Epoch 12, batch 10550, loss[loss=0.1462, simple_loss=0.2211, pruned_loss=0.03567, over 4824.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2111, pruned_loss=0.03184, over 972626.10 frames.], batch size: 15, lr: 1.82e-04 +2022-05-07 10:09:23,509 INFO [train.py:715] (6/8) Epoch 12, batch 10600, loss[loss=0.1454, simple_loss=0.226, pruned_loss=0.03246, over 4880.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2118, pruned_loss=0.03219, over 972540.29 frames.], batch size: 39, lr: 1.82e-04 +2022-05-07 10:10:01,493 INFO [train.py:715] (6/8) Epoch 12, batch 10650, loss[loss=0.1268, simple_loss=0.1964, pruned_loss=0.02862, over 4752.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2118, pruned_loss=0.03199, over 972837.52 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 10:10:39,355 INFO [train.py:715] (6/8) Epoch 12, batch 10700, loss[loss=0.1653, simple_loss=0.2374, pruned_loss=0.04661, over 4803.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2112, pruned_loss=0.03194, over 973073.38 frames.], batch size: 24, lr: 1.82e-04 +2022-05-07 10:11:16,857 INFO [train.py:715] (6/8) Epoch 12, batch 10750, loss[loss=0.1333, simple_loss=0.1957, pruned_loss=0.03541, over 4795.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03145, over 972419.62 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 10:11:54,745 INFO [train.py:715] (6/8) Epoch 12, batch 10800, loss[loss=0.1247, simple_loss=0.2038, pruned_loss=0.02275, over 4806.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03151, over 972488.94 frames.], batch size: 25, lr: 1.82e-04 +2022-05-07 10:12:32,735 INFO [train.py:715] (6/8) Epoch 12, batch 10850, loss[loss=0.122, simple_loss=0.2039, pruned_loss=0.02005, over 4827.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.03142, over 972185.07 frames.], batch size: 13, lr: 1.82e-04 +2022-05-07 10:13:11,526 INFO [train.py:715] (6/8) Epoch 12, batch 10900, loss[loss=0.1335, simple_loss=0.2171, pruned_loss=0.02494, over 4965.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2101, pruned_loss=0.03124, over 972269.90 frames.], batch size: 21, lr: 1.82e-04 +2022-05-07 10:13:48,733 INFO [train.py:715] (6/8) Epoch 12, batch 10950, loss[loss=0.1597, simple_loss=0.2262, pruned_loss=0.04666, over 4904.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03143, over 972293.98 frames.], batch size: 17, lr: 1.82e-04 +2022-05-07 10:14:26,877 INFO [train.py:715] (6/8) Epoch 12, batch 11000, loss[loss=0.1307, simple_loss=0.2082, pruned_loss=0.02656, over 4990.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.03105, over 971442.91 frames.], batch size: 28, lr: 1.82e-04 +2022-05-07 10:15:05,148 INFO [train.py:715] (6/8) Epoch 12, batch 11050, loss[loss=0.122, simple_loss=0.2045, pruned_loss=0.0197, over 4794.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03138, over 971671.19 frames.], batch size: 24, lr: 1.82e-04 +2022-05-07 10:15:42,771 INFO [train.py:715] (6/8) Epoch 12, batch 11100, loss[loss=0.1183, simple_loss=0.1973, pruned_loss=0.01963, over 4985.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2096, pruned_loss=0.03139, over 972287.04 frames.], batch size: 31, lr: 1.82e-04 +2022-05-07 10:16:21,272 INFO [train.py:715] (6/8) Epoch 12, batch 11150, loss[loss=0.1286, simple_loss=0.204, pruned_loss=0.02662, over 4848.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2105, pruned_loss=0.03183, over 972534.72 frames.], batch size: 32, lr: 1.82e-04 +2022-05-07 10:16:58,890 INFO [train.py:715] (6/8) Epoch 12, batch 11200, loss[loss=0.1148, simple_loss=0.1868, pruned_loss=0.02142, over 4965.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2111, pruned_loss=0.03202, over 972855.82 frames.], batch size: 21, lr: 1.82e-04 +2022-05-07 10:17:36,988 INFO [train.py:715] (6/8) Epoch 12, batch 11250, loss[loss=0.1342, simple_loss=0.2182, pruned_loss=0.02516, over 4939.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03193, over 972854.47 frames.], batch size: 21, lr: 1.82e-04 +2022-05-07 10:18:14,708 INFO [train.py:715] (6/8) Epoch 12, batch 11300, loss[loss=0.1041, simple_loss=0.1833, pruned_loss=0.01248, over 4969.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2094, pruned_loss=0.03185, over 972427.77 frames.], batch size: 28, lr: 1.82e-04 +2022-05-07 10:18:51,980 INFO [train.py:715] (6/8) Epoch 12, batch 11350, loss[loss=0.1548, simple_loss=0.2247, pruned_loss=0.04247, over 4762.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2099, pruned_loss=0.03186, over 972880.80 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:19:30,200 INFO [train.py:715] (6/8) Epoch 12, batch 11400, loss[loss=0.1327, simple_loss=0.2029, pruned_loss=0.03127, over 4974.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.03192, over 973745.73 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 10:20:07,741 INFO [train.py:715] (6/8) Epoch 12, batch 11450, loss[loss=0.1313, simple_loss=0.2186, pruned_loss=0.022, over 4988.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.03166, over 972808.85 frames.], batch size: 28, lr: 1.82e-04 +2022-05-07 10:20:45,256 INFO [train.py:715] (6/8) Epoch 12, batch 11500, loss[loss=0.1221, simple_loss=0.1976, pruned_loss=0.02328, over 4892.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.0316, over 972829.44 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:21:23,002 INFO [train.py:715] (6/8) Epoch 12, batch 11550, loss[loss=0.128, simple_loss=0.2156, pruned_loss=0.02024, over 4954.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2094, pruned_loss=0.03123, over 972747.05 frames.], batch size: 29, lr: 1.82e-04 +2022-05-07 10:22:01,394 INFO [train.py:715] (6/8) Epoch 12, batch 11600, loss[loss=0.1212, simple_loss=0.1985, pruned_loss=0.02202, over 4758.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.0311, over 972433.91 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:22:38,879 INFO [train.py:715] (6/8) Epoch 12, batch 11650, loss[loss=0.1239, simple_loss=0.1968, pruned_loss=0.02549, over 4783.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03106, over 972549.35 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 10:23:16,091 INFO [train.py:715] (6/8) Epoch 12, batch 11700, loss[loss=0.1523, simple_loss=0.2241, pruned_loss=0.04029, over 4982.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2094, pruned_loss=0.0312, over 972055.57 frames.], batch size: 31, lr: 1.82e-04 +2022-05-07 10:23:53,748 INFO [train.py:715] (6/8) Epoch 12, batch 11750, loss[loss=0.1355, simple_loss=0.2216, pruned_loss=0.02465, over 4992.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03167, over 972563.42 frames.], batch size: 15, lr: 1.82e-04 +2022-05-07 10:24:31,083 INFO [train.py:715] (6/8) Epoch 12, batch 11800, loss[loss=0.1232, simple_loss=0.1991, pruned_loss=0.02371, over 4850.00 frames.], tot_loss[loss=0.1367, simple_loss=0.21, pruned_loss=0.03169, over 971814.74 frames.], batch size: 30, lr: 1.82e-04 +2022-05-07 10:25:08,776 INFO [train.py:715] (6/8) Epoch 12, batch 11850, loss[loss=0.1211, simple_loss=0.19, pruned_loss=0.02606, over 4771.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2092, pruned_loss=0.03127, over 972474.07 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:25:46,627 INFO [train.py:715] (6/8) Epoch 12, batch 11900, loss[loss=0.1193, simple_loss=0.1948, pruned_loss=0.02187, over 4943.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03104, over 972527.25 frames.], batch size: 21, lr: 1.82e-04 +2022-05-07 10:26:24,515 INFO [train.py:715] (6/8) Epoch 12, batch 11950, loss[loss=0.151, simple_loss=0.2296, pruned_loss=0.03618, over 4879.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.03112, over 971656.19 frames.], batch size: 22, lr: 1.82e-04 +2022-05-07 10:27:01,977 INFO [train.py:715] (6/8) Epoch 12, batch 12000, loss[loss=0.1459, simple_loss=0.2254, pruned_loss=0.03319, over 4923.00 frames.], tot_loss[loss=0.136, simple_loss=0.2094, pruned_loss=0.03124, over 971955.51 frames.], batch size: 18, lr: 1.82e-04 +2022-05-07 10:27:01,977 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 10:27:11,325 INFO [train.py:742] (6/8) Epoch 12, validation: loss=0.1058, simple_loss=0.1897, pruned_loss=0.01095, over 914524.00 frames. +2022-05-07 10:27:50,015 INFO [train.py:715] (6/8) Epoch 12, batch 12050, loss[loss=0.1601, simple_loss=0.2199, pruned_loss=0.05015, over 4973.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.0317, over 972142.07 frames.], batch size: 15, lr: 1.82e-04 +2022-05-07 10:28:29,092 INFO [train.py:715] (6/8) Epoch 12, batch 12100, loss[loss=0.1209, simple_loss=0.2001, pruned_loss=0.02085, over 4819.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2106, pruned_loss=0.03196, over 971520.09 frames.], batch size: 26, lr: 1.82e-04 +2022-05-07 10:29:08,848 INFO [train.py:715] (6/8) Epoch 12, batch 12150, loss[loss=0.1331, simple_loss=0.2062, pruned_loss=0.02994, over 4907.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2099, pruned_loss=0.03164, over 971167.63 frames.], batch size: 29, lr: 1.82e-04 +2022-05-07 10:29:47,129 INFO [train.py:715] (6/8) Epoch 12, batch 12200, loss[loss=0.1406, simple_loss=0.2127, pruned_loss=0.03422, over 4792.00 frames.], tot_loss[loss=0.1362, simple_loss=0.21, pruned_loss=0.03118, over 971068.56 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 10:30:25,384 INFO [train.py:715] (6/8) Epoch 12, batch 12250, loss[loss=0.1345, simple_loss=0.2054, pruned_loss=0.03178, over 4972.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03164, over 971368.60 frames.], batch size: 35, lr: 1.82e-04 +2022-05-07 10:31:04,237 INFO [train.py:715] (6/8) Epoch 12, batch 12300, loss[loss=0.1763, simple_loss=0.2557, pruned_loss=0.04844, over 4789.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2108, pruned_loss=0.03222, over 971562.62 frames.], batch size: 18, lr: 1.82e-04 +2022-05-07 10:31:42,815 INFO [train.py:715] (6/8) Epoch 12, batch 12350, loss[loss=0.1597, simple_loss=0.2248, pruned_loss=0.04729, over 4824.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2107, pruned_loss=0.03212, over 972282.08 frames.], batch size: 30, lr: 1.82e-04 +2022-05-07 10:32:20,260 INFO [train.py:715] (6/8) Epoch 12, batch 12400, loss[loss=0.1151, simple_loss=0.1894, pruned_loss=0.02042, over 4902.00 frames.], tot_loss[loss=0.137, simple_loss=0.2102, pruned_loss=0.03187, over 972388.51 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:32:57,988 INFO [train.py:715] (6/8) Epoch 12, batch 12450, loss[loss=0.1267, simple_loss=0.1991, pruned_loss=0.02719, over 4751.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2102, pruned_loss=0.032, over 972417.42 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:33:36,210 INFO [train.py:715] (6/8) Epoch 12, batch 12500, loss[loss=0.1265, simple_loss=0.1967, pruned_loss=0.02811, over 4825.00 frames.], tot_loss[loss=0.1369, simple_loss=0.21, pruned_loss=0.03191, over 972561.26 frames.], batch size: 15, lr: 1.82e-04 +2022-05-07 10:34:13,317 INFO [train.py:715] (6/8) Epoch 12, batch 12550, loss[loss=0.1606, simple_loss=0.2365, pruned_loss=0.04235, over 4748.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2092, pruned_loss=0.03113, over 971704.98 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:34:51,155 INFO [train.py:715] (6/8) Epoch 12, batch 12600, loss[loss=0.1263, simple_loss=0.1982, pruned_loss=0.02718, over 4768.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2093, pruned_loss=0.03152, over 972234.53 frames.], batch size: 18, lr: 1.82e-04 +2022-05-07 10:35:28,921 INFO [train.py:715] (6/8) Epoch 12, batch 12650, loss[loss=0.1316, simple_loss=0.1965, pruned_loss=0.03336, over 4883.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2098, pruned_loss=0.03183, over 971706.14 frames.], batch size: 32, lr: 1.82e-04 +2022-05-07 10:36:06,674 INFO [train.py:715] (6/8) Epoch 12, batch 12700, loss[loss=0.1285, simple_loss=0.1998, pruned_loss=0.02856, over 4785.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.03153, over 972423.67 frames.], batch size: 18, lr: 1.82e-04 +2022-05-07 10:36:44,125 INFO [train.py:715] (6/8) Epoch 12, batch 12750, loss[loss=0.1578, simple_loss=0.2268, pruned_loss=0.04435, over 4769.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2095, pruned_loss=0.03153, over 972933.15 frames.], batch size: 18, lr: 1.82e-04 +2022-05-07 10:37:22,154 INFO [train.py:715] (6/8) Epoch 12, batch 12800, loss[loss=0.1174, simple_loss=0.1892, pruned_loss=0.02278, over 4915.00 frames.], tot_loss[loss=0.137, simple_loss=0.2099, pruned_loss=0.03204, over 972987.41 frames.], batch size: 18, lr: 1.82e-04 +2022-05-07 10:38:00,582 INFO [train.py:715] (6/8) Epoch 12, batch 12850, loss[loss=0.1321, simple_loss=0.2058, pruned_loss=0.02921, over 4863.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2104, pruned_loss=0.03199, over 973271.32 frames.], batch size: 12, lr: 1.82e-04 +2022-05-07 10:38:37,910 INFO [train.py:715] (6/8) Epoch 12, batch 12900, loss[loss=0.1319, simple_loss=0.2128, pruned_loss=0.02555, over 4852.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2108, pruned_loss=0.03224, over 973423.00 frames.], batch size: 20, lr: 1.82e-04 +2022-05-07 10:39:15,002 INFO [train.py:715] (6/8) Epoch 12, batch 12950, loss[loss=0.1605, simple_loss=0.2398, pruned_loss=0.04062, over 4879.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2105, pruned_loss=0.03199, over 972446.88 frames.], batch size: 22, lr: 1.82e-04 +2022-05-07 10:39:52,999 INFO [train.py:715] (6/8) Epoch 12, batch 13000, loss[loss=0.1471, simple_loss=0.2106, pruned_loss=0.0418, over 4758.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2108, pruned_loss=0.03188, over 972307.69 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 10:40:30,780 INFO [train.py:715] (6/8) Epoch 12, batch 13050, loss[loss=0.1332, simple_loss=0.2134, pruned_loss=0.02647, over 4770.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2107, pruned_loss=0.0315, over 971564.33 frames.], batch size: 17, lr: 1.82e-04 +2022-05-07 10:41:08,532 INFO [train.py:715] (6/8) Epoch 12, batch 13100, loss[loss=0.1392, simple_loss=0.2311, pruned_loss=0.02366, over 4862.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03115, over 971880.62 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:41:46,121 INFO [train.py:715] (6/8) Epoch 12, batch 13150, loss[loss=0.1412, simple_loss=0.2088, pruned_loss=0.03682, over 4773.00 frames.], tot_loss[loss=0.1362, simple_loss=0.21, pruned_loss=0.03118, over 971959.36 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 10:42:23,789 INFO [train.py:715] (6/8) Epoch 12, batch 13200, loss[loss=0.1158, simple_loss=0.1945, pruned_loss=0.01852, over 4823.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2099, pruned_loss=0.03137, over 972213.72 frames.], batch size: 27, lr: 1.82e-04 +2022-05-07 10:43:01,013 INFO [train.py:715] (6/8) Epoch 12, batch 13250, loss[loss=0.1436, simple_loss=0.2105, pruned_loss=0.03836, over 4657.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03079, over 972347.81 frames.], batch size: 13, lr: 1.82e-04 +2022-05-07 10:43:38,187 INFO [train.py:715] (6/8) Epoch 12, batch 13300, loss[loss=0.1131, simple_loss=0.192, pruned_loss=0.01705, over 4888.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2084, pruned_loss=0.03096, over 973113.40 frames.], batch size: 22, lr: 1.82e-04 +2022-05-07 10:44:16,077 INFO [train.py:715] (6/8) Epoch 12, batch 13350, loss[loss=0.1287, simple_loss=0.2089, pruned_loss=0.02421, over 4875.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2098, pruned_loss=0.03162, over 972829.19 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:44:54,316 INFO [train.py:715] (6/8) Epoch 12, batch 13400, loss[loss=0.1294, simple_loss=0.2088, pruned_loss=0.02501, over 4871.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2098, pruned_loss=0.03163, over 973307.96 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:45:31,692 INFO [train.py:715] (6/8) Epoch 12, batch 13450, loss[loss=0.1447, simple_loss=0.2169, pruned_loss=0.03625, over 4785.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2096, pruned_loss=0.03155, over 973623.95 frames.], batch size: 17, lr: 1.82e-04 +2022-05-07 10:46:09,030 INFO [train.py:715] (6/8) Epoch 12, batch 13500, loss[loss=0.1438, simple_loss=0.2089, pruned_loss=0.0394, over 4995.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.03173, over 973035.00 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 10:46:47,475 INFO [train.py:715] (6/8) Epoch 12, batch 13550, loss[loss=0.1353, simple_loss=0.2157, pruned_loss=0.02747, over 4958.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2108, pruned_loss=0.03175, over 973519.24 frames.], batch size: 21, lr: 1.82e-04 +2022-05-07 10:47:24,687 INFO [train.py:715] (6/8) Epoch 12, batch 13600, loss[loss=0.1674, simple_loss=0.2413, pruned_loss=0.04678, over 4923.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2109, pruned_loss=0.03165, over 973259.39 frames.], batch size: 39, lr: 1.82e-04 +2022-05-07 10:48:02,573 INFO [train.py:715] (6/8) Epoch 12, batch 13650, loss[loss=0.1354, simple_loss=0.2045, pruned_loss=0.03314, over 4739.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2111, pruned_loss=0.03167, over 973597.56 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:48:40,711 INFO [train.py:715] (6/8) Epoch 12, batch 13700, loss[loss=0.1278, simple_loss=0.1992, pruned_loss=0.02817, over 4945.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.0317, over 973691.36 frames.], batch size: 21, lr: 1.82e-04 +2022-05-07 10:49:18,441 INFO [train.py:715] (6/8) Epoch 12, batch 13750, loss[loss=0.113, simple_loss=0.191, pruned_loss=0.01751, over 4835.00 frames.], tot_loss[loss=0.1376, simple_loss=0.211, pruned_loss=0.03214, over 973254.57 frames.], batch size: 15, lr: 1.82e-04 +2022-05-07 10:49:56,509 INFO [train.py:715] (6/8) Epoch 12, batch 13800, loss[loss=0.1412, simple_loss=0.2108, pruned_loss=0.0358, over 4858.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.03195, over 973788.17 frames.], batch size: 20, lr: 1.82e-04 +2022-05-07 10:50:34,449 INFO [train.py:715] (6/8) Epoch 12, batch 13850, loss[loss=0.1675, simple_loss=0.2341, pruned_loss=0.05041, over 4813.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2105, pruned_loss=0.03182, over 973537.02 frames.], batch size: 21, lr: 1.82e-04 +2022-05-07 10:51:12,972 INFO [train.py:715] (6/8) Epoch 12, batch 13900, loss[loss=0.135, simple_loss=0.2017, pruned_loss=0.03414, over 4851.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03152, over 973601.93 frames.], batch size: 30, lr: 1.82e-04 +2022-05-07 10:51:50,185 INFO [train.py:715] (6/8) Epoch 12, batch 13950, loss[loss=0.1338, simple_loss=0.2125, pruned_loss=0.02751, over 4748.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.03152, over 973522.29 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:52:28,377 INFO [train.py:715] (6/8) Epoch 12, batch 14000, loss[loss=0.15, simple_loss=0.2206, pruned_loss=0.03969, over 4978.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.0316, over 973604.51 frames.], batch size: 24, lr: 1.82e-04 +2022-05-07 10:53:06,889 INFO [train.py:715] (6/8) Epoch 12, batch 14050, loss[loss=0.1352, simple_loss=0.2073, pruned_loss=0.03158, over 4890.00 frames.], tot_loss[loss=0.1369, simple_loss=0.211, pruned_loss=0.03139, over 973472.52 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:53:44,259 INFO [train.py:715] (6/8) Epoch 12, batch 14100, loss[loss=0.1261, simple_loss=0.191, pruned_loss=0.0306, over 4963.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2115, pruned_loss=0.03177, over 973169.58 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 10:54:21,695 INFO [train.py:715] (6/8) Epoch 12, batch 14150, loss[loss=0.1308, simple_loss=0.1941, pruned_loss=0.0338, over 4839.00 frames.], tot_loss[loss=0.1373, simple_loss=0.211, pruned_loss=0.03176, over 972823.03 frames.], batch size: 13, lr: 1.82e-04 +2022-05-07 10:55:00,102 INFO [train.py:715] (6/8) Epoch 12, batch 14200, loss[loss=0.1386, simple_loss=0.2205, pruned_loss=0.02837, over 4937.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03162, over 972546.59 frames.], batch size: 21, lr: 1.82e-04 +2022-05-07 10:55:38,424 INFO [train.py:715] (6/8) Epoch 12, batch 14250, loss[loss=0.1347, simple_loss=0.2141, pruned_loss=0.02765, over 4814.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2103, pruned_loss=0.03131, over 971695.79 frames.], batch size: 21, lr: 1.81e-04 +2022-05-07 10:56:18,082 INFO [train.py:715] (6/8) Epoch 12, batch 14300, loss[loss=0.1263, simple_loss=0.2038, pruned_loss=0.0244, over 4818.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.03166, over 971316.68 frames.], batch size: 27, lr: 1.81e-04 +2022-05-07 10:56:56,580 INFO [train.py:715] (6/8) Epoch 12, batch 14350, loss[loss=0.1198, simple_loss=0.1978, pruned_loss=0.02085, over 4775.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.03165, over 972150.51 frames.], batch size: 19, lr: 1.81e-04 +2022-05-07 10:57:35,965 INFO [train.py:715] (6/8) Epoch 12, batch 14400, loss[loss=0.1321, simple_loss=0.2141, pruned_loss=0.02501, over 4930.00 frames.], tot_loss[loss=0.1374, simple_loss=0.211, pruned_loss=0.03191, over 971688.70 frames.], batch size: 29, lr: 1.81e-04 +2022-05-07 10:58:14,110 INFO [train.py:715] (6/8) Epoch 12, batch 14450, loss[loss=0.1463, simple_loss=0.2178, pruned_loss=0.03742, over 4838.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2116, pruned_loss=0.03205, over 972277.68 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 10:58:53,033 INFO [train.py:715] (6/8) Epoch 12, batch 14500, loss[loss=0.147, simple_loss=0.2046, pruned_loss=0.04467, over 4747.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2127, pruned_loss=0.0328, over 972340.48 frames.], batch size: 19, lr: 1.81e-04 +2022-05-07 10:59:32,145 INFO [train.py:715] (6/8) Epoch 12, batch 14550, loss[loss=0.1362, simple_loss=0.208, pruned_loss=0.03219, over 4756.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2114, pruned_loss=0.03224, over 973001.37 frames.], batch size: 16, lr: 1.81e-04 +2022-05-07 11:00:11,029 INFO [train.py:715] (6/8) Epoch 12, batch 14600, loss[loss=0.1347, simple_loss=0.2116, pruned_loss=0.02893, over 4819.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2124, pruned_loss=0.03265, over 973458.66 frames.], batch size: 25, lr: 1.81e-04 +2022-05-07 11:00:49,646 INFO [train.py:715] (6/8) Epoch 12, batch 14650, loss[loss=0.107, simple_loss=0.182, pruned_loss=0.01597, over 4808.00 frames.], tot_loss[loss=0.138, simple_loss=0.2117, pruned_loss=0.03211, over 973955.52 frames.], batch size: 25, lr: 1.81e-04 +2022-05-07 11:01:27,543 INFO [train.py:715] (6/8) Epoch 12, batch 14700, loss[loss=0.1273, simple_loss=0.1979, pruned_loss=0.02833, over 4802.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2122, pruned_loss=0.03254, over 974087.96 frames.], batch size: 25, lr: 1.81e-04 +2022-05-07 11:02:06,066 INFO [train.py:715] (6/8) Epoch 12, batch 14750, loss[loss=0.1149, simple_loss=0.1872, pruned_loss=0.02132, over 4761.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2115, pruned_loss=0.03201, over 973508.67 frames.], batch size: 12, lr: 1.81e-04 +2022-05-07 11:02:43,581 INFO [train.py:715] (6/8) Epoch 12, batch 14800, loss[loss=0.1353, simple_loss=0.217, pruned_loss=0.02685, over 4957.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2118, pruned_loss=0.03245, over 973080.34 frames.], batch size: 23, lr: 1.81e-04 +2022-05-07 11:03:21,326 INFO [train.py:715] (6/8) Epoch 12, batch 14850, loss[loss=0.1307, simple_loss=0.2104, pruned_loss=0.02552, over 4978.00 frames.], tot_loss[loss=0.138, simple_loss=0.2113, pruned_loss=0.03239, over 972504.22 frames.], batch size: 24, lr: 1.81e-04 +2022-05-07 11:03:59,687 INFO [train.py:715] (6/8) Epoch 12, batch 14900, loss[loss=0.1116, simple_loss=0.1829, pruned_loss=0.02018, over 4761.00 frames.], tot_loss[loss=0.138, simple_loss=0.2113, pruned_loss=0.0324, over 972431.28 frames.], batch size: 12, lr: 1.81e-04 +2022-05-07 11:04:38,247 INFO [train.py:715] (6/8) Epoch 12, batch 14950, loss[loss=0.1253, simple_loss=0.2014, pruned_loss=0.02464, over 4965.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2112, pruned_loss=0.03197, over 972238.72 frames.], batch size: 24, lr: 1.81e-04 +2022-05-07 11:05:15,440 INFO [train.py:715] (6/8) Epoch 12, batch 15000, loss[loss=0.132, simple_loss=0.2014, pruned_loss=0.0313, over 4828.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2111, pruned_loss=0.03222, over 971347.59 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:05:15,440 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 11:05:25,070 INFO [train.py:742] (6/8) Epoch 12, validation: loss=0.1057, simple_loss=0.1897, pruned_loss=0.01083, over 914524.00 frames. +2022-05-07 11:06:02,934 INFO [train.py:715] (6/8) Epoch 12, batch 15050, loss[loss=0.1346, simple_loss=0.2064, pruned_loss=0.0314, over 4913.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03177, over 970833.97 frames.], batch size: 17, lr: 1.81e-04 +2022-05-07 11:06:41,224 INFO [train.py:715] (6/8) Epoch 12, batch 15100, loss[loss=0.1361, simple_loss=0.2087, pruned_loss=0.03177, over 4944.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.03173, over 970749.06 frames.], batch size: 21, lr: 1.81e-04 +2022-05-07 11:07:20,403 INFO [train.py:715] (6/8) Epoch 12, batch 15150, loss[loss=0.1458, simple_loss=0.2157, pruned_loss=0.03796, over 4965.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2119, pruned_loss=0.03228, over 970294.99 frames.], batch size: 39, lr: 1.81e-04 +2022-05-07 11:07:58,878 INFO [train.py:715] (6/8) Epoch 12, batch 15200, loss[loss=0.1534, simple_loss=0.2245, pruned_loss=0.04111, over 4795.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2123, pruned_loss=0.0326, over 970899.90 frames.], batch size: 24, lr: 1.81e-04 +2022-05-07 11:08:37,664 INFO [train.py:715] (6/8) Epoch 12, batch 15250, loss[loss=0.1364, simple_loss=0.2128, pruned_loss=0.02994, over 4754.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2112, pruned_loss=0.03215, over 970864.80 frames.], batch size: 19, lr: 1.81e-04 +2022-05-07 11:09:16,357 INFO [train.py:715] (6/8) Epoch 12, batch 15300, loss[loss=0.1126, simple_loss=0.186, pruned_loss=0.01957, over 4991.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03182, over 970900.35 frames.], batch size: 26, lr: 1.81e-04 +2022-05-07 11:09:54,568 INFO [train.py:715] (6/8) Epoch 12, batch 15350, loss[loss=0.1247, simple_loss=0.2034, pruned_loss=0.02298, over 4935.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03175, over 971523.30 frames.], batch size: 23, lr: 1.81e-04 +2022-05-07 11:10:31,953 INFO [train.py:715] (6/8) Epoch 12, batch 15400, loss[loss=0.1389, simple_loss=0.2046, pruned_loss=0.0366, over 4830.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03102, over 971752.26 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:11:09,690 INFO [train.py:715] (6/8) Epoch 12, batch 15450, loss[loss=0.131, simple_loss=0.197, pruned_loss=0.0325, over 4750.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2102, pruned_loss=0.03133, over 971722.59 frames.], batch size: 19, lr: 1.81e-04 +2022-05-07 11:11:48,440 INFO [train.py:715] (6/8) Epoch 12, batch 15500, loss[loss=0.1263, simple_loss=0.2032, pruned_loss=0.02471, over 4797.00 frames.], tot_loss[loss=0.136, simple_loss=0.2099, pruned_loss=0.03102, over 971395.39 frames.], batch size: 13, lr: 1.81e-04 +2022-05-07 11:12:26,567 INFO [train.py:715] (6/8) Epoch 12, batch 15550, loss[loss=0.1376, simple_loss=0.2079, pruned_loss=0.03366, over 4973.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2107, pruned_loss=0.03139, over 971594.49 frames.], batch size: 25, lr: 1.81e-04 +2022-05-07 11:13:04,460 INFO [train.py:715] (6/8) Epoch 12, batch 15600, loss[loss=0.1482, simple_loss=0.2164, pruned_loss=0.04005, over 4811.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2114, pruned_loss=0.03165, over 970946.20 frames.], batch size: 12, lr: 1.81e-04 +2022-05-07 11:13:42,240 INFO [train.py:715] (6/8) Epoch 12, batch 15650, loss[loss=0.1249, simple_loss=0.1925, pruned_loss=0.02861, over 4840.00 frames.], tot_loss[loss=0.137, simple_loss=0.2109, pruned_loss=0.03155, over 970603.00 frames.], batch size: 26, lr: 1.81e-04 +2022-05-07 11:14:20,676 INFO [train.py:715] (6/8) Epoch 12, batch 15700, loss[loss=0.1227, simple_loss=0.1982, pruned_loss=0.02363, over 4811.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03145, over 970184.69 frames.], batch size: 25, lr: 1.81e-04 +2022-05-07 11:14:58,369 INFO [train.py:715] (6/8) Epoch 12, batch 15750, loss[loss=0.1505, simple_loss=0.2169, pruned_loss=0.04206, over 4986.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03189, over 971476.05 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:15:36,106 INFO [train.py:715] (6/8) Epoch 12, batch 15800, loss[loss=0.1422, simple_loss=0.2211, pruned_loss=0.03163, over 4904.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2104, pruned_loss=0.03187, over 971894.13 frames.], batch size: 19, lr: 1.81e-04 +2022-05-07 11:16:14,196 INFO [train.py:715] (6/8) Epoch 12, batch 15850, loss[loss=0.1486, simple_loss=0.2225, pruned_loss=0.03731, over 4830.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03169, over 972023.13 frames.], batch size: 30, lr: 1.81e-04 +2022-05-07 11:16:51,695 INFO [train.py:715] (6/8) Epoch 12, batch 15900, loss[loss=0.1482, simple_loss=0.2099, pruned_loss=0.04331, over 4966.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2101, pruned_loss=0.03186, over 972720.83 frames.], batch size: 29, lr: 1.81e-04 +2022-05-07 11:17:29,508 INFO [train.py:715] (6/8) Epoch 12, batch 15950, loss[loss=0.1446, simple_loss=0.2225, pruned_loss=0.03334, over 4924.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.03204, over 972024.55 frames.], batch size: 18, lr: 1.81e-04 +2022-05-07 11:18:07,570 INFO [train.py:715] (6/8) Epoch 12, batch 16000, loss[loss=0.1453, simple_loss=0.2176, pruned_loss=0.03648, over 4792.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2102, pruned_loss=0.03181, over 971752.71 frames.], batch size: 17, lr: 1.81e-04 +2022-05-07 11:18:47,328 INFO [train.py:715] (6/8) Epoch 12, batch 16050, loss[loss=0.1768, simple_loss=0.2393, pruned_loss=0.0571, over 4946.00 frames.], tot_loss[loss=0.138, simple_loss=0.2107, pruned_loss=0.03265, over 971807.26 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:19:25,280 INFO [train.py:715] (6/8) Epoch 12, batch 16100, loss[loss=0.1455, simple_loss=0.2212, pruned_loss=0.0349, over 4799.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2113, pruned_loss=0.03279, over 970962.79 frames.], batch size: 24, lr: 1.81e-04 +2022-05-07 11:20:04,190 INFO [train.py:715] (6/8) Epoch 12, batch 16150, loss[loss=0.1506, simple_loss=0.2316, pruned_loss=0.03479, over 4922.00 frames.], tot_loss[loss=0.138, simple_loss=0.2113, pruned_loss=0.0324, over 971707.96 frames.], batch size: 18, lr: 1.81e-04 +2022-05-07 11:20:43,070 INFO [train.py:715] (6/8) Epoch 12, batch 16200, loss[loss=0.1442, simple_loss=0.211, pruned_loss=0.03874, over 4697.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2118, pruned_loss=0.03231, over 972036.47 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:21:21,843 INFO [train.py:715] (6/8) Epoch 12, batch 16250, loss[loss=0.1418, simple_loss=0.2161, pruned_loss=0.03372, over 4843.00 frames.], tot_loss[loss=0.1383, simple_loss=0.212, pruned_loss=0.03228, over 971491.37 frames.], batch size: 30, lr: 1.81e-04 +2022-05-07 11:21:59,696 INFO [train.py:715] (6/8) Epoch 12, batch 16300, loss[loss=0.1236, simple_loss=0.2035, pruned_loss=0.02188, over 4876.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2115, pruned_loss=0.03185, over 971549.22 frames.], batch size: 22, lr: 1.81e-04 +2022-05-07 11:22:37,475 INFO [train.py:715] (6/8) Epoch 12, batch 16350, loss[loss=0.1196, simple_loss=0.1978, pruned_loss=0.02069, over 4961.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2114, pruned_loss=0.03219, over 971518.22 frames.], batch size: 21, lr: 1.81e-04 +2022-05-07 11:23:16,252 INFO [train.py:715] (6/8) Epoch 12, batch 16400, loss[loss=0.125, simple_loss=0.1942, pruned_loss=0.02794, over 4707.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.03148, over 972175.69 frames.], batch size: 12, lr: 1.81e-04 +2022-05-07 11:23:54,216 INFO [train.py:715] (6/8) Epoch 12, batch 16450, loss[loss=0.1256, simple_loss=0.2087, pruned_loss=0.02125, over 4834.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.03171, over 971047.56 frames.], batch size: 26, lr: 1.81e-04 +2022-05-07 11:24:33,097 INFO [train.py:715] (6/8) Epoch 12, batch 16500, loss[loss=0.1363, simple_loss=0.2111, pruned_loss=0.03077, over 4746.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.03114, over 971315.80 frames.], batch size: 16, lr: 1.81e-04 +2022-05-07 11:25:12,171 INFO [train.py:715] (6/8) Epoch 12, batch 16550, loss[loss=0.1243, simple_loss=0.1932, pruned_loss=0.02771, over 4886.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03112, over 971530.06 frames.], batch size: 22, lr: 1.81e-04 +2022-05-07 11:25:51,308 INFO [train.py:715] (6/8) Epoch 12, batch 16600, loss[loss=0.1275, simple_loss=0.2022, pruned_loss=0.02646, over 4911.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2095, pruned_loss=0.03119, over 972421.25 frames.], batch size: 18, lr: 1.81e-04 +2022-05-07 11:26:29,864 INFO [train.py:715] (6/8) Epoch 12, batch 16650, loss[loss=0.1213, simple_loss=0.195, pruned_loss=0.02377, over 4817.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03146, over 972598.02 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:27:08,923 INFO [train.py:715] (6/8) Epoch 12, batch 16700, loss[loss=0.1455, simple_loss=0.222, pruned_loss=0.03452, over 4764.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.03176, over 972002.19 frames.], batch size: 17, lr: 1.81e-04 +2022-05-07 11:27:48,110 INFO [train.py:715] (6/8) Epoch 12, batch 16750, loss[loss=0.1082, simple_loss=0.1736, pruned_loss=0.02142, over 4836.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2096, pruned_loss=0.03127, over 972179.39 frames.], batch size: 13, lr: 1.81e-04 +2022-05-07 11:28:26,500 INFO [train.py:715] (6/8) Epoch 12, batch 16800, loss[loss=0.1375, simple_loss=0.2146, pruned_loss=0.03014, over 4924.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.0314, over 972024.57 frames.], batch size: 21, lr: 1.81e-04 +2022-05-07 11:29:05,271 INFO [train.py:715] (6/8) Epoch 12, batch 16850, loss[loss=0.1433, simple_loss=0.2274, pruned_loss=0.02957, over 4914.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.0314, over 971982.71 frames.], batch size: 39, lr: 1.81e-04 +2022-05-07 11:29:44,426 INFO [train.py:715] (6/8) Epoch 12, batch 16900, loss[loss=0.1177, simple_loss=0.2005, pruned_loss=0.01744, over 4906.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2109, pruned_loss=0.03175, over 972899.27 frames.], batch size: 23, lr: 1.81e-04 +2022-05-07 11:30:24,181 INFO [train.py:715] (6/8) Epoch 12, batch 16950, loss[loss=0.1655, simple_loss=0.231, pruned_loss=0.05, over 4910.00 frames.], tot_loss[loss=0.137, simple_loss=0.2108, pruned_loss=0.0316, over 972718.64 frames.], batch size: 18, lr: 1.81e-04 +2022-05-07 11:31:02,695 INFO [train.py:715] (6/8) Epoch 12, batch 17000, loss[loss=0.1178, simple_loss=0.2077, pruned_loss=0.01397, over 4921.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2107, pruned_loss=0.0316, over 973501.82 frames.], batch size: 18, lr: 1.81e-04 +2022-05-07 11:31:40,883 INFO [train.py:715] (6/8) Epoch 12, batch 17050, loss[loss=0.1377, simple_loss=0.2121, pruned_loss=0.03167, over 4922.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.03187, over 972845.85 frames.], batch size: 18, lr: 1.81e-04 +2022-05-07 11:32:19,764 INFO [train.py:715] (6/8) Epoch 12, batch 17100, loss[loss=0.1114, simple_loss=0.1914, pruned_loss=0.01572, over 4758.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03176, over 972085.37 frames.], batch size: 19, lr: 1.81e-04 +2022-05-07 11:32:58,567 INFO [train.py:715] (6/8) Epoch 12, batch 17150, loss[loss=0.1208, simple_loss=0.1974, pruned_loss=0.02207, over 4813.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2118, pruned_loss=0.0322, over 971903.73 frames.], batch size: 13, lr: 1.81e-04 +2022-05-07 11:33:37,597 INFO [train.py:715] (6/8) Epoch 12, batch 17200, loss[loss=0.1359, simple_loss=0.2115, pruned_loss=0.03015, over 4842.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2116, pruned_loss=0.03228, over 973393.30 frames.], batch size: 30, lr: 1.81e-04 +2022-05-07 11:34:16,027 INFO [train.py:715] (6/8) Epoch 12, batch 17250, loss[loss=0.1292, simple_loss=0.2051, pruned_loss=0.02669, over 4887.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2121, pruned_loss=0.03264, over 972875.74 frames.], batch size: 22, lr: 1.81e-04 +2022-05-07 11:34:54,493 INFO [train.py:715] (6/8) Epoch 12, batch 17300, loss[loss=0.1275, simple_loss=0.1997, pruned_loss=0.02762, over 4861.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2119, pruned_loss=0.03264, over 972448.56 frames.], batch size: 20, lr: 1.81e-04 +2022-05-07 11:35:32,126 INFO [train.py:715] (6/8) Epoch 12, batch 17350, loss[loss=0.1212, simple_loss=0.1971, pruned_loss=0.02268, over 4929.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.0319, over 972433.35 frames.], batch size: 23, lr: 1.81e-04 +2022-05-07 11:36:10,077 INFO [train.py:715] (6/8) Epoch 12, batch 17400, loss[loss=0.1391, simple_loss=0.2058, pruned_loss=0.03615, over 4908.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.03199, over 973168.40 frames.], batch size: 17, lr: 1.81e-04 +2022-05-07 11:36:47,845 INFO [train.py:715] (6/8) Epoch 12, batch 17450, loss[loss=0.1328, simple_loss=0.1988, pruned_loss=0.03337, over 4853.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.03205, over 973808.16 frames.], batch size: 32, lr: 1.81e-04 +2022-05-07 11:37:26,178 INFO [train.py:715] (6/8) Epoch 12, batch 17500, loss[loss=0.1135, simple_loss=0.1926, pruned_loss=0.01717, over 4968.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2108, pruned_loss=0.032, over 973783.10 frames.], batch size: 24, lr: 1.81e-04 +2022-05-07 11:38:04,040 INFO [train.py:715] (6/8) Epoch 12, batch 17550, loss[loss=0.1363, simple_loss=0.2059, pruned_loss=0.03338, over 4805.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2117, pruned_loss=0.03219, over 972972.53 frames.], batch size: 21, lr: 1.81e-04 +2022-05-07 11:38:42,238 INFO [train.py:715] (6/8) Epoch 12, batch 17600, loss[loss=0.1407, simple_loss=0.2231, pruned_loss=0.02918, over 4964.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2111, pruned_loss=0.03203, over 972833.60 frames.], batch size: 35, lr: 1.81e-04 +2022-05-07 11:39:19,885 INFO [train.py:715] (6/8) Epoch 12, batch 17650, loss[loss=0.1413, simple_loss=0.2137, pruned_loss=0.03443, over 4830.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2116, pruned_loss=0.0323, over 972877.47 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:39:57,990 INFO [train.py:715] (6/8) Epoch 12, batch 17700, loss[loss=0.1468, simple_loss=0.2213, pruned_loss=0.03617, over 4688.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2129, pruned_loss=0.03312, over 973382.47 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:40:36,848 INFO [train.py:715] (6/8) Epoch 12, batch 17750, loss[loss=0.1025, simple_loss=0.1713, pruned_loss=0.01683, over 4823.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2124, pruned_loss=0.03299, over 972996.82 frames.], batch size: 12, lr: 1.81e-04 +2022-05-07 11:41:15,688 INFO [train.py:715] (6/8) Epoch 12, batch 17800, loss[loss=0.1354, simple_loss=0.2066, pruned_loss=0.03208, over 4888.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2123, pruned_loss=0.03315, over 973047.76 frames.], batch size: 16, lr: 1.81e-04 +2022-05-07 11:41:54,189 INFO [train.py:715] (6/8) Epoch 12, batch 17850, loss[loss=0.1137, simple_loss=0.182, pruned_loss=0.02274, over 4893.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2118, pruned_loss=0.03279, over 973018.93 frames.], batch size: 16, lr: 1.81e-04 +2022-05-07 11:42:32,957 INFO [train.py:715] (6/8) Epoch 12, batch 17900, loss[loss=0.147, simple_loss=0.227, pruned_loss=0.03352, over 4833.00 frames.], tot_loss[loss=0.1385, simple_loss=0.212, pruned_loss=0.03249, over 973177.93 frames.], batch size: 26, lr: 1.81e-04 +2022-05-07 11:43:10,438 INFO [train.py:715] (6/8) Epoch 12, batch 17950, loss[loss=0.1496, simple_loss=0.2177, pruned_loss=0.04078, over 4757.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2125, pruned_loss=0.03258, over 973092.82 frames.], batch size: 19, lr: 1.81e-04 +2022-05-07 11:43:48,629 INFO [train.py:715] (6/8) Epoch 12, batch 18000, loss[loss=0.1459, simple_loss=0.2282, pruned_loss=0.03183, over 4800.00 frames.], tot_loss[loss=0.1382, simple_loss=0.212, pruned_loss=0.03221, over 973561.37 frames.], batch size: 24, lr: 1.81e-04 +2022-05-07 11:43:48,629 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 11:43:58,184 INFO [train.py:742] (6/8) Epoch 12, validation: loss=0.106, simple_loss=0.19, pruned_loss=0.011, over 914524.00 frames. +2022-05-07 11:44:36,606 INFO [train.py:715] (6/8) Epoch 12, batch 18050, loss[loss=0.1245, simple_loss=0.2004, pruned_loss=0.02428, over 4848.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2118, pruned_loss=0.03195, over 973655.83 frames.], batch size: 32, lr: 1.81e-04 +2022-05-07 11:45:14,476 INFO [train.py:715] (6/8) Epoch 12, batch 18100, loss[loss=0.1626, simple_loss=0.23, pruned_loss=0.04762, over 4925.00 frames.], tot_loss[loss=0.138, simple_loss=0.2119, pruned_loss=0.03204, over 973242.10 frames.], batch size: 39, lr: 1.81e-04 +2022-05-07 11:45:52,625 INFO [train.py:715] (6/8) Epoch 12, batch 18150, loss[loss=0.1374, simple_loss=0.2081, pruned_loss=0.03337, over 4884.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2115, pruned_loss=0.03193, over 972880.89 frames.], batch size: 16, lr: 1.81e-04 +2022-05-07 11:46:30,450 INFO [train.py:715] (6/8) Epoch 12, batch 18200, loss[loss=0.1506, simple_loss=0.2258, pruned_loss=0.03765, over 4899.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2121, pruned_loss=0.03227, over 973014.23 frames.], batch size: 19, lr: 1.81e-04 +2022-05-07 11:47:08,252 INFO [train.py:715] (6/8) Epoch 12, batch 18250, loss[loss=0.1249, simple_loss=0.2054, pruned_loss=0.02219, over 4942.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2122, pruned_loss=0.03229, over 973482.60 frames.], batch size: 29, lr: 1.81e-04 +2022-05-07 11:47:46,408 INFO [train.py:715] (6/8) Epoch 12, batch 18300, loss[loss=0.1188, simple_loss=0.2012, pruned_loss=0.0182, over 4758.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2123, pruned_loss=0.03268, over 972952.94 frames.], batch size: 19, lr: 1.81e-04 +2022-05-07 11:48:24,293 INFO [train.py:715] (6/8) Epoch 12, batch 18350, loss[loss=0.1493, simple_loss=0.2279, pruned_loss=0.03533, over 4920.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2124, pruned_loss=0.0325, over 972867.12 frames.], batch size: 39, lr: 1.81e-04 +2022-05-07 11:49:02,248 INFO [train.py:715] (6/8) Epoch 12, batch 18400, loss[loss=0.1273, simple_loss=0.212, pruned_loss=0.02131, over 4717.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2124, pruned_loss=0.03218, over 972497.39 frames.], batch size: 16, lr: 1.81e-04 +2022-05-07 11:49:39,749 INFO [train.py:715] (6/8) Epoch 12, batch 18450, loss[loss=0.11, simple_loss=0.183, pruned_loss=0.01846, over 4830.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2113, pruned_loss=0.03168, over 972007.51 frames.], batch size: 26, lr: 1.81e-04 +2022-05-07 11:50:17,837 INFO [train.py:715] (6/8) Epoch 12, batch 18500, loss[loss=0.1419, simple_loss=0.2183, pruned_loss=0.03269, over 4935.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2115, pruned_loss=0.03191, over 972590.79 frames.], batch size: 21, lr: 1.81e-04 +2022-05-07 11:50:55,666 INFO [train.py:715] (6/8) Epoch 12, batch 18550, loss[loss=0.1178, simple_loss=0.1929, pruned_loss=0.02134, over 4856.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2117, pruned_loss=0.03176, over 972289.82 frames.], batch size: 30, lr: 1.81e-04 +2022-05-07 11:51:33,501 INFO [train.py:715] (6/8) Epoch 12, batch 18600, loss[loss=0.1553, simple_loss=0.2296, pruned_loss=0.0405, over 4960.00 frames.], tot_loss[loss=0.1379, simple_loss=0.212, pruned_loss=0.03189, over 972485.10 frames.], batch size: 39, lr: 1.81e-04 +2022-05-07 11:52:11,113 INFO [train.py:715] (6/8) Epoch 12, batch 18650, loss[loss=0.1285, simple_loss=0.2055, pruned_loss=0.02572, over 4868.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2123, pruned_loss=0.03194, over 973029.12 frames.], batch size: 22, lr: 1.81e-04 +2022-05-07 11:52:48,673 INFO [train.py:715] (6/8) Epoch 12, batch 18700, loss[loss=0.1676, simple_loss=0.2418, pruned_loss=0.04667, over 4793.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2128, pruned_loss=0.03286, over 972229.83 frames.], batch size: 18, lr: 1.81e-04 +2022-05-07 11:53:26,073 INFO [train.py:715] (6/8) Epoch 12, batch 18750, loss[loss=0.1454, simple_loss=0.2262, pruned_loss=0.03228, over 4689.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2116, pruned_loss=0.03213, over 972178.95 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:54:04,014 INFO [train.py:715] (6/8) Epoch 12, batch 18800, loss[loss=0.1325, simple_loss=0.1994, pruned_loss=0.03276, over 4928.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2107, pruned_loss=0.03212, over 973169.95 frames.], batch size: 23, lr: 1.81e-04 +2022-05-07 11:54:41,889 INFO [train.py:715] (6/8) Epoch 12, batch 18850, loss[loss=0.1387, simple_loss=0.1994, pruned_loss=0.03901, over 4971.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2108, pruned_loss=0.03224, over 972770.75 frames.], batch size: 21, lr: 1.81e-04 +2022-05-07 11:55:19,706 INFO [train.py:715] (6/8) Epoch 12, batch 18900, loss[loss=0.1522, simple_loss=0.2237, pruned_loss=0.04034, over 4920.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2108, pruned_loss=0.0321, over 972844.17 frames.], batch size: 18, lr: 1.81e-04 +2022-05-07 11:55:57,997 INFO [train.py:715] (6/8) Epoch 12, batch 18950, loss[loss=0.1269, simple_loss=0.2096, pruned_loss=0.02213, over 4938.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2108, pruned_loss=0.032, over 973637.38 frames.], batch size: 23, lr: 1.81e-04 +2022-05-07 11:56:35,808 INFO [train.py:715] (6/8) Epoch 12, batch 19000, loss[loss=0.1336, simple_loss=0.2112, pruned_loss=0.02796, over 4758.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2111, pruned_loss=0.03207, over 972415.50 frames.], batch size: 19, lr: 1.81e-04 +2022-05-07 11:57:13,289 INFO [train.py:715] (6/8) Epoch 12, batch 19050, loss[loss=0.136, simple_loss=0.2041, pruned_loss=0.03395, over 4810.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03171, over 972470.96 frames.], batch size: 24, lr: 1.80e-04 +2022-05-07 11:57:50,567 INFO [train.py:715] (6/8) Epoch 12, batch 19100, loss[loss=0.145, simple_loss=0.2112, pruned_loss=0.03943, over 4865.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2109, pruned_loss=0.03146, over 972732.12 frames.], batch size: 16, lr: 1.80e-04 +2022-05-07 11:58:28,557 INFO [train.py:715] (6/8) Epoch 12, batch 19150, loss[loss=0.1421, simple_loss=0.2047, pruned_loss=0.0397, over 4874.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2112, pruned_loss=0.03204, over 973066.98 frames.], batch size: 16, lr: 1.80e-04 +2022-05-07 11:59:07,195 INFO [train.py:715] (6/8) Epoch 12, batch 19200, loss[loss=0.1227, simple_loss=0.1909, pruned_loss=0.02725, over 4929.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2122, pruned_loss=0.0326, over 972629.63 frames.], batch size: 23, lr: 1.80e-04 +2022-05-07 11:59:45,242 INFO [train.py:715] (6/8) Epoch 12, batch 19250, loss[loss=0.1427, simple_loss=0.2159, pruned_loss=0.03473, over 4868.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2123, pruned_loss=0.03247, over 972662.93 frames.], batch size: 16, lr: 1.80e-04 +2022-05-07 12:00:23,723 INFO [train.py:715] (6/8) Epoch 12, batch 19300, loss[loss=0.1462, simple_loss=0.2187, pruned_loss=0.03687, over 4925.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2124, pruned_loss=0.03231, over 972455.29 frames.], batch size: 23, lr: 1.80e-04 +2022-05-07 12:01:01,907 INFO [train.py:715] (6/8) Epoch 12, batch 19350, loss[loss=0.1177, simple_loss=0.188, pruned_loss=0.02368, over 4894.00 frames.], tot_loss[loss=0.139, simple_loss=0.2131, pruned_loss=0.03242, over 971920.02 frames.], batch size: 19, lr: 1.80e-04 +2022-05-07 12:01:39,913 INFO [train.py:715] (6/8) Epoch 12, batch 19400, loss[loss=0.1151, simple_loss=0.1912, pruned_loss=0.01945, over 4973.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2125, pruned_loss=0.03211, over 972844.63 frames.], batch size: 24, lr: 1.80e-04 +2022-05-07 12:02:17,952 INFO [train.py:715] (6/8) Epoch 12, batch 19450, loss[loss=0.118, simple_loss=0.2067, pruned_loss=0.01461, over 4786.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2123, pruned_loss=0.03212, over 973508.95 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:02:56,773 INFO [train.py:715] (6/8) Epoch 12, batch 19500, loss[loss=0.129, simple_loss=0.202, pruned_loss=0.028, over 4784.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2118, pruned_loss=0.03202, over 973157.87 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:03:35,600 INFO [train.py:715] (6/8) Epoch 12, batch 19550, loss[loss=0.1518, simple_loss=0.2247, pruned_loss=0.03947, over 4903.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2111, pruned_loss=0.03184, over 972212.94 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:04:14,319 INFO [train.py:715] (6/8) Epoch 12, batch 19600, loss[loss=0.1386, simple_loss=0.2228, pruned_loss=0.02719, over 4779.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2115, pruned_loss=0.03205, over 971494.74 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:04:53,464 INFO [train.py:715] (6/8) Epoch 12, batch 19650, loss[loss=0.1254, simple_loss=0.1945, pruned_loss=0.02812, over 4959.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2112, pruned_loss=0.03209, over 972027.53 frames.], batch size: 14, lr: 1.80e-04 +2022-05-07 12:05:32,629 INFO [train.py:715] (6/8) Epoch 12, batch 19700, loss[loss=0.1404, simple_loss=0.2044, pruned_loss=0.0382, over 4776.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2115, pruned_loss=0.03216, over 971809.66 frames.], batch size: 14, lr: 1.80e-04 +2022-05-07 12:06:12,008 INFO [train.py:715] (6/8) Epoch 12, batch 19750, loss[loss=0.1326, simple_loss=0.2091, pruned_loss=0.02804, over 4977.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2121, pruned_loss=0.03287, over 971442.18 frames.], batch size: 14, lr: 1.80e-04 +2022-05-07 12:06:52,644 INFO [train.py:715] (6/8) Epoch 12, batch 19800, loss[loss=0.1697, simple_loss=0.2486, pruned_loss=0.04544, over 4795.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2115, pruned_loss=0.03238, over 972254.14 frames.], batch size: 14, lr: 1.80e-04 +2022-05-07 12:07:33,086 INFO [train.py:715] (6/8) Epoch 12, batch 19850, loss[loss=0.159, simple_loss=0.2264, pruned_loss=0.04579, over 4977.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2118, pruned_loss=0.03271, over 972133.40 frames.], batch size: 15, lr: 1.80e-04 +2022-05-07 12:08:14,254 INFO [train.py:715] (6/8) Epoch 12, batch 19900, loss[loss=0.1123, simple_loss=0.1891, pruned_loss=0.01777, over 4948.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2114, pruned_loss=0.03217, over 972246.98 frames.], batch size: 35, lr: 1.80e-04 +2022-05-07 12:08:54,593 INFO [train.py:715] (6/8) Epoch 12, batch 19950, loss[loss=0.1334, simple_loss=0.2125, pruned_loss=0.02718, over 4914.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2115, pruned_loss=0.0324, over 972505.36 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:09:35,211 INFO [train.py:715] (6/8) Epoch 12, batch 20000, loss[loss=0.1597, simple_loss=0.2285, pruned_loss=0.04547, over 4836.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2117, pruned_loss=0.03232, over 972691.85 frames.], batch size: 30, lr: 1.80e-04 +2022-05-07 12:10:15,433 INFO [train.py:715] (6/8) Epoch 12, batch 20050, loss[loss=0.1246, simple_loss=0.1989, pruned_loss=0.02516, over 4937.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2115, pruned_loss=0.03217, over 973525.41 frames.], batch size: 29, lr: 1.80e-04 +2022-05-07 12:10:55,690 INFO [train.py:715] (6/8) Epoch 12, batch 20100, loss[loss=0.1339, simple_loss=0.206, pruned_loss=0.03091, over 4840.00 frames.], tot_loss[loss=0.137, simple_loss=0.2108, pruned_loss=0.03165, over 974674.24 frames.], batch size: 13, lr: 1.80e-04 +2022-05-07 12:11:35,656 INFO [train.py:715] (6/8) Epoch 12, batch 20150, loss[loss=0.154, simple_loss=0.2211, pruned_loss=0.04342, over 4893.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.03163, over 973969.44 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:12:16,046 INFO [train.py:715] (6/8) Epoch 12, batch 20200, loss[loss=0.1464, simple_loss=0.2216, pruned_loss=0.0356, over 4909.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2112, pruned_loss=0.03204, over 973990.91 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:12:56,155 INFO [train.py:715] (6/8) Epoch 12, batch 20250, loss[loss=0.1609, simple_loss=0.2334, pruned_loss=0.04418, over 4748.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2104, pruned_loss=0.0314, over 973222.15 frames.], batch size: 16, lr: 1.80e-04 +2022-05-07 12:13:36,212 INFO [train.py:715] (6/8) Epoch 12, batch 20300, loss[loss=0.1243, simple_loss=0.188, pruned_loss=0.03031, over 4831.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03124, over 973238.14 frames.], batch size: 13, lr: 1.80e-04 +2022-05-07 12:14:16,799 INFO [train.py:715] (6/8) Epoch 12, batch 20350, loss[loss=0.1302, simple_loss=0.2053, pruned_loss=0.02754, over 4963.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.0312, over 972493.00 frames.], batch size: 35, lr: 1.80e-04 +2022-05-07 12:14:56,468 INFO [train.py:715] (6/8) Epoch 12, batch 20400, loss[loss=0.17, simple_loss=0.239, pruned_loss=0.05053, over 4937.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2103, pruned_loss=0.03172, over 972230.76 frames.], batch size: 35, lr: 1.80e-04 +2022-05-07 12:15:36,232 INFO [train.py:715] (6/8) Epoch 12, batch 20450, loss[loss=0.1464, simple_loss=0.2247, pruned_loss=0.03408, over 4971.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2098, pruned_loss=0.03155, over 972991.65 frames.], batch size: 24, lr: 1.80e-04 +2022-05-07 12:16:15,878 INFO [train.py:715] (6/8) Epoch 12, batch 20500, loss[loss=0.1821, simple_loss=0.2428, pruned_loss=0.06069, over 4876.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2103, pruned_loss=0.03178, over 974012.11 frames.], batch size: 16, lr: 1.80e-04 +2022-05-07 12:16:56,323 INFO [train.py:715] (6/8) Epoch 12, batch 20550, loss[loss=0.1082, simple_loss=0.1821, pruned_loss=0.01712, over 4911.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03174, over 974752.65 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:17:36,241 INFO [train.py:715] (6/8) Epoch 12, batch 20600, loss[loss=0.1255, simple_loss=0.2026, pruned_loss=0.02422, over 4810.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03175, over 974696.83 frames.], batch size: 21, lr: 1.80e-04 +2022-05-07 12:18:15,186 INFO [train.py:715] (6/8) Epoch 12, batch 20650, loss[loss=0.1152, simple_loss=0.1835, pruned_loss=0.0234, over 4985.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2106, pruned_loss=0.03164, over 974151.19 frames.], batch size: 28, lr: 1.80e-04 +2022-05-07 12:18:54,297 INFO [train.py:715] (6/8) Epoch 12, batch 20700, loss[loss=0.1258, simple_loss=0.2052, pruned_loss=0.02318, over 4800.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2103, pruned_loss=0.03124, over 973917.39 frames.], batch size: 21, lr: 1.80e-04 +2022-05-07 12:19:32,272 INFO [train.py:715] (6/8) Epoch 12, batch 20750, loss[loss=0.139, simple_loss=0.2052, pruned_loss=0.03642, over 4874.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2101, pruned_loss=0.03121, over 973714.25 frames.], batch size: 32, lr: 1.80e-04 +2022-05-07 12:20:10,580 INFO [train.py:715] (6/8) Epoch 12, batch 20800, loss[loss=0.132, simple_loss=0.2083, pruned_loss=0.02791, over 4782.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2107, pruned_loss=0.03128, over 973445.64 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:20:48,328 INFO [train.py:715] (6/8) Epoch 12, batch 20850, loss[loss=0.1501, simple_loss=0.2264, pruned_loss=0.03689, over 4772.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2107, pruned_loss=0.03144, over 972367.74 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:21:26,467 INFO [train.py:715] (6/8) Epoch 12, batch 20900, loss[loss=0.1415, simple_loss=0.2111, pruned_loss=0.03599, over 4965.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2111, pruned_loss=0.03161, over 972633.66 frames.], batch size: 24, lr: 1.80e-04 +2022-05-07 12:22:04,011 INFO [train.py:715] (6/8) Epoch 12, batch 20950, loss[loss=0.1143, simple_loss=0.1891, pruned_loss=0.01981, over 4804.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2106, pruned_loss=0.0313, over 973312.56 frames.], batch size: 21, lr: 1.80e-04 +2022-05-07 12:22:41,374 INFO [train.py:715] (6/8) Epoch 12, batch 21000, loss[loss=0.12, simple_loss=0.2044, pruned_loss=0.01782, over 4798.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.03147, over 974249.22 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:22:41,375 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 12:22:50,900 INFO [train.py:742] (6/8) Epoch 12, validation: loss=0.1056, simple_loss=0.1896, pruned_loss=0.01081, over 914524.00 frames. +2022-05-07 12:23:28,721 INFO [train.py:715] (6/8) Epoch 12, batch 21050, loss[loss=0.1174, simple_loss=0.1855, pruned_loss=0.02469, over 4874.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2107, pruned_loss=0.03155, over 974126.90 frames.], batch size: 34, lr: 1.80e-04 +2022-05-07 12:24:06,827 INFO [train.py:715] (6/8) Epoch 12, batch 21100, loss[loss=0.1459, simple_loss=0.2129, pruned_loss=0.03944, over 4887.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03171, over 974016.21 frames.], batch size: 22, lr: 1.80e-04 +2022-05-07 12:24:44,627 INFO [train.py:715] (6/8) Epoch 12, batch 21150, loss[loss=0.1258, simple_loss=0.2024, pruned_loss=0.02453, over 4906.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2104, pruned_loss=0.03185, over 973947.93 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:25:22,419 INFO [train.py:715] (6/8) Epoch 12, batch 21200, loss[loss=0.1514, simple_loss=0.2189, pruned_loss=0.04197, over 4843.00 frames.], tot_loss[loss=0.1367, simple_loss=0.21, pruned_loss=0.03167, over 973906.41 frames.], batch size: 32, lr: 1.80e-04 +2022-05-07 12:26:00,700 INFO [train.py:715] (6/8) Epoch 12, batch 21250, loss[loss=0.1198, simple_loss=0.1986, pruned_loss=0.02051, over 4944.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03157, over 973580.41 frames.], batch size: 35, lr: 1.80e-04 +2022-05-07 12:26:39,505 INFO [train.py:715] (6/8) Epoch 12, batch 21300, loss[loss=0.1424, simple_loss=0.2122, pruned_loss=0.03628, over 4788.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2098, pruned_loss=0.03128, over 973165.26 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:27:17,266 INFO [train.py:715] (6/8) Epoch 12, batch 21350, loss[loss=0.1739, simple_loss=0.2339, pruned_loss=0.0569, over 4943.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.03167, over 973253.24 frames.], batch size: 39, lr: 1.80e-04 +2022-05-07 12:27:56,379 INFO [train.py:715] (6/8) Epoch 12, batch 21400, loss[loss=0.1167, simple_loss=0.1992, pruned_loss=0.01713, over 4795.00 frames.], tot_loss[loss=0.1366, simple_loss=0.21, pruned_loss=0.03165, over 974169.29 frames.], batch size: 24, lr: 1.80e-04 +2022-05-07 12:28:35,915 INFO [train.py:715] (6/8) Epoch 12, batch 21450, loss[loss=0.1554, simple_loss=0.2331, pruned_loss=0.03885, over 4699.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03158, over 973670.32 frames.], batch size: 15, lr: 1.80e-04 +2022-05-07 12:29:14,512 INFO [train.py:715] (6/8) Epoch 12, batch 21500, loss[loss=0.1424, simple_loss=0.2139, pruned_loss=0.03543, over 4977.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.0317, over 973460.06 frames.], batch size: 28, lr: 1.80e-04 +2022-05-07 12:29:53,102 INFO [train.py:715] (6/8) Epoch 12, batch 21550, loss[loss=0.191, simple_loss=0.2718, pruned_loss=0.0551, over 4901.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03182, over 973258.62 frames.], batch size: 19, lr: 1.80e-04 +2022-05-07 12:30:31,274 INFO [train.py:715] (6/8) Epoch 12, batch 21600, loss[loss=0.1287, simple_loss=0.1971, pruned_loss=0.03017, over 4820.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.03205, over 974172.11 frames.], batch size: 13, lr: 1.80e-04 +2022-05-07 12:31:09,737 INFO [train.py:715] (6/8) Epoch 12, batch 21650, loss[loss=0.1377, simple_loss=0.215, pruned_loss=0.03021, over 4803.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03246, over 973665.96 frames.], batch size: 21, lr: 1.80e-04 +2022-05-07 12:31:46,936 INFO [train.py:715] (6/8) Epoch 12, batch 21700, loss[loss=0.1088, simple_loss=0.1789, pruned_loss=0.01933, over 4831.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2111, pruned_loss=0.03212, over 973154.26 frames.], batch size: 13, lr: 1.80e-04 +2022-05-07 12:32:25,497 INFO [train.py:715] (6/8) Epoch 12, batch 21750, loss[loss=0.1421, simple_loss=0.2294, pruned_loss=0.02738, over 4914.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2107, pruned_loss=0.03149, over 972735.34 frames.], batch size: 19, lr: 1.80e-04 +2022-05-07 12:33:04,220 INFO [train.py:715] (6/8) Epoch 12, batch 21800, loss[loss=0.1275, simple_loss=0.1962, pruned_loss=0.02935, over 4786.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2107, pruned_loss=0.03122, over 972637.74 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:33:42,113 INFO [train.py:715] (6/8) Epoch 12, batch 21850, loss[loss=0.1395, simple_loss=0.2207, pruned_loss=0.02918, over 4898.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2111, pruned_loss=0.03139, over 972843.10 frames.], batch size: 19, lr: 1.80e-04 +2022-05-07 12:34:19,728 INFO [train.py:715] (6/8) Epoch 12, batch 21900, loss[loss=0.1464, simple_loss=0.2231, pruned_loss=0.03482, over 4713.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03171, over 972455.82 frames.], batch size: 15, lr: 1.80e-04 +2022-05-07 12:34:58,476 INFO [train.py:715] (6/8) Epoch 12, batch 21950, loss[loss=0.1477, simple_loss=0.2098, pruned_loss=0.04281, over 4776.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2106, pruned_loss=0.03192, over 972648.84 frames.], batch size: 14, lr: 1.80e-04 +2022-05-07 12:35:37,481 INFO [train.py:715] (6/8) Epoch 12, batch 22000, loss[loss=0.1152, simple_loss=0.1955, pruned_loss=0.01741, over 4802.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2108, pruned_loss=0.03219, over 972253.32 frames.], batch size: 21, lr: 1.80e-04 +2022-05-07 12:36:15,713 INFO [train.py:715] (6/8) Epoch 12, batch 22050, loss[loss=0.1314, simple_loss=0.2106, pruned_loss=0.02608, over 4848.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03212, over 972360.50 frames.], batch size: 13, lr: 1.80e-04 +2022-05-07 12:36:54,702 INFO [train.py:715] (6/8) Epoch 12, batch 22100, loss[loss=0.1106, simple_loss=0.1831, pruned_loss=0.01905, over 4976.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2109, pruned_loss=0.03203, over 971829.03 frames.], batch size: 14, lr: 1.80e-04 +2022-05-07 12:37:33,660 INFO [train.py:715] (6/8) Epoch 12, batch 22150, loss[loss=0.1246, simple_loss=0.198, pruned_loss=0.02564, over 4805.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2103, pruned_loss=0.03176, over 971596.45 frames.], batch size: 24, lr: 1.80e-04 +2022-05-07 12:38:11,934 INFO [train.py:715] (6/8) Epoch 12, batch 22200, loss[loss=0.1307, simple_loss=0.2045, pruned_loss=0.02845, over 4843.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.03155, over 971959.97 frames.], batch size: 15, lr: 1.80e-04 +2022-05-07 12:38:49,700 INFO [train.py:715] (6/8) Epoch 12, batch 22250, loss[loss=0.1259, simple_loss=0.204, pruned_loss=0.02386, over 4688.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2104, pruned_loss=0.03193, over 972093.74 frames.], batch size: 15, lr: 1.80e-04 +2022-05-07 12:39:30,401 INFO [train.py:715] (6/8) Epoch 12, batch 22300, loss[loss=0.1817, simple_loss=0.2341, pruned_loss=0.06463, over 4849.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2113, pruned_loss=0.03222, over 971387.91 frames.], batch size: 30, lr: 1.80e-04 +2022-05-07 12:40:08,653 INFO [train.py:715] (6/8) Epoch 12, batch 22350, loss[loss=0.1161, simple_loss=0.1991, pruned_loss=0.01658, over 4864.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2115, pruned_loss=0.03215, over 972201.50 frames.], batch size: 20, lr: 1.80e-04 +2022-05-07 12:40:46,766 INFO [train.py:715] (6/8) Epoch 12, batch 22400, loss[loss=0.1625, simple_loss=0.2285, pruned_loss=0.04825, over 4841.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.0318, over 971967.04 frames.], batch size: 34, lr: 1.80e-04 +2022-05-07 12:41:25,345 INFO [train.py:715] (6/8) Epoch 12, batch 22450, loss[loss=0.1207, simple_loss=0.1954, pruned_loss=0.02295, over 4788.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2099, pruned_loss=0.03171, over 972766.70 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:42:03,784 INFO [train.py:715] (6/8) Epoch 12, batch 22500, loss[loss=0.1215, simple_loss=0.1978, pruned_loss=0.02253, over 4799.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2108, pruned_loss=0.03195, over 972257.23 frames.], batch size: 21, lr: 1.80e-04 +2022-05-07 12:42:42,489 INFO [train.py:715] (6/8) Epoch 12, batch 22550, loss[loss=0.17, simple_loss=0.2472, pruned_loss=0.04643, over 4907.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03217, over 971653.19 frames.], batch size: 39, lr: 1.80e-04 +2022-05-07 12:43:20,634 INFO [train.py:715] (6/8) Epoch 12, batch 22600, loss[loss=0.1233, simple_loss=0.2089, pruned_loss=0.01889, over 4810.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2113, pruned_loss=0.03193, over 972942.01 frames.], batch size: 26, lr: 1.80e-04 +2022-05-07 12:43:58,688 INFO [train.py:715] (6/8) Epoch 12, batch 22650, loss[loss=0.1722, simple_loss=0.2406, pruned_loss=0.05195, over 4844.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2118, pruned_loss=0.03235, over 972933.20 frames.], batch size: 30, lr: 1.80e-04 +2022-05-07 12:44:36,596 INFO [train.py:715] (6/8) Epoch 12, batch 22700, loss[loss=0.1279, simple_loss=0.2007, pruned_loss=0.02754, over 4973.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.03276, over 972551.41 frames.], batch size: 15, lr: 1.80e-04 +2022-05-07 12:45:14,814 INFO [train.py:715] (6/8) Epoch 12, batch 22750, loss[loss=0.1255, simple_loss=0.2077, pruned_loss=0.02166, over 4757.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2121, pruned_loss=0.0331, over 972277.54 frames.], batch size: 19, lr: 1.80e-04 +2022-05-07 12:45:53,322 INFO [train.py:715] (6/8) Epoch 12, batch 22800, loss[loss=0.1423, simple_loss=0.207, pruned_loss=0.03875, over 4937.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2123, pruned_loss=0.03292, over 971436.91 frames.], batch size: 39, lr: 1.80e-04 +2022-05-07 12:46:32,315 INFO [train.py:715] (6/8) Epoch 12, batch 22850, loss[loss=0.1242, simple_loss=0.1957, pruned_loss=0.02639, over 4932.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2117, pruned_loss=0.03292, over 971208.30 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:47:10,523 INFO [train.py:715] (6/8) Epoch 12, batch 22900, loss[loss=0.1562, simple_loss=0.2201, pruned_loss=0.04613, over 4935.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2117, pruned_loss=0.03283, over 972312.27 frames.], batch size: 39, lr: 1.80e-04 +2022-05-07 12:47:48,404 INFO [train.py:715] (6/8) Epoch 12, batch 22950, loss[loss=0.1206, simple_loss=0.2029, pruned_loss=0.01913, over 4833.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2116, pruned_loss=0.03233, over 972504.97 frames.], batch size: 26, lr: 1.80e-04 +2022-05-07 12:48:26,679 INFO [train.py:715] (6/8) Epoch 12, batch 23000, loss[loss=0.1156, simple_loss=0.2009, pruned_loss=0.0151, over 4818.00 frames.], tot_loss[loss=0.1375, simple_loss=0.211, pruned_loss=0.03205, over 972814.85 frames.], batch size: 27, lr: 1.80e-04 +2022-05-07 12:49:04,950 INFO [train.py:715] (6/8) Epoch 12, batch 23050, loss[loss=0.1223, simple_loss=0.1917, pruned_loss=0.02649, over 4978.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2109, pruned_loss=0.03204, over 973151.04 frames.], batch size: 14, lr: 1.80e-04 +2022-05-07 12:49:43,053 INFO [train.py:715] (6/8) Epoch 12, batch 23100, loss[loss=0.1336, simple_loss=0.2072, pruned_loss=0.03003, over 4876.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03148, over 972301.90 frames.], batch size: 22, lr: 1.80e-04 +2022-05-07 12:50:21,954 INFO [train.py:715] (6/8) Epoch 12, batch 23150, loss[loss=0.1306, simple_loss=0.2103, pruned_loss=0.0255, over 4884.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03149, over 972186.43 frames.], batch size: 39, lr: 1.80e-04 +2022-05-07 12:51:01,025 INFO [train.py:715] (6/8) Epoch 12, batch 23200, loss[loss=0.1269, simple_loss=0.2005, pruned_loss=0.02666, over 4794.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03186, over 971925.70 frames.], batch size: 24, lr: 1.80e-04 +2022-05-07 12:51:39,416 INFO [train.py:715] (6/8) Epoch 12, batch 23250, loss[loss=0.1445, simple_loss=0.2202, pruned_loss=0.03442, over 4736.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2112, pruned_loss=0.03193, over 971850.83 frames.], batch size: 16, lr: 1.80e-04 +2022-05-07 12:52:17,111 INFO [train.py:715] (6/8) Epoch 12, batch 23300, loss[loss=0.1548, simple_loss=0.2265, pruned_loss=0.04151, over 4861.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2109, pruned_loss=0.03192, over 971219.31 frames.], batch size: 32, lr: 1.80e-04 +2022-05-07 12:52:55,811 INFO [train.py:715] (6/8) Epoch 12, batch 23350, loss[loss=0.1237, simple_loss=0.2071, pruned_loss=0.02021, over 4984.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2108, pruned_loss=0.03144, over 971491.82 frames.], batch size: 28, lr: 1.80e-04 +2022-05-07 12:53:33,839 INFO [train.py:715] (6/8) Epoch 12, batch 23400, loss[loss=0.108, simple_loss=0.1771, pruned_loss=0.01943, over 4740.00 frames.], tot_loss[loss=0.1376, simple_loss=0.211, pruned_loss=0.03204, over 971549.34 frames.], batch size: 16, lr: 1.80e-04 +2022-05-07 12:54:11,389 INFO [train.py:715] (6/8) Epoch 12, batch 23450, loss[loss=0.1119, simple_loss=0.1837, pruned_loss=0.02006, over 4869.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03216, over 971433.18 frames.], batch size: 30, lr: 1.80e-04 +2022-05-07 12:54:49,573 INFO [train.py:715] (6/8) Epoch 12, batch 23500, loss[loss=0.1526, simple_loss=0.2221, pruned_loss=0.04149, over 4872.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2111, pruned_loss=0.03178, over 971284.06 frames.], batch size: 16, lr: 1.80e-04 +2022-05-07 12:55:28,412 INFO [train.py:715] (6/8) Epoch 12, batch 23550, loss[loss=0.1376, simple_loss=0.2105, pruned_loss=0.03234, over 4902.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.03181, over 971907.23 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:56:07,102 INFO [train.py:715] (6/8) Epoch 12, batch 23600, loss[loss=0.1454, simple_loss=0.2174, pruned_loss=0.03674, over 4921.00 frames.], tot_loss[loss=0.136, simple_loss=0.2094, pruned_loss=0.03128, over 971720.37 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:56:45,801 INFO [train.py:715] (6/8) Epoch 12, batch 23650, loss[loss=0.1138, simple_loss=0.1961, pruned_loss=0.01572, over 4832.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2098, pruned_loss=0.03174, over 971584.56 frames.], batch size: 26, lr: 1.80e-04 +2022-05-07 12:57:24,209 INFO [train.py:715] (6/8) Epoch 12, batch 23700, loss[loss=0.152, simple_loss=0.219, pruned_loss=0.0425, over 4754.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2094, pruned_loss=0.03184, over 971340.99 frames.], batch size: 19, lr: 1.80e-04 +2022-05-07 12:58:02,491 INFO [train.py:715] (6/8) Epoch 12, batch 23750, loss[loss=0.1173, simple_loss=0.1938, pruned_loss=0.02045, over 4912.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2101, pruned_loss=0.03207, over 971493.09 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:58:41,209 INFO [train.py:715] (6/8) Epoch 12, batch 23800, loss[loss=0.1291, simple_loss=0.2066, pruned_loss=0.02585, over 4765.00 frames.], tot_loss[loss=0.138, simple_loss=0.2111, pruned_loss=0.0324, over 971826.71 frames.], batch size: 19, lr: 1.80e-04 +2022-05-07 12:59:20,123 INFO [train.py:715] (6/8) Epoch 12, batch 23850, loss[loss=0.1685, simple_loss=0.2385, pruned_loss=0.04923, over 4778.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2111, pruned_loss=0.03251, over 971314.21 frames.], batch size: 14, lr: 1.80e-04 +2022-05-07 12:59:59,715 INFO [train.py:715] (6/8) Epoch 12, batch 23900, loss[loss=0.1478, simple_loss=0.2284, pruned_loss=0.03359, over 4879.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03284, over 971775.18 frames.], batch size: 19, lr: 1.80e-04 +2022-05-07 13:00:39,435 INFO [train.py:715] (6/8) Epoch 12, batch 23950, loss[loss=0.1405, simple_loss=0.2111, pruned_loss=0.03489, over 4903.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.03284, over 972851.08 frames.], batch size: 29, lr: 1.79e-04 +2022-05-07 13:01:18,255 INFO [train.py:715] (6/8) Epoch 12, batch 24000, loss[loss=0.1407, simple_loss=0.2107, pruned_loss=0.0354, over 4843.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2112, pruned_loss=0.03244, over 973148.86 frames.], batch size: 32, lr: 1.79e-04 +2022-05-07 13:01:18,256 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 13:01:27,803 INFO [train.py:742] (6/8) Epoch 12, validation: loss=0.1054, simple_loss=0.1895, pruned_loss=0.01071, over 914524.00 frames. +2022-05-07 13:02:06,831 INFO [train.py:715] (6/8) Epoch 12, batch 24050, loss[loss=0.1274, simple_loss=0.2119, pruned_loss=0.0215, over 4984.00 frames.], tot_loss[loss=0.138, simple_loss=0.2113, pruned_loss=0.03235, over 973070.13 frames.], batch size: 25, lr: 1.79e-04 +2022-05-07 13:02:47,354 INFO [train.py:715] (6/8) Epoch 12, batch 24100, loss[loss=0.1704, simple_loss=0.2288, pruned_loss=0.05596, over 4963.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2106, pruned_loss=0.03199, over 972224.11 frames.], batch size: 35, lr: 1.79e-04 +2022-05-07 13:03:27,830 INFO [train.py:715] (6/8) Epoch 12, batch 24150, loss[loss=0.1478, simple_loss=0.2169, pruned_loss=0.0393, over 4755.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2092, pruned_loss=0.03131, over 972267.39 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:04:07,867 INFO [train.py:715] (6/8) Epoch 12, batch 24200, loss[loss=0.1355, simple_loss=0.2038, pruned_loss=0.03364, over 4755.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03143, over 972651.35 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 13:04:47,996 INFO [train.py:715] (6/8) Epoch 12, batch 24250, loss[loss=0.1507, simple_loss=0.2182, pruned_loss=0.0416, over 4785.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2098, pruned_loss=0.03177, over 972476.24 frames.], batch size: 14, lr: 1.79e-04 +2022-05-07 13:05:28,030 INFO [train.py:715] (6/8) Epoch 12, batch 24300, loss[loss=0.1256, simple_loss=0.2089, pruned_loss=0.0212, over 4827.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2103, pruned_loss=0.03178, over 971573.14 frames.], batch size: 27, lr: 1.79e-04 +2022-05-07 13:06:07,763 INFO [train.py:715] (6/8) Epoch 12, batch 24350, loss[loss=0.1283, simple_loss=0.2018, pruned_loss=0.02737, over 4758.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03195, over 972260.63 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:06:47,582 INFO [train.py:715] (6/8) Epoch 12, batch 24400, loss[loss=0.1376, simple_loss=0.214, pruned_loss=0.0306, over 4940.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2104, pruned_loss=0.03185, over 972095.44 frames.], batch size: 29, lr: 1.79e-04 +2022-05-07 13:07:27,541 INFO [train.py:715] (6/8) Epoch 12, batch 24450, loss[loss=0.1768, simple_loss=0.2563, pruned_loss=0.04863, over 4972.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.03169, over 971640.76 frames.], batch size: 24, lr: 1.79e-04 +2022-05-07 13:08:07,315 INFO [train.py:715] (6/8) Epoch 12, batch 24500, loss[loss=0.1465, simple_loss=0.227, pruned_loss=0.03303, over 4810.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2112, pruned_loss=0.03214, over 971748.55 frames.], batch size: 21, lr: 1.79e-04 +2022-05-07 13:08:46,561 INFO [train.py:715] (6/8) Epoch 12, batch 24550, loss[loss=0.1394, simple_loss=0.2151, pruned_loss=0.03181, over 4928.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03214, over 972162.67 frames.], batch size: 17, lr: 1.79e-04 +2022-05-07 13:09:26,206 INFO [train.py:715] (6/8) Epoch 12, batch 24600, loss[loss=0.1459, simple_loss=0.2286, pruned_loss=0.03157, over 4827.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2109, pruned_loss=0.03193, over 972519.35 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:10:05,929 INFO [train.py:715] (6/8) Epoch 12, batch 24650, loss[loss=0.1411, simple_loss=0.209, pruned_loss=0.0366, over 4985.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2104, pruned_loss=0.03207, over 972146.76 frames.], batch size: 25, lr: 1.79e-04 +2022-05-07 13:10:45,628 INFO [train.py:715] (6/8) Epoch 12, batch 24700, loss[loss=0.1189, simple_loss=0.1858, pruned_loss=0.02598, over 4962.00 frames.], tot_loss[loss=0.1379, simple_loss=0.211, pruned_loss=0.03245, over 972005.65 frames.], batch size: 35, lr: 1.79e-04 +2022-05-07 13:11:24,799 INFO [train.py:715] (6/8) Epoch 12, batch 24750, loss[loss=0.1339, simple_loss=0.2003, pruned_loss=0.03378, over 4789.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2108, pruned_loss=0.0324, over 971788.94 frames.], batch size: 17, lr: 1.79e-04 +2022-05-07 13:12:05,001 INFO [train.py:715] (6/8) Epoch 12, batch 24800, loss[loss=0.1687, simple_loss=0.2492, pruned_loss=0.04405, over 4902.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03218, over 970639.78 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:12:44,866 INFO [train.py:715] (6/8) Epoch 12, batch 24850, loss[loss=0.1298, simple_loss=0.2036, pruned_loss=0.02801, over 4891.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2106, pruned_loss=0.03208, over 970704.36 frames.], batch size: 17, lr: 1.79e-04 +2022-05-07 13:13:24,111 INFO [train.py:715] (6/8) Epoch 12, batch 24900, loss[loss=0.2108, simple_loss=0.2646, pruned_loss=0.07855, over 4981.00 frames.], tot_loss[loss=0.138, simple_loss=0.2111, pruned_loss=0.03249, over 971669.22 frames.], batch size: 39, lr: 1.79e-04 +2022-05-07 13:14:03,442 INFO [train.py:715] (6/8) Epoch 12, batch 24950, loss[loss=0.1561, simple_loss=0.2366, pruned_loss=0.03781, over 4969.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2106, pruned_loss=0.03206, over 972423.00 frames.], batch size: 35, lr: 1.79e-04 +2022-05-07 13:14:42,356 INFO [train.py:715] (6/8) Epoch 12, batch 25000, loss[loss=0.1539, simple_loss=0.2361, pruned_loss=0.03585, over 4746.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.0318, over 972034.97 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 13:15:20,412 INFO [train.py:715] (6/8) Epoch 12, batch 25050, loss[loss=0.1587, simple_loss=0.2283, pruned_loss=0.04455, over 4891.00 frames.], tot_loss[loss=0.1367, simple_loss=0.21, pruned_loss=0.03175, over 971764.75 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 13:15:58,460 INFO [train.py:715] (6/8) Epoch 12, batch 25100, loss[loss=0.1444, simple_loss=0.2322, pruned_loss=0.0283, over 4892.00 frames.], tot_loss[loss=0.137, simple_loss=0.2108, pruned_loss=0.0316, over 971329.23 frames.], batch size: 39, lr: 1.79e-04 +2022-05-07 13:16:36,852 INFO [train.py:715] (6/8) Epoch 12, batch 25150, loss[loss=0.1297, simple_loss=0.2024, pruned_loss=0.02851, over 4782.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2106, pruned_loss=0.03158, over 971576.97 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 13:17:15,106 INFO [train.py:715] (6/8) Epoch 12, batch 25200, loss[loss=0.1304, simple_loss=0.1968, pruned_loss=0.03203, over 4874.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2104, pruned_loss=0.03147, over 971646.57 frames.], batch size: 30, lr: 1.79e-04 +2022-05-07 13:17:52,726 INFO [train.py:715] (6/8) Epoch 12, batch 25250, loss[loss=0.1361, simple_loss=0.2028, pruned_loss=0.03467, over 4966.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2106, pruned_loss=0.03156, over 971857.82 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:18:30,734 INFO [train.py:715] (6/8) Epoch 12, batch 25300, loss[loss=0.1222, simple_loss=0.1948, pruned_loss=0.02483, over 4907.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03173, over 971893.06 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 13:19:08,813 INFO [train.py:715] (6/8) Epoch 12, batch 25350, loss[loss=0.1426, simple_loss=0.2007, pruned_loss=0.04228, over 4699.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.03151, over 971341.73 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:19:47,793 INFO [train.py:715] (6/8) Epoch 12, batch 25400, loss[loss=0.1098, simple_loss=0.1727, pruned_loss=0.02345, over 4705.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2095, pruned_loss=0.03182, over 971717.90 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:20:26,683 INFO [train.py:715] (6/8) Epoch 12, batch 25450, loss[loss=0.1461, simple_loss=0.222, pruned_loss=0.03513, over 4921.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2097, pruned_loss=0.03156, over 971897.82 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 13:21:06,566 INFO [train.py:715] (6/8) Epoch 12, batch 25500, loss[loss=0.1127, simple_loss=0.1952, pruned_loss=0.01506, over 4805.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03169, over 972034.67 frames.], batch size: 21, lr: 1.79e-04 +2022-05-07 13:21:45,633 INFO [train.py:715] (6/8) Epoch 12, batch 25550, loss[loss=0.1326, simple_loss=0.2077, pruned_loss=0.02874, over 4789.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2098, pruned_loss=0.03166, over 971400.14 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 13:22:23,736 INFO [train.py:715] (6/8) Epoch 12, batch 25600, loss[loss=0.1318, simple_loss=0.1989, pruned_loss=0.03234, over 4770.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2104, pruned_loss=0.03192, over 971257.17 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 13:23:02,066 INFO [train.py:715] (6/8) Epoch 12, batch 25650, loss[loss=0.1437, simple_loss=0.2215, pruned_loss=0.03294, over 4889.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2102, pruned_loss=0.03182, over 971177.74 frames.], batch size: 22, lr: 1.79e-04 +2022-05-07 13:23:40,757 INFO [train.py:715] (6/8) Epoch 12, batch 25700, loss[loss=0.1123, simple_loss=0.18, pruned_loss=0.02234, over 4789.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2113, pruned_loss=0.03271, over 970693.78 frames.], batch size: 12, lr: 1.79e-04 +2022-05-07 13:24:19,544 INFO [train.py:715] (6/8) Epoch 12, batch 25750, loss[loss=0.1534, simple_loss=0.2326, pruned_loss=0.03707, over 4962.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03289, over 971843.81 frames.], batch size: 35, lr: 1.79e-04 +2022-05-07 13:24:58,012 INFO [train.py:715] (6/8) Epoch 12, batch 25800, loss[loss=0.1585, simple_loss=0.2381, pruned_loss=0.03948, over 4792.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2122, pruned_loss=0.03271, over 971568.40 frames.], batch size: 17, lr: 1.79e-04 +2022-05-07 13:25:36,924 INFO [train.py:715] (6/8) Epoch 12, batch 25850, loss[loss=0.1022, simple_loss=0.1702, pruned_loss=0.01707, over 4968.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2115, pruned_loss=0.03252, over 972106.46 frames.], batch size: 24, lr: 1.79e-04 +2022-05-07 13:26:15,479 INFO [train.py:715] (6/8) Epoch 12, batch 25900, loss[loss=0.1745, simple_loss=0.2332, pruned_loss=0.05792, over 4936.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2104, pruned_loss=0.03203, over 971981.89 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 13:26:53,772 INFO [train.py:715] (6/8) Epoch 12, batch 25950, loss[loss=0.1208, simple_loss=0.1909, pruned_loss=0.0254, over 4851.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2101, pruned_loss=0.03184, over 971067.00 frames.], batch size: 13, lr: 1.79e-04 +2022-05-07 13:27:31,254 INFO [train.py:715] (6/8) Epoch 12, batch 26000, loss[loss=0.1711, simple_loss=0.2358, pruned_loss=0.05325, over 4709.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2099, pruned_loss=0.03176, over 971377.00 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:28:09,532 INFO [train.py:715] (6/8) Epoch 12, batch 26050, loss[loss=0.1093, simple_loss=0.1936, pruned_loss=0.01245, over 4984.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2097, pruned_loss=0.03164, over 970598.80 frames.], batch size: 25, lr: 1.79e-04 +2022-05-07 13:28:48,388 INFO [train.py:715] (6/8) Epoch 12, batch 26100, loss[loss=0.1414, simple_loss=0.2259, pruned_loss=0.02842, over 4763.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2106, pruned_loss=0.03207, over 971299.15 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:29:27,204 INFO [train.py:715] (6/8) Epoch 12, batch 26150, loss[loss=0.1593, simple_loss=0.2356, pruned_loss=0.04149, over 4742.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.03196, over 971886.86 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 13:30:06,156 INFO [train.py:715] (6/8) Epoch 12, batch 26200, loss[loss=0.1452, simple_loss=0.2224, pruned_loss=0.03402, over 4898.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.03198, over 971864.02 frames.], batch size: 17, lr: 1.79e-04 +2022-05-07 13:30:44,509 INFO [train.py:715] (6/8) Epoch 12, batch 26250, loss[loss=0.1294, simple_loss=0.206, pruned_loss=0.02642, over 4886.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.03142, over 971840.48 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:31:23,015 INFO [train.py:715] (6/8) Epoch 12, batch 26300, loss[loss=0.1481, simple_loss=0.2326, pruned_loss=0.03183, over 4867.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.0317, over 972305.11 frames.], batch size: 22, lr: 1.79e-04 +2022-05-07 13:32:02,158 INFO [train.py:715] (6/8) Epoch 12, batch 26350, loss[loss=0.1464, simple_loss=0.2252, pruned_loss=0.03378, over 4938.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2108, pruned_loss=0.03186, over 972121.31 frames.], batch size: 29, lr: 1.79e-04 +2022-05-07 13:32:40,220 INFO [train.py:715] (6/8) Epoch 12, batch 26400, loss[loss=0.1315, simple_loss=0.2099, pruned_loss=0.02658, over 4808.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03155, over 972251.61 frames.], batch size: 25, lr: 1.79e-04 +2022-05-07 13:33:18,355 INFO [train.py:715] (6/8) Epoch 12, batch 26450, loss[loss=0.1547, simple_loss=0.2451, pruned_loss=0.03214, over 4791.00 frames.], tot_loss[loss=0.1371, simple_loss=0.211, pruned_loss=0.03161, over 972542.32 frames.], batch size: 17, lr: 1.79e-04 +2022-05-07 13:33:56,293 INFO [train.py:715] (6/8) Epoch 12, batch 26500, loss[loss=0.1242, simple_loss=0.1899, pruned_loss=0.0293, over 4749.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2114, pruned_loss=0.0316, over 973031.95 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 13:34:34,592 INFO [train.py:715] (6/8) Epoch 12, batch 26550, loss[loss=0.136, simple_loss=0.2045, pruned_loss=0.03372, over 4881.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2101, pruned_loss=0.03115, over 972740.35 frames.], batch size: 22, lr: 1.79e-04 +2022-05-07 13:35:12,933 INFO [train.py:715] (6/8) Epoch 12, batch 26600, loss[loss=0.1384, simple_loss=0.2173, pruned_loss=0.02973, over 4982.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2109, pruned_loss=0.03145, over 973192.66 frames.], batch size: 24, lr: 1.79e-04 +2022-05-07 13:35:51,455 INFO [train.py:715] (6/8) Epoch 12, batch 26650, loss[loss=0.1183, simple_loss=0.1901, pruned_loss=0.02329, over 4800.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03136, over 972745.37 frames.], batch size: 24, lr: 1.79e-04 +2022-05-07 13:36:30,040 INFO [train.py:715] (6/8) Epoch 12, batch 26700, loss[loss=0.1548, simple_loss=0.2323, pruned_loss=0.03868, over 4813.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2104, pruned_loss=0.03116, over 972330.51 frames.], batch size: 21, lr: 1.79e-04 +2022-05-07 13:37:08,438 INFO [train.py:715] (6/8) Epoch 12, batch 26750, loss[loss=0.1284, simple_loss=0.2009, pruned_loss=0.028, over 4888.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2103, pruned_loss=0.03139, over 972063.20 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:37:47,890 INFO [train.py:715] (6/8) Epoch 12, batch 26800, loss[loss=0.1196, simple_loss=0.2063, pruned_loss=0.01644, over 4874.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03199, over 973208.47 frames.], batch size: 20, lr: 1.79e-04 +2022-05-07 13:38:27,720 INFO [train.py:715] (6/8) Epoch 12, batch 26850, loss[loss=0.1251, simple_loss=0.2073, pruned_loss=0.02146, over 4774.00 frames.], tot_loss[loss=0.1373, simple_loss=0.211, pruned_loss=0.03185, over 973149.02 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 13:39:07,110 INFO [train.py:715] (6/8) Epoch 12, batch 26900, loss[loss=0.1025, simple_loss=0.1769, pruned_loss=0.014, over 4849.00 frames.], tot_loss[loss=0.1362, simple_loss=0.21, pruned_loss=0.03122, over 972203.96 frames.], batch size: 12, lr: 1.79e-04 +2022-05-07 13:39:45,936 INFO [train.py:715] (6/8) Epoch 12, batch 26950, loss[loss=0.1596, simple_loss=0.225, pruned_loss=0.04711, over 4750.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2097, pruned_loss=0.03162, over 973100.07 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 13:40:25,468 INFO [train.py:715] (6/8) Epoch 12, batch 27000, loss[loss=0.1143, simple_loss=0.1862, pruned_loss=0.02118, over 4830.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2092, pruned_loss=0.03129, over 972653.89 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:40:25,469 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 13:40:37,912 INFO [train.py:742] (6/8) Epoch 12, validation: loss=0.1054, simple_loss=0.1894, pruned_loss=0.01072, over 914524.00 frames. +2022-05-07 13:41:17,233 INFO [train.py:715] (6/8) Epoch 12, batch 27050, loss[loss=0.1447, simple_loss=0.2171, pruned_loss=0.03614, over 4825.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2094, pruned_loss=0.03163, over 972960.74 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:41:55,463 INFO [train.py:715] (6/8) Epoch 12, batch 27100, loss[loss=0.14, simple_loss=0.2042, pruned_loss=0.03789, over 4970.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2099, pruned_loss=0.03175, over 972788.64 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:42:33,820 INFO [train.py:715] (6/8) Epoch 12, batch 27150, loss[loss=0.1551, simple_loss=0.2146, pruned_loss=0.04781, over 4696.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2092, pruned_loss=0.03123, over 972471.57 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:43:12,676 INFO [train.py:715] (6/8) Epoch 12, batch 27200, loss[loss=0.1029, simple_loss=0.1832, pruned_loss=0.01133, over 4944.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03125, over 972528.36 frames.], batch size: 23, lr: 1.79e-04 +2022-05-07 13:43:50,977 INFO [train.py:715] (6/8) Epoch 12, batch 27250, loss[loss=0.1627, simple_loss=0.2155, pruned_loss=0.05491, over 4688.00 frames.], tot_loss[loss=0.1367, simple_loss=0.21, pruned_loss=0.03164, over 972645.42 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:44:29,601 INFO [train.py:715] (6/8) Epoch 12, batch 27300, loss[loss=0.1332, simple_loss=0.2115, pruned_loss=0.02744, over 4809.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2097, pruned_loss=0.0316, over 971606.52 frames.], batch size: 21, lr: 1.79e-04 +2022-05-07 13:45:08,185 INFO [train.py:715] (6/8) Epoch 12, batch 27350, loss[loss=0.163, simple_loss=0.2394, pruned_loss=0.04333, over 4754.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2088, pruned_loss=0.03105, over 971829.28 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:45:47,170 INFO [train.py:715] (6/8) Epoch 12, batch 27400, loss[loss=0.146, simple_loss=0.2167, pruned_loss=0.03766, over 4814.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2098, pruned_loss=0.0313, over 971979.39 frames.], batch size: 25, lr: 1.79e-04 +2022-05-07 13:46:25,850 INFO [train.py:715] (6/8) Epoch 12, batch 27450, loss[loss=0.1459, simple_loss=0.2328, pruned_loss=0.02947, over 4946.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2111, pruned_loss=0.03184, over 972249.21 frames.], batch size: 21, lr: 1.79e-04 +2022-05-07 13:47:04,343 INFO [train.py:715] (6/8) Epoch 12, batch 27500, loss[loss=0.1416, simple_loss=0.2161, pruned_loss=0.03357, over 4931.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2111, pruned_loss=0.0317, over 972123.58 frames.], batch size: 23, lr: 1.79e-04 +2022-05-07 13:47:43,117 INFO [train.py:715] (6/8) Epoch 12, batch 27550, loss[loss=0.1469, simple_loss=0.2224, pruned_loss=0.03568, over 4822.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2121, pruned_loss=0.03222, over 971927.19 frames.], batch size: 26, lr: 1.79e-04 +2022-05-07 13:48:21,798 INFO [train.py:715] (6/8) Epoch 12, batch 27600, loss[loss=0.115, simple_loss=0.1908, pruned_loss=0.01956, over 4873.00 frames.], tot_loss[loss=0.138, simple_loss=0.2118, pruned_loss=0.0321, over 972261.44 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 13:49:00,948 INFO [train.py:715] (6/8) Epoch 12, batch 27650, loss[loss=0.1141, simple_loss=0.1782, pruned_loss=0.02499, over 4960.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2124, pruned_loss=0.03241, over 972823.38 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:49:39,559 INFO [train.py:715] (6/8) Epoch 12, batch 27700, loss[loss=0.1447, simple_loss=0.2289, pruned_loss=0.03025, over 4959.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2115, pruned_loss=0.03237, over 973667.10 frames.], batch size: 21, lr: 1.79e-04 +2022-05-07 13:50:18,423 INFO [train.py:715] (6/8) Epoch 12, batch 27750, loss[loss=0.1362, simple_loss=0.2197, pruned_loss=0.02629, over 4865.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2113, pruned_loss=0.03215, over 973205.71 frames.], batch size: 20, lr: 1.79e-04 +2022-05-07 13:50:56,328 INFO [train.py:715] (6/8) Epoch 12, batch 27800, loss[loss=0.1541, simple_loss=0.2266, pruned_loss=0.04083, over 4791.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2111, pruned_loss=0.03233, over 972077.40 frames.], batch size: 24, lr: 1.79e-04 +2022-05-07 13:51:34,005 INFO [train.py:715] (6/8) Epoch 12, batch 27850, loss[loss=0.114, simple_loss=0.1943, pruned_loss=0.01682, over 4958.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2106, pruned_loss=0.03197, over 972206.10 frames.], batch size: 29, lr: 1.79e-04 +2022-05-07 13:52:12,422 INFO [train.py:715] (6/8) Epoch 12, batch 27900, loss[loss=0.1269, simple_loss=0.2076, pruned_loss=0.0231, over 4822.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2111, pruned_loss=0.03214, over 972567.54 frames.], batch size: 26, lr: 1.79e-04 +2022-05-07 13:52:50,385 INFO [train.py:715] (6/8) Epoch 12, batch 27950, loss[loss=0.1439, simple_loss=0.2337, pruned_loss=0.02699, over 4831.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2109, pruned_loss=0.03199, over 972009.33 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:53:28,677 INFO [train.py:715] (6/8) Epoch 12, batch 28000, loss[loss=0.141, simple_loss=0.2122, pruned_loss=0.0349, over 4867.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2108, pruned_loss=0.03191, over 971618.26 frames.], batch size: 32, lr: 1.79e-04 +2022-05-07 13:54:06,333 INFO [train.py:715] (6/8) Epoch 12, batch 28050, loss[loss=0.1258, simple_loss=0.2086, pruned_loss=0.02147, over 4820.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03224, over 971838.52 frames.], batch size: 25, lr: 1.79e-04 +2022-05-07 13:54:44,517 INFO [train.py:715] (6/8) Epoch 12, batch 28100, loss[loss=0.1698, simple_loss=0.2459, pruned_loss=0.04687, over 4789.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03219, over 972221.64 frames.], batch size: 17, lr: 1.79e-04 +2022-05-07 13:55:22,260 INFO [train.py:715] (6/8) Epoch 12, batch 28150, loss[loss=0.1668, simple_loss=0.2419, pruned_loss=0.04586, over 4910.00 frames.], tot_loss[loss=0.138, simple_loss=0.2114, pruned_loss=0.03233, over 973205.95 frames.], batch size: 23, lr: 1.79e-04 +2022-05-07 13:56:00,667 INFO [train.py:715] (6/8) Epoch 12, batch 28200, loss[loss=0.1311, simple_loss=0.2103, pruned_loss=0.02593, over 4984.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2104, pruned_loss=0.03207, over 972078.57 frames.], batch size: 28, lr: 1.79e-04 +2022-05-07 13:56:39,078 INFO [train.py:715] (6/8) Epoch 12, batch 28250, loss[loss=0.1756, simple_loss=0.2396, pruned_loss=0.05575, over 4987.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2107, pruned_loss=0.03234, over 973154.87 frames.], batch size: 31, lr: 1.79e-04 +2022-05-07 13:57:17,044 INFO [train.py:715] (6/8) Epoch 12, batch 28300, loss[loss=0.1443, simple_loss=0.2183, pruned_loss=0.0351, over 4962.00 frames.], tot_loss[loss=0.138, simple_loss=0.2108, pruned_loss=0.03257, over 972749.81 frames.], batch size: 25, lr: 1.79e-04 +2022-05-07 13:57:55,847 INFO [train.py:715] (6/8) Epoch 12, batch 28350, loss[loss=0.1608, simple_loss=0.2344, pruned_loss=0.04356, over 4860.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2109, pruned_loss=0.03276, over 973314.98 frames.], batch size: 32, lr: 1.79e-04 +2022-05-07 13:58:33,886 INFO [train.py:715] (6/8) Epoch 12, batch 28400, loss[loss=0.1735, simple_loss=0.245, pruned_loss=0.05103, over 4850.00 frames.], tot_loss[loss=0.1382, simple_loss=0.211, pruned_loss=0.03272, over 972552.06 frames.], batch size: 30, lr: 1.79e-04 +2022-05-07 13:59:12,073 INFO [train.py:715] (6/8) Epoch 12, batch 28450, loss[loss=0.1289, simple_loss=0.2005, pruned_loss=0.02863, over 4867.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2105, pruned_loss=0.03237, over 972493.59 frames.], batch size: 20, lr: 1.79e-04 +2022-05-07 13:59:49,933 INFO [train.py:715] (6/8) Epoch 12, batch 28500, loss[loss=0.1439, simple_loss=0.2163, pruned_loss=0.0357, over 4817.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2099, pruned_loss=0.03196, over 972673.40 frames.], batch size: 27, lr: 1.79e-04 +2022-05-07 14:00:27,901 INFO [train.py:715] (6/8) Epoch 12, batch 28550, loss[loss=0.1206, simple_loss=0.1939, pruned_loss=0.02361, over 4773.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03186, over 973123.60 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 14:01:06,319 INFO [train.py:715] (6/8) Epoch 12, batch 28600, loss[loss=0.116, simple_loss=0.2012, pruned_loss=0.01535, over 4813.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2098, pruned_loss=0.03167, over 973100.74 frames.], batch size: 25, lr: 1.79e-04 +2022-05-07 14:01:44,217 INFO [train.py:715] (6/8) Epoch 12, batch 28650, loss[loss=0.1557, simple_loss=0.2205, pruned_loss=0.04543, over 4922.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03176, over 974222.24 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 14:02:23,315 INFO [train.py:715] (6/8) Epoch 12, batch 28700, loss[loss=0.136, simple_loss=0.2061, pruned_loss=0.03294, over 4895.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2107, pruned_loss=0.0318, over 974297.16 frames.], batch size: 32, lr: 1.79e-04 +2022-05-07 14:03:01,820 INFO [train.py:715] (6/8) Epoch 12, batch 28750, loss[loss=0.1344, simple_loss=0.2022, pruned_loss=0.03327, over 4976.00 frames.], tot_loss[loss=0.1367, simple_loss=0.21, pruned_loss=0.03169, over 974373.38 frames.], batch size: 35, lr: 1.79e-04 +2022-05-07 14:03:40,787 INFO [train.py:715] (6/8) Epoch 12, batch 28800, loss[loss=0.1099, simple_loss=0.1877, pruned_loss=0.01599, over 4930.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.032, over 974221.42 frames.], batch size: 23, lr: 1.79e-04 +2022-05-07 14:04:18,692 INFO [train.py:715] (6/8) Epoch 12, batch 28850, loss[loss=0.1317, simple_loss=0.2047, pruned_loss=0.02929, over 4853.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03174, over 973991.53 frames.], batch size: 20, lr: 1.79e-04 +2022-05-07 14:04:57,049 INFO [train.py:715] (6/8) Epoch 12, batch 28900, loss[loss=0.1731, simple_loss=0.2455, pruned_loss=0.05029, over 4967.00 frames.], tot_loss[loss=0.138, simple_loss=0.2118, pruned_loss=0.03212, over 972216.82 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:05:35,806 INFO [train.py:715] (6/8) Epoch 12, batch 28950, loss[loss=0.1601, simple_loss=0.2325, pruned_loss=0.04381, over 4824.00 frames.], tot_loss[loss=0.1374, simple_loss=0.211, pruned_loss=0.03185, over 971713.98 frames.], batch size: 26, lr: 1.78e-04 +2022-05-07 14:06:14,161 INFO [train.py:715] (6/8) Epoch 12, batch 29000, loss[loss=0.117, simple_loss=0.1976, pruned_loss=0.01819, over 4683.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2103, pruned_loss=0.03134, over 970739.85 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:06:53,434 INFO [train.py:715] (6/8) Epoch 12, batch 29050, loss[loss=0.136, simple_loss=0.2188, pruned_loss=0.02654, over 4815.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2109, pruned_loss=0.03135, over 970522.20 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:07:31,901 INFO [train.py:715] (6/8) Epoch 12, batch 29100, loss[loss=0.1337, simple_loss=0.2122, pruned_loss=0.02758, over 4798.00 frames.], tot_loss[loss=0.137, simple_loss=0.211, pruned_loss=0.03147, over 970632.04 frames.], batch size: 24, lr: 1.78e-04 +2022-05-07 14:08:10,570 INFO [train.py:715] (6/8) Epoch 12, batch 29150, loss[loss=0.1537, simple_loss=0.2248, pruned_loss=0.04134, over 4811.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2117, pruned_loss=0.03194, over 970996.18 frames.], batch size: 26, lr: 1.78e-04 +2022-05-07 14:08:48,998 INFO [train.py:715] (6/8) Epoch 12, batch 29200, loss[loss=0.1093, simple_loss=0.1745, pruned_loss=0.02206, over 4816.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2113, pruned_loss=0.03191, over 971463.81 frames.], batch size: 12, lr: 1.78e-04 +2022-05-07 14:09:27,675 INFO [train.py:715] (6/8) Epoch 12, batch 29250, loss[loss=0.1207, simple_loss=0.1961, pruned_loss=0.02263, over 4867.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.03169, over 971355.23 frames.], batch size: 13, lr: 1.78e-04 +2022-05-07 14:10:05,809 INFO [train.py:715] (6/8) Epoch 12, batch 29300, loss[loss=0.1555, simple_loss=0.2304, pruned_loss=0.04032, over 4842.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2111, pruned_loss=0.03193, over 971648.11 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:10:43,234 INFO [train.py:715] (6/8) Epoch 12, batch 29350, loss[loss=0.173, simple_loss=0.2339, pruned_loss=0.05602, over 4977.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2108, pruned_loss=0.03198, over 971742.77 frames.], batch size: 39, lr: 1.78e-04 +2022-05-07 14:11:22,340 INFO [train.py:715] (6/8) Epoch 12, batch 29400, loss[loss=0.15, simple_loss=0.2174, pruned_loss=0.04126, over 4826.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2111, pruned_loss=0.03211, over 970852.62 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:12:00,594 INFO [train.py:715] (6/8) Epoch 12, batch 29450, loss[loss=0.1905, simple_loss=0.2561, pruned_loss=0.06244, over 4783.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03198, over 972220.83 frames.], batch size: 18, lr: 1.78e-04 +2022-05-07 14:12:38,754 INFO [train.py:715] (6/8) Epoch 12, batch 29500, loss[loss=0.147, simple_loss=0.2186, pruned_loss=0.03764, over 4798.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2106, pruned_loss=0.03184, over 971909.30 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 14:13:16,880 INFO [train.py:715] (6/8) Epoch 12, batch 29550, loss[loss=0.1471, simple_loss=0.2183, pruned_loss=0.03797, over 4981.00 frames.], tot_loss[loss=0.137, simple_loss=0.2102, pruned_loss=0.03191, over 972770.97 frames.], batch size: 28, lr: 1.78e-04 +2022-05-07 14:13:55,811 INFO [train.py:715] (6/8) Epoch 12, batch 29600, loss[loss=0.128, simple_loss=0.2055, pruned_loss=0.02521, over 4921.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2109, pruned_loss=0.03239, over 972854.94 frames.], batch size: 23, lr: 1.78e-04 +2022-05-07 14:14:34,035 INFO [train.py:715] (6/8) Epoch 12, batch 29650, loss[loss=0.1316, simple_loss=0.2014, pruned_loss=0.03087, over 4966.00 frames.], tot_loss[loss=0.1369, simple_loss=0.21, pruned_loss=0.0319, over 973284.66 frames.], batch size: 35, lr: 1.78e-04 +2022-05-07 14:15:11,742 INFO [train.py:715] (6/8) Epoch 12, batch 29700, loss[loss=0.1429, simple_loss=0.2178, pruned_loss=0.03399, over 4988.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2093, pruned_loss=0.03176, over 972147.56 frames.], batch size: 26, lr: 1.78e-04 +2022-05-07 14:15:51,282 INFO [train.py:715] (6/8) Epoch 12, batch 29750, loss[loss=0.119, simple_loss=0.1855, pruned_loss=0.0263, over 4863.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2092, pruned_loss=0.03147, over 971948.77 frames.], batch size: 16, lr: 1.78e-04 +2022-05-07 14:16:30,393 INFO [train.py:715] (6/8) Epoch 12, batch 29800, loss[loss=0.1342, simple_loss=0.199, pruned_loss=0.03469, over 4837.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2096, pruned_loss=0.03155, over 971675.82 frames.], batch size: 30, lr: 1.78e-04 +2022-05-07 14:17:09,205 INFO [train.py:715] (6/8) Epoch 12, batch 29850, loss[loss=0.1394, simple_loss=0.2154, pruned_loss=0.03167, over 4931.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2098, pruned_loss=0.03187, over 971229.10 frames.], batch size: 35, lr: 1.78e-04 +2022-05-07 14:17:47,534 INFO [train.py:715] (6/8) Epoch 12, batch 29900, loss[loss=0.113, simple_loss=0.1911, pruned_loss=0.01742, over 4768.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2104, pruned_loss=0.03196, over 970710.64 frames.], batch size: 19, lr: 1.78e-04 +2022-05-07 14:18:26,386 INFO [train.py:715] (6/8) Epoch 12, batch 29950, loss[loss=0.1742, simple_loss=0.2416, pruned_loss=0.05336, over 4755.00 frames.], tot_loss[loss=0.138, simple_loss=0.2112, pruned_loss=0.03234, over 971364.90 frames.], batch size: 16, lr: 1.78e-04 +2022-05-07 14:19:04,509 INFO [train.py:715] (6/8) Epoch 12, batch 30000, loss[loss=0.1254, simple_loss=0.2039, pruned_loss=0.02345, over 4967.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03153, over 972431.35 frames.], batch size: 24, lr: 1.78e-04 +2022-05-07 14:19:04,509 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 14:19:14,012 INFO [train.py:742] (6/8) Epoch 12, validation: loss=0.1054, simple_loss=0.1894, pruned_loss=0.01072, over 914524.00 frames. +2022-05-07 14:19:52,927 INFO [train.py:715] (6/8) Epoch 12, batch 30050, loss[loss=0.1253, simple_loss=0.2098, pruned_loss=0.02044, over 4830.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2107, pruned_loss=0.03159, over 972273.95 frames.], batch size: 27, lr: 1.78e-04 +2022-05-07 14:20:31,330 INFO [train.py:715] (6/8) Epoch 12, batch 30100, loss[loss=0.1392, simple_loss=0.2051, pruned_loss=0.03668, over 4944.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2112, pruned_loss=0.03215, over 973638.69 frames.], batch size: 21, lr: 1.78e-04 +2022-05-07 14:21:10,496 INFO [train.py:715] (6/8) Epoch 12, batch 30150, loss[loss=0.1117, simple_loss=0.1904, pruned_loss=0.01648, over 4968.00 frames.], tot_loss[loss=0.1373, simple_loss=0.211, pruned_loss=0.03178, over 973554.82 frames.], batch size: 28, lr: 1.78e-04 +2022-05-07 14:21:48,959 INFO [train.py:715] (6/8) Epoch 12, batch 30200, loss[loss=0.137, simple_loss=0.2092, pruned_loss=0.03242, over 4850.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2105, pruned_loss=0.03139, over 973942.60 frames.], batch size: 20, lr: 1.78e-04 +2022-05-07 14:22:28,439 INFO [train.py:715] (6/8) Epoch 12, batch 30250, loss[loss=0.1598, simple_loss=0.2318, pruned_loss=0.04387, over 4881.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03142, over 974297.30 frames.], batch size: 22, lr: 1.78e-04 +2022-05-07 14:23:07,603 INFO [train.py:715] (6/8) Epoch 12, batch 30300, loss[loss=0.1411, simple_loss=0.2107, pruned_loss=0.03579, over 4899.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03128, over 974888.30 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 14:23:45,573 INFO [train.py:715] (6/8) Epoch 12, batch 30350, loss[loss=0.1578, simple_loss=0.2279, pruned_loss=0.04383, over 4839.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.0317, over 974975.05 frames.], batch size: 26, lr: 1.78e-04 +2022-05-07 14:24:23,562 INFO [train.py:715] (6/8) Epoch 12, batch 30400, loss[loss=0.1556, simple_loss=0.2323, pruned_loss=0.03947, over 4865.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2115, pruned_loss=0.03194, over 974668.03 frames.], batch size: 20, lr: 1.78e-04 +2022-05-07 14:25:01,297 INFO [train.py:715] (6/8) Epoch 12, batch 30450, loss[loss=0.1427, simple_loss=0.2034, pruned_loss=0.04098, over 4843.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2108, pruned_loss=0.03195, over 974057.65 frames.], batch size: 20, lr: 1.78e-04 +2022-05-07 14:25:39,283 INFO [train.py:715] (6/8) Epoch 12, batch 30500, loss[loss=0.1421, simple_loss=0.2123, pruned_loss=0.03589, over 4924.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.03205, over 974161.01 frames.], batch size: 29, lr: 1.78e-04 +2022-05-07 14:26:17,288 INFO [train.py:715] (6/8) Epoch 12, batch 30550, loss[loss=0.1311, simple_loss=0.2051, pruned_loss=0.02852, over 4837.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2114, pruned_loss=0.03181, over 973620.01 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:26:55,237 INFO [train.py:715] (6/8) Epoch 12, batch 30600, loss[loss=0.1421, simple_loss=0.221, pruned_loss=0.0316, over 4859.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2105, pruned_loss=0.03152, over 973825.63 frames.], batch size: 20, lr: 1.78e-04 +2022-05-07 14:27:32,191 INFO [train.py:715] (6/8) Epoch 12, batch 30650, loss[loss=0.1422, simple_loss=0.2076, pruned_loss=0.03845, over 4968.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03147, over 973703.44 frames.], batch size: 35, lr: 1.78e-04 +2022-05-07 14:28:10,734 INFO [train.py:715] (6/8) Epoch 12, batch 30700, loss[loss=0.1307, simple_loss=0.1933, pruned_loss=0.03402, over 4800.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03145, over 973128.98 frames.], batch size: 12, lr: 1.78e-04 +2022-05-07 14:28:48,656 INFO [train.py:715] (6/8) Epoch 12, batch 30750, loss[loss=0.1203, simple_loss=0.1993, pruned_loss=0.02067, over 4829.00 frames.], tot_loss[loss=0.136, simple_loss=0.2099, pruned_loss=0.03105, over 973442.64 frames.], batch size: 13, lr: 1.78e-04 +2022-05-07 14:29:27,167 INFO [train.py:715] (6/8) Epoch 12, batch 30800, loss[loss=0.1398, simple_loss=0.2179, pruned_loss=0.03085, over 4825.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2105, pruned_loss=0.03133, over 973238.10 frames.], batch size: 26, lr: 1.78e-04 +2022-05-07 14:30:05,815 INFO [train.py:715] (6/8) Epoch 12, batch 30850, loss[loss=0.1429, simple_loss=0.2288, pruned_loss=0.02853, over 4965.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2106, pruned_loss=0.03124, over 973362.42 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:30:45,016 INFO [train.py:715] (6/8) Epoch 12, batch 30900, loss[loss=0.148, simple_loss=0.2194, pruned_loss=0.03832, over 4989.00 frames.], tot_loss[loss=0.1361, simple_loss=0.21, pruned_loss=0.03116, over 973561.17 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:31:23,359 INFO [train.py:715] (6/8) Epoch 12, batch 30950, loss[loss=0.1512, simple_loss=0.2329, pruned_loss=0.03474, over 4847.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03158, over 973079.36 frames.], batch size: 13, lr: 1.78e-04 +2022-05-07 14:32:02,065 INFO [train.py:715] (6/8) Epoch 12, batch 31000, loss[loss=0.1105, simple_loss=0.1874, pruned_loss=0.01678, over 4925.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2097, pruned_loss=0.03122, over 973256.86 frames.], batch size: 29, lr: 1.78e-04 +2022-05-07 14:32:41,210 INFO [train.py:715] (6/8) Epoch 12, batch 31050, loss[loss=0.1376, simple_loss=0.2037, pruned_loss=0.0357, over 4946.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2098, pruned_loss=0.03128, over 973299.07 frames.], batch size: 39, lr: 1.78e-04 +2022-05-07 14:33:19,691 INFO [train.py:715] (6/8) Epoch 12, batch 31100, loss[loss=0.1364, simple_loss=0.2111, pruned_loss=0.03084, over 4857.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.03163, over 973264.53 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:33:57,507 INFO [train.py:715] (6/8) Epoch 12, batch 31150, loss[loss=0.1503, simple_loss=0.2364, pruned_loss=0.03215, over 4787.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03212, over 973223.63 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 14:34:36,504 INFO [train.py:715] (6/8) Epoch 12, batch 31200, loss[loss=0.1372, simple_loss=0.2058, pruned_loss=0.0343, over 4900.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2101, pruned_loss=0.03183, over 973023.60 frames.], batch size: 18, lr: 1.78e-04 +2022-05-07 14:35:15,344 INFO [train.py:715] (6/8) Epoch 12, batch 31250, loss[loss=0.1207, simple_loss=0.1906, pruned_loss=0.02543, over 4818.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03175, over 972606.45 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:35:54,048 INFO [train.py:715] (6/8) Epoch 12, batch 31300, loss[loss=0.1428, simple_loss=0.2157, pruned_loss=0.03495, over 4872.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03167, over 973321.39 frames.], batch size: 34, lr: 1.78e-04 +2022-05-07 14:36:32,568 INFO [train.py:715] (6/8) Epoch 12, batch 31350, loss[loss=0.1222, simple_loss=0.1975, pruned_loss=0.02347, over 4880.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03185, over 973315.77 frames.], batch size: 22, lr: 1.78e-04 +2022-05-07 14:37:11,736 INFO [train.py:715] (6/8) Epoch 12, batch 31400, loss[loss=0.1521, simple_loss=0.2157, pruned_loss=0.04427, over 4988.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2106, pruned_loss=0.03202, over 973267.90 frames.], batch size: 25, lr: 1.78e-04 +2022-05-07 14:37:50,139 INFO [train.py:715] (6/8) Epoch 12, batch 31450, loss[loss=0.1572, simple_loss=0.2251, pruned_loss=0.0447, over 4878.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2106, pruned_loss=0.03217, over 974106.77 frames.], batch size: 16, lr: 1.78e-04 +2022-05-07 14:38:28,380 INFO [train.py:715] (6/8) Epoch 12, batch 31500, loss[loss=0.132, simple_loss=0.1988, pruned_loss=0.03256, over 4804.00 frames.], tot_loss[loss=0.1376, simple_loss=0.211, pruned_loss=0.03211, over 973303.58 frames.], batch size: 21, lr: 1.78e-04 +2022-05-07 14:39:06,659 INFO [train.py:715] (6/8) Epoch 12, batch 31550, loss[loss=0.1363, simple_loss=0.2081, pruned_loss=0.03224, over 4817.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.03175, over 973415.70 frames.], batch size: 25, lr: 1.78e-04 +2022-05-07 14:39:45,222 INFO [train.py:715] (6/8) Epoch 12, batch 31600, loss[loss=0.1299, simple_loss=0.2054, pruned_loss=0.02723, over 4910.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.03158, over 972534.37 frames.], batch size: 18, lr: 1.78e-04 +2022-05-07 14:40:22,892 INFO [train.py:715] (6/8) Epoch 12, batch 31650, loss[loss=0.128, simple_loss=0.1925, pruned_loss=0.0318, over 4812.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03152, over 972352.05 frames.], batch size: 12, lr: 1.78e-04 +2022-05-07 14:41:00,518 INFO [train.py:715] (6/8) Epoch 12, batch 31700, loss[loss=0.1462, simple_loss=0.2332, pruned_loss=0.02963, over 4813.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03171, over 972370.65 frames.], batch size: 25, lr: 1.78e-04 +2022-05-07 14:41:38,633 INFO [train.py:715] (6/8) Epoch 12, batch 31750, loss[loss=0.1239, simple_loss=0.2015, pruned_loss=0.02316, over 4826.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2099, pruned_loss=0.03165, over 973462.59 frames.], batch size: 30, lr: 1.78e-04 +2022-05-07 14:42:16,747 INFO [train.py:715] (6/8) Epoch 12, batch 31800, loss[loss=0.1267, simple_loss=0.2106, pruned_loss=0.02138, over 4932.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2104, pruned_loss=0.03211, over 973549.35 frames.], batch size: 39, lr: 1.78e-04 +2022-05-07 14:42:54,699 INFO [train.py:715] (6/8) Epoch 12, batch 31850, loss[loss=0.1283, simple_loss=0.1953, pruned_loss=0.0307, over 4866.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03184, over 973437.55 frames.], batch size: 32, lr: 1.78e-04 +2022-05-07 14:43:32,404 INFO [train.py:715] (6/8) Epoch 12, batch 31900, loss[loss=0.1251, simple_loss=0.2047, pruned_loss=0.02276, over 4942.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03155, over 974140.73 frames.], batch size: 21, lr: 1.78e-04 +2022-05-07 14:44:10,697 INFO [train.py:715] (6/8) Epoch 12, batch 31950, loss[loss=0.1259, simple_loss=0.1862, pruned_loss=0.03281, over 4816.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2099, pruned_loss=0.03174, over 973322.64 frames.], batch size: 13, lr: 1.78e-04 +2022-05-07 14:44:48,297 INFO [train.py:715] (6/8) Epoch 12, batch 32000, loss[loss=0.1169, simple_loss=0.1806, pruned_loss=0.02658, over 4762.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2092, pruned_loss=0.03124, over 973089.85 frames.], batch size: 12, lr: 1.78e-04 +2022-05-07 14:45:26,163 INFO [train.py:715] (6/8) Epoch 12, batch 32050, loss[loss=0.1313, simple_loss=0.2083, pruned_loss=0.02714, over 4896.00 frames.], tot_loss[loss=0.1359, simple_loss=0.209, pruned_loss=0.03144, over 973556.42 frames.], batch size: 19, lr: 1.78e-04 +2022-05-07 14:46:04,021 INFO [train.py:715] (6/8) Epoch 12, batch 32100, loss[loss=0.1473, simple_loss=0.2135, pruned_loss=0.04057, over 4846.00 frames.], tot_loss[loss=0.1356, simple_loss=0.209, pruned_loss=0.03105, over 973067.27 frames.], batch size: 30, lr: 1.78e-04 +2022-05-07 14:46:42,430 INFO [train.py:715] (6/8) Epoch 12, batch 32150, loss[loss=0.1483, simple_loss=0.2218, pruned_loss=0.03736, over 4787.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2093, pruned_loss=0.03123, over 972557.98 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 14:47:20,025 INFO [train.py:715] (6/8) Epoch 12, batch 32200, loss[loss=0.1485, simple_loss=0.2288, pruned_loss=0.03406, over 4888.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03151, over 973128.51 frames.], batch size: 19, lr: 1.78e-04 +2022-05-07 14:47:58,174 INFO [train.py:715] (6/8) Epoch 12, batch 32250, loss[loss=0.1558, simple_loss=0.2235, pruned_loss=0.04408, over 4869.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2097, pruned_loss=0.03145, over 973017.04 frames.], batch size: 16, lr: 1.78e-04 +2022-05-07 14:48:36,824 INFO [train.py:715] (6/8) Epoch 12, batch 32300, loss[loss=0.1499, simple_loss=0.2214, pruned_loss=0.03921, over 4985.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03164, over 973124.97 frames.], batch size: 14, lr: 1.78e-04 +2022-05-07 14:49:14,383 INFO [train.py:715] (6/8) Epoch 12, batch 32350, loss[loss=0.1425, simple_loss=0.2132, pruned_loss=0.03589, over 4850.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03137, over 973140.34 frames.], batch size: 20, lr: 1.78e-04 +2022-05-07 14:49:52,727 INFO [train.py:715] (6/8) Epoch 12, batch 32400, loss[loss=0.129, simple_loss=0.1979, pruned_loss=0.03003, over 4915.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03136, over 973189.82 frames.], batch size: 18, lr: 1.78e-04 +2022-05-07 14:50:30,828 INFO [train.py:715] (6/8) Epoch 12, batch 32450, loss[loss=0.1202, simple_loss=0.1995, pruned_loss=0.02045, over 4858.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03192, over 972862.85 frames.], batch size: 20, lr: 1.78e-04 +2022-05-07 14:51:09,334 INFO [train.py:715] (6/8) Epoch 12, batch 32500, loss[loss=0.115, simple_loss=0.1859, pruned_loss=0.02204, over 4960.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2104, pruned_loss=0.03195, over 972258.36 frames.], batch size: 35, lr: 1.78e-04 +2022-05-07 14:51:46,831 INFO [train.py:715] (6/8) Epoch 12, batch 32550, loss[loss=0.1499, simple_loss=0.2284, pruned_loss=0.03571, over 4776.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2102, pruned_loss=0.0321, over 971883.43 frames.], batch size: 18, lr: 1.78e-04 +2022-05-07 14:52:25,071 INFO [train.py:715] (6/8) Epoch 12, batch 32600, loss[loss=0.1358, simple_loss=0.2057, pruned_loss=0.03302, over 4750.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2105, pruned_loss=0.03212, over 970675.86 frames.], batch size: 16, lr: 1.78e-04 +2022-05-07 14:53:03,208 INFO [train.py:715] (6/8) Epoch 12, batch 32650, loss[loss=0.1305, simple_loss=0.204, pruned_loss=0.02848, over 4796.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2106, pruned_loss=0.03214, over 971015.11 frames.], batch size: 14, lr: 1.78e-04 +2022-05-07 14:53:40,738 INFO [train.py:715] (6/8) Epoch 12, batch 32700, loss[loss=0.1201, simple_loss=0.1964, pruned_loss=0.02186, over 4892.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2101, pruned_loss=0.03219, over 971284.91 frames.], batch size: 22, lr: 1.78e-04 +2022-05-07 14:54:18,462 INFO [train.py:715] (6/8) Epoch 12, batch 32750, loss[loss=0.1389, simple_loss=0.2114, pruned_loss=0.03324, over 4800.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2108, pruned_loss=0.03221, over 970833.75 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 14:54:56,866 INFO [train.py:715] (6/8) Epoch 12, batch 32800, loss[loss=0.1324, simple_loss=0.2011, pruned_loss=0.03187, over 4769.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2107, pruned_loss=0.03214, over 971271.07 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 14:55:35,236 INFO [train.py:715] (6/8) Epoch 12, batch 32850, loss[loss=0.1657, simple_loss=0.232, pruned_loss=0.04973, over 4698.00 frames.], tot_loss[loss=0.1379, simple_loss=0.211, pruned_loss=0.0324, over 971876.48 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:56:12,924 INFO [train.py:715] (6/8) Epoch 12, batch 32900, loss[loss=0.1254, simple_loss=0.2044, pruned_loss=0.02314, over 4947.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2103, pruned_loss=0.03215, over 971707.57 frames.], batch size: 29, lr: 1.78e-04 +2022-05-07 14:56:51,023 INFO [train.py:715] (6/8) Epoch 12, batch 32950, loss[loss=0.1186, simple_loss=0.1947, pruned_loss=0.0212, over 4859.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2107, pruned_loss=0.0325, over 972701.10 frames.], batch size: 20, lr: 1.78e-04 +2022-05-07 14:57:29,167 INFO [train.py:715] (6/8) Epoch 12, batch 33000, loss[loss=0.1186, simple_loss=0.187, pruned_loss=0.0251, over 4971.00 frames.], tot_loss[loss=0.137, simple_loss=0.21, pruned_loss=0.03196, over 972950.13 frames.], batch size: 14, lr: 1.78e-04 +2022-05-07 14:57:29,168 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 14:57:38,689 INFO [train.py:742] (6/8) Epoch 12, validation: loss=0.1057, simple_loss=0.1896, pruned_loss=0.01085, over 914524.00 frames. +2022-05-07 14:58:18,194 INFO [train.py:715] (6/8) Epoch 12, batch 33050, loss[loss=0.112, simple_loss=0.1872, pruned_loss=0.01839, over 4950.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2101, pruned_loss=0.03187, over 972709.13 frames.], batch size: 23, lr: 1.78e-04 +2022-05-07 14:58:56,558 INFO [train.py:715] (6/8) Epoch 12, batch 33100, loss[loss=0.1228, simple_loss=0.196, pruned_loss=0.02481, over 4922.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2097, pruned_loss=0.03147, over 972301.14 frames.], batch size: 18, lr: 1.78e-04 +2022-05-07 14:59:34,844 INFO [train.py:715] (6/8) Epoch 12, batch 33150, loss[loss=0.1193, simple_loss=0.2051, pruned_loss=0.01679, over 4764.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.03138, over 972107.25 frames.], batch size: 19, lr: 1.78e-04 +2022-05-07 15:00:12,870 INFO [train.py:715] (6/8) Epoch 12, batch 33200, loss[loss=0.1449, simple_loss=0.2271, pruned_loss=0.03129, over 4883.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2097, pruned_loss=0.03127, over 971860.31 frames.], batch size: 16, lr: 1.78e-04 +2022-05-07 15:00:51,451 INFO [train.py:715] (6/8) Epoch 12, batch 33250, loss[loss=0.1314, simple_loss=0.2153, pruned_loss=0.02381, over 4826.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03167, over 972107.35 frames.], batch size: 25, lr: 1.78e-04 +2022-05-07 15:01:29,592 INFO [train.py:715] (6/8) Epoch 12, batch 33300, loss[loss=0.1363, simple_loss=0.2043, pruned_loss=0.03411, over 4859.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2111, pruned_loss=0.03177, over 973032.82 frames.], batch size: 20, lr: 1.78e-04 +2022-05-07 15:02:07,723 INFO [train.py:715] (6/8) Epoch 12, batch 33350, loss[loss=0.1086, simple_loss=0.1828, pruned_loss=0.01715, over 4881.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2111, pruned_loss=0.03199, over 973159.70 frames.], batch size: 22, lr: 1.78e-04 +2022-05-07 15:02:46,400 INFO [train.py:715] (6/8) Epoch 12, batch 33400, loss[loss=0.1456, simple_loss=0.2148, pruned_loss=0.03822, over 4901.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2111, pruned_loss=0.03175, over 973485.41 frames.], batch size: 18, lr: 1.78e-04 +2022-05-07 15:03:25,053 INFO [train.py:715] (6/8) Epoch 12, batch 33450, loss[loss=0.1461, simple_loss=0.2216, pruned_loss=0.03529, over 4862.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2105, pruned_loss=0.03141, over 973174.55 frames.], batch size: 16, lr: 1.78e-04 +2022-05-07 15:04:03,411 INFO [train.py:715] (6/8) Epoch 12, batch 33500, loss[loss=0.1309, simple_loss=0.2116, pruned_loss=0.0251, over 4920.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2102, pruned_loss=0.03111, over 972212.17 frames.], batch size: 23, lr: 1.78e-04 +2022-05-07 15:04:42,510 INFO [train.py:715] (6/8) Epoch 12, batch 33550, loss[loss=0.1051, simple_loss=0.1796, pruned_loss=0.01529, over 4803.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2107, pruned_loss=0.03122, over 971777.91 frames.], batch size: 21, lr: 1.78e-04 +2022-05-07 15:05:21,144 INFO [train.py:715] (6/8) Epoch 12, batch 33600, loss[loss=0.1858, simple_loss=0.2607, pruned_loss=0.05543, over 4898.00 frames.], tot_loss[loss=0.1371, simple_loss=0.211, pruned_loss=0.03158, over 972432.41 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 15:05:59,985 INFO [train.py:715] (6/8) Epoch 12, batch 33650, loss[loss=0.124, simple_loss=0.2109, pruned_loss=0.01859, over 4791.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.0315, over 971783.20 frames.], batch size: 24, lr: 1.78e-04 +2022-05-07 15:06:38,064 INFO [train.py:715] (6/8) Epoch 12, batch 33700, loss[loss=0.1341, simple_loss=0.2089, pruned_loss=0.02966, over 4935.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2104, pruned_loss=0.03137, over 972158.66 frames.], batch size: 29, lr: 1.78e-04 +2022-05-07 15:07:16,846 INFO [train.py:715] (6/8) Epoch 12, batch 33750, loss[loss=0.1526, simple_loss=0.221, pruned_loss=0.04209, over 4904.00 frames.], tot_loss[loss=0.1366, simple_loss=0.21, pruned_loss=0.03153, over 972320.59 frames.], batch size: 19, lr: 1.78e-04 +2022-05-07 15:07:55,113 INFO [train.py:715] (6/8) Epoch 12, batch 33800, loss[loss=0.1248, simple_loss=0.2013, pruned_loss=0.02413, over 4811.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2106, pruned_loss=0.03193, over 971951.50 frames.], batch size: 27, lr: 1.78e-04 +2022-05-07 15:08:32,477 INFO [train.py:715] (6/8) Epoch 12, batch 33850, loss[loss=0.1245, simple_loss=0.1976, pruned_loss=0.02571, over 4990.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03166, over 972477.14 frames.], batch size: 25, lr: 1.78e-04 +2022-05-07 15:09:10,655 INFO [train.py:715] (6/8) Epoch 12, batch 33900, loss[loss=0.1442, simple_loss=0.2133, pruned_loss=0.03752, over 4822.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03175, over 973639.23 frames.], batch size: 25, lr: 1.78e-04 +2022-05-07 15:09:47,908 INFO [train.py:715] (6/8) Epoch 12, batch 33950, loss[loss=0.1422, simple_loss=0.2171, pruned_loss=0.0337, over 4828.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2118, pruned_loss=0.03236, over 973479.72 frames.], batch size: 27, lr: 1.77e-04 +2022-05-07 15:10:26,070 INFO [train.py:715] (6/8) Epoch 12, batch 34000, loss[loss=0.1388, simple_loss=0.2251, pruned_loss=0.02629, over 4989.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2125, pruned_loss=0.03245, over 972650.60 frames.], batch size: 25, lr: 1.77e-04 +2022-05-07 15:11:03,702 INFO [train.py:715] (6/8) Epoch 12, batch 34050, loss[loss=0.1456, simple_loss=0.2241, pruned_loss=0.0336, over 4759.00 frames.], tot_loss[loss=0.138, simple_loss=0.2121, pruned_loss=0.03194, over 972601.75 frames.], batch size: 16, lr: 1.77e-04 +2022-05-07 15:11:41,637 INFO [train.py:715] (6/8) Epoch 12, batch 34100, loss[loss=0.1488, simple_loss=0.2035, pruned_loss=0.04703, over 4857.00 frames.], tot_loss[loss=0.138, simple_loss=0.2119, pruned_loss=0.0321, over 972766.74 frames.], batch size: 32, lr: 1.77e-04 +2022-05-07 15:12:19,673 INFO [train.py:715] (6/8) Epoch 12, batch 34150, loss[loss=0.1285, simple_loss=0.2071, pruned_loss=0.02497, over 4836.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2114, pruned_loss=0.03163, over 973762.37 frames.], batch size: 26, lr: 1.77e-04 +2022-05-07 15:12:57,180 INFO [train.py:715] (6/8) Epoch 12, batch 34200, loss[loss=0.1151, simple_loss=0.1937, pruned_loss=0.01824, over 4934.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2113, pruned_loss=0.03164, over 973667.94 frames.], batch size: 21, lr: 1.77e-04 +2022-05-07 15:13:35,449 INFO [train.py:715] (6/8) Epoch 12, batch 34250, loss[loss=0.1164, simple_loss=0.182, pruned_loss=0.02543, over 4754.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03111, over 972941.66 frames.], batch size: 16, lr: 1.77e-04 +2022-05-07 15:14:12,818 INFO [train.py:715] (6/8) Epoch 12, batch 34300, loss[loss=0.1143, simple_loss=0.1927, pruned_loss=0.0179, over 4892.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2095, pruned_loss=0.03098, over 973355.35 frames.], batch size: 22, lr: 1.77e-04 +2022-05-07 15:14:51,107 INFO [train.py:715] (6/8) Epoch 12, batch 34350, loss[loss=0.1141, simple_loss=0.1779, pruned_loss=0.02514, over 4804.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.03096, over 972364.17 frames.], batch size: 12, lr: 1.77e-04 +2022-05-07 15:15:28,881 INFO [train.py:715] (6/8) Epoch 12, batch 34400, loss[loss=0.14, simple_loss=0.215, pruned_loss=0.03253, over 4902.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2086, pruned_loss=0.03054, over 972470.80 frames.], batch size: 17, lr: 1.77e-04 +2022-05-07 15:16:07,247 INFO [train.py:715] (6/8) Epoch 12, batch 34450, loss[loss=0.1269, simple_loss=0.2042, pruned_loss=0.02479, over 4921.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2092, pruned_loss=0.03049, over 972827.71 frames.], batch size: 18, lr: 1.77e-04 +2022-05-07 15:16:45,351 INFO [train.py:715] (6/8) Epoch 12, batch 34500, loss[loss=0.1568, simple_loss=0.223, pruned_loss=0.04526, over 4828.00 frames.], tot_loss[loss=0.1361, simple_loss=0.21, pruned_loss=0.03109, over 971984.43 frames.], batch size: 26, lr: 1.77e-04 +2022-05-07 15:17:23,597 INFO [train.py:715] (6/8) Epoch 12, batch 34550, loss[loss=0.1387, simple_loss=0.2151, pruned_loss=0.0311, over 4778.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03155, over 971776.99 frames.], batch size: 18, lr: 1.77e-04 +2022-05-07 15:18:02,263 INFO [train.py:715] (6/8) Epoch 12, batch 34600, loss[loss=0.1744, simple_loss=0.2385, pruned_loss=0.05514, over 4964.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2108, pruned_loss=0.03215, over 971525.20 frames.], batch size: 14, lr: 1.77e-04 +2022-05-07 15:18:41,661 INFO [train.py:715] (6/8) Epoch 12, batch 34650, loss[loss=0.1586, simple_loss=0.2346, pruned_loss=0.04128, over 4806.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2096, pruned_loss=0.03149, over 971527.29 frames.], batch size: 12, lr: 1.77e-04 +2022-05-07 15:19:21,059 INFO [train.py:715] (6/8) Epoch 12, batch 34700, loss[loss=0.128, simple_loss=0.1944, pruned_loss=0.03081, over 4833.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2106, pruned_loss=0.03201, over 971580.27 frames.], batch size: 32, lr: 1.77e-04 +2022-05-07 15:19:58,683 INFO [train.py:715] (6/8) Epoch 12, batch 34750, loss[loss=0.1344, simple_loss=0.2105, pruned_loss=0.02921, over 4774.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2098, pruned_loss=0.03185, over 972138.90 frames.], batch size: 14, lr: 1.77e-04 +2022-05-07 15:20:34,682 INFO [train.py:715] (6/8) Epoch 12, batch 34800, loss[loss=0.1468, simple_loss=0.2193, pruned_loss=0.03711, over 4920.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2096, pruned_loss=0.03194, over 971502.98 frames.], batch size: 18, lr: 1.77e-04 +2022-05-07 15:21:23,130 INFO [train.py:715] (6/8) Epoch 13, batch 0, loss[loss=0.1475, simple_loss=0.2215, pruned_loss=0.0368, over 4984.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2215, pruned_loss=0.0368, over 4984.00 frames.], batch size: 14, lr: 1.71e-04 +2022-05-07 15:22:01,157 INFO [train.py:715] (6/8) Epoch 13, batch 50, loss[loss=0.1353, simple_loss=0.2158, pruned_loss=0.02737, over 4796.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2094, pruned_loss=0.03244, over 219303.53 frames.], batch size: 21, lr: 1.71e-04 +2022-05-07 15:22:39,464 INFO [train.py:715] (6/8) Epoch 13, batch 100, loss[loss=0.1222, simple_loss=0.1972, pruned_loss=0.02354, over 4988.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2076, pruned_loss=0.0321, over 386247.75 frames.], batch size: 20, lr: 1.71e-04 +2022-05-07 15:23:17,857 INFO [train.py:715] (6/8) Epoch 13, batch 150, loss[loss=0.1262, simple_loss=0.1973, pruned_loss=0.02759, over 4775.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2082, pruned_loss=0.03235, over 515181.40 frames.], batch size: 18, lr: 1.71e-04 +2022-05-07 15:23:57,326 INFO [train.py:715] (6/8) Epoch 13, batch 200, loss[loss=0.1486, simple_loss=0.2261, pruned_loss=0.03558, over 4797.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2089, pruned_loss=0.03172, over 617507.69 frames.], batch size: 24, lr: 1.71e-04 +2022-05-07 15:24:35,737 INFO [train.py:715] (6/8) Epoch 13, batch 250, loss[loss=0.1602, simple_loss=0.2325, pruned_loss=0.04394, over 4882.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.0309, over 696001.79 frames.], batch size: 22, lr: 1.71e-04 +2022-05-07 15:25:15,230 INFO [train.py:715] (6/8) Epoch 13, batch 300, loss[loss=0.1616, simple_loss=0.2377, pruned_loss=0.0428, over 4775.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03083, over 757376.88 frames.], batch size: 18, lr: 1.71e-04 +2022-05-07 15:25:53,996 INFO [train.py:715] (6/8) Epoch 13, batch 350, loss[loss=0.1308, simple_loss=0.2049, pruned_loss=0.02837, over 4865.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.03113, over 805575.26 frames.], batch size: 20, lr: 1.71e-04 +2022-05-07 15:26:33,532 INFO [train.py:715] (6/8) Epoch 13, batch 400, loss[loss=0.1203, simple_loss=0.1917, pruned_loss=0.02448, over 4853.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2108, pruned_loss=0.0311, over 842129.49 frames.], batch size: 13, lr: 1.71e-04 +2022-05-07 15:27:13,032 INFO [train.py:715] (6/8) Epoch 13, batch 450, loss[loss=0.1499, simple_loss=0.2182, pruned_loss=0.04076, over 4865.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2109, pruned_loss=0.03129, over 871064.71 frames.], batch size: 16, lr: 1.71e-04 +2022-05-07 15:27:53,173 INFO [train.py:715] (6/8) Epoch 13, batch 500, loss[loss=0.1236, simple_loss=0.2008, pruned_loss=0.02319, over 4829.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.03144, over 893336.70 frames.], batch size: 26, lr: 1.71e-04 +2022-05-07 15:28:33,648 INFO [train.py:715] (6/8) Epoch 13, batch 550, loss[loss=0.1422, simple_loss=0.2182, pruned_loss=0.03316, over 4959.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2113, pruned_loss=0.03171, over 910960.94 frames.], batch size: 39, lr: 1.71e-04 +2022-05-07 15:29:12,912 INFO [train.py:715] (6/8) Epoch 13, batch 600, loss[loss=0.1257, simple_loss=0.206, pruned_loss=0.02274, over 4919.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2103, pruned_loss=0.03127, over 924794.58 frames.], batch size: 29, lr: 1.71e-04 +2022-05-07 15:29:53,388 INFO [train.py:715] (6/8) Epoch 13, batch 650, loss[loss=0.1414, simple_loss=0.2138, pruned_loss=0.03445, over 4801.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2107, pruned_loss=0.03158, over 935202.63 frames.], batch size: 24, lr: 1.71e-04 +2022-05-07 15:30:33,369 INFO [train.py:715] (6/8) Epoch 13, batch 700, loss[loss=0.1209, simple_loss=0.1849, pruned_loss=0.02847, over 4779.00 frames.], tot_loss[loss=0.1363, simple_loss=0.21, pruned_loss=0.03129, over 942957.60 frames.], batch size: 12, lr: 1.71e-04 +2022-05-07 15:31:13,981 INFO [train.py:715] (6/8) Epoch 13, batch 750, loss[loss=0.1302, simple_loss=0.2056, pruned_loss=0.02744, over 4954.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2094, pruned_loss=0.03077, over 949413.53 frames.], batch size: 15, lr: 1.71e-04 +2022-05-07 15:31:53,301 INFO [train.py:715] (6/8) Epoch 13, batch 800, loss[loss=0.1362, simple_loss=0.2195, pruned_loss=0.02643, over 4888.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2094, pruned_loss=0.03049, over 954811.83 frames.], batch size: 17, lr: 1.71e-04 +2022-05-07 15:32:32,565 INFO [train.py:715] (6/8) Epoch 13, batch 850, loss[loss=0.1558, simple_loss=0.2167, pruned_loss=0.04743, over 4984.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2104, pruned_loss=0.0312, over 959196.13 frames.], batch size: 35, lr: 1.71e-04 +2022-05-07 15:33:12,800 INFO [train.py:715] (6/8) Epoch 13, batch 900, loss[loss=0.1253, simple_loss=0.2045, pruned_loss=0.02305, over 4730.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2108, pruned_loss=0.03152, over 961720.53 frames.], batch size: 16, lr: 1.71e-04 +2022-05-07 15:33:52,201 INFO [train.py:715] (6/8) Epoch 13, batch 950, loss[loss=0.1362, simple_loss=0.2137, pruned_loss=0.02935, over 4843.00 frames.], tot_loss[loss=0.1363, simple_loss=0.21, pruned_loss=0.03132, over 963612.06 frames.], batch size: 30, lr: 1.71e-04 +2022-05-07 15:34:32,779 INFO [train.py:715] (6/8) Epoch 13, batch 1000, loss[loss=0.177, simple_loss=0.2509, pruned_loss=0.05154, over 4960.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2114, pruned_loss=0.03194, over 965474.45 frames.], batch size: 21, lr: 1.71e-04 +2022-05-07 15:35:12,239 INFO [train.py:715] (6/8) Epoch 13, batch 1050, loss[loss=0.1297, simple_loss=0.2072, pruned_loss=0.02604, over 4819.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2117, pruned_loss=0.03209, over 967085.83 frames.], batch size: 26, lr: 1.71e-04 +2022-05-07 15:35:52,555 INFO [train.py:715] (6/8) Epoch 13, batch 1100, loss[loss=0.1292, simple_loss=0.2111, pruned_loss=0.02369, over 4919.00 frames.], tot_loss[loss=0.138, simple_loss=0.2116, pruned_loss=0.03218, over 968641.28 frames.], batch size: 23, lr: 1.71e-04 +2022-05-07 15:36:32,014 INFO [train.py:715] (6/8) Epoch 13, batch 1150, loss[loss=0.1189, simple_loss=0.1962, pruned_loss=0.02083, over 4814.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2109, pruned_loss=0.03169, over 969472.94 frames.], batch size: 26, lr: 1.71e-04 +2022-05-07 15:37:11,796 INFO [train.py:715] (6/8) Epoch 13, batch 1200, loss[loss=0.1354, simple_loss=0.2081, pruned_loss=0.03132, over 4871.00 frames.], tot_loss[loss=0.1361, simple_loss=0.21, pruned_loss=0.03114, over 968670.31 frames.], batch size: 16, lr: 1.71e-04 +2022-05-07 15:37:52,137 INFO [train.py:715] (6/8) Epoch 13, batch 1250, loss[loss=0.1243, simple_loss=0.19, pruned_loss=0.02926, over 4783.00 frames.], tot_loss[loss=0.1355, simple_loss=0.209, pruned_loss=0.03095, over 969876.76 frames.], batch size: 12, lr: 1.71e-04 +2022-05-07 15:38:31,096 INFO [train.py:715] (6/8) Epoch 13, batch 1300, loss[loss=0.1445, simple_loss=0.2195, pruned_loss=0.03479, over 4746.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.03149, over 971355.54 frames.], batch size: 16, lr: 1.71e-04 +2022-05-07 15:39:11,012 INFO [train.py:715] (6/8) Epoch 13, batch 1350, loss[loss=0.1463, simple_loss=0.2095, pruned_loss=0.04158, over 4698.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.03165, over 971453.32 frames.], batch size: 15, lr: 1.71e-04 +2022-05-07 15:39:49,773 INFO [train.py:715] (6/8) Epoch 13, batch 1400, loss[loss=0.1424, simple_loss=0.2169, pruned_loss=0.03395, over 4881.00 frames.], tot_loss[loss=0.1363, simple_loss=0.21, pruned_loss=0.03133, over 971961.85 frames.], batch size: 22, lr: 1.71e-04 +2022-05-07 15:40:28,860 INFO [train.py:715] (6/8) Epoch 13, batch 1450, loss[loss=0.14, simple_loss=0.2278, pruned_loss=0.02607, over 4931.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.0314, over 971256.11 frames.], batch size: 39, lr: 1.71e-04 +2022-05-07 15:41:06,537 INFO [train.py:715] (6/8) Epoch 13, batch 1500, loss[loss=0.1203, simple_loss=0.1896, pruned_loss=0.02552, over 4823.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03185, over 971783.55 frames.], batch size: 26, lr: 1.71e-04 +2022-05-07 15:41:44,153 INFO [train.py:715] (6/8) Epoch 13, batch 1550, loss[loss=0.1207, simple_loss=0.1963, pruned_loss=0.02258, over 4935.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.03187, over 971428.77 frames.], batch size: 21, lr: 1.71e-04 +2022-05-07 15:42:22,724 INFO [train.py:715] (6/8) Epoch 13, batch 1600, loss[loss=0.1605, simple_loss=0.2324, pruned_loss=0.04424, over 4772.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2104, pruned_loss=0.03207, over 971756.41 frames.], batch size: 18, lr: 1.71e-04 +2022-05-07 15:43:00,656 INFO [train.py:715] (6/8) Epoch 13, batch 1650, loss[loss=0.1216, simple_loss=0.1953, pruned_loss=0.02395, over 4953.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2098, pruned_loss=0.03201, over 972426.58 frames.], batch size: 29, lr: 1.71e-04 +2022-05-07 15:43:39,394 INFO [train.py:715] (6/8) Epoch 13, batch 1700, loss[loss=0.1489, simple_loss=0.2207, pruned_loss=0.03853, over 4772.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2102, pruned_loss=0.03208, over 972259.13 frames.], batch size: 18, lr: 1.71e-04 +2022-05-07 15:44:17,678 INFO [train.py:715] (6/8) Epoch 13, batch 1750, loss[loss=0.1376, simple_loss=0.2046, pruned_loss=0.03529, over 4961.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2094, pruned_loss=0.03164, over 972983.87 frames.], batch size: 24, lr: 1.71e-04 +2022-05-07 15:44:57,105 INFO [train.py:715] (6/8) Epoch 13, batch 1800, loss[loss=0.1359, simple_loss=0.199, pruned_loss=0.03638, over 4845.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2088, pruned_loss=0.03193, over 972895.36 frames.], batch size: 13, lr: 1.71e-04 +2022-05-07 15:45:35,185 INFO [train.py:715] (6/8) Epoch 13, batch 1850, loss[loss=0.1604, simple_loss=0.2275, pruned_loss=0.04663, over 4864.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2089, pruned_loss=0.03195, over 972909.11 frames.], batch size: 20, lr: 1.71e-04 +2022-05-07 15:46:13,446 INFO [train.py:715] (6/8) Epoch 13, batch 1900, loss[loss=0.1244, simple_loss=0.2123, pruned_loss=0.01819, over 4951.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2094, pruned_loss=0.0317, over 972930.42 frames.], batch size: 21, lr: 1.71e-04 +2022-05-07 15:46:52,105 INFO [train.py:715] (6/8) Epoch 13, batch 1950, loss[loss=0.1284, simple_loss=0.2044, pruned_loss=0.02624, over 4980.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2094, pruned_loss=0.03154, over 972685.10 frames.], batch size: 28, lr: 1.71e-04 +2022-05-07 15:47:30,480 INFO [train.py:715] (6/8) Epoch 13, batch 2000, loss[loss=0.09672, simple_loss=0.1624, pruned_loss=0.01553, over 4856.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2093, pruned_loss=0.0315, over 973005.61 frames.], batch size: 12, lr: 1.71e-04 +2022-05-07 15:48:09,018 INFO [train.py:715] (6/8) Epoch 13, batch 2050, loss[loss=0.1359, simple_loss=0.2081, pruned_loss=0.03184, over 4767.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2092, pruned_loss=0.03128, over 973053.05 frames.], batch size: 14, lr: 1.71e-04 +2022-05-07 15:48:47,023 INFO [train.py:715] (6/8) Epoch 13, batch 2100, loss[loss=0.1331, simple_loss=0.2041, pruned_loss=0.03103, over 4829.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2098, pruned_loss=0.03159, over 972823.09 frames.], batch size: 26, lr: 1.71e-04 +2022-05-07 15:49:26,190 INFO [train.py:715] (6/8) Epoch 13, batch 2150, loss[loss=0.1439, simple_loss=0.2218, pruned_loss=0.03305, over 4778.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03132, over 972090.47 frames.], batch size: 14, lr: 1.71e-04 +2022-05-07 15:50:04,033 INFO [train.py:715] (6/8) Epoch 13, batch 2200, loss[loss=0.1206, simple_loss=0.2019, pruned_loss=0.01964, over 4802.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2101, pruned_loss=0.03112, over 972246.31 frames.], batch size: 21, lr: 1.71e-04 +2022-05-07 15:50:42,243 INFO [train.py:715] (6/8) Epoch 13, batch 2250, loss[loss=0.1318, simple_loss=0.2149, pruned_loss=0.02432, over 4790.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2101, pruned_loss=0.03087, over 971981.41 frames.], batch size: 24, lr: 1.71e-04 +2022-05-07 15:51:20,496 INFO [train.py:715] (6/8) Epoch 13, batch 2300, loss[loss=0.1506, simple_loss=0.2374, pruned_loss=0.03187, over 4811.00 frames.], tot_loss[loss=0.136, simple_loss=0.2102, pruned_loss=0.03088, over 972689.10 frames.], batch size: 25, lr: 1.71e-04 +2022-05-07 15:51:59,647 INFO [train.py:715] (6/8) Epoch 13, batch 2350, loss[loss=0.1597, simple_loss=0.2322, pruned_loss=0.04362, over 4861.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03079, over 972489.24 frames.], batch size: 20, lr: 1.71e-04 +2022-05-07 15:52:38,011 INFO [train.py:715] (6/8) Epoch 13, batch 2400, loss[loss=0.1264, simple_loss=0.2057, pruned_loss=0.02353, over 4918.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.0312, over 972067.38 frames.], batch size: 18, lr: 1.71e-04 +2022-05-07 15:53:16,747 INFO [train.py:715] (6/8) Epoch 13, batch 2450, loss[loss=0.1435, simple_loss=0.2195, pruned_loss=0.03375, over 4823.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2093, pruned_loss=0.03103, over 972078.15 frames.], batch size: 27, lr: 1.71e-04 +2022-05-07 15:53:55,654 INFO [train.py:715] (6/8) Epoch 13, batch 2500, loss[loss=0.1203, simple_loss=0.1892, pruned_loss=0.0257, over 4888.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2093, pruned_loss=0.0311, over 971268.09 frames.], batch size: 19, lr: 1.71e-04 +2022-05-07 15:54:34,063 INFO [train.py:715] (6/8) Epoch 13, batch 2550, loss[loss=0.1207, simple_loss=0.1836, pruned_loss=0.02887, over 4990.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03149, over 972614.75 frames.], batch size: 15, lr: 1.71e-04 +2022-05-07 15:55:12,160 INFO [train.py:715] (6/8) Epoch 13, batch 2600, loss[loss=0.1306, simple_loss=0.2023, pruned_loss=0.02938, over 4987.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2104, pruned_loss=0.03189, over 972190.92 frames.], batch size: 31, lr: 1.71e-04 +2022-05-07 15:55:50,582 INFO [train.py:715] (6/8) Epoch 13, batch 2650, loss[loss=0.1228, simple_loss=0.1955, pruned_loss=0.02508, over 4828.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2104, pruned_loss=0.03193, over 972037.42 frames.], batch size: 26, lr: 1.71e-04 +2022-05-07 15:56:28,664 INFO [train.py:715] (6/8) Epoch 13, batch 2700, loss[loss=0.1135, simple_loss=0.1884, pruned_loss=0.01926, over 4787.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2108, pruned_loss=0.03194, over 972920.21 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 15:57:06,439 INFO [train.py:715] (6/8) Epoch 13, batch 2750, loss[loss=0.1212, simple_loss=0.2003, pruned_loss=0.02101, over 4806.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2116, pruned_loss=0.03211, over 972322.46 frames.], batch size: 24, lr: 1.70e-04 +2022-05-07 15:57:43,967 INFO [train.py:715] (6/8) Epoch 13, batch 2800, loss[loss=0.1574, simple_loss=0.2273, pruned_loss=0.04372, over 4974.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2126, pruned_loss=0.03246, over 972926.42 frames.], batch size: 31, lr: 1.70e-04 +2022-05-07 15:58:22,562 INFO [train.py:715] (6/8) Epoch 13, batch 2850, loss[loss=0.1262, simple_loss=0.207, pruned_loss=0.02274, over 4831.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2114, pruned_loss=0.03198, over 973154.16 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 15:59:00,069 INFO [train.py:715] (6/8) Epoch 13, batch 2900, loss[loss=0.1605, simple_loss=0.2386, pruned_loss=0.04113, over 4977.00 frames.], tot_loss[loss=0.1371, simple_loss=0.211, pruned_loss=0.03162, over 973564.19 frames.], batch size: 40, lr: 1.70e-04 +2022-05-07 15:59:37,967 INFO [train.py:715] (6/8) Epoch 13, batch 2950, loss[loss=0.1524, simple_loss=0.2134, pruned_loss=0.0457, over 4840.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03161, over 972895.54 frames.], batch size: 30, lr: 1.70e-04 +2022-05-07 16:00:15,988 INFO [train.py:715] (6/8) Epoch 13, batch 3000, loss[loss=0.1184, simple_loss=0.1899, pruned_loss=0.0235, over 4852.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03133, over 973207.12 frames.], batch size: 13, lr: 1.70e-04 +2022-05-07 16:00:15,988 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 16:00:25,446 INFO [train.py:742] (6/8) Epoch 13, validation: loss=0.1052, simple_loss=0.1893, pruned_loss=0.01058, over 914524.00 frames. +2022-05-07 16:01:03,675 INFO [train.py:715] (6/8) Epoch 13, batch 3050, loss[loss=0.1201, simple_loss=0.1977, pruned_loss=0.02123, over 4794.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.0316, over 972386.41 frames.], batch size: 17, lr: 1.70e-04 +2022-05-07 16:01:42,202 INFO [train.py:715] (6/8) Epoch 13, batch 3100, loss[loss=0.1644, simple_loss=0.2337, pruned_loss=0.04759, over 4907.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03141, over 972650.08 frames.], batch size: 17, lr: 1.70e-04 +2022-05-07 16:02:19,745 INFO [train.py:715] (6/8) Epoch 13, batch 3150, loss[loss=0.1416, simple_loss=0.2127, pruned_loss=0.03525, over 4983.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03147, over 972258.37 frames.], batch size: 33, lr: 1.70e-04 +2022-05-07 16:02:57,075 INFO [train.py:715] (6/8) Epoch 13, batch 3200, loss[loss=0.1385, simple_loss=0.2127, pruned_loss=0.03215, over 4921.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2091, pruned_loss=0.03127, over 972695.15 frames.], batch size: 23, lr: 1.70e-04 +2022-05-07 16:03:35,540 INFO [train.py:715] (6/8) Epoch 13, batch 3250, loss[loss=0.1704, simple_loss=0.2359, pruned_loss=0.05249, over 4904.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2097, pruned_loss=0.03167, over 972689.74 frames.], batch size: 17, lr: 1.70e-04 +2022-05-07 16:04:13,563 INFO [train.py:715] (6/8) Epoch 13, batch 3300, loss[loss=0.1782, simple_loss=0.2444, pruned_loss=0.05602, over 4916.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.03175, over 973389.67 frames.], batch size: 18, lr: 1.70e-04 +2022-05-07 16:04:51,381 INFO [train.py:715] (6/8) Epoch 13, batch 3350, loss[loss=0.1472, simple_loss=0.2253, pruned_loss=0.03457, over 4841.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03117, over 973005.58 frames.], batch size: 13, lr: 1.70e-04 +2022-05-07 16:05:29,076 INFO [train.py:715] (6/8) Epoch 13, batch 3400, loss[loss=0.1617, simple_loss=0.2289, pruned_loss=0.04732, over 4954.00 frames.], tot_loss[loss=0.1362, simple_loss=0.21, pruned_loss=0.03113, over 972589.49 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:06:07,372 INFO [train.py:715] (6/8) Epoch 13, batch 3450, loss[loss=0.1275, simple_loss=0.2016, pruned_loss=0.02665, over 4868.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.0311, over 972635.67 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 16:06:47,671 INFO [train.py:715] (6/8) Epoch 13, batch 3500, loss[loss=0.1325, simple_loss=0.2106, pruned_loss=0.02722, over 4944.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03134, over 972739.49 frames.], batch size: 29, lr: 1.70e-04 +2022-05-07 16:07:25,032 INFO [train.py:715] (6/8) Epoch 13, batch 3550, loss[loss=0.1299, simple_loss=0.2068, pruned_loss=0.02651, over 4778.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2099, pruned_loss=0.03085, over 972899.31 frames.], batch size: 18, lr: 1.70e-04 +2022-05-07 16:08:03,489 INFO [train.py:715] (6/8) Epoch 13, batch 3600, loss[loss=0.1239, simple_loss=0.2026, pruned_loss=0.02264, over 4753.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03116, over 972570.79 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:08:41,279 INFO [train.py:715] (6/8) Epoch 13, batch 3650, loss[loss=0.1322, simple_loss=0.2119, pruned_loss=0.02626, over 4886.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03139, over 972264.86 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:09:18,851 INFO [train.py:715] (6/8) Epoch 13, batch 3700, loss[loss=0.1373, simple_loss=0.224, pruned_loss=0.02529, over 4858.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2097, pruned_loss=0.03109, over 971934.45 frames.], batch size: 20, lr: 1.70e-04 +2022-05-07 16:09:56,569 INFO [train.py:715] (6/8) Epoch 13, batch 3750, loss[loss=0.1565, simple_loss=0.2296, pruned_loss=0.04168, over 4768.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2096, pruned_loss=0.03141, over 971384.79 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:10:34,803 INFO [train.py:715] (6/8) Epoch 13, batch 3800, loss[loss=0.1346, simple_loss=0.2005, pruned_loss=0.03436, over 4978.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03121, over 971309.24 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:11:11,946 INFO [train.py:715] (6/8) Epoch 13, batch 3850, loss[loss=0.1223, simple_loss=0.2061, pruned_loss=0.01923, over 4970.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2094, pruned_loss=0.0308, over 971116.71 frames.], batch size: 24, lr: 1.70e-04 +2022-05-07 16:11:49,244 INFO [train.py:715] (6/8) Epoch 13, batch 3900, loss[loss=0.1513, simple_loss=0.2318, pruned_loss=0.03544, over 4947.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2097, pruned_loss=0.03063, over 971516.13 frames.], batch size: 23, lr: 1.70e-04 +2022-05-07 16:12:27,131 INFO [train.py:715] (6/8) Epoch 13, batch 3950, loss[loss=0.1602, simple_loss=0.2251, pruned_loss=0.04762, over 4742.00 frames.], tot_loss[loss=0.137, simple_loss=0.2109, pruned_loss=0.0316, over 971526.19 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 16:13:05,299 INFO [train.py:715] (6/8) Epoch 13, batch 4000, loss[loss=0.1168, simple_loss=0.196, pruned_loss=0.01884, over 4886.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2103, pruned_loss=0.03139, over 971745.04 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:13:42,997 INFO [train.py:715] (6/8) Epoch 13, batch 4050, loss[loss=0.1401, simple_loss=0.2069, pruned_loss=0.03664, over 4850.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.03141, over 971431.71 frames.], batch size: 13, lr: 1.70e-04 +2022-05-07 16:14:20,643 INFO [train.py:715] (6/8) Epoch 13, batch 4100, loss[loss=0.1349, simple_loss=0.2146, pruned_loss=0.0276, over 4932.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.03153, over 971726.02 frames.], batch size: 18, lr: 1.70e-04 +2022-05-07 16:14:59,186 INFO [train.py:715] (6/8) Epoch 13, batch 4150, loss[loss=0.1452, simple_loss=0.2101, pruned_loss=0.04015, over 4916.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2098, pruned_loss=0.03157, over 971464.51 frames.], batch size: 17, lr: 1.70e-04 +2022-05-07 16:15:36,529 INFO [train.py:715] (6/8) Epoch 13, batch 4200, loss[loss=0.1395, simple_loss=0.2249, pruned_loss=0.02703, over 4928.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2095, pruned_loss=0.03109, over 972696.06 frames.], batch size: 18, lr: 1.70e-04 +2022-05-07 16:16:14,501 INFO [train.py:715] (6/8) Epoch 13, batch 4250, loss[loss=0.1377, simple_loss=0.2168, pruned_loss=0.02935, over 4952.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.03087, over 972991.41 frames.], batch size: 35, lr: 1.70e-04 +2022-05-07 16:16:52,600 INFO [train.py:715] (6/8) Epoch 13, batch 4300, loss[loss=0.1321, simple_loss=0.2008, pruned_loss=0.03168, over 4697.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03131, over 972491.36 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:17:30,605 INFO [train.py:715] (6/8) Epoch 13, batch 4350, loss[loss=0.1471, simple_loss=0.233, pruned_loss=0.03064, over 4946.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.03178, over 973280.37 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 16:18:08,279 INFO [train.py:715] (6/8) Epoch 13, batch 4400, loss[loss=0.1567, simple_loss=0.2212, pruned_loss=0.04605, over 4758.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2097, pruned_loss=0.03176, over 972553.95 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:18:46,446 INFO [train.py:715] (6/8) Epoch 13, batch 4450, loss[loss=0.1314, simple_loss=0.1957, pruned_loss=0.0336, over 4829.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2095, pruned_loss=0.03162, over 973097.18 frames.], batch size: 13, lr: 1.70e-04 +2022-05-07 16:19:25,667 INFO [train.py:715] (6/8) Epoch 13, batch 4500, loss[loss=0.1072, simple_loss=0.1792, pruned_loss=0.01763, over 4688.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2095, pruned_loss=0.0317, over 972918.20 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:20:03,831 INFO [train.py:715] (6/8) Epoch 13, batch 4550, loss[loss=0.1074, simple_loss=0.1803, pruned_loss=0.01725, over 4987.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2097, pruned_loss=0.0317, over 973576.38 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:20:40,817 INFO [train.py:715] (6/8) Epoch 13, batch 4600, loss[loss=0.1286, simple_loss=0.2022, pruned_loss=0.02745, over 4842.00 frames.], tot_loss[loss=0.137, simple_loss=0.2101, pruned_loss=0.03196, over 973274.97 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:21:19,533 INFO [train.py:715] (6/8) Epoch 13, batch 4650, loss[loss=0.1627, simple_loss=0.234, pruned_loss=0.04575, over 4960.00 frames.], tot_loss[loss=0.1365, simple_loss=0.21, pruned_loss=0.03152, over 972972.06 frames.], batch size: 35, lr: 1.70e-04 +2022-05-07 16:21:57,440 INFO [train.py:715] (6/8) Epoch 13, batch 4700, loss[loss=0.1248, simple_loss=0.2028, pruned_loss=0.02335, over 4948.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03157, over 972969.83 frames.], batch size: 29, lr: 1.70e-04 +2022-05-07 16:22:35,614 INFO [train.py:715] (6/8) Epoch 13, batch 4750, loss[loss=0.152, simple_loss=0.2187, pruned_loss=0.04262, over 4916.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2093, pruned_loss=0.03118, over 973172.16 frames.], batch size: 18, lr: 1.70e-04 +2022-05-07 16:23:13,887 INFO [train.py:715] (6/8) Epoch 13, batch 4800, loss[loss=0.148, simple_loss=0.2155, pruned_loss=0.04029, over 4813.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2088, pruned_loss=0.03098, over 973304.19 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 16:23:53,169 INFO [train.py:715] (6/8) Epoch 13, batch 4850, loss[loss=0.1695, simple_loss=0.244, pruned_loss=0.04748, over 4755.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03172, over 972832.33 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:24:31,292 INFO [train.py:715] (6/8) Epoch 13, batch 4900, loss[loss=0.1138, simple_loss=0.1976, pruned_loss=0.01494, over 4854.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.03203, over 972831.68 frames.], batch size: 20, lr: 1.70e-04 +2022-05-07 16:25:10,150 INFO [train.py:715] (6/8) Epoch 13, batch 4950, loss[loss=0.1645, simple_loss=0.2276, pruned_loss=0.05075, over 4957.00 frames.], tot_loss[loss=0.1375, simple_loss=0.211, pruned_loss=0.03199, over 972868.37 frames.], batch size: 35, lr: 1.70e-04 +2022-05-07 16:25:49,562 INFO [train.py:715] (6/8) Epoch 13, batch 5000, loss[loss=0.1404, simple_loss=0.2164, pruned_loss=0.03221, over 4833.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2105, pruned_loss=0.03159, over 972858.47 frames.], batch size: 30, lr: 1.70e-04 +2022-05-07 16:26:28,903 INFO [train.py:715] (6/8) Epoch 13, batch 5050, loss[loss=0.1441, simple_loss=0.2181, pruned_loss=0.0351, over 4938.00 frames.], tot_loss[loss=0.1361, simple_loss=0.21, pruned_loss=0.03108, over 972671.73 frames.], batch size: 39, lr: 1.70e-04 +2022-05-07 16:27:07,534 INFO [train.py:715] (6/8) Epoch 13, batch 5100, loss[loss=0.1266, simple_loss=0.2004, pruned_loss=0.02639, over 4808.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2109, pruned_loss=0.03143, over 973082.29 frames.], batch size: 25, lr: 1.70e-04 +2022-05-07 16:27:46,969 INFO [train.py:715] (6/8) Epoch 13, batch 5150, loss[loss=0.1407, simple_loss=0.2116, pruned_loss=0.03489, over 4820.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2111, pruned_loss=0.03149, over 972594.45 frames.], batch size: 13, lr: 1.70e-04 +2022-05-07 16:28:26,706 INFO [train.py:715] (6/8) Epoch 13, batch 5200, loss[loss=0.1556, simple_loss=0.2332, pruned_loss=0.03896, over 4937.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.03148, over 972322.23 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 16:29:06,551 INFO [train.py:715] (6/8) Epoch 13, batch 5250, loss[loss=0.1188, simple_loss=0.1908, pruned_loss=0.02342, over 4811.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.03141, over 972452.09 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:29:45,232 INFO [train.py:715] (6/8) Epoch 13, batch 5300, loss[loss=0.1554, simple_loss=0.2174, pruned_loss=0.04673, over 4966.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2097, pruned_loss=0.0312, over 972225.26 frames.], batch size: 35, lr: 1.70e-04 +2022-05-07 16:30:25,373 INFO [train.py:715] (6/8) Epoch 13, batch 5350, loss[loss=0.1249, simple_loss=0.194, pruned_loss=0.02795, over 4705.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2097, pruned_loss=0.03099, over 971953.05 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:31:05,476 INFO [train.py:715] (6/8) Epoch 13, batch 5400, loss[loss=0.1356, simple_loss=0.1898, pruned_loss=0.04072, over 4800.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.03094, over 971747.44 frames.], batch size: 12, lr: 1.70e-04 +2022-05-07 16:31:45,408 INFO [train.py:715] (6/8) Epoch 13, batch 5450, loss[loss=0.1241, simple_loss=0.1917, pruned_loss=0.02824, over 4869.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03137, over 972336.38 frames.], batch size: 32, lr: 1.70e-04 +2022-05-07 16:32:24,984 INFO [train.py:715] (6/8) Epoch 13, batch 5500, loss[loss=0.1172, simple_loss=0.1919, pruned_loss=0.02125, over 4905.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2105, pruned_loss=0.03151, over 972620.91 frames.], batch size: 17, lr: 1.70e-04 +2022-05-07 16:33:04,815 INFO [train.py:715] (6/8) Epoch 13, batch 5550, loss[loss=0.1471, simple_loss=0.2206, pruned_loss=0.03678, over 4943.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2104, pruned_loss=0.03135, over 972466.67 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:33:44,069 INFO [train.py:715] (6/8) Epoch 13, batch 5600, loss[loss=0.1436, simple_loss=0.213, pruned_loss=0.03711, over 4871.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2106, pruned_loss=0.03139, over 972454.60 frames.], batch size: 20, lr: 1.70e-04 +2022-05-07 16:34:23,514 INFO [train.py:715] (6/8) Epoch 13, batch 5650, loss[loss=0.154, simple_loss=0.2243, pruned_loss=0.0418, over 4886.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2103, pruned_loss=0.03126, over 972765.85 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:35:03,786 INFO [train.py:715] (6/8) Epoch 13, batch 5700, loss[loss=0.1149, simple_loss=0.1883, pruned_loss=0.02073, over 4973.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.03163, over 972784.92 frames.], batch size: 24, lr: 1.70e-04 +2022-05-07 16:35:43,900 INFO [train.py:715] (6/8) Epoch 13, batch 5750, loss[loss=0.09851, simple_loss=0.1609, pruned_loss=0.01806, over 4793.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.03177, over 972743.69 frames.], batch size: 18, lr: 1.70e-04 +2022-05-07 16:36:22,748 INFO [train.py:715] (6/8) Epoch 13, batch 5800, loss[loss=0.1393, simple_loss=0.2223, pruned_loss=0.02815, over 4938.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2113, pruned_loss=0.03198, over 972437.08 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 16:37:02,238 INFO [train.py:715] (6/8) Epoch 13, batch 5850, loss[loss=0.1459, simple_loss=0.2083, pruned_loss=0.04175, over 4820.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2113, pruned_loss=0.03212, over 971596.00 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:37:42,376 INFO [train.py:715] (6/8) Epoch 13, batch 5900, loss[loss=0.134, simple_loss=0.2179, pruned_loss=0.02503, over 4827.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2111, pruned_loss=0.03181, over 970974.86 frames.], batch size: 27, lr: 1.70e-04 +2022-05-07 16:38:21,735 INFO [train.py:715] (6/8) Epoch 13, batch 5950, loss[loss=0.1198, simple_loss=0.1886, pruned_loss=0.02548, over 4959.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2111, pruned_loss=0.03192, over 971684.50 frames.], batch size: 35, lr: 1.70e-04 +2022-05-07 16:39:01,215 INFO [train.py:715] (6/8) Epoch 13, batch 6000, loss[loss=0.1161, simple_loss=0.1959, pruned_loss=0.01816, over 4866.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2102, pruned_loss=0.0313, over 971468.05 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 16:39:01,216 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 16:39:10,779 INFO [train.py:742] (6/8) Epoch 13, validation: loss=0.1054, simple_loss=0.1893, pruned_loss=0.01078, over 914524.00 frames. +2022-05-07 16:39:50,261 INFO [train.py:715] (6/8) Epoch 13, batch 6050, loss[loss=0.1065, simple_loss=0.1784, pruned_loss=0.01729, over 4800.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03081, over 972195.55 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 16:40:29,776 INFO [train.py:715] (6/8) Epoch 13, batch 6100, loss[loss=0.1447, simple_loss=0.2265, pruned_loss=0.03141, over 4799.00 frames.], tot_loss[loss=0.135, simple_loss=0.2085, pruned_loss=0.03081, over 972745.76 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 16:41:09,344 INFO [train.py:715] (6/8) Epoch 13, batch 6150, loss[loss=0.1294, simple_loss=0.2004, pruned_loss=0.02919, over 4916.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03086, over 973237.20 frames.], batch size: 18, lr: 1.70e-04 +2022-05-07 16:41:47,239 INFO [train.py:715] (6/8) Epoch 13, batch 6200, loss[loss=0.1155, simple_loss=0.1967, pruned_loss=0.01712, over 4811.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.03055, over 972366.64 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 16:42:26,293 INFO [train.py:715] (6/8) Epoch 13, batch 6250, loss[loss=0.1427, simple_loss=0.2171, pruned_loss=0.03414, over 4749.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03081, over 971038.87 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 16:43:05,825 INFO [train.py:715] (6/8) Epoch 13, batch 6300, loss[loss=0.125, simple_loss=0.2006, pruned_loss=0.02472, over 4977.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.03092, over 971339.53 frames.], batch size: 26, lr: 1.70e-04 +2022-05-07 16:43:44,413 INFO [train.py:715] (6/8) Epoch 13, batch 6350, loss[loss=0.1196, simple_loss=0.1963, pruned_loss=0.02149, over 4957.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03117, over 971977.79 frames.], batch size: 24, lr: 1.70e-04 +2022-05-07 16:44:24,230 INFO [train.py:715] (6/8) Epoch 13, batch 6400, loss[loss=0.1493, simple_loss=0.2236, pruned_loss=0.03756, over 4785.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2105, pruned_loss=0.03148, over 971771.85 frames.], batch size: 17, lr: 1.70e-04 +2022-05-07 16:45:04,051 INFO [train.py:715] (6/8) Epoch 13, batch 6450, loss[loss=0.1474, simple_loss=0.2283, pruned_loss=0.03331, over 4981.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2107, pruned_loss=0.03138, over 972430.10 frames.], batch size: 25, lr: 1.70e-04 +2022-05-07 16:45:44,144 INFO [train.py:715] (6/8) Epoch 13, batch 6500, loss[loss=0.1249, simple_loss=0.194, pruned_loss=0.0279, over 4867.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2099, pruned_loss=0.031, over 972904.37 frames.], batch size: 30, lr: 1.70e-04 +2022-05-07 16:46:23,314 INFO [train.py:715] (6/8) Epoch 13, batch 6550, loss[loss=0.1363, simple_loss=0.2096, pruned_loss=0.03148, over 4895.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2099, pruned_loss=0.03073, over 972626.53 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:47:02,653 INFO [train.py:715] (6/8) Epoch 13, batch 6600, loss[loss=0.1544, simple_loss=0.2161, pruned_loss=0.04641, over 4883.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2095, pruned_loss=0.03118, over 972231.76 frames.], batch size: 30, lr: 1.70e-04 +2022-05-07 16:47:42,043 INFO [train.py:715] (6/8) Epoch 13, batch 6650, loss[loss=0.1302, simple_loss=0.2061, pruned_loss=0.02718, over 4902.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03113, over 972512.32 frames.], batch size: 18, lr: 1.70e-04 +2022-05-07 16:48:20,277 INFO [train.py:715] (6/8) Epoch 13, batch 6700, loss[loss=0.1492, simple_loss=0.217, pruned_loss=0.0407, over 4760.00 frames.], tot_loss[loss=0.136, simple_loss=0.2095, pruned_loss=0.03125, over 972585.78 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:48:58,717 INFO [train.py:715] (6/8) Epoch 13, batch 6750, loss[loss=0.1265, simple_loss=0.1847, pruned_loss=0.0342, over 4774.00 frames.], tot_loss[loss=0.136, simple_loss=0.2094, pruned_loss=0.03129, over 972575.81 frames.], batch size: 12, lr: 1.70e-04 +2022-05-07 16:49:37,984 INFO [train.py:715] (6/8) Epoch 13, batch 6800, loss[loss=0.1143, simple_loss=0.1943, pruned_loss=0.0172, over 4936.00 frames.], tot_loss[loss=0.136, simple_loss=0.2093, pruned_loss=0.03141, over 972994.32 frames.], batch size: 23, lr: 1.70e-04 +2022-05-07 16:50:17,432 INFO [train.py:715] (6/8) Epoch 13, batch 6850, loss[loss=0.1282, simple_loss=0.1918, pruned_loss=0.03228, over 4894.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2084, pruned_loss=0.03133, over 972857.29 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:50:55,365 INFO [train.py:715] (6/8) Epoch 13, batch 6900, loss[loss=0.1261, simple_loss=0.1997, pruned_loss=0.02623, over 4863.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2087, pruned_loss=0.03155, over 972380.68 frames.], batch size: 32, lr: 1.70e-04 +2022-05-07 16:51:33,404 INFO [train.py:715] (6/8) Epoch 13, batch 6950, loss[loss=0.1366, simple_loss=0.2139, pruned_loss=0.02962, over 4896.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2091, pruned_loss=0.03156, over 972384.07 frames.], batch size: 22, lr: 1.70e-04 +2022-05-07 16:52:12,658 INFO [train.py:715] (6/8) Epoch 13, batch 7000, loss[loss=0.1344, simple_loss=0.2103, pruned_loss=0.02921, over 4706.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2095, pruned_loss=0.03156, over 972266.86 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:52:51,301 INFO [train.py:715] (6/8) Epoch 13, batch 7050, loss[loss=0.1148, simple_loss=0.1864, pruned_loss=0.02161, over 4852.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2088, pruned_loss=0.03144, over 972220.14 frames.], batch size: 13, lr: 1.70e-04 +2022-05-07 16:53:30,263 INFO [train.py:715] (6/8) Epoch 13, batch 7100, loss[loss=0.1358, simple_loss=0.2138, pruned_loss=0.02896, over 4823.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2093, pruned_loss=0.03115, over 972329.51 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:54:09,724 INFO [train.py:715] (6/8) Epoch 13, batch 7150, loss[loss=0.1457, simple_loss=0.22, pruned_loss=0.03571, over 4778.00 frames.], tot_loss[loss=0.1355, simple_loss=0.209, pruned_loss=0.03096, over 972568.51 frames.], batch size: 18, lr: 1.70e-04 +2022-05-07 16:54:49,426 INFO [train.py:715] (6/8) Epoch 13, batch 7200, loss[loss=0.1127, simple_loss=0.1827, pruned_loss=0.02129, over 4988.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03118, over 972597.28 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:55:27,557 INFO [train.py:715] (6/8) Epoch 13, batch 7250, loss[loss=0.1439, simple_loss=0.2129, pruned_loss=0.03745, over 4971.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03096, over 972355.04 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:56:05,828 INFO [train.py:715] (6/8) Epoch 13, batch 7300, loss[loss=0.1343, simple_loss=0.2103, pruned_loss=0.02918, over 4915.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2095, pruned_loss=0.03107, over 973255.87 frames.], batch size: 29, lr: 1.70e-04 +2022-05-07 16:56:45,078 INFO [train.py:715] (6/8) Epoch 13, batch 7350, loss[loss=0.1343, simple_loss=0.1915, pruned_loss=0.03855, over 4788.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.03095, over 973095.70 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:57:23,719 INFO [train.py:715] (6/8) Epoch 13, batch 7400, loss[loss=0.1099, simple_loss=0.186, pruned_loss=0.01692, over 4811.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2096, pruned_loss=0.03092, over 972876.89 frames.], batch size: 26, lr: 1.70e-04 +2022-05-07 16:58:01,547 INFO [train.py:715] (6/8) Epoch 13, batch 7450, loss[loss=0.1442, simple_loss=0.2157, pruned_loss=0.03632, over 4914.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2091, pruned_loss=0.03111, over 972423.75 frames.], batch size: 17, lr: 1.70e-04 +2022-05-07 16:58:40,996 INFO [train.py:715] (6/8) Epoch 13, batch 7500, loss[loss=0.1132, simple_loss=0.182, pruned_loss=0.02216, over 4914.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2094, pruned_loss=0.03167, over 972598.15 frames.], batch size: 17, lr: 1.70e-04 +2022-05-07 16:59:20,238 INFO [train.py:715] (6/8) Epoch 13, batch 7550, loss[loss=0.1317, simple_loss=0.1982, pruned_loss=0.03262, over 4788.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.0316, over 971764.46 frames.], batch size: 17, lr: 1.70e-04 +2022-05-07 16:59:57,840 INFO [train.py:715] (6/8) Epoch 13, batch 7600, loss[loss=0.1462, simple_loss=0.2246, pruned_loss=0.03389, over 4851.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2097, pruned_loss=0.03175, over 971831.08 frames.], batch size: 20, lr: 1.70e-04 +2022-05-07 17:00:36,717 INFO [train.py:715] (6/8) Epoch 13, batch 7650, loss[loss=0.1326, simple_loss=0.2133, pruned_loss=0.02595, over 4933.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2092, pruned_loss=0.03121, over 972224.96 frames.], batch size: 29, lr: 1.70e-04 +2022-05-07 17:01:15,689 INFO [train.py:715] (6/8) Epoch 13, batch 7700, loss[loss=0.1496, simple_loss=0.2117, pruned_loss=0.04376, over 4942.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2091, pruned_loss=0.03132, over 972764.17 frames.], batch size: 35, lr: 1.70e-04 +2022-05-07 17:01:54,653 INFO [train.py:715] (6/8) Epoch 13, batch 7750, loss[loss=0.1118, simple_loss=0.1864, pruned_loss=0.01857, over 4872.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03138, over 972726.75 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 17:02:32,581 INFO [train.py:715] (6/8) Epoch 13, batch 7800, loss[loss=0.1238, simple_loss=0.1974, pruned_loss=0.02508, over 4834.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2094, pruned_loss=0.0314, over 972719.47 frames.], batch size: 13, lr: 1.70e-04 +2022-05-07 17:03:11,066 INFO [train.py:715] (6/8) Epoch 13, batch 7850, loss[loss=0.1164, simple_loss=0.194, pruned_loss=0.01942, over 4792.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.0309, over 972218.41 frames.], batch size: 24, lr: 1.70e-04 +2022-05-07 17:03:50,706 INFO [train.py:715] (6/8) Epoch 13, batch 7900, loss[loss=0.1176, simple_loss=0.19, pruned_loss=0.02261, over 4811.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.03111, over 971511.32 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 17:04:28,755 INFO [train.py:715] (6/8) Epoch 13, batch 7950, loss[loss=0.1603, simple_loss=0.2297, pruned_loss=0.04542, over 4882.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.03144, over 971647.83 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 17:05:07,217 INFO [train.py:715] (6/8) Epoch 13, batch 8000, loss[loss=0.1668, simple_loss=0.2211, pruned_loss=0.05624, over 4858.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03159, over 972043.99 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 17:05:45,984 INFO [train.py:715] (6/8) Epoch 13, batch 8050, loss[loss=0.1486, simple_loss=0.2204, pruned_loss=0.03837, over 4879.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.03171, over 971146.89 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 17:06:24,534 INFO [train.py:715] (6/8) Epoch 13, batch 8100, loss[loss=0.1642, simple_loss=0.2322, pruned_loss=0.04805, over 4881.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2097, pruned_loss=0.03172, over 971667.30 frames.], batch size: 16, lr: 1.69e-04 +2022-05-07 17:07:02,517 INFO [train.py:715] (6/8) Epoch 13, batch 8150, loss[loss=0.1299, simple_loss=0.2058, pruned_loss=0.02698, over 4815.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03156, over 972026.62 frames.], batch size: 27, lr: 1.69e-04 +2022-05-07 17:07:40,995 INFO [train.py:715] (6/8) Epoch 13, batch 8200, loss[loss=0.1395, simple_loss=0.2131, pruned_loss=0.0329, over 4981.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.03205, over 972986.18 frames.], batch size: 35, lr: 1.69e-04 +2022-05-07 17:08:20,205 INFO [train.py:715] (6/8) Epoch 13, batch 8250, loss[loss=0.1207, simple_loss=0.2046, pruned_loss=0.01838, over 4852.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03198, over 973027.92 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:08:58,112 INFO [train.py:715] (6/8) Epoch 13, batch 8300, loss[loss=0.1202, simple_loss=0.1975, pruned_loss=0.02145, over 4790.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2108, pruned_loss=0.032, over 973289.63 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 17:09:36,543 INFO [train.py:715] (6/8) Epoch 13, batch 8350, loss[loss=0.1025, simple_loss=0.176, pruned_loss=0.01444, over 4956.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03186, over 973344.59 frames.], batch size: 29, lr: 1.69e-04 +2022-05-07 17:10:15,705 INFO [train.py:715] (6/8) Epoch 13, batch 8400, loss[loss=0.1163, simple_loss=0.1961, pruned_loss=0.0183, over 4804.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.03157, over 972652.94 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 17:10:54,564 INFO [train.py:715] (6/8) Epoch 13, batch 8450, loss[loss=0.1286, simple_loss=0.2143, pruned_loss=0.02142, over 4855.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03149, over 972998.05 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:11:32,557 INFO [train.py:715] (6/8) Epoch 13, batch 8500, loss[loss=0.1185, simple_loss=0.1948, pruned_loss=0.02115, over 4830.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03176, over 973213.87 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:12:11,698 INFO [train.py:715] (6/8) Epoch 13, batch 8550, loss[loss=0.1408, simple_loss=0.2174, pruned_loss=0.03216, over 4941.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03148, over 972929.66 frames.], batch size: 35, lr: 1.69e-04 +2022-05-07 17:12:50,649 INFO [train.py:715] (6/8) Epoch 13, batch 8600, loss[loss=0.1483, simple_loss=0.2187, pruned_loss=0.03901, over 4982.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2095, pruned_loss=0.03116, over 972870.76 frames.], batch size: 35, lr: 1.69e-04 +2022-05-07 17:13:28,886 INFO [train.py:715] (6/8) Epoch 13, batch 8650, loss[loss=0.1073, simple_loss=0.1821, pruned_loss=0.01623, over 4948.00 frames.], tot_loss[loss=0.136, simple_loss=0.2094, pruned_loss=0.0313, over 972872.41 frames.], batch size: 21, lr: 1.69e-04 +2022-05-07 17:14:07,228 INFO [train.py:715] (6/8) Epoch 13, batch 8700, loss[loss=0.1361, simple_loss=0.2099, pruned_loss=0.03121, over 4819.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2098, pruned_loss=0.03134, over 972828.29 frames.], batch size: 13, lr: 1.69e-04 +2022-05-07 17:14:45,870 INFO [train.py:715] (6/8) Epoch 13, batch 8750, loss[loss=0.1218, simple_loss=0.1954, pruned_loss=0.02408, over 4645.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03187, over 973266.31 frames.], batch size: 13, lr: 1.69e-04 +2022-05-07 17:15:24,588 INFO [train.py:715] (6/8) Epoch 13, batch 8800, loss[loss=0.1423, simple_loss=0.2216, pruned_loss=0.03148, over 4886.00 frames.], tot_loss[loss=0.1379, simple_loss=0.211, pruned_loss=0.03239, over 972811.13 frames.], batch size: 39, lr: 1.69e-04 +2022-05-07 17:16:02,888 INFO [train.py:715] (6/8) Epoch 13, batch 8850, loss[loss=0.1492, simple_loss=0.22, pruned_loss=0.0392, over 4873.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2101, pruned_loss=0.03218, over 971813.35 frames.], batch size: 32, lr: 1.69e-04 +2022-05-07 17:16:40,976 INFO [train.py:715] (6/8) Epoch 13, batch 8900, loss[loss=0.1299, simple_loss=0.2077, pruned_loss=0.02602, over 4859.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2098, pruned_loss=0.03196, over 971242.13 frames.], batch size: 16, lr: 1.69e-04 +2022-05-07 17:17:19,701 INFO [train.py:715] (6/8) Epoch 13, batch 8950, loss[loss=0.1734, simple_loss=0.2282, pruned_loss=0.05927, over 4770.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2089, pruned_loss=0.03132, over 971950.51 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 17:17:57,829 INFO [train.py:715] (6/8) Epoch 13, batch 9000, loss[loss=0.1265, simple_loss=0.2017, pruned_loss=0.02563, over 4782.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2089, pruned_loss=0.03127, over 970762.15 frames.], batch size: 12, lr: 1.69e-04 +2022-05-07 17:17:57,830 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 17:18:07,452 INFO [train.py:742] (6/8) Epoch 13, validation: loss=0.1055, simple_loss=0.1893, pruned_loss=0.01084, over 914524.00 frames. +2022-05-07 17:18:45,502 INFO [train.py:715] (6/8) Epoch 13, batch 9050, loss[loss=0.1462, simple_loss=0.2275, pruned_loss=0.03243, over 4813.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2088, pruned_loss=0.0311, over 971970.14 frames.], batch size: 21, lr: 1.69e-04 +2022-05-07 17:19:23,913 INFO [train.py:715] (6/8) Epoch 13, batch 9100, loss[loss=0.1339, simple_loss=0.2113, pruned_loss=0.02827, over 4974.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.0309, over 972024.99 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 17:20:03,102 INFO [train.py:715] (6/8) Epoch 13, batch 9150, loss[loss=0.1795, simple_loss=0.2528, pruned_loss=0.05305, over 4778.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.0317, over 972461.98 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 17:20:42,100 INFO [train.py:715] (6/8) Epoch 13, batch 9200, loss[loss=0.1371, simple_loss=0.212, pruned_loss=0.03106, over 4839.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2099, pruned_loss=0.03159, over 973373.25 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:21:20,014 INFO [train.py:715] (6/8) Epoch 13, batch 9250, loss[loss=0.1513, simple_loss=0.2259, pruned_loss=0.0383, over 4800.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03142, over 971919.84 frames.], batch size: 21, lr: 1.69e-04 +2022-05-07 17:21:58,917 INFO [train.py:715] (6/8) Epoch 13, batch 9300, loss[loss=0.1201, simple_loss=0.1912, pruned_loss=0.02447, over 4785.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2098, pruned_loss=0.03126, over 971623.18 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 17:22:37,769 INFO [train.py:715] (6/8) Epoch 13, batch 9350, loss[loss=0.155, simple_loss=0.2481, pruned_loss=0.03098, over 4857.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2101, pruned_loss=0.03121, over 971996.84 frames.], batch size: 20, lr: 1.69e-04 +2022-05-07 17:23:15,583 INFO [train.py:715] (6/8) Epoch 13, batch 9400, loss[loss=0.1271, simple_loss=0.2043, pruned_loss=0.0249, over 4945.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03144, over 971901.64 frames.], batch size: 21, lr: 1.69e-04 +2022-05-07 17:23:54,035 INFO [train.py:715] (6/8) Epoch 13, batch 9450, loss[loss=0.1285, simple_loss=0.2069, pruned_loss=0.025, over 4911.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2104, pruned_loss=0.03141, over 971823.00 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 17:24:32,855 INFO [train.py:715] (6/8) Epoch 13, batch 9500, loss[loss=0.1242, simple_loss=0.2033, pruned_loss=0.02254, over 4961.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2104, pruned_loss=0.03138, over 972796.64 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 17:25:11,111 INFO [train.py:715] (6/8) Epoch 13, batch 9550, loss[loss=0.1685, simple_loss=0.2388, pruned_loss=0.04912, over 4832.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.03168, over 972380.73 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:25:49,082 INFO [train.py:715] (6/8) Epoch 13, batch 9600, loss[loss=0.123, simple_loss=0.1991, pruned_loss=0.02349, over 4889.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03158, over 972162.89 frames.], batch size: 22, lr: 1.69e-04 +2022-05-07 17:26:28,024 INFO [train.py:715] (6/8) Epoch 13, batch 9650, loss[loss=0.1234, simple_loss=0.2039, pruned_loss=0.02147, over 4892.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03165, over 971933.16 frames.], batch size: 16, lr: 1.69e-04 +2022-05-07 17:27:06,438 INFO [train.py:715] (6/8) Epoch 13, batch 9700, loss[loss=0.125, simple_loss=0.1981, pruned_loss=0.02593, over 4987.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.03183, over 972377.90 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 17:27:44,981 INFO [train.py:715] (6/8) Epoch 13, batch 9750, loss[loss=0.1287, simple_loss=0.2074, pruned_loss=0.02496, over 4941.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2104, pruned_loss=0.03136, over 971769.73 frames.], batch size: 21, lr: 1.69e-04 +2022-05-07 17:28:23,844 INFO [train.py:715] (6/8) Epoch 13, batch 9800, loss[loss=0.1277, simple_loss=0.212, pruned_loss=0.02169, over 4943.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03088, over 971634.78 frames.], batch size: 21, lr: 1.69e-04 +2022-05-07 17:29:03,036 INFO [train.py:715] (6/8) Epoch 13, batch 9850, loss[loss=0.1542, simple_loss=0.2232, pruned_loss=0.04256, over 4989.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.03137, over 972477.66 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 17:29:41,587 INFO [train.py:715] (6/8) Epoch 13, batch 9900, loss[loss=0.1255, simple_loss=0.1966, pruned_loss=0.02724, over 4827.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03135, over 972352.56 frames.], batch size: 26, lr: 1.69e-04 +2022-05-07 17:30:19,824 INFO [train.py:715] (6/8) Epoch 13, batch 9950, loss[loss=0.1395, simple_loss=0.209, pruned_loss=0.03498, over 4888.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.0317, over 972482.84 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 17:30:58,622 INFO [train.py:715] (6/8) Epoch 13, batch 10000, loss[loss=0.1538, simple_loss=0.2235, pruned_loss=0.04204, over 4691.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2112, pruned_loss=0.03204, over 972572.55 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:31:37,788 INFO [train.py:715] (6/8) Epoch 13, batch 10050, loss[loss=0.1381, simple_loss=0.2109, pruned_loss=0.03265, over 4821.00 frames.], tot_loss[loss=0.1378, simple_loss=0.211, pruned_loss=0.03229, over 972457.79 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:32:16,725 INFO [train.py:715] (6/8) Epoch 13, batch 10100, loss[loss=0.1531, simple_loss=0.2331, pruned_loss=0.03659, over 4976.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2109, pruned_loss=0.03219, over 972244.88 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 17:32:54,966 INFO [train.py:715] (6/8) Epoch 13, batch 10150, loss[loss=0.1232, simple_loss=0.1899, pruned_loss=0.02823, over 4814.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.03199, over 972558.48 frames.], batch size: 13, lr: 1.69e-04 +2022-05-07 17:33:34,002 INFO [train.py:715] (6/8) Epoch 13, batch 10200, loss[loss=0.1363, simple_loss=0.2038, pruned_loss=0.03442, over 4915.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2103, pruned_loss=0.03227, over 971994.22 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 17:34:13,398 INFO [train.py:715] (6/8) Epoch 13, batch 10250, loss[loss=0.1139, simple_loss=0.192, pruned_loss=0.01787, over 4816.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2102, pruned_loss=0.03201, over 971804.56 frames.], batch size: 26, lr: 1.69e-04 +2022-05-07 17:34:52,085 INFO [train.py:715] (6/8) Epoch 13, batch 10300, loss[loss=0.145, simple_loss=0.2141, pruned_loss=0.03793, over 4994.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03145, over 972164.09 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 17:35:31,128 INFO [train.py:715] (6/8) Epoch 13, batch 10350, loss[loss=0.1344, simple_loss=0.1995, pruned_loss=0.03463, over 4905.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03146, over 972388.02 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 17:36:10,307 INFO [train.py:715] (6/8) Epoch 13, batch 10400, loss[loss=0.1535, simple_loss=0.2202, pruned_loss=0.04337, over 4877.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03152, over 971977.24 frames.], batch size: 39, lr: 1.69e-04 +2022-05-07 17:36:49,252 INFO [train.py:715] (6/8) Epoch 13, batch 10450, loss[loss=0.1587, simple_loss=0.226, pruned_loss=0.04574, over 4986.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2096, pruned_loss=0.03154, over 971582.09 frames.], batch size: 25, lr: 1.69e-04 +2022-05-07 17:37:26,679 INFO [train.py:715] (6/8) Epoch 13, batch 10500, loss[loss=0.1444, simple_loss=0.2175, pruned_loss=0.03567, over 4796.00 frames.], tot_loss[loss=0.1358, simple_loss=0.209, pruned_loss=0.03126, over 971551.36 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 17:38:05,567 INFO [train.py:715] (6/8) Epoch 13, batch 10550, loss[loss=0.1416, simple_loss=0.2202, pruned_loss=0.03149, over 4862.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2088, pruned_loss=0.03113, over 971547.43 frames.], batch size: 13, lr: 1.69e-04 +2022-05-07 17:38:44,487 INFO [train.py:715] (6/8) Epoch 13, batch 10600, loss[loss=0.1402, simple_loss=0.2165, pruned_loss=0.03195, over 4783.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2087, pruned_loss=0.031, over 972201.02 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 17:39:22,593 INFO [train.py:715] (6/8) Epoch 13, batch 10650, loss[loss=0.12, simple_loss=0.1958, pruned_loss=0.02214, over 4851.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2093, pruned_loss=0.03067, over 972087.84 frames.], batch size: 20, lr: 1.69e-04 +2022-05-07 17:40:01,764 INFO [train.py:715] (6/8) Epoch 13, batch 10700, loss[loss=0.1084, simple_loss=0.1775, pruned_loss=0.01968, over 4792.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2098, pruned_loss=0.03085, over 972010.23 frames.], batch size: 12, lr: 1.69e-04 +2022-05-07 17:40:41,063 INFO [train.py:715] (6/8) Epoch 13, batch 10750, loss[loss=0.1265, simple_loss=0.1965, pruned_loss=0.02821, over 4795.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2091, pruned_loss=0.03069, over 971642.39 frames.], batch size: 12, lr: 1.69e-04 +2022-05-07 17:41:19,860 INFO [train.py:715] (6/8) Epoch 13, batch 10800, loss[loss=0.1528, simple_loss=0.2158, pruned_loss=0.04494, over 4845.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.03114, over 971805.84 frames.], batch size: 13, lr: 1.69e-04 +2022-05-07 17:41:57,896 INFO [train.py:715] (6/8) Epoch 13, batch 10850, loss[loss=0.1348, simple_loss=0.2098, pruned_loss=0.02988, over 4902.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03082, over 972186.55 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 17:42:37,035 INFO [train.py:715] (6/8) Epoch 13, batch 10900, loss[loss=0.1205, simple_loss=0.197, pruned_loss=0.02199, over 4913.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2091, pruned_loss=0.03117, over 972303.84 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 17:43:16,910 INFO [train.py:715] (6/8) Epoch 13, batch 10950, loss[loss=0.103, simple_loss=0.1726, pruned_loss=0.01671, over 4682.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2095, pruned_loss=0.03117, over 973059.45 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:43:56,335 INFO [train.py:715] (6/8) Epoch 13, batch 11000, loss[loss=0.1325, simple_loss=0.2027, pruned_loss=0.03109, over 4974.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2089, pruned_loss=0.03092, over 972822.88 frames.], batch size: 40, lr: 1.69e-04 +2022-05-07 17:44:34,971 INFO [train.py:715] (6/8) Epoch 13, batch 11050, loss[loss=0.1442, simple_loss=0.2169, pruned_loss=0.03575, over 4776.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2103, pruned_loss=0.03129, over 973117.65 frames.], batch size: 12, lr: 1.69e-04 +2022-05-07 17:45:14,267 INFO [train.py:715] (6/8) Epoch 13, batch 11100, loss[loss=0.126, simple_loss=0.1995, pruned_loss=0.02624, over 4895.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2099, pruned_loss=0.03117, over 973505.09 frames.], batch size: 19, lr: 1.69e-04 +2022-05-07 17:45:53,236 INFO [train.py:715] (6/8) Epoch 13, batch 11150, loss[loss=0.1141, simple_loss=0.1769, pruned_loss=0.02567, over 4706.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03083, over 971737.22 frames.], batch size: 12, lr: 1.69e-04 +2022-05-07 17:46:30,990 INFO [train.py:715] (6/8) Epoch 13, batch 11200, loss[loss=0.1393, simple_loss=0.2191, pruned_loss=0.02971, over 4701.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.03155, over 972202.59 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:47:09,192 INFO [train.py:715] (6/8) Epoch 13, batch 11250, loss[loss=0.152, simple_loss=0.2231, pruned_loss=0.0404, over 4934.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2097, pruned_loss=0.03143, over 971822.12 frames.], batch size: 35, lr: 1.69e-04 +2022-05-07 17:47:48,139 INFO [train.py:715] (6/8) Epoch 13, batch 11300, loss[loss=0.15, simple_loss=0.2215, pruned_loss=0.03928, over 4972.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2098, pruned_loss=0.031, over 971661.21 frames.], batch size: 35, lr: 1.69e-04 +2022-05-07 17:48:27,089 INFO [train.py:715] (6/8) Epoch 13, batch 11350, loss[loss=0.1246, simple_loss=0.1966, pruned_loss=0.02627, over 4936.00 frames.], tot_loss[loss=0.136, simple_loss=0.2099, pruned_loss=0.03105, over 971797.22 frames.], batch size: 21, lr: 1.69e-04 +2022-05-07 17:49:05,292 INFO [train.py:715] (6/8) Epoch 13, batch 11400, loss[loss=0.1345, simple_loss=0.2091, pruned_loss=0.02995, over 4867.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.03098, over 972696.58 frames.], batch size: 32, lr: 1.69e-04 +2022-05-07 17:49:44,158 INFO [train.py:715] (6/8) Epoch 13, batch 11450, loss[loss=0.1586, simple_loss=0.2208, pruned_loss=0.04824, over 4845.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2091, pruned_loss=0.03087, over 972066.13 frames.], batch size: 32, lr: 1.69e-04 +2022-05-07 17:50:25,718 INFO [train.py:715] (6/8) Epoch 13, batch 11500, loss[loss=0.1142, simple_loss=0.1969, pruned_loss=0.0158, over 4930.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03072, over 972464.21 frames.], batch size: 23, lr: 1.69e-04 +2022-05-07 17:51:03,649 INFO [train.py:715] (6/8) Epoch 13, batch 11550, loss[loss=0.1368, simple_loss=0.2067, pruned_loss=0.0334, over 4982.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.03075, over 972193.84 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 17:51:42,316 INFO [train.py:715] (6/8) Epoch 13, batch 11600, loss[loss=0.1238, simple_loss=0.192, pruned_loss=0.02784, over 4893.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.03074, over 973062.45 frames.], batch size: 22, lr: 1.69e-04 +2022-05-07 17:52:21,599 INFO [train.py:715] (6/8) Epoch 13, batch 11650, loss[loss=0.1335, simple_loss=0.2025, pruned_loss=0.03218, over 4913.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2087, pruned_loss=0.03132, over 973632.07 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 17:53:00,310 INFO [train.py:715] (6/8) Epoch 13, batch 11700, loss[loss=0.1319, simple_loss=0.2001, pruned_loss=0.0319, over 4819.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2085, pruned_loss=0.03134, over 972941.86 frames.], batch size: 27, lr: 1.69e-04 +2022-05-07 17:53:38,277 INFO [train.py:715] (6/8) Epoch 13, batch 11750, loss[loss=0.1205, simple_loss=0.1934, pruned_loss=0.02376, over 4942.00 frames.], tot_loss[loss=0.1359, simple_loss=0.209, pruned_loss=0.03133, over 972208.44 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 17:54:16,754 INFO [train.py:715] (6/8) Epoch 13, batch 11800, loss[loss=0.1303, simple_loss=0.2039, pruned_loss=0.02836, over 4922.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2092, pruned_loss=0.03164, over 972479.68 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 17:54:55,479 INFO [train.py:715] (6/8) Epoch 13, batch 11850, loss[loss=0.1357, simple_loss=0.2034, pruned_loss=0.03399, over 4767.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2097, pruned_loss=0.03206, over 972400.63 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 17:55:32,887 INFO [train.py:715] (6/8) Epoch 13, batch 11900, loss[loss=0.1216, simple_loss=0.2009, pruned_loss=0.02118, over 4650.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2092, pruned_loss=0.03175, over 972208.47 frames.], batch size: 13, lr: 1.69e-04 +2022-05-07 17:56:11,545 INFO [train.py:715] (6/8) Epoch 13, batch 11950, loss[loss=0.1446, simple_loss=0.2162, pruned_loss=0.03649, over 4754.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2098, pruned_loss=0.0318, over 972432.08 frames.], batch size: 19, lr: 1.69e-04 +2022-05-07 17:56:50,610 INFO [train.py:715] (6/8) Epoch 13, batch 12000, loss[loss=0.1465, simple_loss=0.2166, pruned_loss=0.03822, over 4931.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03144, over 973082.78 frames.], batch size: 23, lr: 1.69e-04 +2022-05-07 17:56:50,611 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 17:57:00,357 INFO [train.py:742] (6/8) Epoch 13, validation: loss=0.1055, simple_loss=0.1893, pruned_loss=0.01081, over 914524.00 frames. +2022-05-07 17:57:40,026 INFO [train.py:715] (6/8) Epoch 13, batch 12050, loss[loss=0.1519, simple_loss=0.2178, pruned_loss=0.04297, over 4869.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03125, over 973609.16 frames.], batch size: 30, lr: 1.69e-04 +2022-05-07 17:58:18,318 INFO [train.py:715] (6/8) Epoch 13, batch 12100, loss[loss=0.1424, simple_loss=0.2179, pruned_loss=0.03351, over 4779.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03187, over 973006.64 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 17:58:56,072 INFO [train.py:715] (6/8) Epoch 13, batch 12150, loss[loss=0.1176, simple_loss=0.1964, pruned_loss=0.0194, over 4853.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.03149, over 972465.88 frames.], batch size: 20, lr: 1.69e-04 +2022-05-07 17:59:34,974 INFO [train.py:715] (6/8) Epoch 13, batch 12200, loss[loss=0.1432, simple_loss=0.2177, pruned_loss=0.03432, over 4886.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2106, pruned_loss=0.03126, over 973416.40 frames.], batch size: 19, lr: 1.69e-04 +2022-05-07 18:00:13,890 INFO [train.py:715] (6/8) Epoch 13, batch 12250, loss[loss=0.146, simple_loss=0.2198, pruned_loss=0.03604, over 4910.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2108, pruned_loss=0.03116, over 973267.55 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 18:00:52,460 INFO [train.py:715] (6/8) Epoch 13, batch 12300, loss[loss=0.1323, simple_loss=0.1959, pruned_loss=0.03431, over 4785.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2104, pruned_loss=0.03117, over 973176.39 frames.], batch size: 12, lr: 1.69e-04 +2022-05-07 18:01:30,136 INFO [train.py:715] (6/8) Epoch 13, batch 12350, loss[loss=0.1397, simple_loss=0.2059, pruned_loss=0.03677, over 4872.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2098, pruned_loss=0.03082, over 972652.98 frames.], batch size: 32, lr: 1.69e-04 +2022-05-07 18:02:09,067 INFO [train.py:715] (6/8) Epoch 13, batch 12400, loss[loss=0.1781, simple_loss=0.2488, pruned_loss=0.05376, over 4946.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2096, pruned_loss=0.03081, over 971939.27 frames.], batch size: 39, lr: 1.69e-04 +2022-05-07 18:02:47,458 INFO [train.py:715] (6/8) Epoch 13, batch 12450, loss[loss=0.1313, simple_loss=0.207, pruned_loss=0.02774, over 4984.00 frames.], tot_loss[loss=0.1355, simple_loss=0.209, pruned_loss=0.03101, over 971537.90 frames.], batch size: 28, lr: 1.69e-04 +2022-05-07 18:03:24,473 INFO [train.py:715] (6/8) Epoch 13, batch 12500, loss[loss=0.1212, simple_loss=0.2011, pruned_loss=0.02062, over 4752.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2097, pruned_loss=0.03147, over 971976.10 frames.], batch size: 19, lr: 1.69e-04 +2022-05-07 18:04:03,260 INFO [train.py:715] (6/8) Epoch 13, batch 12550, loss[loss=0.13, simple_loss=0.2032, pruned_loss=0.02847, over 4753.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.03178, over 971799.08 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 18:04:41,882 INFO [train.py:715] (6/8) Epoch 13, batch 12600, loss[loss=0.1157, simple_loss=0.185, pruned_loss=0.02326, over 4840.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.03139, over 971897.84 frames.], batch size: 30, lr: 1.69e-04 +2022-05-07 18:05:20,410 INFO [train.py:715] (6/8) Epoch 13, batch 12650, loss[loss=0.1305, simple_loss=0.2036, pruned_loss=0.02867, over 4963.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2095, pruned_loss=0.03115, over 971785.76 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 18:05:58,205 INFO [train.py:715] (6/8) Epoch 13, batch 12700, loss[loss=0.1397, simple_loss=0.2051, pruned_loss=0.03718, over 4983.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.03147, over 971182.56 frames.], batch size: 35, lr: 1.69e-04 +2022-05-07 18:06:37,494 INFO [train.py:715] (6/8) Epoch 13, batch 12750, loss[loss=0.1592, simple_loss=0.2206, pruned_loss=0.04888, over 4828.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03174, over 970874.32 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 18:07:16,117 INFO [train.py:715] (6/8) Epoch 13, batch 12800, loss[loss=0.1087, simple_loss=0.1935, pruned_loss=0.01191, over 4830.00 frames.], tot_loss[loss=0.137, simple_loss=0.2102, pruned_loss=0.03187, over 971089.26 frames.], batch size: 13, lr: 1.69e-04 +2022-05-07 18:07:53,801 INFO [train.py:715] (6/8) Epoch 13, batch 12850, loss[loss=0.1145, simple_loss=0.1996, pruned_loss=0.01473, over 4782.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2101, pruned_loss=0.0319, over 971716.23 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 18:08:32,303 INFO [train.py:715] (6/8) Epoch 13, batch 12900, loss[loss=0.1641, simple_loss=0.228, pruned_loss=0.05006, over 4945.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2096, pruned_loss=0.0316, over 972262.17 frames.], batch size: 21, lr: 1.69e-04 +2022-05-07 18:09:10,902 INFO [train.py:715] (6/8) Epoch 13, batch 12950, loss[loss=0.1254, simple_loss=0.2107, pruned_loss=0.02002, over 4911.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2097, pruned_loss=0.03127, over 972303.33 frames.], batch size: 29, lr: 1.69e-04 +2022-05-07 18:09:48,856 INFO [train.py:715] (6/8) Epoch 13, batch 13000, loss[loss=0.1161, simple_loss=0.1869, pruned_loss=0.02266, over 4981.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03148, over 971718.08 frames.], batch size: 28, lr: 1.69e-04 +2022-05-07 18:10:26,257 INFO [train.py:715] (6/8) Epoch 13, batch 13050, loss[loss=0.1276, simple_loss=0.1974, pruned_loss=0.02885, over 4791.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03145, over 971998.51 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 18:11:05,302 INFO [train.py:715] (6/8) Epoch 13, batch 13100, loss[loss=0.1096, simple_loss=0.1758, pruned_loss=0.02176, over 4924.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.0318, over 972224.73 frames.], batch size: 29, lr: 1.69e-04 +2022-05-07 18:11:43,996 INFO [train.py:715] (6/8) Epoch 13, batch 13150, loss[loss=0.1533, simple_loss=0.2314, pruned_loss=0.03764, over 4993.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03147, over 972662.82 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 18:12:21,745 INFO [train.py:715] (6/8) Epoch 13, batch 13200, loss[loss=0.1534, simple_loss=0.2155, pruned_loss=0.04564, over 4990.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.03131, over 972006.23 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 18:13:00,178 INFO [train.py:715] (6/8) Epoch 13, batch 13250, loss[loss=0.1492, simple_loss=0.2248, pruned_loss=0.03677, over 4795.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2095, pruned_loss=0.03155, over 971807.55 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 18:13:38,868 INFO [train.py:715] (6/8) Epoch 13, batch 13300, loss[loss=0.1261, simple_loss=0.1968, pruned_loss=0.02766, over 4915.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2089, pruned_loss=0.03131, over 972738.71 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 18:14:17,621 INFO [train.py:715] (6/8) Epoch 13, batch 13350, loss[loss=0.1496, simple_loss=0.2249, pruned_loss=0.03712, over 4841.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2088, pruned_loss=0.03127, over 972020.62 frames.], batch size: 30, lr: 1.69e-04 +2022-05-07 18:14:55,909 INFO [train.py:715] (6/8) Epoch 13, batch 13400, loss[loss=0.1472, simple_loss=0.2151, pruned_loss=0.03965, over 4968.00 frames.], tot_loss[loss=0.1357, simple_loss=0.209, pruned_loss=0.03122, over 971151.45 frames.], batch size: 39, lr: 1.69e-04 +2022-05-07 18:15:35,681 INFO [train.py:715] (6/8) Epoch 13, batch 13450, loss[loss=0.1209, simple_loss=0.1957, pruned_loss=0.02301, over 4995.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2097, pruned_loss=0.03165, over 971578.07 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 18:16:14,409 INFO [train.py:715] (6/8) Epoch 13, batch 13500, loss[loss=0.1347, simple_loss=0.2109, pruned_loss=0.02927, over 4916.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2097, pruned_loss=0.03155, over 971849.99 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 18:16:52,062 INFO [train.py:715] (6/8) Epoch 13, batch 13550, loss[loss=0.116, simple_loss=0.1955, pruned_loss=0.01822, over 4932.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03158, over 972314.66 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 18:17:29,850 INFO [train.py:715] (6/8) Epoch 13, batch 13600, loss[loss=0.1437, simple_loss=0.222, pruned_loss=0.03271, over 4888.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03186, over 972644.60 frames.], batch size: 19, lr: 1.68e-04 +2022-05-07 18:18:08,971 INFO [train.py:715] (6/8) Epoch 13, batch 13650, loss[loss=0.1407, simple_loss=0.2236, pruned_loss=0.02891, over 4769.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2108, pruned_loss=0.03188, over 973037.82 frames.], batch size: 16, lr: 1.68e-04 +2022-05-07 18:18:47,087 INFO [train.py:715] (6/8) Epoch 13, batch 13700, loss[loss=0.145, simple_loss=0.2232, pruned_loss=0.03344, over 4879.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.03163, over 973091.71 frames.], batch size: 32, lr: 1.68e-04 +2022-05-07 18:19:24,726 INFO [train.py:715] (6/8) Epoch 13, batch 13750, loss[loss=0.138, simple_loss=0.2093, pruned_loss=0.03337, over 4808.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2096, pruned_loss=0.03137, over 972317.88 frames.], batch size: 24, lr: 1.68e-04 +2022-05-07 18:20:03,321 INFO [train.py:715] (6/8) Epoch 13, batch 13800, loss[loss=0.1573, simple_loss=0.2332, pruned_loss=0.04076, over 4885.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2103, pruned_loss=0.03139, over 972149.74 frames.], batch size: 39, lr: 1.68e-04 +2022-05-07 18:20:41,460 INFO [train.py:715] (6/8) Epoch 13, batch 13850, loss[loss=0.1284, simple_loss=0.2023, pruned_loss=0.02721, over 4696.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2098, pruned_loss=0.03128, over 971927.21 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 18:21:19,872 INFO [train.py:715] (6/8) Epoch 13, batch 13900, loss[loss=0.126, simple_loss=0.1939, pruned_loss=0.02908, over 4757.00 frames.], tot_loss[loss=0.136, simple_loss=0.2095, pruned_loss=0.03127, over 972266.28 frames.], batch size: 19, lr: 1.68e-04 +2022-05-07 18:21:58,633 INFO [train.py:715] (6/8) Epoch 13, batch 13950, loss[loss=0.1508, simple_loss=0.2253, pruned_loss=0.03817, over 4896.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03151, over 972594.53 frames.], batch size: 19, lr: 1.68e-04 +2022-05-07 18:22:37,441 INFO [train.py:715] (6/8) Epoch 13, batch 14000, loss[loss=0.1468, simple_loss=0.2158, pruned_loss=0.03892, over 4825.00 frames.], tot_loss[loss=0.137, simple_loss=0.2107, pruned_loss=0.03168, over 973276.66 frames.], batch size: 26, lr: 1.68e-04 +2022-05-07 18:23:15,660 INFO [train.py:715] (6/8) Epoch 13, batch 14050, loss[loss=0.1156, simple_loss=0.2009, pruned_loss=0.01516, over 4978.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03182, over 973177.66 frames.], batch size: 25, lr: 1.68e-04 +2022-05-07 18:23:53,253 INFO [train.py:715] (6/8) Epoch 13, batch 14100, loss[loss=0.1582, simple_loss=0.2267, pruned_loss=0.04489, over 4823.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2116, pruned_loss=0.03199, over 973822.69 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 18:24:32,482 INFO [train.py:715] (6/8) Epoch 13, batch 14150, loss[loss=0.1403, simple_loss=0.2201, pruned_loss=0.03019, over 4950.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2113, pruned_loss=0.03183, over 973512.10 frames.], batch size: 29, lr: 1.68e-04 +2022-05-07 18:25:10,633 INFO [train.py:715] (6/8) Epoch 13, batch 14200, loss[loss=0.1824, simple_loss=0.2581, pruned_loss=0.05336, over 4933.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2106, pruned_loss=0.03206, over 973241.20 frames.], batch size: 23, lr: 1.68e-04 +2022-05-07 18:25:48,510 INFO [train.py:715] (6/8) Epoch 13, batch 14250, loss[loss=0.136, simple_loss=0.2131, pruned_loss=0.02939, over 4804.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.03172, over 973065.81 frames.], batch size: 24, lr: 1.68e-04 +2022-05-07 18:26:26,774 INFO [train.py:715] (6/8) Epoch 13, batch 14300, loss[loss=0.1412, simple_loss=0.2124, pruned_loss=0.03502, over 4808.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2098, pruned_loss=0.03189, over 972908.37 frames.], batch size: 13, lr: 1.68e-04 +2022-05-07 18:27:06,170 INFO [train.py:715] (6/8) Epoch 13, batch 14350, loss[loss=0.1614, simple_loss=0.2281, pruned_loss=0.04742, over 4855.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2099, pruned_loss=0.03188, over 971495.79 frames.], batch size: 20, lr: 1.68e-04 +2022-05-07 18:27:44,509 INFO [train.py:715] (6/8) Epoch 13, batch 14400, loss[loss=0.1486, simple_loss=0.2062, pruned_loss=0.04545, over 4876.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2107, pruned_loss=0.03213, over 971723.36 frames.], batch size: 32, lr: 1.68e-04 +2022-05-07 18:28:22,437 INFO [train.py:715] (6/8) Epoch 13, batch 14450, loss[loss=0.1379, simple_loss=0.2264, pruned_loss=0.02472, over 4942.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2107, pruned_loss=0.03218, over 972478.41 frames.], batch size: 29, lr: 1.68e-04 +2022-05-07 18:29:01,541 INFO [train.py:715] (6/8) Epoch 13, batch 14500, loss[loss=0.1406, simple_loss=0.2106, pruned_loss=0.03536, over 4904.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2105, pruned_loss=0.03218, over 972534.37 frames.], batch size: 19, lr: 1.68e-04 +2022-05-07 18:29:40,347 INFO [train.py:715] (6/8) Epoch 13, batch 14550, loss[loss=0.107, simple_loss=0.1708, pruned_loss=0.02162, over 4838.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03185, over 972597.83 frames.], batch size: 13, lr: 1.68e-04 +2022-05-07 18:30:18,693 INFO [train.py:715] (6/8) Epoch 13, batch 14600, loss[loss=0.1262, simple_loss=0.204, pruned_loss=0.02424, over 4707.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2109, pruned_loss=0.03174, over 972086.56 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 18:30:57,058 INFO [train.py:715] (6/8) Epoch 13, batch 14650, loss[loss=0.1449, simple_loss=0.2181, pruned_loss=0.03584, over 4986.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.03185, over 972532.70 frames.], batch size: 14, lr: 1.68e-04 +2022-05-07 18:31:35,708 INFO [train.py:715] (6/8) Epoch 13, batch 14700, loss[loss=0.1217, simple_loss=0.1924, pruned_loss=0.02552, over 4802.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03221, over 971624.80 frames.], batch size: 12, lr: 1.68e-04 +2022-05-07 18:32:13,644 INFO [train.py:715] (6/8) Epoch 13, batch 14750, loss[loss=0.1344, simple_loss=0.2037, pruned_loss=0.03251, over 4898.00 frames.], tot_loss[loss=0.1369, simple_loss=0.21, pruned_loss=0.03187, over 970755.61 frames.], batch size: 22, lr: 1.68e-04 +2022-05-07 18:32:50,800 INFO [train.py:715] (6/8) Epoch 13, batch 14800, loss[loss=0.114, simple_loss=0.1817, pruned_loss=0.02319, over 4798.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2094, pruned_loss=0.03139, over 970828.77 frames.], batch size: 14, lr: 1.68e-04 +2022-05-07 18:33:29,889 INFO [train.py:715] (6/8) Epoch 13, batch 14850, loss[loss=0.133, simple_loss=0.2103, pruned_loss=0.02779, over 4753.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2089, pruned_loss=0.03075, over 970734.86 frames.], batch size: 16, lr: 1.68e-04 +2022-05-07 18:34:08,569 INFO [train.py:715] (6/8) Epoch 13, batch 14900, loss[loss=0.1338, simple_loss=0.2099, pruned_loss=0.02883, over 4950.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2092, pruned_loss=0.03111, over 972485.22 frames.], batch size: 29, lr: 1.68e-04 +2022-05-07 18:34:46,493 INFO [train.py:715] (6/8) Epoch 13, batch 14950, loss[loss=0.1485, simple_loss=0.2115, pruned_loss=0.04272, over 4737.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.03134, over 972108.49 frames.], batch size: 16, lr: 1.68e-04 +2022-05-07 18:35:24,994 INFO [train.py:715] (6/8) Epoch 13, batch 15000, loss[loss=0.1259, simple_loss=0.193, pruned_loss=0.02943, over 4795.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03218, over 972866.21 frames.], batch size: 12, lr: 1.68e-04 +2022-05-07 18:35:24,994 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 18:35:34,567 INFO [train.py:742] (6/8) Epoch 13, validation: loss=0.1052, simple_loss=0.189, pruned_loss=0.01074, over 914524.00 frames. +2022-05-07 18:36:13,160 INFO [train.py:715] (6/8) Epoch 13, batch 15050, loss[loss=0.1224, simple_loss=0.1941, pruned_loss=0.02537, over 4654.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2106, pruned_loss=0.03185, over 971883.84 frames.], batch size: 13, lr: 1.68e-04 +2022-05-07 18:36:52,715 INFO [train.py:715] (6/8) Epoch 13, batch 15100, loss[loss=0.1274, simple_loss=0.197, pruned_loss=0.02885, over 4800.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2103, pruned_loss=0.03203, over 972976.95 frames.], batch size: 25, lr: 1.68e-04 +2022-05-07 18:37:31,194 INFO [train.py:715] (6/8) Epoch 13, batch 15150, loss[loss=0.1549, simple_loss=0.2294, pruned_loss=0.04026, over 4714.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2108, pruned_loss=0.03185, over 972545.88 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 18:38:09,447 INFO [train.py:715] (6/8) Epoch 13, batch 15200, loss[loss=0.1308, simple_loss=0.2146, pruned_loss=0.02349, over 4915.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.0316, over 973330.39 frames.], batch size: 39, lr: 1.68e-04 +2022-05-07 18:38:49,233 INFO [train.py:715] (6/8) Epoch 13, batch 15250, loss[loss=0.1313, simple_loss=0.203, pruned_loss=0.02979, over 4986.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2097, pruned_loss=0.0314, over 973660.45 frames.], batch size: 28, lr: 1.68e-04 +2022-05-07 18:39:27,977 INFO [train.py:715] (6/8) Epoch 13, batch 15300, loss[loss=0.1514, simple_loss=0.2265, pruned_loss=0.03814, over 4820.00 frames.], tot_loss[loss=0.136, simple_loss=0.2095, pruned_loss=0.03119, over 972615.63 frames.], batch size: 26, lr: 1.68e-04 +2022-05-07 18:40:06,015 INFO [train.py:715] (6/8) Epoch 13, batch 15350, loss[loss=0.1154, simple_loss=0.1924, pruned_loss=0.01923, over 4690.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.03167, over 972143.50 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 18:40:45,013 INFO [train.py:715] (6/8) Epoch 13, batch 15400, loss[loss=0.12, simple_loss=0.2022, pruned_loss=0.01889, over 4750.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03153, over 971604.91 frames.], batch size: 16, lr: 1.68e-04 +2022-05-07 18:41:23,908 INFO [train.py:715] (6/8) Epoch 13, batch 15450, loss[loss=0.1348, simple_loss=0.2048, pruned_loss=0.03241, over 4982.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03122, over 972111.85 frames.], batch size: 31, lr: 1.68e-04 +2022-05-07 18:42:03,717 INFO [train.py:715] (6/8) Epoch 13, batch 15500, loss[loss=0.1667, simple_loss=0.2442, pruned_loss=0.0446, over 4898.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.03136, over 972205.32 frames.], batch size: 18, lr: 1.68e-04 +2022-05-07 18:42:41,967 INFO [train.py:715] (6/8) Epoch 13, batch 15550, loss[loss=0.1082, simple_loss=0.1827, pruned_loss=0.01682, over 4837.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03082, over 971756.70 frames.], batch size: 13, lr: 1.68e-04 +2022-05-07 18:43:21,717 INFO [train.py:715] (6/8) Epoch 13, batch 15600, loss[loss=0.1348, simple_loss=0.2048, pruned_loss=0.03245, over 4796.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2093, pruned_loss=0.03076, over 972544.60 frames.], batch size: 13, lr: 1.68e-04 +2022-05-07 18:44:01,141 INFO [train.py:715] (6/8) Epoch 13, batch 15650, loss[loss=0.1393, simple_loss=0.2074, pruned_loss=0.03563, over 4750.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.03137, over 972084.29 frames.], batch size: 19, lr: 1.68e-04 +2022-05-07 18:44:39,624 INFO [train.py:715] (6/8) Epoch 13, batch 15700, loss[loss=0.1949, simple_loss=0.2637, pruned_loss=0.063, over 4739.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03164, over 971323.12 frames.], batch size: 16, lr: 1.68e-04 +2022-05-07 18:45:18,634 INFO [train.py:715] (6/8) Epoch 13, batch 15750, loss[loss=0.1528, simple_loss=0.2239, pruned_loss=0.04083, over 4862.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2096, pruned_loss=0.03179, over 970847.90 frames.], batch size: 32, lr: 1.68e-04 +2022-05-07 18:45:57,412 INFO [train.py:715] (6/8) Epoch 13, batch 15800, loss[loss=0.1515, simple_loss=0.226, pruned_loss=0.0385, over 4798.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2096, pruned_loss=0.03159, over 971230.32 frames.], batch size: 21, lr: 1.68e-04 +2022-05-07 18:46:35,696 INFO [train.py:715] (6/8) Epoch 13, batch 15850, loss[loss=0.1094, simple_loss=0.179, pruned_loss=0.01991, over 4820.00 frames.], tot_loss[loss=0.1358, simple_loss=0.209, pruned_loss=0.03133, over 971733.23 frames.], batch size: 12, lr: 1.68e-04 +2022-05-07 18:47:13,601 INFO [train.py:715] (6/8) Epoch 13, batch 15900, loss[loss=0.1643, simple_loss=0.2343, pruned_loss=0.04715, over 4719.00 frames.], tot_loss[loss=0.136, simple_loss=0.2092, pruned_loss=0.03142, over 971145.90 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 18:47:52,838 INFO [train.py:715] (6/8) Epoch 13, batch 15950, loss[loss=0.1364, simple_loss=0.2145, pruned_loss=0.02908, over 4835.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2096, pruned_loss=0.03143, over 972113.74 frames.], batch size: 13, lr: 1.68e-04 +2022-05-07 18:48:31,350 INFO [train.py:715] (6/8) Epoch 13, batch 16000, loss[loss=0.1107, simple_loss=0.1851, pruned_loss=0.01814, over 4988.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2102, pruned_loss=0.03108, over 971662.81 frames.], batch size: 14, lr: 1.68e-04 +2022-05-07 18:49:09,603 INFO [train.py:715] (6/8) Epoch 13, batch 16050, loss[loss=0.1531, simple_loss=0.2243, pruned_loss=0.04095, over 4931.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2103, pruned_loss=0.03096, over 971943.13 frames.], batch size: 18, lr: 1.68e-04 +2022-05-07 18:49:48,097 INFO [train.py:715] (6/8) Epoch 13, batch 16100, loss[loss=0.1231, simple_loss=0.2114, pruned_loss=0.01744, over 4983.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2105, pruned_loss=0.03104, over 971867.61 frames.], batch size: 24, lr: 1.68e-04 +2022-05-07 18:50:27,336 INFO [train.py:715] (6/8) Epoch 13, batch 16150, loss[loss=0.1302, simple_loss=0.2111, pruned_loss=0.0247, over 4802.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2099, pruned_loss=0.03075, over 970556.59 frames.], batch size: 25, lr: 1.68e-04 +2022-05-07 18:51:05,994 INFO [train.py:715] (6/8) Epoch 13, batch 16200, loss[loss=0.1363, simple_loss=0.22, pruned_loss=0.02632, over 4885.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.03055, over 970885.81 frames.], batch size: 19, lr: 1.68e-04 +2022-05-07 18:51:42,925 INFO [train.py:715] (6/8) Epoch 13, batch 16250, loss[loss=0.1566, simple_loss=0.2364, pruned_loss=0.03838, over 4843.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.03111, over 970863.45 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 18:52:22,101 INFO [train.py:715] (6/8) Epoch 13, batch 16300, loss[loss=0.1539, simple_loss=0.2243, pruned_loss=0.0417, over 4985.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.03135, over 970292.89 frames.], batch size: 31, lr: 1.68e-04 +2022-05-07 18:53:00,700 INFO [train.py:715] (6/8) Epoch 13, batch 16350, loss[loss=0.1198, simple_loss=0.1985, pruned_loss=0.02055, over 4764.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2095, pruned_loss=0.03113, over 971040.72 frames.], batch size: 16, lr: 1.68e-04 +2022-05-07 18:53:39,045 INFO [train.py:715] (6/8) Epoch 13, batch 16400, loss[loss=0.1336, simple_loss=0.2026, pruned_loss=0.03232, over 4914.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03137, over 971655.28 frames.], batch size: 18, lr: 1.68e-04 +2022-05-07 18:54:18,189 INFO [train.py:715] (6/8) Epoch 13, batch 16450, loss[loss=0.1416, simple_loss=0.2077, pruned_loss=0.03775, over 4739.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2096, pruned_loss=0.03135, over 971437.52 frames.], batch size: 16, lr: 1.68e-04 +2022-05-07 18:54:57,403 INFO [train.py:715] (6/8) Epoch 13, batch 16500, loss[loss=0.1144, simple_loss=0.1901, pruned_loss=0.01936, over 4933.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03119, over 971508.56 frames.], batch size: 29, lr: 1.68e-04 +2022-05-07 18:55:36,543 INFO [train.py:715] (6/8) Epoch 13, batch 16550, loss[loss=0.1449, simple_loss=0.2234, pruned_loss=0.03317, over 4845.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.03113, over 971194.16 frames.], batch size: 30, lr: 1.68e-04 +2022-05-07 18:56:13,926 INFO [train.py:715] (6/8) Epoch 13, batch 16600, loss[loss=0.1187, simple_loss=0.2024, pruned_loss=0.01748, over 4986.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2091, pruned_loss=0.03113, over 971897.54 frames.], batch size: 28, lr: 1.68e-04 +2022-05-07 18:56:53,166 INFO [train.py:715] (6/8) Epoch 13, batch 16650, loss[loss=0.1428, simple_loss=0.2149, pruned_loss=0.03536, over 4980.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2085, pruned_loss=0.03096, over 970985.26 frames.], batch size: 28, lr: 1.68e-04 +2022-05-07 18:57:31,701 INFO [train.py:715] (6/8) Epoch 13, batch 16700, loss[loss=0.1198, simple_loss=0.2098, pruned_loss=0.01489, over 4814.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2092, pruned_loss=0.03114, over 971436.86 frames.], batch size: 25, lr: 1.68e-04 +2022-05-07 18:58:09,691 INFO [train.py:715] (6/8) Epoch 13, batch 16750, loss[loss=0.1436, simple_loss=0.204, pruned_loss=0.04162, over 4966.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.0313, over 971987.56 frames.], batch size: 35, lr: 1.68e-04 +2022-05-07 18:58:48,289 INFO [train.py:715] (6/8) Epoch 13, batch 16800, loss[loss=0.1543, simple_loss=0.2375, pruned_loss=0.03556, over 4801.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03159, over 970887.30 frames.], batch size: 21, lr: 1.68e-04 +2022-05-07 18:59:27,938 INFO [train.py:715] (6/8) Epoch 13, batch 16850, loss[loss=0.1201, simple_loss=0.191, pruned_loss=0.02454, over 4980.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03192, over 970772.20 frames.], batch size: 35, lr: 1.68e-04 +2022-05-07 19:00:06,314 INFO [train.py:715] (6/8) Epoch 13, batch 16900, loss[loss=0.1569, simple_loss=0.23, pruned_loss=0.0419, over 4700.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2112, pruned_loss=0.03184, over 971280.88 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 19:00:44,803 INFO [train.py:715] (6/8) Epoch 13, batch 16950, loss[loss=0.1609, simple_loss=0.2241, pruned_loss=0.04891, over 4939.00 frames.], tot_loss[loss=0.1356, simple_loss=0.209, pruned_loss=0.03113, over 970932.64 frames.], batch size: 18, lr: 1.68e-04 +2022-05-07 19:01:23,717 INFO [train.py:715] (6/8) Epoch 13, batch 17000, loss[loss=0.1441, simple_loss=0.221, pruned_loss=0.03361, over 4968.00 frames.], tot_loss[loss=0.1356, simple_loss=0.209, pruned_loss=0.03105, over 971510.77 frames.], batch size: 35, lr: 1.68e-04 +2022-05-07 19:02:02,416 INFO [train.py:715] (6/8) Epoch 13, batch 17050, loss[loss=0.1408, simple_loss=0.2049, pruned_loss=0.03831, over 4979.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2092, pruned_loss=0.03124, over 971196.34 frames.], batch size: 28, lr: 1.68e-04 +2022-05-07 19:02:40,531 INFO [train.py:715] (6/8) Epoch 13, batch 17100, loss[loss=0.1359, simple_loss=0.2217, pruned_loss=0.02509, over 4837.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2091, pruned_loss=0.03102, over 971553.68 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 19:03:19,261 INFO [train.py:715] (6/8) Epoch 13, batch 17150, loss[loss=0.1352, simple_loss=0.2006, pruned_loss=0.03489, over 4640.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03177, over 971832.49 frames.], batch size: 13, lr: 1.68e-04 +2022-05-07 19:03:58,102 INFO [train.py:715] (6/8) Epoch 13, batch 17200, loss[loss=0.1751, simple_loss=0.2463, pruned_loss=0.05197, over 4862.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2108, pruned_loss=0.03213, over 972237.68 frames.], batch size: 32, lr: 1.68e-04 +2022-05-07 19:04:36,808 INFO [train.py:715] (6/8) Epoch 13, batch 17250, loss[loss=0.136, simple_loss=0.2073, pruned_loss=0.03237, over 4907.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03172, over 972177.07 frames.], batch size: 18, lr: 1.68e-04 +2022-05-07 19:05:14,780 INFO [train.py:715] (6/8) Epoch 13, batch 17300, loss[loss=0.1075, simple_loss=0.1829, pruned_loss=0.01605, over 4793.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2104, pruned_loss=0.03128, over 971755.61 frames.], batch size: 12, lr: 1.68e-04 +2022-05-07 19:05:53,539 INFO [train.py:715] (6/8) Epoch 13, batch 17350, loss[loss=0.1215, simple_loss=0.1942, pruned_loss=0.02441, over 4983.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2108, pruned_loss=0.03147, over 972282.02 frames.], batch size: 14, lr: 1.68e-04 +2022-05-07 19:06:32,453 INFO [train.py:715] (6/8) Epoch 13, batch 17400, loss[loss=0.1516, simple_loss=0.2215, pruned_loss=0.04088, over 4839.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2105, pruned_loss=0.03144, over 971894.46 frames.], batch size: 30, lr: 1.68e-04 +2022-05-07 19:07:10,068 INFO [train.py:715] (6/8) Epoch 13, batch 17450, loss[loss=0.1552, simple_loss=0.2312, pruned_loss=0.03962, over 4885.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2099, pruned_loss=0.03089, over 971649.00 frames.], batch size: 39, lr: 1.68e-04 +2022-05-07 19:07:48,570 INFO [train.py:715] (6/8) Epoch 13, batch 17500, loss[loss=0.1201, simple_loss=0.1907, pruned_loss=0.0247, over 4789.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2093, pruned_loss=0.0304, over 972181.02 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 19:08:27,656 INFO [train.py:715] (6/8) Epoch 13, batch 17550, loss[loss=0.1478, simple_loss=0.2248, pruned_loss=0.03546, over 4911.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03068, over 971606.71 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 19:09:06,327 INFO [train.py:715] (6/8) Epoch 13, batch 17600, loss[loss=0.1508, simple_loss=0.2115, pruned_loss=0.04503, over 4935.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2086, pruned_loss=0.0305, over 972288.30 frames.], batch size: 35, lr: 1.68e-04 +2022-05-07 19:09:43,948 INFO [train.py:715] (6/8) Epoch 13, batch 17650, loss[loss=0.128, simple_loss=0.2059, pruned_loss=0.02507, over 4941.00 frames.], tot_loss[loss=0.135, simple_loss=0.2091, pruned_loss=0.0305, over 971989.39 frames.], batch size: 29, lr: 1.68e-04 +2022-05-07 19:10:23,206 INFO [train.py:715] (6/8) Epoch 13, batch 17700, loss[loss=0.1337, simple_loss=0.1982, pruned_loss=0.03457, over 4860.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03033, over 972707.63 frames.], batch size: 12, lr: 1.68e-04 +2022-05-07 19:11:02,063 INFO [train.py:715] (6/8) Epoch 13, batch 17750, loss[loss=0.1433, simple_loss=0.2063, pruned_loss=0.04016, over 4692.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.03098, over 971811.49 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 19:11:39,681 INFO [train.py:715] (6/8) Epoch 13, batch 17800, loss[loss=0.1133, simple_loss=0.1859, pruned_loss=0.02032, over 4807.00 frames.], tot_loss[loss=0.135, simple_loss=0.2083, pruned_loss=0.03088, over 971215.54 frames.], batch size: 21, lr: 1.68e-04 +2022-05-07 19:12:18,454 INFO [train.py:715] (6/8) Epoch 13, batch 17850, loss[loss=0.1349, simple_loss=0.2107, pruned_loss=0.02954, over 4926.00 frames.], tot_loss[loss=0.135, simple_loss=0.2085, pruned_loss=0.03078, over 971969.38 frames.], batch size: 29, lr: 1.68e-04 +2022-05-07 19:12:57,285 INFO [train.py:715] (6/8) Epoch 13, batch 17900, loss[loss=0.1578, simple_loss=0.2279, pruned_loss=0.04385, over 4918.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2088, pruned_loss=0.03101, over 972710.99 frames.], batch size: 39, lr: 1.68e-04 +2022-05-07 19:13:35,476 INFO [train.py:715] (6/8) Epoch 13, batch 17950, loss[loss=0.1251, simple_loss=0.1946, pruned_loss=0.02786, over 4690.00 frames.], tot_loss[loss=0.1357, simple_loss=0.209, pruned_loss=0.03115, over 972548.66 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 19:14:13,539 INFO [train.py:715] (6/8) Epoch 13, batch 18000, loss[loss=0.1403, simple_loss=0.2114, pruned_loss=0.0346, over 4876.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2091, pruned_loss=0.03126, over 972899.74 frames.], batch size: 20, lr: 1.68e-04 +2022-05-07 19:14:13,539 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 19:14:23,028 INFO [train.py:742] (6/8) Epoch 13, validation: loss=0.1055, simple_loss=0.1892, pruned_loss=0.01083, over 914524.00 frames. +2022-05-07 19:15:00,699 INFO [train.py:715] (6/8) Epoch 13, batch 18050, loss[loss=0.1148, simple_loss=0.1899, pruned_loss=0.01989, over 4856.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2086, pruned_loss=0.03096, over 972799.27 frames.], batch size: 20, lr: 1.68e-04 +2022-05-07 19:15:39,774 INFO [train.py:715] (6/8) Epoch 13, batch 18100, loss[loss=0.1473, simple_loss=0.2225, pruned_loss=0.03607, over 4990.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.0308, over 972603.70 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 19:16:18,124 INFO [train.py:715] (6/8) Epoch 13, batch 18150, loss[loss=0.1208, simple_loss=0.1904, pruned_loss=0.02555, over 4763.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.03115, over 971966.71 frames.], batch size: 14, lr: 1.68e-04 +2022-05-07 19:16:55,371 INFO [train.py:715] (6/8) Epoch 13, batch 18200, loss[loss=0.1405, simple_loss=0.2081, pruned_loss=0.03649, over 4864.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2098, pruned_loss=0.03128, over 971684.96 frames.], batch size: 22, lr: 1.68e-04 +2022-05-07 19:17:33,692 INFO [train.py:715] (6/8) Epoch 13, batch 18250, loss[loss=0.1302, simple_loss=0.2086, pruned_loss=0.02589, over 4930.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2097, pruned_loss=0.03148, over 972176.69 frames.], batch size: 21, lr: 1.68e-04 +2022-05-07 19:18:12,483 INFO [train.py:715] (6/8) Epoch 13, batch 18300, loss[loss=0.1496, simple_loss=0.2216, pruned_loss=0.03877, over 4810.00 frames.], tot_loss[loss=0.136, simple_loss=0.2095, pruned_loss=0.03126, over 971781.66 frames.], batch size: 21, lr: 1.68e-04 +2022-05-07 19:18:51,122 INFO [train.py:715] (6/8) Epoch 13, batch 18350, loss[loss=0.1122, simple_loss=0.1867, pruned_loss=0.01884, over 4747.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2085, pruned_loss=0.03101, over 970906.25 frames.], batch size: 16, lr: 1.68e-04 +2022-05-07 19:19:29,014 INFO [train.py:715] (6/8) Epoch 13, batch 18400, loss[loss=0.123, simple_loss=0.1962, pruned_loss=0.02484, over 4827.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03165, over 971251.11 frames.], batch size: 12, lr: 1.68e-04 +2022-05-07 19:20:07,826 INFO [train.py:715] (6/8) Epoch 13, batch 18450, loss[loss=0.1415, simple_loss=0.209, pruned_loss=0.03701, over 4885.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2107, pruned_loss=0.03236, over 971419.11 frames.], batch size: 16, lr: 1.68e-04 +2022-05-07 19:20:46,497 INFO [train.py:715] (6/8) Epoch 13, batch 18500, loss[loss=0.1493, simple_loss=0.2373, pruned_loss=0.03065, over 4797.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2105, pruned_loss=0.0322, over 971646.87 frames.], batch size: 21, lr: 1.68e-04 +2022-05-07 19:21:23,939 INFO [train.py:715] (6/8) Epoch 13, batch 18550, loss[loss=0.1361, simple_loss=0.2162, pruned_loss=0.02802, over 4782.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03193, over 971356.86 frames.], batch size: 18, lr: 1.68e-04 +2022-05-07 19:22:01,961 INFO [train.py:715] (6/8) Epoch 13, batch 18600, loss[loss=0.1166, simple_loss=0.1922, pruned_loss=0.0205, over 4892.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.0314, over 971096.97 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 19:22:40,557 INFO [train.py:715] (6/8) Epoch 13, batch 18650, loss[loss=0.1282, simple_loss=0.2064, pruned_loss=0.02502, over 4750.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2094, pruned_loss=0.03168, over 970661.73 frames.], batch size: 14, lr: 1.68e-04 +2022-05-07 19:23:18,503 INFO [train.py:715] (6/8) Epoch 13, batch 18700, loss[loss=0.1498, simple_loss=0.2243, pruned_loss=0.03767, over 4871.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2097, pruned_loss=0.03187, over 971264.54 frames.], batch size: 22, lr: 1.68e-04 +2022-05-07 19:23:56,289 INFO [train.py:715] (6/8) Epoch 13, batch 18750, loss[loss=0.1369, simple_loss=0.2067, pruned_loss=0.03354, over 4829.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2095, pruned_loss=0.03175, over 971189.03 frames.], batch size: 13, lr: 1.68e-04 +2022-05-07 19:24:35,599 INFO [train.py:715] (6/8) Epoch 13, batch 18800, loss[loss=0.1281, simple_loss=0.2086, pruned_loss=0.02381, over 4824.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2092, pruned_loss=0.03135, over 970868.99 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 19:25:14,016 INFO [train.py:715] (6/8) Epoch 13, batch 18850, loss[loss=0.159, simple_loss=0.2351, pruned_loss=0.04145, over 4977.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03117, over 972913.16 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 19:25:52,021 INFO [train.py:715] (6/8) Epoch 13, batch 18900, loss[loss=0.1225, simple_loss=0.1852, pruned_loss=0.02994, over 4779.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03126, over 972801.90 frames.], batch size: 12, lr: 1.68e-04 +2022-05-07 19:26:30,883 INFO [train.py:715] (6/8) Epoch 13, batch 18950, loss[loss=0.1178, simple_loss=0.1927, pruned_loss=0.0214, over 4856.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03075, over 973032.71 frames.], batch size: 20, lr: 1.68e-04 +2022-05-07 19:27:09,768 INFO [train.py:715] (6/8) Epoch 13, batch 19000, loss[loss=0.1431, simple_loss=0.2077, pruned_loss=0.03928, over 4946.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2095, pruned_loss=0.03094, over 973683.23 frames.], batch size: 29, lr: 1.68e-04 +2022-05-07 19:27:48,114 INFO [train.py:715] (6/8) Epoch 13, batch 19050, loss[loss=0.1459, simple_loss=0.214, pruned_loss=0.03894, over 4856.00 frames.], tot_loss[loss=0.136, simple_loss=0.2095, pruned_loss=0.03125, over 972862.23 frames.], batch size: 32, lr: 1.68e-04 +2022-05-07 19:28:26,438 INFO [train.py:715] (6/8) Epoch 13, batch 19100, loss[loss=0.1253, simple_loss=0.2015, pruned_loss=0.02455, over 4695.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.03161, over 972650.49 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 19:29:05,442 INFO [train.py:715] (6/8) Epoch 13, batch 19150, loss[loss=0.1268, simple_loss=0.2031, pruned_loss=0.02525, over 4699.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.0316, over 972849.85 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 19:29:44,106 INFO [train.py:715] (6/8) Epoch 13, batch 19200, loss[loss=0.129, simple_loss=0.1983, pruned_loss=0.0299, over 4972.00 frames.], tot_loss[loss=0.136, simple_loss=0.2099, pruned_loss=0.03103, over 973007.43 frames.], batch size: 35, lr: 1.67e-04 +2022-05-07 19:30:21,502 INFO [train.py:715] (6/8) Epoch 13, batch 19250, loss[loss=0.1245, simple_loss=0.1977, pruned_loss=0.02566, over 4938.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03034, over 973044.50 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 19:31:00,078 INFO [train.py:715] (6/8) Epoch 13, batch 19300, loss[loss=0.1503, simple_loss=0.2163, pruned_loss=0.04217, over 4915.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2101, pruned_loss=0.03065, over 972610.23 frames.], batch size: 17, lr: 1.67e-04 +2022-05-07 19:31:39,542 INFO [train.py:715] (6/8) Epoch 13, batch 19350, loss[loss=0.144, simple_loss=0.225, pruned_loss=0.03153, over 4928.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03108, over 972056.14 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 19:32:18,082 INFO [train.py:715] (6/8) Epoch 13, batch 19400, loss[loss=0.1557, simple_loss=0.2309, pruned_loss=0.04024, over 4974.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2095, pruned_loss=0.03073, over 972365.01 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 19:32:56,516 INFO [train.py:715] (6/8) Epoch 13, batch 19450, loss[loss=0.1193, simple_loss=0.187, pruned_loss=0.02578, over 4792.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03076, over 972870.63 frames.], batch size: 14, lr: 1.67e-04 +2022-05-07 19:33:37,809 INFO [train.py:715] (6/8) Epoch 13, batch 19500, loss[loss=0.1478, simple_loss=0.2151, pruned_loss=0.04025, over 4782.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2091, pruned_loss=0.03084, over 972447.08 frames.], batch size: 17, lr: 1.67e-04 +2022-05-07 19:34:16,750 INFO [train.py:715] (6/8) Epoch 13, batch 19550, loss[loss=0.1181, simple_loss=0.1843, pruned_loss=0.02596, over 4753.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2096, pruned_loss=0.03142, over 972533.66 frames.], batch size: 12, lr: 1.67e-04 +2022-05-07 19:34:54,320 INFO [train.py:715] (6/8) Epoch 13, batch 19600, loss[loss=0.111, simple_loss=0.1746, pruned_loss=0.02368, over 4813.00 frames.], tot_loss[loss=0.1357, simple_loss=0.209, pruned_loss=0.03116, over 972223.50 frames.], batch size: 13, lr: 1.67e-04 +2022-05-07 19:35:32,449 INFO [train.py:715] (6/8) Epoch 13, batch 19650, loss[loss=0.1435, simple_loss=0.223, pruned_loss=0.03204, over 4751.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2102, pruned_loss=0.03218, over 972180.67 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 19:36:11,256 INFO [train.py:715] (6/8) Epoch 13, batch 19700, loss[loss=0.1682, simple_loss=0.2325, pruned_loss=0.05189, over 4875.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.03191, over 971866.81 frames.], batch size: 32, lr: 1.67e-04 +2022-05-07 19:36:49,084 INFO [train.py:715] (6/8) Epoch 13, batch 19750, loss[loss=0.1374, simple_loss=0.2123, pruned_loss=0.0313, over 4756.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.03175, over 971824.72 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 19:37:26,939 INFO [train.py:715] (6/8) Epoch 13, batch 19800, loss[loss=0.1353, simple_loss=0.2057, pruned_loss=0.03247, over 4863.00 frames.], tot_loss[loss=0.1363, simple_loss=0.21, pruned_loss=0.03132, over 972832.74 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 19:38:05,611 INFO [train.py:715] (6/8) Epoch 13, batch 19850, loss[loss=0.1464, simple_loss=0.2239, pruned_loss=0.03442, over 4968.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03121, over 972600.94 frames.], batch size: 35, lr: 1.67e-04 +2022-05-07 19:38:44,224 INFO [train.py:715] (6/8) Epoch 13, batch 19900, loss[loss=0.1282, simple_loss=0.2043, pruned_loss=0.02601, over 4941.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.03171, over 972490.21 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 19:39:22,424 INFO [train.py:715] (6/8) Epoch 13, batch 19950, loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.0289, over 4694.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03149, over 972469.27 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 19:40:01,313 INFO [train.py:715] (6/8) Epoch 13, batch 20000, loss[loss=0.1206, simple_loss=0.1985, pruned_loss=0.02132, over 4847.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2092, pruned_loss=0.03123, over 972452.47 frames.], batch size: 27, lr: 1.67e-04 +2022-05-07 19:40:39,756 INFO [train.py:715] (6/8) Epoch 13, batch 20050, loss[loss=0.1765, simple_loss=0.2403, pruned_loss=0.05636, over 4871.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2099, pruned_loss=0.0313, over 972037.06 frames.], batch size: 32, lr: 1.67e-04 +2022-05-07 19:41:16,930 INFO [train.py:715] (6/8) Epoch 13, batch 20100, loss[loss=0.1373, simple_loss=0.2081, pruned_loss=0.03321, over 4785.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03136, over 971358.07 frames.], batch size: 14, lr: 1.67e-04 +2022-05-07 19:41:54,399 INFO [train.py:715] (6/8) Epoch 13, batch 20150, loss[loss=0.1324, simple_loss=0.2089, pruned_loss=0.02794, over 4975.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2111, pruned_loss=0.03201, over 971545.50 frames.], batch size: 28, lr: 1.67e-04 +2022-05-07 19:42:33,107 INFO [train.py:715] (6/8) Epoch 13, batch 20200, loss[loss=0.1671, simple_loss=0.2381, pruned_loss=0.04808, over 4954.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03168, over 971818.02 frames.], batch size: 29, lr: 1.67e-04 +2022-05-07 19:43:11,185 INFO [train.py:715] (6/8) Epoch 13, batch 20250, loss[loss=0.1127, simple_loss=0.1896, pruned_loss=0.01794, over 4984.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2107, pruned_loss=0.0317, over 972256.44 frames.], batch size: 14, lr: 1.67e-04 +2022-05-07 19:43:48,890 INFO [train.py:715] (6/8) Epoch 13, batch 20300, loss[loss=0.1298, simple_loss=0.205, pruned_loss=0.02731, over 4948.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.03146, over 972421.01 frames.], batch size: 23, lr: 1.67e-04 +2022-05-07 19:44:26,993 INFO [train.py:715] (6/8) Epoch 13, batch 20350, loss[loss=0.1383, simple_loss=0.2172, pruned_loss=0.02967, over 4890.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2089, pruned_loss=0.03078, over 971948.44 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 19:45:05,762 INFO [train.py:715] (6/8) Epoch 13, batch 20400, loss[loss=0.1333, simple_loss=0.202, pruned_loss=0.03227, over 4884.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03125, over 972199.71 frames.], batch size: 34, lr: 1.67e-04 +2022-05-07 19:45:43,491 INFO [train.py:715] (6/8) Epoch 13, batch 20450, loss[loss=0.11, simple_loss=0.1857, pruned_loss=0.01718, over 4901.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03154, over 972747.96 frames.], batch size: 19, lr: 1.67e-04 +2022-05-07 19:46:21,266 INFO [train.py:715] (6/8) Epoch 13, batch 20500, loss[loss=0.1189, simple_loss=0.1939, pruned_loss=0.02193, over 4950.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2099, pruned_loss=0.03164, over 972663.14 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 19:46:59,824 INFO [train.py:715] (6/8) Epoch 13, batch 20550, loss[loss=0.1419, simple_loss=0.2076, pruned_loss=0.03807, over 4896.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.03165, over 972509.24 frames.], batch size: 19, lr: 1.67e-04 +2022-05-07 19:47:37,480 INFO [train.py:715] (6/8) Epoch 13, batch 20600, loss[loss=0.1195, simple_loss=0.1972, pruned_loss=0.02087, over 4893.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2111, pruned_loss=0.03164, over 973098.71 frames.], batch size: 22, lr: 1.67e-04 +2022-05-07 19:48:15,106 INFO [train.py:715] (6/8) Epoch 13, batch 20650, loss[loss=0.1591, simple_loss=0.2146, pruned_loss=0.0518, over 4945.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2109, pruned_loss=0.03165, over 973261.03 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 19:48:52,913 INFO [train.py:715] (6/8) Epoch 13, batch 20700, loss[loss=0.1661, simple_loss=0.2444, pruned_loss=0.04386, over 4961.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2106, pruned_loss=0.03144, over 973213.02 frames.], batch size: 24, lr: 1.67e-04 +2022-05-07 19:49:31,350 INFO [train.py:715] (6/8) Epoch 13, batch 20750, loss[loss=0.1692, simple_loss=0.2486, pruned_loss=0.04488, over 4903.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2095, pruned_loss=0.03106, over 973073.98 frames.], batch size: 17, lr: 1.67e-04 +2022-05-07 19:50:08,696 INFO [train.py:715] (6/8) Epoch 13, batch 20800, loss[loss=0.1497, simple_loss=0.2216, pruned_loss=0.03893, over 4878.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2096, pruned_loss=0.03078, over 973158.02 frames.], batch size: 22, lr: 1.67e-04 +2022-05-07 19:50:46,280 INFO [train.py:715] (6/8) Epoch 13, batch 20850, loss[loss=0.1637, simple_loss=0.2409, pruned_loss=0.04327, over 4913.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2099, pruned_loss=0.0309, over 971712.37 frames.], batch size: 17, lr: 1.67e-04 +2022-05-07 19:51:24,970 INFO [train.py:715] (6/8) Epoch 13, batch 20900, loss[loss=0.1135, simple_loss=0.1884, pruned_loss=0.01926, over 4968.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03158, over 971725.13 frames.], batch size: 35, lr: 1.67e-04 +2022-05-07 19:52:03,243 INFO [train.py:715] (6/8) Epoch 13, batch 20950, loss[loss=0.1283, simple_loss=0.196, pruned_loss=0.03035, over 4939.00 frames.], tot_loss[loss=0.1366, simple_loss=0.21, pruned_loss=0.0316, over 971960.65 frames.], batch size: 35, lr: 1.67e-04 +2022-05-07 19:52:40,748 INFO [train.py:715] (6/8) Epoch 13, batch 21000, loss[loss=0.1626, simple_loss=0.2268, pruned_loss=0.0492, over 4859.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.03153, over 972481.58 frames.], batch size: 39, lr: 1.67e-04 +2022-05-07 19:52:40,749 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 19:52:50,264 INFO [train.py:742] (6/8) Epoch 13, validation: loss=0.1054, simple_loss=0.1891, pruned_loss=0.01084, over 914524.00 frames. +2022-05-07 19:53:28,440 INFO [train.py:715] (6/8) Epoch 13, batch 21050, loss[loss=0.1458, simple_loss=0.2255, pruned_loss=0.03303, over 4957.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03162, over 971703.68 frames.], batch size: 24, lr: 1.67e-04 +2022-05-07 19:54:06,969 INFO [train.py:715] (6/8) Epoch 13, batch 21100, loss[loss=0.1538, simple_loss=0.229, pruned_loss=0.03928, over 4933.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03167, over 971654.27 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 19:54:46,058 INFO [train.py:715] (6/8) Epoch 13, batch 21150, loss[loss=0.1218, simple_loss=0.1902, pruned_loss=0.02672, over 4972.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03147, over 971486.63 frames.], batch size: 14, lr: 1.67e-04 +2022-05-07 19:55:23,880 INFO [train.py:715] (6/8) Epoch 13, batch 21200, loss[loss=0.1462, simple_loss=0.2114, pruned_loss=0.0405, over 4986.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2097, pruned_loss=0.03141, over 971842.48 frames.], batch size: 39, lr: 1.67e-04 +2022-05-07 19:56:02,467 INFO [train.py:715] (6/8) Epoch 13, batch 21250, loss[loss=0.1638, simple_loss=0.2241, pruned_loss=0.05181, over 4766.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2097, pruned_loss=0.03102, over 971528.82 frames.], batch size: 12, lr: 1.67e-04 +2022-05-07 19:56:41,277 INFO [train.py:715] (6/8) Epoch 13, batch 21300, loss[loss=0.1504, simple_loss=0.2231, pruned_loss=0.03889, over 4800.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.03066, over 970983.54 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 19:57:19,155 INFO [train.py:715] (6/8) Epoch 13, batch 21350, loss[loss=0.1355, simple_loss=0.2135, pruned_loss=0.02869, over 4885.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2095, pruned_loss=0.03108, over 971990.21 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 19:57:57,084 INFO [train.py:715] (6/8) Epoch 13, batch 21400, loss[loss=0.1576, simple_loss=0.2289, pruned_loss=0.04315, over 4934.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03121, over 971868.78 frames.], batch size: 39, lr: 1.67e-04 +2022-05-07 19:58:35,344 INFO [train.py:715] (6/8) Epoch 13, batch 21450, loss[loss=0.1183, simple_loss=0.1926, pruned_loss=0.02197, over 4920.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.03133, over 971999.18 frames.], batch size: 23, lr: 1.67e-04 +2022-05-07 19:59:14,500 INFO [train.py:715] (6/8) Epoch 13, batch 21500, loss[loss=0.1401, simple_loss=0.22, pruned_loss=0.03009, over 4965.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.0312, over 971735.73 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 19:59:52,263 INFO [train.py:715] (6/8) Epoch 13, batch 21550, loss[loss=0.1322, simple_loss=0.2061, pruned_loss=0.02916, over 4876.00 frames.], tot_loss[loss=0.1363, simple_loss=0.21, pruned_loss=0.03134, over 971853.59 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 20:00:30,898 INFO [train.py:715] (6/8) Epoch 13, batch 21600, loss[loss=0.1276, simple_loss=0.2064, pruned_loss=0.02438, over 4836.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.03143, over 971931.92 frames.], batch size: 27, lr: 1.67e-04 +2022-05-07 20:01:09,862 INFO [train.py:715] (6/8) Epoch 13, batch 21650, loss[loss=0.1069, simple_loss=0.1805, pruned_loss=0.01663, over 4790.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.03117, over 971895.12 frames.], batch size: 12, lr: 1.67e-04 +2022-05-07 20:01:48,621 INFO [train.py:715] (6/8) Epoch 13, batch 21700, loss[loss=0.09983, simple_loss=0.1726, pruned_loss=0.01355, over 4849.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03066, over 971672.33 frames.], batch size: 13, lr: 1.67e-04 +2022-05-07 20:02:27,465 INFO [train.py:715] (6/8) Epoch 13, batch 21750, loss[loss=0.1319, simple_loss=0.2057, pruned_loss=0.02907, over 4933.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.03072, over 972112.66 frames.], batch size: 29, lr: 1.67e-04 +2022-05-07 20:03:06,114 INFO [train.py:715] (6/8) Epoch 13, batch 21800, loss[loss=0.1425, simple_loss=0.2145, pruned_loss=0.03525, over 4982.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03034, over 972507.88 frames.], batch size: 25, lr: 1.67e-04 +2022-05-07 20:03:45,413 INFO [train.py:715] (6/8) Epoch 13, batch 21850, loss[loss=0.1276, simple_loss=0.1906, pruned_loss=0.03231, over 4987.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03038, over 972058.79 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 20:04:23,520 INFO [train.py:715] (6/8) Epoch 13, batch 21900, loss[loss=0.1306, simple_loss=0.2001, pruned_loss=0.03055, over 4909.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03034, over 971041.62 frames.], batch size: 17, lr: 1.67e-04 +2022-05-07 20:05:01,702 INFO [train.py:715] (6/8) Epoch 13, batch 21950, loss[loss=0.1202, simple_loss=0.1958, pruned_loss=0.02232, over 4790.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2094, pruned_loss=0.03061, over 971305.58 frames.], batch size: 18, lr: 1.67e-04 +2022-05-07 20:05:40,176 INFO [train.py:715] (6/8) Epoch 13, batch 22000, loss[loss=0.1176, simple_loss=0.1956, pruned_loss=0.01982, over 4987.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03045, over 971498.00 frames.], batch size: 25, lr: 1.67e-04 +2022-05-07 20:06:17,893 INFO [train.py:715] (6/8) Epoch 13, batch 22050, loss[loss=0.1443, simple_loss=0.2135, pruned_loss=0.03753, over 4968.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2088, pruned_loss=0.03034, over 971860.40 frames.], batch size: 35, lr: 1.67e-04 +2022-05-07 20:06:55,941 INFO [train.py:715] (6/8) Epoch 13, batch 22100, loss[loss=0.1334, simple_loss=0.2068, pruned_loss=0.03005, over 4870.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03056, over 971735.59 frames.], batch size: 32, lr: 1.67e-04 +2022-05-07 20:07:33,695 INFO [train.py:715] (6/8) Epoch 13, batch 22150, loss[loss=0.1479, simple_loss=0.2171, pruned_loss=0.03932, over 4967.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03145, over 971155.97 frames.], batch size: 35, lr: 1.67e-04 +2022-05-07 20:08:12,649 INFO [train.py:715] (6/8) Epoch 13, batch 22200, loss[loss=0.1423, simple_loss=0.2092, pruned_loss=0.03765, over 4979.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2098, pruned_loss=0.03158, over 971488.29 frames.], batch size: 39, lr: 1.67e-04 +2022-05-07 20:08:50,195 INFO [train.py:715] (6/8) Epoch 13, batch 22250, loss[loss=0.1435, simple_loss=0.2059, pruned_loss=0.04055, over 4733.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.0314, over 971182.25 frames.], batch size: 12, lr: 1.67e-04 +2022-05-07 20:09:28,956 INFO [train.py:715] (6/8) Epoch 13, batch 22300, loss[loss=0.1281, simple_loss=0.2103, pruned_loss=0.02295, over 4779.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2103, pruned_loss=0.03121, over 971117.13 frames.], batch size: 18, lr: 1.67e-04 +2022-05-07 20:10:07,703 INFO [train.py:715] (6/8) Epoch 13, batch 22350, loss[loss=0.1598, simple_loss=0.2295, pruned_loss=0.04504, over 4693.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2104, pruned_loss=0.03144, over 971086.48 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 20:10:45,730 INFO [train.py:715] (6/8) Epoch 13, batch 22400, loss[loss=0.1274, simple_loss=0.2008, pruned_loss=0.02703, over 4955.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2106, pruned_loss=0.03158, over 971727.95 frames.], batch size: 39, lr: 1.67e-04 +2022-05-07 20:11:23,412 INFO [train.py:715] (6/8) Epoch 13, batch 22450, loss[loss=0.1331, simple_loss=0.2125, pruned_loss=0.02688, over 4823.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2104, pruned_loss=0.03123, over 971066.99 frames.], batch size: 13, lr: 1.67e-04 +2022-05-07 20:12:01,254 INFO [train.py:715] (6/8) Epoch 13, batch 22500, loss[loss=0.1384, simple_loss=0.2141, pruned_loss=0.0314, over 4806.00 frames.], tot_loss[loss=0.137, simple_loss=0.2108, pruned_loss=0.03156, over 970578.40 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 20:12:39,613 INFO [train.py:715] (6/8) Epoch 13, batch 22550, loss[loss=0.1145, simple_loss=0.1875, pruned_loss=0.02074, over 4834.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03081, over 971054.57 frames.], batch size: 26, lr: 1.67e-04 +2022-05-07 20:13:16,804 INFO [train.py:715] (6/8) Epoch 13, batch 22600, loss[loss=0.1326, simple_loss=0.1976, pruned_loss=0.03383, over 4753.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2094, pruned_loss=0.03067, over 971471.90 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 20:13:54,710 INFO [train.py:715] (6/8) Epoch 13, batch 22650, loss[loss=0.1439, simple_loss=0.229, pruned_loss=0.02936, over 4830.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2099, pruned_loss=0.03089, over 971099.54 frames.], batch size: 26, lr: 1.67e-04 +2022-05-07 20:14:32,803 INFO [train.py:715] (6/8) Epoch 13, batch 22700, loss[loss=0.1304, simple_loss=0.1893, pruned_loss=0.03578, over 4855.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2094, pruned_loss=0.0308, over 971257.65 frames.], batch size: 32, lr: 1.67e-04 +2022-05-07 20:15:11,034 INFO [train.py:715] (6/8) Epoch 13, batch 22750, loss[loss=0.1362, simple_loss=0.2084, pruned_loss=0.03194, over 4968.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2091, pruned_loss=0.03034, over 972197.08 frames.], batch size: 25, lr: 1.67e-04 +2022-05-07 20:15:49,015 INFO [train.py:715] (6/8) Epoch 13, batch 22800, loss[loss=0.1302, simple_loss=0.207, pruned_loss=0.02673, over 4915.00 frames.], tot_loss[loss=0.136, simple_loss=0.2099, pruned_loss=0.03106, over 971095.51 frames.], batch size: 17, lr: 1.67e-04 +2022-05-07 20:16:27,580 INFO [train.py:715] (6/8) Epoch 13, batch 22850, loss[loss=0.1312, simple_loss=0.2103, pruned_loss=0.02601, over 4788.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03105, over 971451.63 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 20:17:06,829 INFO [train.py:715] (6/8) Epoch 13, batch 22900, loss[loss=0.136, simple_loss=0.2224, pruned_loss=0.0248, over 4795.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2101, pruned_loss=0.03083, over 971379.93 frames.], batch size: 17, lr: 1.67e-04 +2022-05-07 20:17:44,514 INFO [train.py:715] (6/8) Epoch 13, batch 22950, loss[loss=0.1531, simple_loss=0.2221, pruned_loss=0.0421, over 4815.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2108, pruned_loss=0.03137, over 970767.33 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 20:18:23,096 INFO [train.py:715] (6/8) Epoch 13, batch 23000, loss[loss=0.1276, simple_loss=0.1966, pruned_loss=0.02929, over 4788.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2102, pruned_loss=0.03179, over 971156.82 frames.], batch size: 14, lr: 1.67e-04 +2022-05-07 20:19:01,744 INFO [train.py:715] (6/8) Epoch 13, batch 23050, loss[loss=0.1415, simple_loss=0.2111, pruned_loss=0.03593, over 4856.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.03136, over 971005.92 frames.], batch size: 32, lr: 1.67e-04 +2022-05-07 20:19:40,069 INFO [train.py:715] (6/8) Epoch 13, batch 23100, loss[loss=0.1555, simple_loss=0.2234, pruned_loss=0.0438, over 4689.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2105, pruned_loss=0.03136, over 970770.56 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 20:20:17,986 INFO [train.py:715] (6/8) Epoch 13, batch 23150, loss[loss=0.1377, simple_loss=0.2194, pruned_loss=0.02797, over 4825.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2112, pruned_loss=0.03178, over 971880.32 frames.], batch size: 26, lr: 1.67e-04 +2022-05-07 20:20:56,164 INFO [train.py:715] (6/8) Epoch 13, batch 23200, loss[loss=0.1362, simple_loss=0.2123, pruned_loss=0.03003, over 4959.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2102, pruned_loss=0.03126, over 972719.62 frames.], batch size: 39, lr: 1.67e-04 +2022-05-07 20:21:34,316 INFO [train.py:715] (6/8) Epoch 13, batch 23250, loss[loss=0.13, simple_loss=0.2075, pruned_loss=0.02624, over 4850.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03113, over 972237.71 frames.], batch size: 20, lr: 1.67e-04 +2022-05-07 20:22:11,786 INFO [train.py:715] (6/8) Epoch 13, batch 23300, loss[loss=0.1581, simple_loss=0.2375, pruned_loss=0.03936, over 4977.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2104, pruned_loss=0.03126, over 971778.70 frames.], batch size: 39, lr: 1.67e-04 +2022-05-07 20:22:50,102 INFO [train.py:715] (6/8) Epoch 13, batch 23350, loss[loss=0.1273, simple_loss=0.1997, pruned_loss=0.02741, over 4937.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03115, over 972328.32 frames.], batch size: 29, lr: 1.67e-04 +2022-05-07 20:23:28,677 INFO [train.py:715] (6/8) Epoch 13, batch 23400, loss[loss=0.1383, simple_loss=0.2308, pruned_loss=0.02284, over 4944.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2095, pruned_loss=0.03103, over 972219.67 frames.], batch size: 24, lr: 1.67e-04 +2022-05-07 20:24:06,993 INFO [train.py:715] (6/8) Epoch 13, batch 23450, loss[loss=0.1296, simple_loss=0.2093, pruned_loss=0.02498, over 4989.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03089, over 972297.36 frames.], batch size: 25, lr: 1.67e-04 +2022-05-07 20:24:45,013 INFO [train.py:715] (6/8) Epoch 13, batch 23500, loss[loss=0.1275, simple_loss=0.2012, pruned_loss=0.02689, over 4751.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03065, over 972093.35 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 20:25:23,769 INFO [train.py:715] (6/8) Epoch 13, batch 23550, loss[loss=0.1342, simple_loss=0.2095, pruned_loss=0.02948, over 4755.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2082, pruned_loss=0.03039, over 972390.95 frames.], batch size: 19, lr: 1.67e-04 +2022-05-07 20:26:02,268 INFO [train.py:715] (6/8) Epoch 13, batch 23600, loss[loss=0.1192, simple_loss=0.1895, pruned_loss=0.02442, over 4926.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03041, over 971304.59 frames.], batch size: 18, lr: 1.67e-04 +2022-05-07 20:26:39,839 INFO [train.py:715] (6/8) Epoch 13, batch 23650, loss[loss=0.1606, simple_loss=0.2362, pruned_loss=0.04251, over 4961.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2089, pruned_loss=0.03074, over 971293.23 frames.], batch size: 39, lr: 1.67e-04 +2022-05-07 20:27:18,103 INFO [train.py:715] (6/8) Epoch 13, batch 23700, loss[loss=0.1522, simple_loss=0.2336, pruned_loss=0.03543, over 4784.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2086, pruned_loss=0.03053, over 971334.36 frames.], batch size: 18, lr: 1.67e-04 +2022-05-07 20:27:56,588 INFO [train.py:715] (6/8) Epoch 13, batch 23750, loss[loss=0.1284, simple_loss=0.2072, pruned_loss=0.0248, over 4800.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2088, pruned_loss=0.03068, over 971979.44 frames.], batch size: 17, lr: 1.67e-04 +2022-05-07 20:28:34,761 INFO [train.py:715] (6/8) Epoch 13, batch 23800, loss[loss=0.1214, simple_loss=0.1931, pruned_loss=0.02482, over 4983.00 frames.], tot_loss[loss=0.1351, simple_loss=0.209, pruned_loss=0.03058, over 971954.03 frames.], batch size: 14, lr: 1.67e-04 +2022-05-07 20:29:12,136 INFO [train.py:715] (6/8) Epoch 13, batch 23850, loss[loss=0.1305, simple_loss=0.2134, pruned_loss=0.02379, over 4865.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2091, pruned_loss=0.03068, over 972153.18 frames.], batch size: 20, lr: 1.67e-04 +2022-05-07 20:29:51,249 INFO [train.py:715] (6/8) Epoch 13, batch 23900, loss[loss=0.1796, simple_loss=0.2693, pruned_loss=0.04497, over 4923.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.03094, over 972635.04 frames.], batch size: 23, lr: 1.67e-04 +2022-05-07 20:30:29,201 INFO [train.py:715] (6/8) Epoch 13, batch 23950, loss[loss=0.1279, simple_loss=0.2065, pruned_loss=0.0247, over 4921.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2088, pruned_loss=0.03077, over 972837.53 frames.], batch size: 18, lr: 1.67e-04 +2022-05-07 20:31:06,579 INFO [train.py:715] (6/8) Epoch 13, batch 24000, loss[loss=0.1094, simple_loss=0.1784, pruned_loss=0.02016, over 4974.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.0306, over 972579.26 frames.], batch size: 14, lr: 1.67e-04 +2022-05-07 20:31:06,579 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 20:31:16,109 INFO [train.py:742] (6/8) Epoch 13, validation: loss=0.1053, simple_loss=0.1891, pruned_loss=0.01069, over 914524.00 frames. +2022-05-07 20:31:53,723 INFO [train.py:715] (6/8) Epoch 13, batch 24050, loss[loss=0.1514, simple_loss=0.2264, pruned_loss=0.03821, over 4746.00 frames.], tot_loss[loss=0.135, simple_loss=0.2084, pruned_loss=0.03078, over 973122.26 frames.], batch size: 19, lr: 1.67e-04 +2022-05-07 20:32:31,541 INFO [train.py:715] (6/8) Epoch 13, batch 24100, loss[loss=0.1381, simple_loss=0.2206, pruned_loss=0.02783, over 4803.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2077, pruned_loss=0.03063, over 972364.73 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 20:33:10,917 INFO [train.py:715] (6/8) Epoch 13, batch 24150, loss[loss=0.1181, simple_loss=0.1982, pruned_loss=0.01898, over 4944.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03078, over 972855.40 frames.], batch size: 29, lr: 1.67e-04 +2022-05-07 20:33:49,884 INFO [train.py:715] (6/8) Epoch 13, batch 24200, loss[loss=0.1318, simple_loss=0.2191, pruned_loss=0.02229, over 4881.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2094, pruned_loss=0.03113, over 973011.14 frames.], batch size: 22, lr: 1.67e-04 +2022-05-07 20:34:28,087 INFO [train.py:715] (6/8) Epoch 13, batch 24250, loss[loss=0.1068, simple_loss=0.1825, pruned_loss=0.01554, over 4983.00 frames.], tot_loss[loss=0.136, simple_loss=0.2095, pruned_loss=0.0313, over 972395.13 frames.], batch size: 28, lr: 1.67e-04 +2022-05-07 20:35:06,951 INFO [train.py:715] (6/8) Epoch 13, batch 24300, loss[loss=0.1149, simple_loss=0.1884, pruned_loss=0.02067, over 4769.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2088, pruned_loss=0.03118, over 972055.41 frames.], batch size: 14, lr: 1.67e-04 +2022-05-07 20:35:45,649 INFO [train.py:715] (6/8) Epoch 13, batch 24350, loss[loss=0.1161, simple_loss=0.1941, pruned_loss=0.01904, over 4946.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2085, pruned_loss=0.03095, over 971867.62 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 20:36:23,176 INFO [train.py:715] (6/8) Epoch 13, batch 24400, loss[loss=0.1176, simple_loss=0.2005, pruned_loss=0.01738, over 4931.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03082, over 971595.60 frames.], batch size: 23, lr: 1.67e-04 +2022-05-07 20:37:01,582 INFO [train.py:715] (6/8) Epoch 13, batch 24450, loss[loss=0.1266, simple_loss=0.194, pruned_loss=0.0296, over 4646.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03087, over 971581.35 frames.], batch size: 13, lr: 1.67e-04 +2022-05-07 20:37:40,238 INFO [train.py:715] (6/8) Epoch 13, batch 24500, loss[loss=0.1324, simple_loss=0.2062, pruned_loss=0.02929, over 4851.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2087, pruned_loss=0.03077, over 972231.69 frames.], batch size: 30, lr: 1.67e-04 +2022-05-07 20:38:18,537 INFO [train.py:715] (6/8) Epoch 13, batch 24550, loss[loss=0.1363, simple_loss=0.2156, pruned_loss=0.02844, over 4798.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2094, pruned_loss=0.03079, over 971634.11 frames.], batch size: 24, lr: 1.67e-04 +2022-05-07 20:38:56,898 INFO [train.py:715] (6/8) Epoch 13, batch 24600, loss[loss=0.1264, simple_loss=0.2035, pruned_loss=0.02468, over 4797.00 frames.], tot_loss[loss=0.135, simple_loss=0.2092, pruned_loss=0.03041, over 971704.49 frames.], batch size: 24, lr: 1.67e-04 +2022-05-07 20:39:36,089 INFO [train.py:715] (6/8) Epoch 13, batch 24650, loss[loss=0.1313, simple_loss=0.2051, pruned_loss=0.02878, over 4789.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2097, pruned_loss=0.03067, over 971900.55 frames.], batch size: 24, lr: 1.67e-04 +2022-05-07 20:40:14,989 INFO [train.py:715] (6/8) Epoch 13, batch 24700, loss[loss=0.1227, simple_loss=0.2008, pruned_loss=0.02228, over 4938.00 frames.], tot_loss[loss=0.1361, simple_loss=0.21, pruned_loss=0.03108, over 971648.46 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 20:40:52,894 INFO [train.py:715] (6/8) Epoch 13, batch 24750, loss[loss=0.1144, simple_loss=0.1901, pruned_loss=0.01939, over 4990.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03097, over 971523.80 frames.], batch size: 25, lr: 1.67e-04 +2022-05-07 20:41:31,285 INFO [train.py:715] (6/8) Epoch 13, batch 24800, loss[loss=0.1298, simple_loss=0.2092, pruned_loss=0.02523, over 4774.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2104, pruned_loss=0.03131, over 972929.48 frames.], batch size: 18, lr: 1.67e-04 +2022-05-07 20:42:10,093 INFO [train.py:715] (6/8) Epoch 13, batch 24850, loss[loss=0.1433, simple_loss=0.2255, pruned_loss=0.03057, over 4982.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2095, pruned_loss=0.03074, over 972133.17 frames.], batch size: 27, lr: 1.66e-04 +2022-05-07 20:42:48,217 INFO [train.py:715] (6/8) Epoch 13, batch 24900, loss[loss=0.1367, simple_loss=0.2203, pruned_loss=0.02651, over 4915.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2096, pruned_loss=0.03064, over 973322.55 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 20:43:26,336 INFO [train.py:715] (6/8) Epoch 13, batch 24950, loss[loss=0.1279, simple_loss=0.2067, pruned_loss=0.02452, over 4787.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2094, pruned_loss=0.03066, over 973241.87 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 20:44:04,943 INFO [train.py:715] (6/8) Epoch 13, batch 25000, loss[loss=0.1316, simple_loss=0.2121, pruned_loss=0.02556, over 4857.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03104, over 973986.93 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 20:44:43,238 INFO [train.py:715] (6/8) Epoch 13, batch 25050, loss[loss=0.1409, simple_loss=0.2201, pruned_loss=0.03084, over 4759.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2104, pruned_loss=0.03105, over 974213.85 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 20:45:20,929 INFO [train.py:715] (6/8) Epoch 13, batch 25100, loss[loss=0.1164, simple_loss=0.1968, pruned_loss=0.018, over 4862.00 frames.], tot_loss[loss=0.136, simple_loss=0.2101, pruned_loss=0.03093, over 974068.61 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 20:46:00,046 INFO [train.py:715] (6/8) Epoch 13, batch 25150, loss[loss=0.09136, simple_loss=0.159, pruned_loss=0.01184, over 4716.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2089, pruned_loss=0.03016, over 973727.84 frames.], batch size: 12, lr: 1.66e-04 +2022-05-07 20:46:38,589 INFO [train.py:715] (6/8) Epoch 13, batch 25200, loss[loss=0.1359, simple_loss=0.2022, pruned_loss=0.03485, over 4817.00 frames.], tot_loss[loss=0.1357, simple_loss=0.21, pruned_loss=0.03073, over 973292.55 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 20:47:17,727 INFO [train.py:715] (6/8) Epoch 13, batch 25250, loss[loss=0.1616, simple_loss=0.2287, pruned_loss=0.04724, over 4978.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2106, pruned_loss=0.03112, over 973150.20 frames.], batch size: 35, lr: 1.66e-04 +2022-05-07 20:47:55,932 INFO [train.py:715] (6/8) Epoch 13, batch 25300, loss[loss=0.1398, simple_loss=0.2237, pruned_loss=0.02798, over 4774.00 frames.], tot_loss[loss=0.136, simple_loss=0.2099, pruned_loss=0.03105, over 972464.83 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 20:48:34,495 INFO [train.py:715] (6/8) Epoch 13, batch 25350, loss[loss=0.1346, simple_loss=0.2118, pruned_loss=0.0287, over 4806.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03087, over 972603.82 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 20:49:13,704 INFO [train.py:715] (6/8) Epoch 13, batch 25400, loss[loss=0.1353, simple_loss=0.2172, pruned_loss=0.02663, over 4819.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2098, pruned_loss=0.03099, over 971484.86 frames.], batch size: 25, lr: 1.66e-04 +2022-05-07 20:49:51,568 INFO [train.py:715] (6/8) Epoch 13, batch 25450, loss[loss=0.1129, simple_loss=0.1899, pruned_loss=0.01788, over 4912.00 frames.], tot_loss[loss=0.136, simple_loss=0.21, pruned_loss=0.03099, over 971733.43 frames.], batch size: 17, lr: 1.66e-04 +2022-05-07 20:50:30,631 INFO [train.py:715] (6/8) Epoch 13, batch 25500, loss[loss=0.1502, simple_loss=0.2095, pruned_loss=0.04546, over 4687.00 frames.], tot_loss[loss=0.1361, simple_loss=0.21, pruned_loss=0.03109, over 972599.75 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 20:51:09,204 INFO [train.py:715] (6/8) Epoch 13, batch 25550, loss[loss=0.1422, simple_loss=0.2138, pruned_loss=0.03525, over 4756.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2104, pruned_loss=0.03133, over 972787.28 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 20:51:47,750 INFO [train.py:715] (6/8) Epoch 13, batch 25600, loss[loss=0.1221, simple_loss=0.2049, pruned_loss=0.0197, over 4991.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2104, pruned_loss=0.03154, over 973427.84 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 20:52:25,800 INFO [train.py:715] (6/8) Epoch 13, batch 25650, loss[loss=0.1313, simple_loss=0.1984, pruned_loss=0.0321, over 4830.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2105, pruned_loss=0.03155, over 973087.15 frames.], batch size: 12, lr: 1.66e-04 +2022-05-07 20:53:05,138 INFO [train.py:715] (6/8) Epoch 13, batch 25700, loss[loss=0.1501, simple_loss=0.2197, pruned_loss=0.04024, over 4983.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.03174, over 972749.05 frames.], batch size: 24, lr: 1.66e-04 +2022-05-07 20:53:43,491 INFO [train.py:715] (6/8) Epoch 13, batch 25750, loss[loss=0.1546, simple_loss=0.2229, pruned_loss=0.04314, over 4977.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2114, pruned_loss=0.03205, over 972787.75 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 20:54:21,677 INFO [train.py:715] (6/8) Epoch 13, batch 25800, loss[loss=0.143, simple_loss=0.2136, pruned_loss=0.0362, over 4850.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2108, pruned_loss=0.03181, over 973855.57 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 20:55:00,569 INFO [train.py:715] (6/8) Epoch 13, batch 25850, loss[loss=0.1458, simple_loss=0.2143, pruned_loss=0.03862, over 4864.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03167, over 972715.45 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 20:55:39,362 INFO [train.py:715] (6/8) Epoch 13, batch 25900, loss[loss=0.142, simple_loss=0.2155, pruned_loss=0.03424, over 4837.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03138, over 972278.64 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 20:56:18,181 INFO [train.py:715] (6/8) Epoch 13, batch 25950, loss[loss=0.1267, simple_loss=0.2037, pruned_loss=0.02488, over 4821.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2091, pruned_loss=0.03104, over 972200.98 frames.], batch size: 27, lr: 1.66e-04 +2022-05-07 20:56:57,183 INFO [train.py:715] (6/8) Epoch 13, batch 26000, loss[loss=0.204, simple_loss=0.2657, pruned_loss=0.07119, over 4981.00 frames.], tot_loss[loss=0.136, simple_loss=0.2092, pruned_loss=0.03141, over 972138.30 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 20:57:36,545 INFO [train.py:715] (6/8) Epoch 13, batch 26050, loss[loss=0.1325, simple_loss=0.2049, pruned_loss=0.02998, over 4937.00 frames.], tot_loss[loss=0.136, simple_loss=0.2093, pruned_loss=0.03132, over 971864.66 frames.], batch size: 29, lr: 1.66e-04 +2022-05-07 20:58:15,741 INFO [train.py:715] (6/8) Epoch 13, batch 26100, loss[loss=0.1428, simple_loss=0.2151, pruned_loss=0.03529, over 4778.00 frames.], tot_loss[loss=0.1356, simple_loss=0.209, pruned_loss=0.03112, over 971284.22 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 20:58:54,124 INFO [train.py:715] (6/8) Epoch 13, batch 26150, loss[loss=0.1258, simple_loss=0.2054, pruned_loss=0.02308, over 4880.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2089, pruned_loss=0.03102, over 971016.58 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 20:59:33,349 INFO [train.py:715] (6/8) Epoch 13, batch 26200, loss[loss=0.145, simple_loss=0.2241, pruned_loss=0.03292, over 4799.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2102, pruned_loss=0.03181, over 971048.43 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 21:00:12,172 INFO [train.py:715] (6/8) Epoch 13, batch 26250, loss[loss=0.1328, simple_loss=0.2064, pruned_loss=0.02962, over 4836.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2093, pruned_loss=0.03143, over 971014.53 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:00:50,346 INFO [train.py:715] (6/8) Epoch 13, batch 26300, loss[loss=0.1527, simple_loss=0.2349, pruned_loss=0.03522, over 4820.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.03155, over 970923.63 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:01:28,300 INFO [train.py:715] (6/8) Epoch 13, batch 26350, loss[loss=0.1454, simple_loss=0.23, pruned_loss=0.03038, over 4985.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03174, over 971158.13 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 21:02:07,168 INFO [train.py:715] (6/8) Epoch 13, batch 26400, loss[loss=0.1466, simple_loss=0.2256, pruned_loss=0.03382, over 4964.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03153, over 971479.43 frames.], batch size: 35, lr: 1.66e-04 +2022-05-07 21:02:46,107 INFO [train.py:715] (6/8) Epoch 13, batch 26450, loss[loss=0.1308, simple_loss=0.2055, pruned_loss=0.02806, over 4766.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.03138, over 971793.31 frames.], batch size: 18, lr: 1.66e-04 +2022-05-07 21:03:24,279 INFO [train.py:715] (6/8) Epoch 13, batch 26500, loss[loss=0.1348, simple_loss=0.2101, pruned_loss=0.02974, over 4859.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2102, pruned_loss=0.03132, over 971702.21 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 21:04:03,401 INFO [train.py:715] (6/8) Epoch 13, batch 26550, loss[loss=0.151, simple_loss=0.2173, pruned_loss=0.04234, over 4754.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03096, over 970541.51 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 21:04:41,841 INFO [train.py:715] (6/8) Epoch 13, batch 26600, loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02939, over 4904.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03138, over 971126.55 frames.], batch size: 17, lr: 1.66e-04 +2022-05-07 21:05:20,069 INFO [train.py:715] (6/8) Epoch 13, batch 26650, loss[loss=0.1213, simple_loss=0.1978, pruned_loss=0.02245, over 4840.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2101, pruned_loss=0.03112, over 971311.89 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:05:58,319 INFO [train.py:715] (6/8) Epoch 13, batch 26700, loss[loss=0.1276, simple_loss=0.1981, pruned_loss=0.02851, over 4982.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2095, pruned_loss=0.03111, over 971133.86 frames.], batch size: 35, lr: 1.66e-04 +2022-05-07 21:06:37,483 INFO [train.py:715] (6/8) Epoch 13, batch 26750, loss[loss=0.1858, simple_loss=0.2481, pruned_loss=0.06174, over 4983.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03176, over 971700.42 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:07:15,992 INFO [train.py:715] (6/8) Epoch 13, batch 26800, loss[loss=0.1465, simple_loss=0.2261, pruned_loss=0.03342, over 4834.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2114, pruned_loss=0.03214, over 972388.86 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:07:54,605 INFO [train.py:715] (6/8) Epoch 13, batch 26850, loss[loss=0.1463, simple_loss=0.2235, pruned_loss=0.03456, over 4849.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.03204, over 972084.33 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 21:08:33,348 INFO [train.py:715] (6/8) Epoch 13, batch 26900, loss[loss=0.1047, simple_loss=0.1779, pruned_loss=0.01574, over 4928.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03099, over 972798.31 frames.], batch size: 23, lr: 1.66e-04 +2022-05-07 21:09:11,799 INFO [train.py:715] (6/8) Epoch 13, batch 26950, loss[loss=0.1249, simple_loss=0.1972, pruned_loss=0.02634, over 4752.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2102, pruned_loss=0.03133, over 972859.31 frames.], batch size: 12, lr: 1.66e-04 +2022-05-07 21:09:50,380 INFO [train.py:715] (6/8) Epoch 13, batch 27000, loss[loss=0.1184, simple_loss=0.1954, pruned_loss=0.02067, over 4926.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2104, pruned_loss=0.03109, over 972112.59 frames.], batch size: 18, lr: 1.66e-04 +2022-05-07 21:09:50,381 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 21:09:59,936 INFO [train.py:742] (6/8) Epoch 13, validation: loss=0.1053, simple_loss=0.1891, pruned_loss=0.01077, over 914524.00 frames. +2022-05-07 21:10:39,030 INFO [train.py:715] (6/8) Epoch 13, batch 27050, loss[loss=0.1188, simple_loss=0.1919, pruned_loss=0.02286, over 4811.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2096, pruned_loss=0.03066, over 971503.81 frames.], batch size: 27, lr: 1.66e-04 +2022-05-07 21:11:17,918 INFO [train.py:715] (6/8) Epoch 13, batch 27100, loss[loss=0.1357, simple_loss=0.2162, pruned_loss=0.0276, over 4928.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.03021, over 970329.65 frames.], batch size: 18, lr: 1.66e-04 +2022-05-07 21:11:57,150 INFO [train.py:715] (6/8) Epoch 13, batch 27150, loss[loss=0.1623, simple_loss=0.2446, pruned_loss=0.03998, over 4813.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.0306, over 970575.17 frames.], batch size: 26, lr: 1.66e-04 +2022-05-07 21:12:36,112 INFO [train.py:715] (6/8) Epoch 13, batch 27200, loss[loss=0.1511, simple_loss=0.2185, pruned_loss=0.04184, over 4782.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2098, pruned_loss=0.03088, over 970559.31 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 21:13:14,913 INFO [train.py:715] (6/8) Epoch 13, batch 27250, loss[loss=0.1257, simple_loss=0.2043, pruned_loss=0.02355, over 4950.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.03102, over 970999.40 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 21:13:54,910 INFO [train.py:715] (6/8) Epoch 13, batch 27300, loss[loss=0.1719, simple_loss=0.2497, pruned_loss=0.04704, over 4914.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2096, pruned_loss=0.03093, over 971795.90 frames.], batch size: 17, lr: 1.66e-04 +2022-05-07 21:14:33,866 INFO [train.py:715] (6/8) Epoch 13, batch 27350, loss[loss=0.1215, simple_loss=0.1898, pruned_loss=0.02659, over 4825.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.0311, over 971460.53 frames.], batch size: 25, lr: 1.66e-04 +2022-05-07 21:15:11,635 INFO [train.py:715] (6/8) Epoch 13, batch 27400, loss[loss=0.1784, simple_loss=0.2461, pruned_loss=0.05537, over 4981.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2091, pruned_loss=0.03101, over 971248.20 frames.], batch size: 35, lr: 1.66e-04 +2022-05-07 21:15:49,746 INFO [train.py:715] (6/8) Epoch 13, batch 27450, loss[loss=0.1338, simple_loss=0.1967, pruned_loss=0.03546, over 4864.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2089, pruned_loss=0.03097, over 970983.61 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 21:16:30,592 INFO [train.py:715] (6/8) Epoch 13, batch 27500, loss[loss=0.1334, simple_loss=0.2048, pruned_loss=0.03099, over 4920.00 frames.], tot_loss[loss=0.1345, simple_loss=0.208, pruned_loss=0.03046, over 972425.81 frames.], batch size: 23, lr: 1.66e-04 +2022-05-07 21:17:08,831 INFO [train.py:715] (6/8) Epoch 13, batch 27550, loss[loss=0.1256, simple_loss=0.1834, pruned_loss=0.03394, over 4846.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03144, over 971900.08 frames.], batch size: 13, lr: 1.66e-04 +2022-05-07 21:17:46,780 INFO [train.py:715] (6/8) Epoch 13, batch 27600, loss[loss=0.1211, simple_loss=0.1984, pruned_loss=0.02187, over 4822.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2095, pruned_loss=0.03149, over 971406.15 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:18:25,959 INFO [train.py:715] (6/8) Epoch 13, batch 27650, loss[loss=0.1335, simple_loss=0.2096, pruned_loss=0.02877, over 4908.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.03177, over 971350.12 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 21:19:03,879 INFO [train.py:715] (6/8) Epoch 13, batch 27700, loss[loss=0.1254, simple_loss=0.1945, pruned_loss=0.02813, over 4791.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2093, pruned_loss=0.03124, over 971029.46 frames.], batch size: 12, lr: 1.66e-04 +2022-05-07 21:19:42,883 INFO [train.py:715] (6/8) Epoch 13, batch 27750, loss[loss=0.1395, simple_loss=0.2162, pruned_loss=0.03138, over 4917.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2087, pruned_loss=0.03093, over 972230.64 frames.], batch size: 17, lr: 1.66e-04 +2022-05-07 21:20:21,395 INFO [train.py:715] (6/8) Epoch 13, batch 27800, loss[loss=0.1608, simple_loss=0.2319, pruned_loss=0.04488, over 4878.00 frames.], tot_loss[loss=0.136, simple_loss=0.2093, pruned_loss=0.03141, over 972114.39 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 21:21:00,122 INFO [train.py:715] (6/8) Epoch 13, batch 27850, loss[loss=0.128, simple_loss=0.2077, pruned_loss=0.02412, over 4766.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2092, pruned_loss=0.03132, over 972576.60 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 21:21:38,317 INFO [train.py:715] (6/8) Epoch 13, batch 27900, loss[loss=0.1227, simple_loss=0.1979, pruned_loss=0.02374, over 4740.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.03152, over 972508.55 frames.], batch size: 12, lr: 1.66e-04 +2022-05-07 21:22:16,100 INFO [train.py:715] (6/8) Epoch 13, batch 27950, loss[loss=0.146, simple_loss=0.2269, pruned_loss=0.03259, over 4897.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2105, pruned_loss=0.03155, over 972913.69 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 21:22:55,057 INFO [train.py:715] (6/8) Epoch 13, batch 28000, loss[loss=0.1167, simple_loss=0.1914, pruned_loss=0.02098, over 4801.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2094, pruned_loss=0.03124, over 973399.65 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 21:23:33,526 INFO [train.py:715] (6/8) Epoch 13, batch 28050, loss[loss=0.1369, simple_loss=0.2235, pruned_loss=0.02515, over 4778.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03137, over 973090.85 frames.], batch size: 17, lr: 1.66e-04 +2022-05-07 21:24:11,556 INFO [train.py:715] (6/8) Epoch 13, batch 28100, loss[loss=0.1465, simple_loss=0.2146, pruned_loss=0.03923, over 4686.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03164, over 972482.87 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:24:49,600 INFO [train.py:715] (6/8) Epoch 13, batch 28150, loss[loss=0.1241, simple_loss=0.2005, pruned_loss=0.02381, over 4741.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2102, pruned_loss=0.031, over 973147.07 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 21:25:28,818 INFO [train.py:715] (6/8) Epoch 13, batch 28200, loss[loss=0.1314, simple_loss=0.2077, pruned_loss=0.02753, over 4987.00 frames.], tot_loss[loss=0.137, simple_loss=0.2107, pruned_loss=0.03167, over 972712.35 frames.], batch size: 28, lr: 1.66e-04 +2022-05-07 21:26:06,613 INFO [train.py:715] (6/8) Epoch 13, batch 28250, loss[loss=0.1499, simple_loss=0.2208, pruned_loss=0.03949, over 4887.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2107, pruned_loss=0.03159, over 971733.33 frames.], batch size: 22, lr: 1.66e-04 +2022-05-07 21:26:44,758 INFO [train.py:715] (6/8) Epoch 13, batch 28300, loss[loss=0.1779, simple_loss=0.2502, pruned_loss=0.05282, over 4989.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2113, pruned_loss=0.03195, over 971527.09 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 21:27:23,467 INFO [train.py:715] (6/8) Epoch 13, batch 28350, loss[loss=0.1284, simple_loss=0.19, pruned_loss=0.03344, over 4767.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.03179, over 971531.56 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 21:28:01,612 INFO [train.py:715] (6/8) Epoch 13, batch 28400, loss[loss=0.1989, simple_loss=0.2568, pruned_loss=0.07053, over 4965.00 frames.], tot_loss[loss=0.1365, simple_loss=0.21, pruned_loss=0.03153, over 973111.58 frames.], batch size: 39, lr: 1.66e-04 +2022-05-07 21:28:40,047 INFO [train.py:715] (6/8) Epoch 13, batch 28450, loss[loss=0.137, simple_loss=0.2134, pruned_loss=0.03031, over 4699.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2099, pruned_loss=0.03133, over 973323.25 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:29:18,389 INFO [train.py:715] (6/8) Epoch 13, batch 28500, loss[loss=0.1286, simple_loss=0.2107, pruned_loss=0.02324, over 4878.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.03068, over 973056.23 frames.], batch size: 38, lr: 1.66e-04 +2022-05-07 21:29:57,064 INFO [train.py:715] (6/8) Epoch 13, batch 28550, loss[loss=0.1466, simple_loss=0.2203, pruned_loss=0.03648, over 4985.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03078, over 972690.12 frames.], batch size: 35, lr: 1.66e-04 +2022-05-07 21:30:35,262 INFO [train.py:715] (6/8) Epoch 13, batch 28600, loss[loss=0.1201, simple_loss=0.1974, pruned_loss=0.0214, over 4950.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2089, pruned_loss=0.03095, over 973052.65 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 21:31:13,618 INFO [train.py:715] (6/8) Epoch 13, batch 28650, loss[loss=0.1297, simple_loss=0.207, pruned_loss=0.02621, over 4952.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2084, pruned_loss=0.03057, over 972886.84 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 21:31:52,266 INFO [train.py:715] (6/8) Epoch 13, batch 28700, loss[loss=0.1066, simple_loss=0.185, pruned_loss=0.01409, over 4802.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2086, pruned_loss=0.0305, over 972912.37 frames.], batch size: 24, lr: 1.66e-04 +2022-05-07 21:32:30,336 INFO [train.py:715] (6/8) Epoch 13, batch 28750, loss[loss=0.1157, simple_loss=0.192, pruned_loss=0.01971, over 4906.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2093, pruned_loss=0.03073, over 972537.56 frames.], batch size: 17, lr: 1.66e-04 +2022-05-07 21:33:08,639 INFO [train.py:715] (6/8) Epoch 13, batch 28800, loss[loss=0.1353, simple_loss=0.2148, pruned_loss=0.02785, over 4791.00 frames.], tot_loss[loss=0.1359, simple_loss=0.21, pruned_loss=0.03092, over 971896.52 frames.], batch size: 18, lr: 1.66e-04 +2022-05-07 21:33:47,848 INFO [train.py:715] (6/8) Epoch 13, batch 28850, loss[loss=0.1349, simple_loss=0.2024, pruned_loss=0.03367, over 4833.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2096, pruned_loss=0.0309, over 971203.39 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:34:26,370 INFO [train.py:715] (6/8) Epoch 13, batch 28900, loss[loss=0.1068, simple_loss=0.1821, pruned_loss=0.01582, over 4808.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2098, pruned_loss=0.03084, over 971557.17 frames.], batch size: 25, lr: 1.66e-04 +2022-05-07 21:35:04,281 INFO [train.py:715] (6/8) Epoch 13, batch 28950, loss[loss=0.1304, simple_loss=0.2094, pruned_loss=0.0257, over 4827.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2103, pruned_loss=0.03111, over 971035.10 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:35:42,448 INFO [train.py:715] (6/8) Epoch 13, batch 29000, loss[loss=0.1413, simple_loss=0.2156, pruned_loss=0.03353, over 4926.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03173, over 970618.07 frames.], batch size: 23, lr: 1.66e-04 +2022-05-07 21:36:21,624 INFO [train.py:715] (6/8) Epoch 13, batch 29050, loss[loss=0.1223, simple_loss=0.186, pruned_loss=0.02934, over 4824.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2102, pruned_loss=0.03186, over 970970.50 frames.], batch size: 13, lr: 1.66e-04 +2022-05-07 21:37:00,160 INFO [train.py:715] (6/8) Epoch 13, batch 29100, loss[loss=0.1324, simple_loss=0.1992, pruned_loss=0.03285, over 4843.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2096, pruned_loss=0.03174, over 970651.74 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 21:37:38,207 INFO [train.py:715] (6/8) Epoch 13, batch 29150, loss[loss=0.1493, simple_loss=0.2231, pruned_loss=0.03771, over 4794.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2096, pruned_loss=0.0315, over 971151.01 frames.], batch size: 17, lr: 1.66e-04 +2022-05-07 21:38:16,964 INFO [train.py:715] (6/8) Epoch 13, batch 29200, loss[loss=0.1519, simple_loss=0.2295, pruned_loss=0.03715, over 4766.00 frames.], tot_loss[loss=0.135, simple_loss=0.2082, pruned_loss=0.03091, over 970696.19 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 21:38:55,214 INFO [train.py:715] (6/8) Epoch 13, batch 29250, loss[loss=0.1197, simple_loss=0.1999, pruned_loss=0.01975, over 4756.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2094, pruned_loss=0.03164, over 971074.73 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 21:39:34,055 INFO [train.py:715] (6/8) Epoch 13, batch 29300, loss[loss=0.1427, simple_loss=0.2107, pruned_loss=0.03732, over 4856.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2086, pruned_loss=0.03112, over 970615.69 frames.], batch size: 34, lr: 1.66e-04 +2022-05-07 21:40:12,803 INFO [train.py:715] (6/8) Epoch 13, batch 29350, loss[loss=0.1242, simple_loss=0.1969, pruned_loss=0.02572, over 4981.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03075, over 970806.76 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:40:51,679 INFO [train.py:715] (6/8) Epoch 13, batch 29400, loss[loss=0.1478, simple_loss=0.2132, pruned_loss=0.04119, over 4923.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2094, pruned_loss=0.0312, over 971264.99 frames.], batch size: 39, lr: 1.66e-04 +2022-05-07 21:41:29,701 INFO [train.py:715] (6/8) Epoch 13, batch 29450, loss[loss=0.1231, simple_loss=0.1914, pruned_loss=0.02739, over 4973.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2093, pruned_loss=0.03155, over 971551.93 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 21:42:08,740 INFO [train.py:715] (6/8) Epoch 13, batch 29500, loss[loss=0.1378, simple_loss=0.2069, pruned_loss=0.03439, over 4752.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2078, pruned_loss=0.03085, over 971576.36 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 21:42:47,375 INFO [train.py:715] (6/8) Epoch 13, batch 29550, loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03164, over 4697.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2074, pruned_loss=0.03045, over 971691.13 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:43:25,736 INFO [train.py:715] (6/8) Epoch 13, batch 29600, loss[loss=0.1477, simple_loss=0.2312, pruned_loss=0.03209, over 4817.00 frames.], tot_loss[loss=0.135, simple_loss=0.2082, pruned_loss=0.03084, over 971513.73 frames.], batch size: 25, lr: 1.66e-04 +2022-05-07 21:44:03,484 INFO [train.py:715] (6/8) Epoch 13, batch 29650, loss[loss=0.1548, simple_loss=0.2113, pruned_loss=0.04916, over 4853.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2084, pruned_loss=0.03113, over 971988.00 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 21:44:41,769 INFO [train.py:715] (6/8) Epoch 13, batch 29700, loss[loss=0.1227, simple_loss=0.1969, pruned_loss=0.02422, over 4921.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2086, pruned_loss=0.03105, over 972725.69 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 21:45:20,122 INFO [train.py:715] (6/8) Epoch 13, batch 29750, loss[loss=0.1273, simple_loss=0.1978, pruned_loss=0.0284, over 4790.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03082, over 972061.48 frames.], batch size: 12, lr: 1.66e-04 +2022-05-07 21:45:59,493 INFO [train.py:715] (6/8) Epoch 13, batch 29800, loss[loss=0.1289, simple_loss=0.2079, pruned_loss=0.02495, over 4894.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.0309, over 971630.50 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 21:46:38,722 INFO [train.py:715] (6/8) Epoch 13, batch 29850, loss[loss=0.145, simple_loss=0.2202, pruned_loss=0.03489, over 4889.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03142, over 971460.19 frames.], batch size: 22, lr: 1.66e-04 +2022-05-07 21:47:18,329 INFO [train.py:715] (6/8) Epoch 13, batch 29900, loss[loss=0.1596, simple_loss=0.235, pruned_loss=0.04211, over 4777.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.03138, over 971028.76 frames.], batch size: 17, lr: 1.66e-04 +2022-05-07 21:47:57,741 INFO [train.py:715] (6/8) Epoch 13, batch 29950, loss[loss=0.1316, simple_loss=0.2113, pruned_loss=0.02589, over 4853.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2096, pruned_loss=0.03129, over 971167.66 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 21:48:36,360 INFO [train.py:715] (6/8) Epoch 13, batch 30000, loss[loss=0.1528, simple_loss=0.2284, pruned_loss=0.03856, over 4866.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2099, pruned_loss=0.03135, over 971818.54 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 21:48:36,361 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 21:48:45,862 INFO [train.py:742] (6/8) Epoch 13, validation: loss=0.1054, simple_loss=0.1891, pruned_loss=0.01083, over 914524.00 frames. +2022-05-07 21:49:25,290 INFO [train.py:715] (6/8) Epoch 13, batch 30050, loss[loss=0.1506, simple_loss=0.2299, pruned_loss=0.03562, over 4877.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03127, over 972050.20 frames.], batch size: 22, lr: 1.66e-04 +2022-05-07 21:50:05,104 INFO [train.py:715] (6/8) Epoch 13, batch 30100, loss[loss=0.141, simple_loss=0.2274, pruned_loss=0.02729, over 4921.00 frames.], tot_loss[loss=0.1359, simple_loss=0.21, pruned_loss=0.03087, over 971766.70 frames.], batch size: 29, lr: 1.66e-04 +2022-05-07 21:50:44,574 INFO [train.py:715] (6/8) Epoch 13, batch 30150, loss[loss=0.1215, simple_loss=0.2019, pruned_loss=0.02057, over 4812.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2095, pruned_loss=0.03092, over 971764.10 frames.], batch size: 27, lr: 1.66e-04 +2022-05-07 21:51:23,148 INFO [train.py:715] (6/8) Epoch 13, batch 30200, loss[loss=0.1172, simple_loss=0.1971, pruned_loss=0.01863, over 4807.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2084, pruned_loss=0.03049, over 971601.49 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 21:52:02,986 INFO [train.py:715] (6/8) Epoch 13, batch 30250, loss[loss=0.1296, simple_loss=0.2062, pruned_loss=0.02651, over 4884.00 frames.], tot_loss[loss=0.1361, simple_loss=0.21, pruned_loss=0.03108, over 971859.43 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 21:52:42,785 INFO [train.py:715] (6/8) Epoch 13, batch 30300, loss[loss=0.1769, simple_loss=0.2381, pruned_loss=0.05784, over 4743.00 frames.], tot_loss[loss=0.136, simple_loss=0.2101, pruned_loss=0.03098, over 971731.69 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 21:53:22,302 INFO [train.py:715] (6/8) Epoch 13, batch 30350, loss[loss=0.1336, simple_loss=0.2066, pruned_loss=0.03024, over 4854.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03145, over 971730.47 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 21:54:01,880 INFO [train.py:715] (6/8) Epoch 13, batch 30400, loss[loss=0.1428, simple_loss=0.2109, pruned_loss=0.03736, over 4892.00 frames.], tot_loss[loss=0.1366, simple_loss=0.21, pruned_loss=0.03163, over 972036.53 frames.], batch size: 22, lr: 1.66e-04 +2022-05-07 21:54:42,498 INFO [train.py:715] (6/8) Epoch 13, batch 30450, loss[loss=0.1493, simple_loss=0.2173, pruned_loss=0.04071, over 4980.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.03171, over 972531.28 frames.], batch size: 33, lr: 1.66e-04 +2022-05-07 21:55:22,608 INFO [train.py:715] (6/8) Epoch 13, batch 30500, loss[loss=0.1429, simple_loss=0.2093, pruned_loss=0.03822, over 4976.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2099, pruned_loss=0.03165, over 972529.47 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:56:02,393 INFO [train.py:715] (6/8) Epoch 13, batch 30550, loss[loss=0.1299, simple_loss=0.2102, pruned_loss=0.02476, over 4887.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03153, over 972452.26 frames.], batch size: 22, lr: 1.66e-04 +2022-05-07 21:56:43,842 INFO [train.py:715] (6/8) Epoch 13, batch 30600, loss[loss=0.1158, simple_loss=0.1872, pruned_loss=0.02219, over 4950.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2092, pruned_loss=0.03114, over 973154.09 frames.], batch size: 23, lr: 1.66e-04 +2022-05-07 21:57:24,955 INFO [train.py:715] (6/8) Epoch 13, batch 30650, loss[loss=0.122, simple_loss=0.1999, pruned_loss=0.02204, over 4846.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2093, pruned_loss=0.03142, over 972827.52 frames.], batch size: 20, lr: 1.65e-04 +2022-05-07 21:58:05,362 INFO [train.py:715] (6/8) Epoch 13, batch 30700, loss[loss=0.1375, simple_loss=0.2179, pruned_loss=0.02859, over 4955.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2094, pruned_loss=0.0312, over 972875.30 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 21:58:45,827 INFO [train.py:715] (6/8) Epoch 13, batch 30750, loss[loss=0.1276, simple_loss=0.2037, pruned_loss=0.02578, over 4754.00 frames.], tot_loss[loss=0.1367, simple_loss=0.21, pruned_loss=0.03175, over 972114.64 frames.], batch size: 19, lr: 1.65e-04 +2022-05-07 21:59:26,820 INFO [train.py:715] (6/8) Epoch 13, batch 30800, loss[loss=0.1385, simple_loss=0.2153, pruned_loss=0.03083, over 4925.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.0314, over 971227.65 frames.], batch size: 23, lr: 1.65e-04 +2022-05-07 22:00:07,580 INFO [train.py:715] (6/8) Epoch 13, batch 30850, loss[loss=0.1348, simple_loss=0.2066, pruned_loss=0.0315, over 4872.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2093, pruned_loss=0.03123, over 972503.48 frames.], batch size: 16, lr: 1.65e-04 +2022-05-07 22:00:48,211 INFO [train.py:715] (6/8) Epoch 13, batch 30900, loss[loss=0.124, simple_loss=0.2023, pruned_loss=0.02287, over 4812.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2092, pruned_loss=0.03129, over 971755.66 frames.], batch size: 26, lr: 1.65e-04 +2022-05-07 22:01:29,255 INFO [train.py:715] (6/8) Epoch 13, batch 30950, loss[loss=0.1302, simple_loss=0.2067, pruned_loss=0.02685, over 4700.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2094, pruned_loss=0.03167, over 971830.89 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:02:09,963 INFO [train.py:715] (6/8) Epoch 13, batch 31000, loss[loss=0.1113, simple_loss=0.1953, pruned_loss=0.01363, over 4797.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2099, pruned_loss=0.03163, over 972239.27 frames.], batch size: 25, lr: 1.65e-04 +2022-05-07 22:02:50,144 INFO [train.py:715] (6/8) Epoch 13, batch 31050, loss[loss=0.1685, simple_loss=0.2449, pruned_loss=0.04603, over 4922.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2098, pruned_loss=0.03169, over 972440.05 frames.], batch size: 29, lr: 1.65e-04 +2022-05-07 22:03:30,715 INFO [train.py:715] (6/8) Epoch 13, batch 31100, loss[loss=0.1518, simple_loss=0.2189, pruned_loss=0.04238, over 4719.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2096, pruned_loss=0.03155, over 972003.44 frames.], batch size: 12, lr: 1.65e-04 +2022-05-07 22:04:11,682 INFO [train.py:715] (6/8) Epoch 13, batch 31150, loss[loss=0.1594, simple_loss=0.2475, pruned_loss=0.03562, over 4940.00 frames.], tot_loss[loss=0.1365, simple_loss=0.21, pruned_loss=0.03155, over 972679.51 frames.], batch size: 21, lr: 1.65e-04 +2022-05-07 22:04:52,786 INFO [train.py:715] (6/8) Epoch 13, batch 31200, loss[loss=0.1225, simple_loss=0.1944, pruned_loss=0.02527, over 4922.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2096, pruned_loss=0.0313, over 971827.26 frames.], batch size: 18, lr: 1.65e-04 +2022-05-07 22:05:32,915 INFO [train.py:715] (6/8) Epoch 13, batch 31250, loss[loss=0.117, simple_loss=0.1947, pruned_loss=0.01969, over 4946.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.03181, over 972929.17 frames.], batch size: 23, lr: 1.65e-04 +2022-05-07 22:06:13,245 INFO [train.py:715] (6/8) Epoch 13, batch 31300, loss[loss=0.1246, simple_loss=0.202, pruned_loss=0.02357, over 4810.00 frames.], tot_loss[loss=0.1372, simple_loss=0.211, pruned_loss=0.03169, over 972340.97 frames.], batch size: 24, lr: 1.65e-04 +2022-05-07 22:06:53,514 INFO [train.py:715] (6/8) Epoch 13, batch 31350, loss[loss=0.1529, simple_loss=0.226, pruned_loss=0.03994, over 4801.00 frames.], tot_loss[loss=0.1371, simple_loss=0.211, pruned_loss=0.03159, over 972996.41 frames.], batch size: 24, lr: 1.65e-04 +2022-05-07 22:07:33,263 INFO [train.py:715] (6/8) Epoch 13, batch 31400, loss[loss=0.1169, simple_loss=0.1953, pruned_loss=0.01926, over 4859.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2101, pruned_loss=0.03124, over 972563.61 frames.], batch size: 20, lr: 1.65e-04 +2022-05-07 22:08:13,735 INFO [train.py:715] (6/8) Epoch 13, batch 31450, loss[loss=0.1378, simple_loss=0.2178, pruned_loss=0.02897, over 4980.00 frames.], tot_loss[loss=0.1359, simple_loss=0.21, pruned_loss=0.03091, over 972458.49 frames.], batch size: 14, lr: 1.65e-04 +2022-05-07 22:08:54,101 INFO [train.py:715] (6/8) Epoch 13, batch 31500, loss[loss=0.1411, simple_loss=0.2209, pruned_loss=0.03063, over 4856.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.03092, over 973002.97 frames.], batch size: 20, lr: 1.65e-04 +2022-05-07 22:09:33,924 INFO [train.py:715] (6/8) Epoch 13, batch 31550, loss[loss=0.1161, simple_loss=0.1902, pruned_loss=0.02096, over 4790.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03146, over 972969.89 frames.], batch size: 17, lr: 1.65e-04 +2022-05-07 22:10:14,450 INFO [train.py:715] (6/8) Epoch 13, batch 31600, loss[loss=0.1183, simple_loss=0.1825, pruned_loss=0.02705, over 4822.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2097, pruned_loss=0.03143, over 972207.47 frames.], batch size: 13, lr: 1.65e-04 +2022-05-07 22:10:55,017 INFO [train.py:715] (6/8) Epoch 13, batch 31650, loss[loss=0.1312, simple_loss=0.1999, pruned_loss=0.03125, over 4771.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2094, pruned_loss=0.03141, over 972593.24 frames.], batch size: 12, lr: 1.65e-04 +2022-05-07 22:11:35,405 INFO [train.py:715] (6/8) Epoch 13, batch 31700, loss[loss=0.1292, simple_loss=0.1963, pruned_loss=0.03103, over 4755.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2087, pruned_loss=0.03118, over 972725.47 frames.], batch size: 16, lr: 1.65e-04 +2022-05-07 22:12:15,837 INFO [train.py:715] (6/8) Epoch 13, batch 31750, loss[loss=0.1602, simple_loss=0.2301, pruned_loss=0.04519, over 4880.00 frames.], tot_loss[loss=0.1356, simple_loss=0.209, pruned_loss=0.0311, over 971683.89 frames.], batch size: 16, lr: 1.65e-04 +2022-05-07 22:12:56,388 INFO [train.py:715] (6/8) Epoch 13, batch 31800, loss[loss=0.1337, simple_loss=0.2125, pruned_loss=0.02743, over 4973.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03081, over 972068.13 frames.], batch size: 35, lr: 1.65e-04 +2022-05-07 22:13:37,271 INFO [train.py:715] (6/8) Epoch 13, batch 31850, loss[loss=0.1363, simple_loss=0.2172, pruned_loss=0.02774, over 4712.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.03111, over 971958.44 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:14:18,120 INFO [train.py:715] (6/8) Epoch 13, batch 31900, loss[loss=0.1153, simple_loss=0.1982, pruned_loss=0.01615, over 4809.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03108, over 971544.12 frames.], batch size: 26, lr: 1.65e-04 +2022-05-07 22:14:59,145 INFO [train.py:715] (6/8) Epoch 13, batch 31950, loss[loss=0.1221, simple_loss=0.2084, pruned_loss=0.01785, over 4779.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2102, pruned_loss=0.03119, over 971019.13 frames.], batch size: 18, lr: 1.65e-04 +2022-05-07 22:15:39,540 INFO [train.py:715] (6/8) Epoch 13, batch 32000, loss[loss=0.1391, simple_loss=0.2068, pruned_loss=0.03563, over 4891.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2097, pruned_loss=0.03086, over 970862.39 frames.], batch size: 19, lr: 1.65e-04 +2022-05-07 22:16:20,152 INFO [train.py:715] (6/8) Epoch 13, batch 32050, loss[loss=0.1473, simple_loss=0.2248, pruned_loss=0.03491, over 4693.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03103, over 971414.01 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:17:00,693 INFO [train.py:715] (6/8) Epoch 13, batch 32100, loss[loss=0.1303, simple_loss=0.2129, pruned_loss=0.02384, over 4964.00 frames.], tot_loss[loss=0.136, simple_loss=0.2094, pruned_loss=0.03126, over 971857.36 frames.], batch size: 24, lr: 1.65e-04 +2022-05-07 22:17:41,704 INFO [train.py:715] (6/8) Epoch 13, batch 32150, loss[loss=0.1549, simple_loss=0.2231, pruned_loss=0.04336, over 4900.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.03167, over 972587.50 frames.], batch size: 19, lr: 1.65e-04 +2022-05-07 22:18:22,395 INFO [train.py:715] (6/8) Epoch 13, batch 32200, loss[loss=0.1159, simple_loss=0.191, pruned_loss=0.02038, over 4889.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03135, over 972782.43 frames.], batch size: 32, lr: 1.65e-04 +2022-05-07 22:19:03,054 INFO [train.py:715] (6/8) Epoch 13, batch 32250, loss[loss=0.1356, simple_loss=0.2171, pruned_loss=0.0271, over 4768.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.03093, over 973201.16 frames.], batch size: 17, lr: 1.65e-04 +2022-05-07 22:19:43,881 INFO [train.py:715] (6/8) Epoch 13, batch 32300, loss[loss=0.1144, simple_loss=0.1921, pruned_loss=0.01836, over 4775.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.03098, over 973329.56 frames.], batch size: 18, lr: 1.65e-04 +2022-05-07 22:20:24,944 INFO [train.py:715] (6/8) Epoch 13, batch 32350, loss[loss=0.144, simple_loss=0.2226, pruned_loss=0.03265, over 4976.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2106, pruned_loss=0.03112, over 973067.45 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:21:06,368 INFO [train.py:715] (6/8) Epoch 13, batch 32400, loss[loss=0.1268, simple_loss=0.2048, pruned_loss=0.0244, over 4793.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2107, pruned_loss=0.03075, over 972211.01 frames.], batch size: 14, lr: 1.65e-04 +2022-05-07 22:21:47,426 INFO [train.py:715] (6/8) Epoch 13, batch 32450, loss[loss=0.1493, simple_loss=0.2178, pruned_loss=0.04042, over 4870.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2107, pruned_loss=0.03131, over 972159.39 frames.], batch size: 32, lr: 1.65e-04 +2022-05-07 22:22:28,224 INFO [train.py:715] (6/8) Epoch 13, batch 32500, loss[loss=0.1137, simple_loss=0.1945, pruned_loss=0.01646, over 4878.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2096, pruned_loss=0.03057, over 972553.02 frames.], batch size: 22, lr: 1.65e-04 +2022-05-07 22:23:09,259 INFO [train.py:715] (6/8) Epoch 13, batch 32550, loss[loss=0.1244, simple_loss=0.2003, pruned_loss=0.02427, over 4799.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.03057, over 972006.36 frames.], batch size: 24, lr: 1.65e-04 +2022-05-07 22:23:49,649 INFO [train.py:715] (6/8) Epoch 13, batch 32600, loss[loss=0.1757, simple_loss=0.2497, pruned_loss=0.05089, over 4834.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2097, pruned_loss=0.03064, over 971571.00 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:24:30,000 INFO [train.py:715] (6/8) Epoch 13, batch 32650, loss[loss=0.1163, simple_loss=0.191, pruned_loss=0.02082, over 4944.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2101, pruned_loss=0.0309, over 971815.01 frames.], batch size: 21, lr: 1.65e-04 +2022-05-07 22:25:10,590 INFO [train.py:715] (6/8) Epoch 13, batch 32700, loss[loss=0.1196, simple_loss=0.1949, pruned_loss=0.0222, over 4810.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2106, pruned_loss=0.03105, over 971957.76 frames.], batch size: 27, lr: 1.65e-04 +2022-05-07 22:25:50,913 INFO [train.py:715] (6/8) Epoch 13, batch 32750, loss[loss=0.1276, simple_loss=0.2038, pruned_loss=0.0257, over 4706.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2107, pruned_loss=0.03092, over 971395.17 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:26:31,937 INFO [train.py:715] (6/8) Epoch 13, batch 32800, loss[loss=0.1264, simple_loss=0.2102, pruned_loss=0.02131, over 4853.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2108, pruned_loss=0.03125, over 972072.03 frames.], batch size: 20, lr: 1.65e-04 +2022-05-07 22:27:12,668 INFO [train.py:715] (6/8) Epoch 13, batch 32850, loss[loss=0.1546, simple_loss=0.2376, pruned_loss=0.03584, over 4889.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2098, pruned_loss=0.03084, over 971122.56 frames.], batch size: 19, lr: 1.65e-04 +2022-05-07 22:27:53,753 INFO [train.py:715] (6/8) Epoch 13, batch 32900, loss[loss=0.1457, simple_loss=0.2113, pruned_loss=0.04002, over 4946.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2095, pruned_loss=0.03092, over 971267.85 frames.], batch size: 39, lr: 1.65e-04 +2022-05-07 22:28:33,959 INFO [train.py:715] (6/8) Epoch 13, batch 32950, loss[loss=0.1362, simple_loss=0.2093, pruned_loss=0.03156, over 4812.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2104, pruned_loss=0.03086, over 971399.94 frames.], batch size: 13, lr: 1.65e-04 +2022-05-07 22:29:14,626 INFO [train.py:715] (6/8) Epoch 13, batch 33000, loss[loss=0.1224, simple_loss=0.1998, pruned_loss=0.02246, over 4863.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2101, pruned_loss=0.03073, over 971334.03 frames.], batch size: 20, lr: 1.65e-04 +2022-05-07 22:29:14,627 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 22:29:24,504 INFO [train.py:742] (6/8) Epoch 13, validation: loss=0.1054, simple_loss=0.1892, pruned_loss=0.01081, over 914524.00 frames. +2022-05-07 22:30:05,558 INFO [train.py:715] (6/8) Epoch 13, batch 33050, loss[loss=0.1526, simple_loss=0.2311, pruned_loss=0.03698, over 4927.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2094, pruned_loss=0.03019, over 970514.24 frames.], batch size: 29, lr: 1.65e-04 +2022-05-07 22:30:45,209 INFO [train.py:715] (6/8) Epoch 13, batch 33100, loss[loss=0.1334, simple_loss=0.1904, pruned_loss=0.03823, over 4776.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2095, pruned_loss=0.03071, over 971541.76 frames.], batch size: 12, lr: 1.65e-04 +2022-05-07 22:31:25,147 INFO [train.py:715] (6/8) Epoch 13, batch 33150, loss[loss=0.1301, simple_loss=0.2098, pruned_loss=0.02514, over 4888.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2109, pruned_loss=0.03148, over 971624.58 frames.], batch size: 22, lr: 1.65e-04 +2022-05-07 22:32:05,571 INFO [train.py:715] (6/8) Epoch 13, batch 33200, loss[loss=0.1464, simple_loss=0.2123, pruned_loss=0.04021, over 4985.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2106, pruned_loss=0.03111, over 972868.32 frames.], batch size: 31, lr: 1.65e-04 +2022-05-07 22:32:46,036 INFO [train.py:715] (6/8) Epoch 13, batch 33250, loss[loss=0.1601, simple_loss=0.2291, pruned_loss=0.04556, over 4977.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2113, pruned_loss=0.03126, over 972682.25 frames.], batch size: 40, lr: 1.65e-04 +2022-05-07 22:33:26,592 INFO [train.py:715] (6/8) Epoch 13, batch 33300, loss[loss=0.1348, simple_loss=0.212, pruned_loss=0.02885, over 4980.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2104, pruned_loss=0.03094, over 972799.76 frames.], batch size: 27, lr: 1.65e-04 +2022-05-07 22:34:07,017 INFO [train.py:715] (6/8) Epoch 13, batch 33350, loss[loss=0.1192, simple_loss=0.1974, pruned_loss=0.02049, over 4855.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2109, pruned_loss=0.03149, over 973119.45 frames.], batch size: 20, lr: 1.65e-04 +2022-05-07 22:34:47,637 INFO [train.py:715] (6/8) Epoch 13, batch 33400, loss[loss=0.1338, simple_loss=0.2076, pruned_loss=0.03, over 4701.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2108, pruned_loss=0.03174, over 972383.72 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:35:28,236 INFO [train.py:715] (6/8) Epoch 13, batch 33450, loss[loss=0.1272, simple_loss=0.2029, pruned_loss=0.02578, over 4802.00 frames.], tot_loss[loss=0.138, simple_loss=0.2119, pruned_loss=0.03203, over 972433.47 frames.], batch size: 14, lr: 1.65e-04 +2022-05-07 22:36:08,958 INFO [train.py:715] (6/8) Epoch 13, batch 33500, loss[loss=0.1484, simple_loss=0.2195, pruned_loss=0.03867, over 4798.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2119, pruned_loss=0.03182, over 973209.74 frames.], batch size: 14, lr: 1.65e-04 +2022-05-07 22:36:49,608 INFO [train.py:715] (6/8) Epoch 13, batch 33550, loss[loss=0.1381, simple_loss=0.2022, pruned_loss=0.03694, over 4963.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2111, pruned_loss=0.03166, over 972580.03 frames.], batch size: 31, lr: 1.65e-04 +2022-05-07 22:37:30,308 INFO [train.py:715] (6/8) Epoch 13, batch 33600, loss[loss=0.1103, simple_loss=0.1825, pruned_loss=0.01908, over 4756.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2105, pruned_loss=0.03121, over 972457.15 frames.], batch size: 19, lr: 1.65e-04 +2022-05-07 22:38:10,830 INFO [train.py:715] (6/8) Epoch 13, batch 33650, loss[loss=0.115, simple_loss=0.1938, pruned_loss=0.01814, over 4818.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2104, pruned_loss=0.03114, over 973002.28 frames.], batch size: 13, lr: 1.65e-04 +2022-05-07 22:38:51,066 INFO [train.py:715] (6/8) Epoch 13, batch 33700, loss[loss=0.1309, simple_loss=0.1989, pruned_loss=0.03141, over 4897.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2103, pruned_loss=0.0313, over 971755.98 frames.], batch size: 39, lr: 1.65e-04 +2022-05-07 22:39:32,043 INFO [train.py:715] (6/8) Epoch 13, batch 33750, loss[loss=0.1347, simple_loss=0.2163, pruned_loss=0.02658, over 4952.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2101, pruned_loss=0.03115, over 971932.47 frames.], batch size: 24, lr: 1.65e-04 +2022-05-07 22:40:12,827 INFO [train.py:715] (6/8) Epoch 13, batch 33800, loss[loss=0.1347, simple_loss=0.1965, pruned_loss=0.03643, over 4747.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.0315, over 971397.05 frames.], batch size: 14, lr: 1.65e-04 +2022-05-07 22:40:53,577 INFO [train.py:715] (6/8) Epoch 13, batch 33850, loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03012, over 4891.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03184, over 971701.14 frames.], batch size: 22, lr: 1.65e-04 +2022-05-07 22:41:34,023 INFO [train.py:715] (6/8) Epoch 13, batch 33900, loss[loss=0.1329, simple_loss=0.2009, pruned_loss=0.03245, over 4648.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03112, over 971996.30 frames.], batch size: 13, lr: 1.65e-04 +2022-05-07 22:42:15,282 INFO [train.py:715] (6/8) Epoch 13, batch 33950, loss[loss=0.1235, simple_loss=0.204, pruned_loss=0.02146, over 4831.00 frames.], tot_loss[loss=0.136, simple_loss=0.21, pruned_loss=0.03105, over 971780.66 frames.], batch size: 25, lr: 1.65e-04 +2022-05-07 22:42:56,289 INFO [train.py:715] (6/8) Epoch 13, batch 34000, loss[loss=0.125, simple_loss=0.2022, pruned_loss=0.02395, over 4976.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.03112, over 972065.69 frames.], batch size: 35, lr: 1.65e-04 +2022-05-07 22:43:36,860 INFO [train.py:715] (6/8) Epoch 13, batch 34050, loss[loss=0.1571, simple_loss=0.2222, pruned_loss=0.04602, over 4948.00 frames.], tot_loss[loss=0.1363, simple_loss=0.21, pruned_loss=0.03133, over 972874.62 frames.], batch size: 21, lr: 1.65e-04 +2022-05-07 22:44:17,683 INFO [train.py:715] (6/8) Epoch 13, batch 34100, loss[loss=0.1301, simple_loss=0.2004, pruned_loss=0.0299, over 4902.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.03168, over 972097.64 frames.], batch size: 19, lr: 1.65e-04 +2022-05-07 22:44:57,555 INFO [train.py:715] (6/8) Epoch 13, batch 34150, loss[loss=0.1395, simple_loss=0.2143, pruned_loss=0.03235, over 4832.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.0311, over 971691.85 frames.], batch size: 13, lr: 1.65e-04 +2022-05-07 22:45:38,245 INFO [train.py:715] (6/8) Epoch 13, batch 34200, loss[loss=0.1176, simple_loss=0.1984, pruned_loss=0.01837, over 4880.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2094, pruned_loss=0.03068, over 971475.06 frames.], batch size: 22, lr: 1.65e-04 +2022-05-07 22:46:18,604 INFO [train.py:715] (6/8) Epoch 13, batch 34250, loss[loss=0.1155, simple_loss=0.1885, pruned_loss=0.02126, over 4813.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.0301, over 972727.92 frames.], batch size: 27, lr: 1.65e-04 +2022-05-07 22:46:59,526 INFO [train.py:715] (6/8) Epoch 13, batch 34300, loss[loss=0.1175, simple_loss=0.1921, pruned_loss=0.02141, over 4868.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03047, over 972012.20 frames.], batch size: 20, lr: 1.65e-04 +2022-05-07 22:47:39,596 INFO [train.py:715] (6/8) Epoch 13, batch 34350, loss[loss=0.1722, simple_loss=0.241, pruned_loss=0.05172, over 4970.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03116, over 973145.85 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:48:20,188 INFO [train.py:715] (6/8) Epoch 13, batch 34400, loss[loss=0.1317, simple_loss=0.2135, pruned_loss=0.02494, over 4948.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2106, pruned_loss=0.03143, over 973769.06 frames.], batch size: 29, lr: 1.65e-04 +2022-05-07 22:49:01,284 INFO [train.py:715] (6/8) Epoch 13, batch 34450, loss[loss=0.1172, simple_loss=0.1978, pruned_loss=0.01829, over 4946.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2107, pruned_loss=0.03118, over 973662.29 frames.], batch size: 21, lr: 1.65e-04 +2022-05-07 22:49:41,690 INFO [train.py:715] (6/8) Epoch 13, batch 34500, loss[loss=0.1112, simple_loss=0.1893, pruned_loss=0.01656, over 4845.00 frames.], tot_loss[loss=0.137, simple_loss=0.2112, pruned_loss=0.03142, over 973236.22 frames.], batch size: 13, lr: 1.65e-04 +2022-05-07 22:50:21,496 INFO [train.py:715] (6/8) Epoch 13, batch 34550, loss[loss=0.1417, simple_loss=0.2205, pruned_loss=0.03141, over 4748.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2108, pruned_loss=0.03115, over 973063.33 frames.], batch size: 16, lr: 1.65e-04 +2022-05-07 22:51:01,502 INFO [train.py:715] (6/8) Epoch 13, batch 34600, loss[loss=0.1712, simple_loss=0.2202, pruned_loss=0.06112, over 4972.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2113, pruned_loss=0.03144, over 972897.96 frames.], batch size: 14, lr: 1.65e-04 +2022-05-07 22:51:40,869 INFO [train.py:715] (6/8) Epoch 13, batch 34650, loss[loss=0.1261, simple_loss=0.2093, pruned_loss=0.02141, over 4870.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2119, pruned_loss=0.03194, over 973730.83 frames.], batch size: 20, lr: 1.65e-04 +2022-05-07 22:52:20,404 INFO [train.py:715] (6/8) Epoch 13, batch 34700, loss[loss=0.1297, simple_loss=0.2098, pruned_loss=0.02484, over 4917.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2122, pruned_loss=0.0321, over 973383.45 frames.], batch size: 17, lr: 1.65e-04 +2022-05-07 22:52:59,362 INFO [train.py:715] (6/8) Epoch 13, batch 34750, loss[loss=0.1055, simple_loss=0.1888, pruned_loss=0.01108, over 4930.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2112, pruned_loss=0.03167, over 972707.34 frames.], batch size: 18, lr: 1.65e-04 +2022-05-07 22:53:36,110 INFO [train.py:715] (6/8) Epoch 13, batch 34800, loss[loss=0.1037, simple_loss=0.1755, pruned_loss=0.01596, over 4824.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2093, pruned_loss=0.03102, over 972671.02 frames.], batch size: 12, lr: 1.65e-04 +2022-05-07 22:54:25,047 INFO [train.py:715] (6/8) Epoch 14, batch 0, loss[loss=0.1319, simple_loss=0.2099, pruned_loss=0.02692, over 4819.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2099, pruned_loss=0.02692, over 4819.00 frames.], batch size: 25, lr: 1.59e-04 +2022-05-07 22:55:04,008 INFO [train.py:715] (6/8) Epoch 14, batch 50, loss[loss=0.138, simple_loss=0.2059, pruned_loss=0.03506, over 4835.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2069, pruned_loss=0.02964, over 219709.39 frames.], batch size: 15, lr: 1.59e-04 +2022-05-07 22:55:42,423 INFO [train.py:715] (6/8) Epoch 14, batch 100, loss[loss=0.139, simple_loss=0.2079, pruned_loss=0.03503, over 4871.00 frames.], tot_loss[loss=0.134, simple_loss=0.2078, pruned_loss=0.03005, over 385971.90 frames.], batch size: 16, lr: 1.59e-04 +2022-05-07 22:56:21,310 INFO [train.py:715] (6/8) Epoch 14, batch 150, loss[loss=0.1374, simple_loss=0.2119, pruned_loss=0.0315, over 4981.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2079, pruned_loss=0.03011, over 515789.32 frames.], batch size: 14, lr: 1.59e-04 +2022-05-07 22:56:59,878 INFO [train.py:715] (6/8) Epoch 14, batch 200, loss[loss=0.1329, simple_loss=0.2063, pruned_loss=0.02977, over 4962.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03093, over 616321.09 frames.], batch size: 15, lr: 1.59e-04 +2022-05-07 22:57:38,471 INFO [train.py:715] (6/8) Epoch 14, batch 250, loss[loss=0.1155, simple_loss=0.1975, pruned_loss=0.01675, over 4685.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2083, pruned_loss=0.03045, over 695226.08 frames.], batch size: 15, lr: 1.59e-04 +2022-05-07 22:58:17,254 INFO [train.py:715] (6/8) Epoch 14, batch 300, loss[loss=0.1587, simple_loss=0.2427, pruned_loss=0.03737, over 4878.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2104, pruned_loss=0.03127, over 756849.62 frames.], batch size: 22, lr: 1.59e-04 +2022-05-07 22:58:56,806 INFO [train.py:715] (6/8) Epoch 14, batch 350, loss[loss=0.1375, simple_loss=0.2143, pruned_loss=0.03029, over 4709.00 frames.], tot_loss[loss=0.136, simple_loss=0.2101, pruned_loss=0.03092, over 804125.17 frames.], batch size: 15, lr: 1.59e-04 +2022-05-07 22:59:35,349 INFO [train.py:715] (6/8) Epoch 14, batch 400, loss[loss=0.1221, simple_loss=0.1897, pruned_loss=0.02724, over 4866.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2099, pruned_loss=0.03119, over 841405.17 frames.], batch size: 16, lr: 1.59e-04 +2022-05-07 23:00:14,791 INFO [train.py:715] (6/8) Epoch 14, batch 450, loss[loss=0.1444, simple_loss=0.2134, pruned_loss=0.03768, over 4951.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03071, over 871037.21 frames.], batch size: 21, lr: 1.59e-04 +2022-05-07 23:00:54,075 INFO [train.py:715] (6/8) Epoch 14, batch 500, loss[loss=0.1203, simple_loss=0.2012, pruned_loss=0.01969, over 4956.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2088, pruned_loss=0.03032, over 894451.04 frames.], batch size: 24, lr: 1.59e-04 +2022-05-07 23:01:33,696 INFO [train.py:715] (6/8) Epoch 14, batch 550, loss[loss=0.1301, simple_loss=0.189, pruned_loss=0.03558, over 4825.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2097, pruned_loss=0.03101, over 912070.48 frames.], batch size: 13, lr: 1.59e-04 +2022-05-07 23:02:12,473 INFO [train.py:715] (6/8) Epoch 14, batch 600, loss[loss=0.1535, simple_loss=0.2266, pruned_loss=0.0402, over 4953.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03037, over 924991.73 frames.], batch size: 21, lr: 1.59e-04 +2022-05-07 23:02:51,124 INFO [train.py:715] (6/8) Epoch 14, batch 650, loss[loss=0.1554, simple_loss=0.2269, pruned_loss=0.042, over 4805.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2075, pruned_loss=0.02994, over 935744.49 frames.], batch size: 24, lr: 1.59e-04 +2022-05-07 23:03:32,629 INFO [train.py:715] (6/8) Epoch 14, batch 700, loss[loss=0.1151, simple_loss=0.1954, pruned_loss=0.01742, over 4813.00 frames.], tot_loss[loss=0.1336, simple_loss=0.207, pruned_loss=0.03006, over 943535.59 frames.], batch size: 27, lr: 1.59e-04 +2022-05-07 23:04:11,050 INFO [train.py:715] (6/8) Epoch 14, batch 750, loss[loss=0.1469, simple_loss=0.2239, pruned_loss=0.03492, over 4780.00 frames.], tot_loss[loss=0.1343, simple_loss=0.208, pruned_loss=0.0303, over 949425.78 frames.], batch size: 14, lr: 1.59e-04 +2022-05-07 23:04:51,178 INFO [train.py:715] (6/8) Epoch 14, batch 800, loss[loss=0.1283, simple_loss=0.2039, pruned_loss=0.02639, over 4835.00 frames.], tot_loss[loss=0.1343, simple_loss=0.208, pruned_loss=0.0303, over 954503.67 frames.], batch size: 13, lr: 1.59e-04 +2022-05-07 23:05:30,252 INFO [train.py:715] (6/8) Epoch 14, batch 850, loss[loss=0.1395, simple_loss=0.207, pruned_loss=0.03603, over 4900.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03035, over 958214.66 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:06:09,709 INFO [train.py:715] (6/8) Epoch 14, batch 900, loss[loss=0.1439, simple_loss=0.2259, pruned_loss=0.03097, over 4822.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03034, over 961562.35 frames.], batch size: 26, lr: 1.59e-04 +2022-05-07 23:06:48,332 INFO [train.py:715] (6/8) Epoch 14, batch 950, loss[loss=0.1374, simple_loss=0.2071, pruned_loss=0.03386, over 4879.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03041, over 963765.85 frames.], batch size: 22, lr: 1.59e-04 +2022-05-07 23:07:27,881 INFO [train.py:715] (6/8) Epoch 14, batch 1000, loss[loss=0.1249, simple_loss=0.1972, pruned_loss=0.02625, over 4916.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03034, over 965134.63 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:08:07,942 INFO [train.py:715] (6/8) Epoch 14, batch 1050, loss[loss=0.1369, simple_loss=0.2065, pruned_loss=0.03362, over 4863.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03091, over 966308.14 frames.], batch size: 16, lr: 1.59e-04 +2022-05-07 23:08:47,254 INFO [train.py:715] (6/8) Epoch 14, batch 1100, loss[loss=0.1234, simple_loss=0.198, pruned_loss=0.02441, over 4962.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03062, over 968266.32 frames.], batch size: 25, lr: 1.59e-04 +2022-05-07 23:09:26,960 INFO [train.py:715] (6/8) Epoch 14, batch 1150, loss[loss=0.135, simple_loss=0.2008, pruned_loss=0.0346, over 4877.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03015, over 969476.11 frames.], batch size: 32, lr: 1.59e-04 +2022-05-07 23:10:07,014 INFO [train.py:715] (6/8) Epoch 14, batch 1200, loss[loss=0.1358, simple_loss=0.2299, pruned_loss=0.02087, over 4812.00 frames.], tot_loss[loss=0.135, simple_loss=0.2088, pruned_loss=0.03055, over 970443.96 frames.], batch size: 25, lr: 1.59e-04 +2022-05-07 23:10:47,172 INFO [train.py:715] (6/8) Epoch 14, batch 1250, loss[loss=0.1455, simple_loss=0.2125, pruned_loss=0.03922, over 4840.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2078, pruned_loss=0.03038, over 971511.70 frames.], batch size: 30, lr: 1.59e-04 +2022-05-07 23:11:26,191 INFO [train.py:715] (6/8) Epoch 14, batch 1300, loss[loss=0.1148, simple_loss=0.1846, pruned_loss=0.02251, over 4744.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2078, pruned_loss=0.03041, over 971741.72 frames.], batch size: 19, lr: 1.59e-04 +2022-05-07 23:12:05,713 INFO [train.py:715] (6/8) Epoch 14, batch 1350, loss[loss=0.1157, simple_loss=0.1901, pruned_loss=0.02065, over 4810.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2071, pruned_loss=0.03036, over 972601.37 frames.], batch size: 27, lr: 1.59e-04 +2022-05-07 23:12:45,084 INFO [train.py:715] (6/8) Epoch 14, batch 1400, loss[loss=0.1487, simple_loss=0.2102, pruned_loss=0.04355, over 4971.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2069, pruned_loss=0.03023, over 973511.31 frames.], batch size: 31, lr: 1.59e-04 +2022-05-07 23:13:24,652 INFO [train.py:715] (6/8) Epoch 14, batch 1450, loss[loss=0.1303, simple_loss=0.2098, pruned_loss=0.02545, over 4795.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2077, pruned_loss=0.03034, over 973530.55 frames.], batch size: 24, lr: 1.59e-04 +2022-05-07 23:14:04,618 INFO [train.py:715] (6/8) Epoch 14, batch 1500, loss[loss=0.151, simple_loss=0.2114, pruned_loss=0.04526, over 4867.00 frames.], tot_loss[loss=0.135, simple_loss=0.2088, pruned_loss=0.03062, over 972936.65 frames.], batch size: 20, lr: 1.59e-04 +2022-05-07 23:14:44,293 INFO [train.py:715] (6/8) Epoch 14, batch 1550, loss[loss=0.1251, simple_loss=0.1978, pruned_loss=0.02619, over 4906.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03104, over 973259.57 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:15:24,190 INFO [train.py:715] (6/8) Epoch 14, batch 1600, loss[loss=0.1433, simple_loss=0.2151, pruned_loss=0.03571, over 4863.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03096, over 972077.81 frames.], batch size: 32, lr: 1.59e-04 +2022-05-07 23:16:03,425 INFO [train.py:715] (6/8) Epoch 14, batch 1650, loss[loss=0.1484, simple_loss=0.2218, pruned_loss=0.03751, over 4929.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03132, over 972436.62 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:16:43,079 INFO [train.py:715] (6/8) Epoch 14, batch 1700, loss[loss=0.1419, simple_loss=0.2214, pruned_loss=0.03123, over 4918.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03109, over 972940.58 frames.], batch size: 29, lr: 1.59e-04 +2022-05-07 23:17:22,565 INFO [train.py:715] (6/8) Epoch 14, batch 1750, loss[loss=0.1144, simple_loss=0.1945, pruned_loss=0.01717, over 4790.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2083, pruned_loss=0.03042, over 972046.34 frames.], batch size: 24, lr: 1.59e-04 +2022-05-07 23:18:02,281 INFO [train.py:715] (6/8) Epoch 14, batch 1800, loss[loss=0.1139, simple_loss=0.1884, pruned_loss=0.01974, over 4777.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03085, over 972249.54 frames.], batch size: 14, lr: 1.59e-04 +2022-05-07 23:18:40,627 INFO [train.py:715] (6/8) Epoch 14, batch 1850, loss[loss=0.1316, simple_loss=0.2048, pruned_loss=0.02917, over 4949.00 frames.], tot_loss[loss=0.135, simple_loss=0.2084, pruned_loss=0.03074, over 972249.14 frames.], batch size: 21, lr: 1.59e-04 +2022-05-07 23:19:19,859 INFO [train.py:715] (6/8) Epoch 14, batch 1900, loss[loss=0.1298, simple_loss=0.2049, pruned_loss=0.02739, over 4976.00 frames.], tot_loss[loss=0.1349, simple_loss=0.208, pruned_loss=0.0309, over 972027.31 frames.], batch size: 35, lr: 1.59e-04 +2022-05-07 23:19:59,660 INFO [train.py:715] (6/8) Epoch 14, batch 1950, loss[loss=0.1387, simple_loss=0.2242, pruned_loss=0.02655, over 4923.00 frames.], tot_loss[loss=0.1345, simple_loss=0.208, pruned_loss=0.03055, over 972768.90 frames.], batch size: 23, lr: 1.59e-04 +2022-05-07 23:20:39,805 INFO [train.py:715] (6/8) Epoch 14, batch 2000, loss[loss=0.1386, simple_loss=0.2067, pruned_loss=0.03521, over 4921.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03012, over 972776.62 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:21:19,092 INFO [train.py:715] (6/8) Epoch 14, batch 2050, loss[loss=0.1327, simple_loss=0.2065, pruned_loss=0.02945, over 4891.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02996, over 973023.76 frames.], batch size: 17, lr: 1.59e-04 +2022-05-07 23:21:58,531 INFO [train.py:715] (6/8) Epoch 14, batch 2100, loss[loss=0.125, simple_loss=0.197, pruned_loss=0.02645, over 4926.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2081, pruned_loss=0.03046, over 973061.24 frames.], batch size: 29, lr: 1.59e-04 +2022-05-07 23:22:38,243 INFO [train.py:715] (6/8) Epoch 14, batch 2150, loss[loss=0.1234, simple_loss=0.2096, pruned_loss=0.01861, over 4878.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2076, pruned_loss=0.02999, over 972556.44 frames.], batch size: 22, lr: 1.59e-04 +2022-05-07 23:23:16,935 INFO [train.py:715] (6/8) Epoch 14, batch 2200, loss[loss=0.148, simple_loss=0.2149, pruned_loss=0.04052, over 4871.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03004, over 973166.73 frames.], batch size: 32, lr: 1.59e-04 +2022-05-07 23:23:55,885 INFO [train.py:715] (6/8) Epoch 14, batch 2250, loss[loss=0.138, simple_loss=0.2146, pruned_loss=0.03064, over 4913.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03038, over 974013.84 frames.], batch size: 29, lr: 1.59e-04 +2022-05-07 23:24:34,962 INFO [train.py:715] (6/8) Epoch 14, batch 2300, loss[loss=0.1374, simple_loss=0.2106, pruned_loss=0.03208, over 4912.00 frames.], tot_loss[loss=0.1351, simple_loss=0.209, pruned_loss=0.03056, over 974175.88 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:25:14,124 INFO [train.py:715] (6/8) Epoch 14, batch 2350, loss[loss=0.1309, simple_loss=0.2046, pruned_loss=0.02863, over 4705.00 frames.], tot_loss[loss=0.1356, simple_loss=0.209, pruned_loss=0.03106, over 973793.77 frames.], batch size: 15, lr: 1.59e-04 +2022-05-07 23:25:53,237 INFO [train.py:715] (6/8) Epoch 14, batch 2400, loss[loss=0.1202, simple_loss=0.2014, pruned_loss=0.01954, over 4815.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03147, over 974223.43 frames.], batch size: 25, lr: 1.59e-04 +2022-05-07 23:26:32,283 INFO [train.py:715] (6/8) Epoch 14, batch 2450, loss[loss=0.1351, simple_loss=0.1995, pruned_loss=0.0354, over 4902.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2089, pruned_loss=0.03122, over 973534.91 frames.], batch size: 29, lr: 1.59e-04 +2022-05-07 23:27:11,604 INFO [train.py:715] (6/8) Epoch 14, batch 2500, loss[loss=0.1421, simple_loss=0.2149, pruned_loss=0.03466, over 4891.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.03066, over 973132.20 frames.], batch size: 19, lr: 1.59e-04 +2022-05-07 23:27:50,104 INFO [train.py:715] (6/8) Epoch 14, batch 2550, loss[loss=0.1542, simple_loss=0.2294, pruned_loss=0.03953, over 4695.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03084, over 973195.23 frames.], batch size: 15, lr: 1.59e-04 +2022-05-07 23:28:29,683 INFO [train.py:715] (6/8) Epoch 14, batch 2600, loss[loss=0.1387, simple_loss=0.2158, pruned_loss=0.03077, over 4913.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2099, pruned_loss=0.0309, over 974298.27 frames.], batch size: 23, lr: 1.59e-04 +2022-05-07 23:29:09,138 INFO [train.py:715] (6/8) Epoch 14, batch 2650, loss[loss=0.1295, simple_loss=0.1999, pruned_loss=0.02958, over 4973.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2101, pruned_loss=0.03112, over 974554.83 frames.], batch size: 14, lr: 1.59e-04 +2022-05-07 23:29:48,493 INFO [train.py:715] (6/8) Epoch 14, batch 2700, loss[loss=0.1444, simple_loss=0.2183, pruned_loss=0.0352, over 4877.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2091, pruned_loss=0.03086, over 973816.88 frames.], batch size: 32, lr: 1.59e-04 +2022-05-07 23:30:27,052 INFO [train.py:715] (6/8) Epoch 14, batch 2750, loss[loss=0.1231, simple_loss=0.1901, pruned_loss=0.02807, over 4786.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2084, pruned_loss=0.03059, over 973742.18 frames.], batch size: 12, lr: 1.59e-04 +2022-05-07 23:31:06,249 INFO [train.py:715] (6/8) Epoch 14, batch 2800, loss[loss=0.1427, simple_loss=0.218, pruned_loss=0.03371, over 4856.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03036, over 974028.23 frames.], batch size: 20, lr: 1.59e-04 +2022-05-07 23:31:45,883 INFO [train.py:715] (6/8) Epoch 14, batch 2850, loss[loss=0.1261, simple_loss=0.1986, pruned_loss=0.0268, over 4899.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03088, over 974090.68 frames.], batch size: 22, lr: 1.59e-04 +2022-05-07 23:32:24,327 INFO [train.py:715] (6/8) Epoch 14, batch 2900, loss[loss=0.1313, simple_loss=0.2115, pruned_loss=0.0255, over 4804.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03077, over 973861.91 frames.], batch size: 21, lr: 1.59e-04 +2022-05-07 23:33:06,132 INFO [train.py:715] (6/8) Epoch 14, batch 2950, loss[loss=0.1593, simple_loss=0.2179, pruned_loss=0.0504, over 4945.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2084, pruned_loss=0.03099, over 973343.89 frames.], batch size: 35, lr: 1.59e-04 +2022-05-07 23:33:45,667 INFO [train.py:715] (6/8) Epoch 14, batch 3000, loss[loss=0.1343, simple_loss=0.2135, pruned_loss=0.02756, over 4951.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2083, pruned_loss=0.03104, over 972864.36 frames.], batch size: 24, lr: 1.59e-04 +2022-05-07 23:33:45,667 INFO [train.py:733] (6/8) Computing validation loss +2022-05-07 23:33:55,240 INFO [train.py:742] (6/8) Epoch 14, validation: loss=0.1052, simple_loss=0.1891, pruned_loss=0.01067, over 914524.00 frames. +2022-05-07 23:34:34,252 INFO [train.py:715] (6/8) Epoch 14, batch 3050, loss[loss=0.1189, simple_loss=0.1966, pruned_loss=0.0206, over 4821.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2091, pruned_loss=0.03078, over 972209.55 frames.], batch size: 27, lr: 1.59e-04 +2022-05-07 23:35:14,220 INFO [train.py:715] (6/8) Epoch 14, batch 3100, loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.0315, over 4961.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2099, pruned_loss=0.03135, over 972809.21 frames.], batch size: 24, lr: 1.59e-04 +2022-05-07 23:35:53,771 INFO [train.py:715] (6/8) Epoch 14, batch 3150, loss[loss=0.1551, simple_loss=0.2333, pruned_loss=0.03851, over 4885.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2105, pruned_loss=0.03125, over 973241.05 frames.], batch size: 22, lr: 1.59e-04 +2022-05-07 23:36:33,461 INFO [train.py:715] (6/8) Epoch 14, batch 3200, loss[loss=0.1217, simple_loss=0.1937, pruned_loss=0.02486, over 4849.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.0314, over 972916.55 frames.], batch size: 12, lr: 1.59e-04 +2022-05-07 23:37:14,485 INFO [train.py:715] (6/8) Epoch 14, batch 3250, loss[loss=0.1479, simple_loss=0.2232, pruned_loss=0.03629, over 4658.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.03158, over 972176.60 frames.], batch size: 13, lr: 1.59e-04 +2022-05-07 23:37:54,310 INFO [train.py:715] (6/8) Epoch 14, batch 3300, loss[loss=0.1285, simple_loss=0.2101, pruned_loss=0.02346, over 4935.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.03171, over 971753.69 frames.], batch size: 29, lr: 1.59e-04 +2022-05-07 23:38:34,438 INFO [train.py:715] (6/8) Epoch 14, batch 3350, loss[loss=0.09212, simple_loss=0.1558, pruned_loss=0.01424, over 4756.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03183, over 971905.61 frames.], batch size: 12, lr: 1.59e-04 +2022-05-07 23:39:15,380 INFO [train.py:715] (6/8) Epoch 14, batch 3400, loss[loss=0.1429, simple_loss=0.2197, pruned_loss=0.03301, over 4785.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.03175, over 971122.84 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:39:56,033 INFO [train.py:715] (6/8) Epoch 14, batch 3450, loss[loss=0.1267, simple_loss=0.1926, pruned_loss=0.03043, over 4824.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2097, pruned_loss=0.032, over 971359.89 frames.], batch size: 13, lr: 1.59e-04 +2022-05-07 23:40:35,907 INFO [train.py:715] (6/8) Epoch 14, batch 3500, loss[loss=0.1374, simple_loss=0.2077, pruned_loss=0.03358, over 4905.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2094, pruned_loss=0.03158, over 971546.11 frames.], batch size: 19, lr: 1.59e-04 +2022-05-07 23:41:15,985 INFO [train.py:715] (6/8) Epoch 14, batch 3550, loss[loss=0.1526, simple_loss=0.231, pruned_loss=0.03712, over 4781.00 frames.], tot_loss[loss=0.136, simple_loss=0.209, pruned_loss=0.03145, over 971585.66 frames.], batch size: 17, lr: 1.59e-04 +2022-05-07 23:41:56,124 INFO [train.py:715] (6/8) Epoch 14, batch 3600, loss[loss=0.1285, simple_loss=0.1957, pruned_loss=0.03058, over 4940.00 frames.], tot_loss[loss=0.135, simple_loss=0.2081, pruned_loss=0.03096, over 972099.31 frames.], batch size: 24, lr: 1.59e-04 +2022-05-07 23:42:36,129 INFO [train.py:715] (6/8) Epoch 14, batch 3650, loss[loss=0.1446, simple_loss=0.223, pruned_loss=0.03306, over 4927.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2083, pruned_loss=0.03065, over 972143.56 frames.], batch size: 21, lr: 1.59e-04 +2022-05-07 23:43:16,032 INFO [train.py:715] (6/8) Epoch 14, batch 3700, loss[loss=0.1146, simple_loss=0.1893, pruned_loss=0.01989, over 4982.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03028, over 972265.45 frames.], batch size: 14, lr: 1.59e-04 +2022-05-07 23:43:56,762 INFO [train.py:715] (6/8) Epoch 14, batch 3750, loss[loss=0.1525, simple_loss=0.2237, pruned_loss=0.04067, over 4947.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2079, pruned_loss=0.03041, over 973282.96 frames.], batch size: 23, lr: 1.59e-04 +2022-05-07 23:44:36,923 INFO [train.py:715] (6/8) Epoch 14, batch 3800, loss[loss=0.1683, simple_loss=0.2488, pruned_loss=0.04386, over 4837.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2079, pruned_loss=0.03031, over 972734.57 frames.], batch size: 15, lr: 1.59e-04 +2022-05-07 23:45:16,168 INFO [train.py:715] (6/8) Epoch 14, batch 3850, loss[loss=0.1294, simple_loss=0.2032, pruned_loss=0.02783, over 4805.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.03037, over 972281.71 frames.], batch size: 21, lr: 1.59e-04 +2022-05-07 23:45:56,641 INFO [train.py:715] (6/8) Epoch 14, batch 3900, loss[loss=0.1212, simple_loss=0.1962, pruned_loss=0.02311, over 4854.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2075, pruned_loss=0.03015, over 972113.47 frames.], batch size: 32, lr: 1.59e-04 +2022-05-07 23:46:37,885 INFO [train.py:715] (6/8) Epoch 14, batch 3950, loss[loss=0.1102, simple_loss=0.182, pruned_loss=0.0192, over 4936.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2076, pruned_loss=0.03036, over 971667.09 frames.], batch size: 21, lr: 1.59e-04 +2022-05-07 23:47:18,825 INFO [train.py:715] (6/8) Epoch 14, batch 4000, loss[loss=0.1504, simple_loss=0.225, pruned_loss=0.03794, over 4690.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2074, pruned_loss=0.0302, over 971795.16 frames.], batch size: 15, lr: 1.59e-04 +2022-05-07 23:47:59,309 INFO [train.py:715] (6/8) Epoch 14, batch 4050, loss[loss=0.1245, simple_loss=0.2076, pruned_loss=0.02066, over 4768.00 frames.], tot_loss[loss=0.1346, simple_loss=0.208, pruned_loss=0.03057, over 971800.03 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:48:40,128 INFO [train.py:715] (6/8) Epoch 14, batch 4100, loss[loss=0.1487, simple_loss=0.2242, pruned_loss=0.03656, over 4813.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03066, over 971724.20 frames.], batch size: 21, lr: 1.59e-04 +2022-05-07 23:49:21,558 INFO [train.py:715] (6/8) Epoch 14, batch 4150, loss[loss=0.1441, simple_loss=0.2054, pruned_loss=0.04143, over 4776.00 frames.], tot_loss[loss=0.1353, simple_loss=0.209, pruned_loss=0.0308, over 971568.94 frames.], batch size: 14, lr: 1.59e-04 +2022-05-07 23:50:02,218 INFO [train.py:715] (6/8) Epoch 14, batch 4200, loss[loss=0.1346, simple_loss=0.2038, pruned_loss=0.03276, over 4867.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03039, over 971910.03 frames.], batch size: 32, lr: 1.59e-04 +2022-05-07 23:50:43,286 INFO [train.py:715] (6/8) Epoch 14, batch 4250, loss[loss=0.1532, simple_loss=0.2251, pruned_loss=0.04062, over 4866.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03068, over 972994.91 frames.], batch size: 32, lr: 1.59e-04 +2022-05-07 23:51:25,172 INFO [train.py:715] (6/8) Epoch 14, batch 4300, loss[loss=0.142, simple_loss=0.234, pruned_loss=0.02507, over 4924.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2089, pruned_loss=0.03084, over 972772.14 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:52:06,426 INFO [train.py:715] (6/8) Epoch 14, batch 4350, loss[loss=0.1252, simple_loss=0.2009, pruned_loss=0.0248, over 4910.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03088, over 972644.38 frames.], batch size: 23, lr: 1.59e-04 +2022-05-07 23:52:46,952 INFO [train.py:715] (6/8) Epoch 14, batch 4400, loss[loss=0.1526, simple_loss=0.2265, pruned_loss=0.03935, over 4968.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2095, pruned_loss=0.03155, over 972876.16 frames.], batch size: 35, lr: 1.59e-04 +2022-05-07 23:53:27,637 INFO [train.py:715] (6/8) Epoch 14, batch 4450, loss[loss=0.1445, simple_loss=0.233, pruned_loss=0.02803, over 4781.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03115, over 973367.66 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:54:08,633 INFO [train.py:715] (6/8) Epoch 14, batch 4500, loss[loss=0.1025, simple_loss=0.1815, pruned_loss=0.01171, over 4874.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03104, over 973177.95 frames.], batch size: 22, lr: 1.59e-04 +2022-05-07 23:54:48,677 INFO [train.py:715] (6/8) Epoch 14, batch 4550, loss[loss=0.1455, simple_loss=0.2142, pruned_loss=0.03839, over 4831.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03087, over 973287.16 frames.], batch size: 13, lr: 1.59e-04 +2022-05-07 23:55:27,631 INFO [train.py:715] (6/8) Epoch 14, batch 4600, loss[loss=0.1298, simple_loss=0.2134, pruned_loss=0.02313, over 4828.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03123, over 973205.91 frames.], batch size: 26, lr: 1.59e-04 +2022-05-07 23:56:08,466 INFO [train.py:715] (6/8) Epoch 14, batch 4650, loss[loss=0.1417, simple_loss=0.2163, pruned_loss=0.03358, over 4805.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03119, over 972834.20 frames.], batch size: 24, lr: 1.59e-04 +2022-05-07 23:56:48,196 INFO [train.py:715] (6/8) Epoch 14, batch 4700, loss[loss=0.1149, simple_loss=0.1803, pruned_loss=0.02472, over 4904.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2088, pruned_loss=0.03091, over 972718.05 frames.], batch size: 17, lr: 1.59e-04 +2022-05-07 23:57:26,878 INFO [train.py:715] (6/8) Epoch 14, batch 4750, loss[loss=0.1241, simple_loss=0.1854, pruned_loss=0.03137, over 4821.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2088, pruned_loss=0.03143, over 972578.46 frames.], batch size: 13, lr: 1.58e-04 +2022-05-07 23:58:06,244 INFO [train.py:715] (6/8) Epoch 14, batch 4800, loss[loss=0.1261, simple_loss=0.2171, pruned_loss=0.01755, over 4802.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2087, pruned_loss=0.03106, over 972152.99 frames.], batch size: 21, lr: 1.58e-04 +2022-05-07 23:58:46,078 INFO [train.py:715] (6/8) Epoch 14, batch 4850, loss[loss=0.1241, simple_loss=0.1958, pruned_loss=0.02623, over 4786.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2082, pruned_loss=0.0308, over 971716.23 frames.], batch size: 17, lr: 1.58e-04 +2022-05-07 23:59:25,004 INFO [train.py:715] (6/8) Epoch 14, batch 4900, loss[loss=0.1676, simple_loss=0.2432, pruned_loss=0.04596, over 4874.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2084, pruned_loss=0.03089, over 972441.82 frames.], batch size: 32, lr: 1.58e-04 +2022-05-08 00:00:04,153 INFO [train.py:715] (6/8) Epoch 14, batch 4950, loss[loss=0.1531, simple_loss=0.2156, pruned_loss=0.04524, over 4841.00 frames.], tot_loss[loss=0.1343, simple_loss=0.208, pruned_loss=0.03029, over 972773.38 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 00:00:44,232 INFO [train.py:715] (6/8) Epoch 14, batch 5000, loss[loss=0.1171, simple_loss=0.1888, pruned_loss=0.02269, over 4777.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03076, over 972607.85 frames.], batch size: 18, lr: 1.58e-04 +2022-05-08 00:01:23,510 INFO [train.py:715] (6/8) Epoch 14, batch 5050, loss[loss=0.1367, simple_loss=0.2113, pruned_loss=0.03107, over 4864.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2091, pruned_loss=0.03107, over 972649.25 frames.], batch size: 32, lr: 1.58e-04 +2022-05-08 00:02:02,197 INFO [train.py:715] (6/8) Epoch 14, batch 5100, loss[loss=0.1275, simple_loss=0.2013, pruned_loss=0.0268, over 4773.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2095, pruned_loss=0.03097, over 973171.75 frames.], batch size: 14, lr: 1.58e-04 +2022-05-08 00:02:41,797 INFO [train.py:715] (6/8) Epoch 14, batch 5150, loss[loss=0.1505, simple_loss=0.2283, pruned_loss=0.03633, over 4988.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.031, over 973074.64 frames.], batch size: 39, lr: 1.58e-04 +2022-05-08 00:03:21,427 INFO [train.py:715] (6/8) Epoch 14, batch 5200, loss[loss=0.1091, simple_loss=0.1866, pruned_loss=0.01579, over 4973.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03073, over 973341.05 frames.], batch size: 25, lr: 1.58e-04 +2022-05-08 00:03:59,950 INFO [train.py:715] (6/8) Epoch 14, batch 5250, loss[loss=0.1396, simple_loss=0.2125, pruned_loss=0.03329, over 4800.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.0315, over 972974.76 frames.], batch size: 24, lr: 1.58e-04 +2022-05-08 00:04:38,445 INFO [train.py:715] (6/8) Epoch 14, batch 5300, loss[loss=0.1161, simple_loss=0.1827, pruned_loss=0.02471, over 4853.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03123, over 973294.33 frames.], batch size: 20, lr: 1.58e-04 +2022-05-08 00:05:17,627 INFO [train.py:715] (6/8) Epoch 14, batch 5350, loss[loss=0.1593, simple_loss=0.2265, pruned_loss=0.04606, over 4703.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2096, pruned_loss=0.03085, over 973857.33 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 00:05:56,200 INFO [train.py:715] (6/8) Epoch 14, batch 5400, loss[loss=0.1168, simple_loss=0.1893, pruned_loss=0.02214, over 4832.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03045, over 973792.45 frames.], batch size: 26, lr: 1.58e-04 +2022-05-08 00:06:34,703 INFO [train.py:715] (6/8) Epoch 14, batch 5450, loss[loss=0.1322, simple_loss=0.2102, pruned_loss=0.02711, over 4986.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03044, over 973952.24 frames.], batch size: 25, lr: 1.58e-04 +2022-05-08 00:07:13,538 INFO [train.py:715] (6/8) Epoch 14, batch 5500, loss[loss=0.1298, simple_loss=0.2117, pruned_loss=0.02392, over 4751.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03059, over 974157.47 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 00:07:53,215 INFO [train.py:715] (6/8) Epoch 14, batch 5550, loss[loss=0.1329, simple_loss=0.2103, pruned_loss=0.0277, over 4910.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03089, over 973089.08 frames.], batch size: 29, lr: 1.58e-04 +2022-05-08 00:08:31,548 INFO [train.py:715] (6/8) Epoch 14, batch 5600, loss[loss=0.1337, simple_loss=0.2022, pruned_loss=0.03263, over 4803.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.03109, over 973002.66 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 00:09:10,034 INFO [train.py:715] (6/8) Epoch 14, batch 5650, loss[loss=0.1542, simple_loss=0.2225, pruned_loss=0.04291, over 4855.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2103, pruned_loss=0.03119, over 972089.26 frames.], batch size: 34, lr: 1.58e-04 +2022-05-08 00:09:49,146 INFO [train.py:715] (6/8) Epoch 14, batch 5700, loss[loss=0.1474, simple_loss=0.2278, pruned_loss=0.03351, over 4792.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2101, pruned_loss=0.03066, over 973024.59 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 00:10:27,421 INFO [train.py:715] (6/8) Epoch 14, batch 5750, loss[loss=0.1134, simple_loss=0.1903, pruned_loss=0.01823, over 4793.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2103, pruned_loss=0.03114, over 972591.82 frames.], batch size: 18, lr: 1.58e-04 +2022-05-08 00:11:05,799 INFO [train.py:715] (6/8) Epoch 14, batch 5800, loss[loss=0.1593, simple_loss=0.2151, pruned_loss=0.05171, over 4779.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2102, pruned_loss=0.03062, over 973466.60 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 00:11:44,417 INFO [train.py:715] (6/8) Epoch 14, batch 5850, loss[loss=0.107, simple_loss=0.1708, pruned_loss=0.0216, over 4793.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2086, pruned_loss=0.03007, over 972641.16 frames.], batch size: 12, lr: 1.58e-04 +2022-05-08 00:12:23,187 INFO [train.py:715] (6/8) Epoch 14, batch 5900, loss[loss=0.1674, simple_loss=0.2367, pruned_loss=0.049, over 4745.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.0298, over 972518.52 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 00:13:02,945 INFO [train.py:715] (6/8) Epoch 14, batch 5950, loss[loss=0.1422, simple_loss=0.2203, pruned_loss=0.03203, over 4806.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02978, over 972573.65 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 00:13:42,637 INFO [train.py:715] (6/8) Epoch 14, batch 6000, loss[loss=0.1187, simple_loss=0.1966, pruned_loss=0.02041, over 4862.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2078, pruned_loss=0.02955, over 973075.60 frames.], batch size: 20, lr: 1.58e-04 +2022-05-08 00:13:42,638 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 00:13:52,503 INFO [train.py:742] (6/8) Epoch 14, validation: loss=0.105, simple_loss=0.1888, pruned_loss=0.01057, over 914524.00 frames. +2022-05-08 00:14:31,598 INFO [train.py:715] (6/8) Epoch 14, batch 6050, loss[loss=0.1222, simple_loss=0.1992, pruned_loss=0.02257, over 4951.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.0299, over 973819.69 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 00:15:10,793 INFO [train.py:715] (6/8) Epoch 14, batch 6100, loss[loss=0.1243, simple_loss=0.2016, pruned_loss=0.02351, over 4970.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2089, pruned_loss=0.02995, over 973342.36 frames.], batch size: 29, lr: 1.58e-04 +2022-05-08 00:15:50,797 INFO [train.py:715] (6/8) Epoch 14, batch 6150, loss[loss=0.1226, simple_loss=0.2005, pruned_loss=0.02239, over 4769.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03011, over 972958.21 frames.], batch size: 12, lr: 1.58e-04 +2022-05-08 00:16:30,417 INFO [train.py:715] (6/8) Epoch 14, batch 6200, loss[loss=0.1144, simple_loss=0.1834, pruned_loss=0.02269, over 4694.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2098, pruned_loss=0.03096, over 972034.47 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 00:17:10,269 INFO [train.py:715] (6/8) Epoch 14, batch 6250, loss[loss=0.142, simple_loss=0.2137, pruned_loss=0.03509, over 4788.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2091, pruned_loss=0.03083, over 971529.06 frames.], batch size: 18, lr: 1.58e-04 +2022-05-08 00:17:49,645 INFO [train.py:715] (6/8) Epoch 14, batch 6300, loss[loss=0.1439, simple_loss=0.2232, pruned_loss=0.03227, over 4921.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03075, over 970611.55 frames.], batch size: 18, lr: 1.58e-04 +2022-05-08 00:18:29,674 INFO [train.py:715] (6/8) Epoch 14, batch 6350, loss[loss=0.1446, simple_loss=0.2195, pruned_loss=0.03485, over 4858.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2089, pruned_loss=0.03079, over 970212.26 frames.], batch size: 20, lr: 1.58e-04 +2022-05-08 00:19:09,451 INFO [train.py:715] (6/8) Epoch 14, batch 6400, loss[loss=0.1436, simple_loss=0.217, pruned_loss=0.03511, over 4954.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2091, pruned_loss=0.03071, over 970241.18 frames.], batch size: 24, lr: 1.58e-04 +2022-05-08 00:19:49,536 INFO [train.py:715] (6/8) Epoch 14, batch 6450, loss[loss=0.1091, simple_loss=0.189, pruned_loss=0.01463, over 4846.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.03094, over 969992.88 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 00:20:29,527 INFO [train.py:715] (6/8) Epoch 14, batch 6500, loss[loss=0.1028, simple_loss=0.1755, pruned_loss=0.01504, over 4796.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03087, over 969694.46 frames.], batch size: 12, lr: 1.58e-04 +2022-05-08 00:21:09,184 INFO [train.py:715] (6/8) Epoch 14, batch 6550, loss[loss=0.1328, simple_loss=0.2084, pruned_loss=0.02864, over 4825.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.03111, over 970809.09 frames.], batch size: 13, lr: 1.58e-04 +2022-05-08 00:21:49,068 INFO [train.py:715] (6/8) Epoch 14, batch 6600, loss[loss=0.128, simple_loss=0.1999, pruned_loss=0.02799, over 4773.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.03071, over 971842.25 frames.], batch size: 18, lr: 1.58e-04 +2022-05-08 00:22:29,240 INFO [train.py:715] (6/8) Epoch 14, batch 6650, loss[loss=0.1164, simple_loss=0.1855, pruned_loss=0.02367, over 4768.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2093, pruned_loss=0.03109, over 971798.39 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 00:23:08,969 INFO [train.py:715] (6/8) Epoch 14, batch 6700, loss[loss=0.1091, simple_loss=0.1894, pruned_loss=0.01442, over 4776.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03076, over 971817.52 frames.], batch size: 14, lr: 1.58e-04 +2022-05-08 00:23:48,869 INFO [train.py:715] (6/8) Epoch 14, batch 6750, loss[loss=0.1411, simple_loss=0.2128, pruned_loss=0.03466, over 4882.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2086, pruned_loss=0.03099, over 971725.35 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 00:24:28,849 INFO [train.py:715] (6/8) Epoch 14, batch 6800, loss[loss=0.137, simple_loss=0.2171, pruned_loss=0.0285, over 4987.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03071, over 972188.06 frames.], batch size: 14, lr: 1.58e-04 +2022-05-08 00:25:08,852 INFO [train.py:715] (6/8) Epoch 14, batch 6850, loss[loss=0.146, simple_loss=0.2294, pruned_loss=0.03124, over 4756.00 frames.], tot_loss[loss=0.136, simple_loss=0.2094, pruned_loss=0.03129, over 971696.52 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 00:25:48,272 INFO [train.py:715] (6/8) Epoch 14, batch 6900, loss[loss=0.1419, simple_loss=0.2246, pruned_loss=0.02964, over 4785.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03085, over 971130.17 frames.], batch size: 14, lr: 1.58e-04 +2022-05-08 00:26:28,464 INFO [train.py:715] (6/8) Epoch 14, batch 6950, loss[loss=0.1262, simple_loss=0.1993, pruned_loss=0.02652, over 4916.00 frames.], tot_loss[loss=0.135, simple_loss=0.2088, pruned_loss=0.03061, over 971037.63 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 00:27:08,573 INFO [train.py:715] (6/8) Epoch 14, batch 7000, loss[loss=0.1199, simple_loss=0.1943, pruned_loss=0.02274, over 4806.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03047, over 971467.47 frames.], batch size: 24, lr: 1.58e-04 +2022-05-08 00:27:48,559 INFO [train.py:715] (6/8) Epoch 14, batch 7050, loss[loss=0.1514, simple_loss=0.2301, pruned_loss=0.03634, over 4798.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03061, over 971581.91 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 00:28:27,887 INFO [train.py:715] (6/8) Epoch 14, batch 7100, loss[loss=0.142, simple_loss=0.2105, pruned_loss=0.03669, over 4812.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03114, over 971444.71 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 00:29:07,988 INFO [train.py:715] (6/8) Epoch 14, batch 7150, loss[loss=0.1206, simple_loss=0.1968, pruned_loss=0.02219, over 4696.00 frames.], tot_loss[loss=0.136, simple_loss=0.2095, pruned_loss=0.03131, over 971880.78 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 00:29:48,179 INFO [train.py:715] (6/8) Epoch 14, batch 7200, loss[loss=0.1503, simple_loss=0.2278, pruned_loss=0.03635, over 4959.00 frames.], tot_loss[loss=0.1363, simple_loss=0.21, pruned_loss=0.0313, over 971506.74 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 00:30:28,021 INFO [train.py:715] (6/8) Epoch 14, batch 7250, loss[loss=0.1289, simple_loss=0.1975, pruned_loss=0.03014, over 4782.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03104, over 972550.81 frames.], batch size: 14, lr: 1.58e-04 +2022-05-08 00:31:08,146 INFO [train.py:715] (6/8) Epoch 14, batch 7300, loss[loss=0.1384, simple_loss=0.2095, pruned_loss=0.03364, over 4970.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2096, pruned_loss=0.03134, over 973461.93 frames.], batch size: 24, lr: 1.58e-04 +2022-05-08 00:31:48,274 INFO [train.py:715] (6/8) Epoch 14, batch 7350, loss[loss=0.1277, simple_loss=0.2105, pruned_loss=0.02249, over 4860.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2098, pruned_loss=0.03127, over 973731.01 frames.], batch size: 30, lr: 1.58e-04 +2022-05-08 00:32:28,615 INFO [train.py:715] (6/8) Epoch 14, batch 7400, loss[loss=0.1339, simple_loss=0.2055, pruned_loss=0.03113, over 4855.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2098, pruned_loss=0.03129, over 973906.19 frames.], batch size: 32, lr: 1.58e-04 +2022-05-08 00:33:08,067 INFO [train.py:715] (6/8) Epoch 14, batch 7450, loss[loss=0.1171, simple_loss=0.1849, pruned_loss=0.02461, over 4689.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03126, over 973669.77 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 00:33:47,758 INFO [train.py:715] (6/8) Epoch 14, batch 7500, loss[loss=0.1245, simple_loss=0.1927, pruned_loss=0.02815, over 4834.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2096, pruned_loss=0.03127, over 972356.81 frames.], batch size: 27, lr: 1.58e-04 +2022-05-08 00:34:27,410 INFO [train.py:715] (6/8) Epoch 14, batch 7550, loss[loss=0.1063, simple_loss=0.1754, pruned_loss=0.0186, over 4887.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2087, pruned_loss=0.03093, over 973103.61 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 00:35:06,416 INFO [train.py:715] (6/8) Epoch 14, batch 7600, loss[loss=0.1227, simple_loss=0.1964, pruned_loss=0.02455, over 4862.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2092, pruned_loss=0.03107, over 972977.43 frames.], batch size: 20, lr: 1.58e-04 +2022-05-08 00:35:46,371 INFO [train.py:715] (6/8) Epoch 14, batch 7650, loss[loss=0.1338, simple_loss=0.2127, pruned_loss=0.02746, over 4953.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03101, over 973314.20 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 00:36:25,273 INFO [train.py:715] (6/8) Epoch 14, batch 7700, loss[loss=0.1631, simple_loss=0.2333, pruned_loss=0.04648, over 4916.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2101, pruned_loss=0.03089, over 973361.91 frames.], batch size: 18, lr: 1.58e-04 +2022-05-08 00:37:05,577 INFO [train.py:715] (6/8) Epoch 14, batch 7750, loss[loss=0.1256, simple_loss=0.2096, pruned_loss=0.0208, over 4983.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2102, pruned_loss=0.03118, over 973393.35 frames.], batch size: 27, lr: 1.58e-04 +2022-05-08 00:37:44,382 INFO [train.py:715] (6/8) Epoch 14, batch 7800, loss[loss=0.122, simple_loss=0.1876, pruned_loss=0.02824, over 4752.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2102, pruned_loss=0.03131, over 972956.33 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 00:38:23,473 INFO [train.py:715] (6/8) Epoch 14, batch 7850, loss[loss=0.1528, simple_loss=0.2245, pruned_loss=0.04056, over 4795.00 frames.], tot_loss[loss=0.1363, simple_loss=0.21, pruned_loss=0.03134, over 972009.04 frames.], batch size: 14, lr: 1.58e-04 +2022-05-08 00:39:03,278 INFO [train.py:715] (6/8) Epoch 14, batch 7900, loss[loss=0.1452, simple_loss=0.1994, pruned_loss=0.04554, over 4795.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2098, pruned_loss=0.03136, over 971635.47 frames.], batch size: 12, lr: 1.58e-04 +2022-05-08 00:39:42,079 INFO [train.py:715] (6/8) Epoch 14, batch 7950, loss[loss=0.1407, simple_loss=0.2099, pruned_loss=0.03578, over 4988.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03118, over 971807.55 frames.], batch size: 20, lr: 1.58e-04 +2022-05-08 00:40:21,694 INFO [train.py:715] (6/8) Epoch 14, batch 8000, loss[loss=0.153, simple_loss=0.2213, pruned_loss=0.04236, over 4807.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03124, over 971745.43 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 00:41:00,526 INFO [train.py:715] (6/8) Epoch 14, batch 8050, loss[loss=0.132, simple_loss=0.2142, pruned_loss=0.02497, over 4792.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03136, over 970594.74 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 00:41:40,056 INFO [train.py:715] (6/8) Epoch 14, batch 8100, loss[loss=0.1432, simple_loss=0.2183, pruned_loss=0.03404, over 4780.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03143, over 969701.07 frames.], batch size: 18, lr: 1.58e-04 +2022-05-08 00:42:18,787 INFO [train.py:715] (6/8) Epoch 14, batch 8150, loss[loss=0.1122, simple_loss=0.1812, pruned_loss=0.02159, over 4930.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03166, over 970146.62 frames.], batch size: 23, lr: 1.58e-04 +2022-05-08 00:42:58,273 INFO [train.py:715] (6/8) Epoch 14, batch 8200, loss[loss=0.1163, simple_loss=0.1977, pruned_loss=0.01748, over 4947.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03102, over 969680.44 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 00:43:37,716 INFO [train.py:715] (6/8) Epoch 14, batch 8250, loss[loss=0.1498, simple_loss=0.234, pruned_loss=0.03278, over 4940.00 frames.], tot_loss[loss=0.136, simple_loss=0.2101, pruned_loss=0.03093, over 970368.51 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 00:44:17,183 INFO [train.py:715] (6/8) Epoch 14, batch 8300, loss[loss=0.1338, simple_loss=0.2154, pruned_loss=0.02616, over 4747.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2105, pruned_loss=0.03137, over 970263.79 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 00:44:56,128 INFO [train.py:715] (6/8) Epoch 14, batch 8350, loss[loss=0.1368, simple_loss=0.2034, pruned_loss=0.03509, over 4821.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2095, pruned_loss=0.03104, over 970552.89 frames.], batch size: 26, lr: 1.58e-04 +2022-05-08 00:45:35,328 INFO [train.py:715] (6/8) Epoch 14, batch 8400, loss[loss=0.1161, simple_loss=0.1944, pruned_loss=0.0189, over 4823.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03033, over 971185.71 frames.], batch size: 26, lr: 1.58e-04 +2022-05-08 00:46:14,805 INFO [train.py:715] (6/8) Epoch 14, batch 8450, loss[loss=0.1521, simple_loss=0.2293, pruned_loss=0.03748, over 4745.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2078, pruned_loss=0.03014, over 972185.75 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 00:46:53,361 INFO [train.py:715] (6/8) Epoch 14, batch 8500, loss[loss=0.1435, simple_loss=0.2129, pruned_loss=0.03703, over 4853.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.0301, over 971740.01 frames.], batch size: 32, lr: 1.58e-04 +2022-05-08 00:47:32,469 INFO [train.py:715] (6/8) Epoch 14, batch 8550, loss[loss=0.1371, simple_loss=0.208, pruned_loss=0.03304, over 4824.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03009, over 972017.52 frames.], batch size: 13, lr: 1.58e-04 +2022-05-08 00:48:13,444 INFO [train.py:715] (6/8) Epoch 14, batch 8600, loss[loss=0.1442, simple_loss=0.2006, pruned_loss=0.0439, over 4900.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03002, over 971836.63 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 00:48:52,734 INFO [train.py:715] (6/8) Epoch 14, batch 8650, loss[loss=0.1524, simple_loss=0.2167, pruned_loss=0.04405, over 4833.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03037, over 971641.32 frames.], batch size: 30, lr: 1.58e-04 +2022-05-08 00:49:34,160 INFO [train.py:715] (6/8) Epoch 14, batch 8700, loss[loss=0.1203, simple_loss=0.1904, pruned_loss=0.02507, over 4891.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2081, pruned_loss=0.03056, over 971470.93 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 00:50:13,530 INFO [train.py:715] (6/8) Epoch 14, batch 8750, loss[loss=0.1225, simple_loss=0.1949, pruned_loss=0.02507, over 4762.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2077, pruned_loss=0.03044, over 971725.08 frames.], batch size: 14, lr: 1.58e-04 +2022-05-08 00:50:53,248 INFO [train.py:715] (6/8) Epoch 14, batch 8800, loss[loss=0.1373, simple_loss=0.2217, pruned_loss=0.02645, over 4962.00 frames.], tot_loss[loss=0.134, simple_loss=0.2074, pruned_loss=0.0303, over 971581.28 frames.], batch size: 28, lr: 1.58e-04 +2022-05-08 00:51:32,828 INFO [train.py:715] (6/8) Epoch 14, batch 8850, loss[loss=0.1585, simple_loss=0.2226, pruned_loss=0.04719, over 4942.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2076, pruned_loss=0.03052, over 971748.90 frames.], batch size: 29, lr: 1.58e-04 +2022-05-08 00:52:13,347 INFO [train.py:715] (6/8) Epoch 14, batch 8900, loss[loss=0.1389, simple_loss=0.2201, pruned_loss=0.02888, over 4948.00 frames.], tot_loss[loss=0.1346, simple_loss=0.208, pruned_loss=0.03062, over 971896.90 frames.], batch size: 39, lr: 1.58e-04 +2022-05-08 00:52:53,218 INFO [train.py:715] (6/8) Epoch 14, batch 8950, loss[loss=0.1339, simple_loss=0.2041, pruned_loss=0.03187, over 4860.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2086, pruned_loss=0.03098, over 972615.27 frames.], batch size: 20, lr: 1.58e-04 +2022-05-08 00:53:33,014 INFO [train.py:715] (6/8) Epoch 14, batch 9000, loss[loss=0.1283, simple_loss=0.1932, pruned_loss=0.03169, over 4829.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2087, pruned_loss=0.03129, over 971383.29 frames.], batch size: 27, lr: 1.58e-04 +2022-05-08 00:53:33,015 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 00:53:47,941 INFO [train.py:742] (6/8) Epoch 14, validation: loss=0.1052, simple_loss=0.189, pruned_loss=0.01074, over 914524.00 frames. +2022-05-08 00:54:27,486 INFO [train.py:715] (6/8) Epoch 14, batch 9050, loss[loss=0.142, simple_loss=0.2002, pruned_loss=0.0419, over 4805.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2095, pruned_loss=0.03164, over 970834.57 frames.], batch size: 14, lr: 1.58e-04 +2022-05-08 00:55:07,812 INFO [train.py:715] (6/8) Epoch 14, batch 9100, loss[loss=0.1233, simple_loss=0.2076, pruned_loss=0.0195, over 4824.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2096, pruned_loss=0.03157, over 971125.36 frames.], batch size: 26, lr: 1.58e-04 +2022-05-08 00:55:47,317 INFO [train.py:715] (6/8) Epoch 14, batch 9150, loss[loss=0.1038, simple_loss=0.1764, pruned_loss=0.01565, over 4957.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2093, pruned_loss=0.03139, over 971131.35 frames.], batch size: 28, lr: 1.58e-04 +2022-05-08 00:56:27,177 INFO [train.py:715] (6/8) Epoch 14, batch 9200, loss[loss=0.1187, simple_loss=0.2029, pruned_loss=0.01726, over 4913.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2091, pruned_loss=0.03073, over 971551.75 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 00:57:06,894 INFO [train.py:715] (6/8) Epoch 14, batch 9250, loss[loss=0.1437, simple_loss=0.2198, pruned_loss=0.03377, over 4951.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03082, over 971890.99 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 00:57:46,610 INFO [train.py:715] (6/8) Epoch 14, batch 9300, loss[loss=0.1492, simple_loss=0.2127, pruned_loss=0.04289, over 4864.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2099, pruned_loss=0.03093, over 972171.76 frames.], batch size: 32, lr: 1.58e-04 +2022-05-08 00:58:26,531 INFO [train.py:715] (6/8) Epoch 14, batch 9350, loss[loss=0.1376, simple_loss=0.2063, pruned_loss=0.03449, over 4902.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.03059, over 971549.70 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 00:59:06,677 INFO [train.py:715] (6/8) Epoch 14, batch 9400, loss[loss=0.142, simple_loss=0.2095, pruned_loss=0.03725, over 4913.00 frames.], tot_loss[loss=0.1357, simple_loss=0.21, pruned_loss=0.03068, over 971562.15 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 00:59:46,296 INFO [train.py:715] (6/8) Epoch 14, batch 9450, loss[loss=0.1609, simple_loss=0.2235, pruned_loss=0.04916, over 4877.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.0306, over 972113.98 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 01:00:26,055 INFO [train.py:715] (6/8) Epoch 14, batch 9500, loss[loss=0.1438, simple_loss=0.2139, pruned_loss=0.03684, over 4936.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03056, over 972260.01 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 01:01:05,846 INFO [train.py:715] (6/8) Epoch 14, batch 9550, loss[loss=0.1333, simple_loss=0.2114, pruned_loss=0.02759, over 4740.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03018, over 972325.96 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 01:01:46,004 INFO [train.py:715] (6/8) Epoch 14, batch 9600, loss[loss=0.1344, simple_loss=0.2146, pruned_loss=0.02713, over 4924.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2077, pruned_loss=0.02992, over 971142.27 frames.], batch size: 29, lr: 1.58e-04 +2022-05-08 01:02:25,433 INFO [train.py:715] (6/8) Epoch 14, batch 9650, loss[loss=0.118, simple_loss=0.1882, pruned_loss=0.02392, over 4986.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.03005, over 971910.42 frames.], batch size: 31, lr: 1.58e-04 +2022-05-08 01:03:05,457 INFO [train.py:715] (6/8) Epoch 14, batch 9700, loss[loss=0.1077, simple_loss=0.1864, pruned_loss=0.01451, over 4835.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2084, pruned_loss=0.03048, over 972266.63 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 01:03:45,044 INFO [train.py:715] (6/8) Epoch 14, batch 9750, loss[loss=0.1266, simple_loss=0.192, pruned_loss=0.03061, over 4755.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2083, pruned_loss=0.03048, over 972193.72 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 01:04:25,350 INFO [train.py:715] (6/8) Epoch 14, batch 9800, loss[loss=0.1171, simple_loss=0.1889, pruned_loss=0.02262, over 4948.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03056, over 972740.23 frames.], batch size: 28, lr: 1.58e-04 +2022-05-08 01:05:04,570 INFO [train.py:715] (6/8) Epoch 14, batch 9850, loss[loss=0.1368, simple_loss=0.2028, pruned_loss=0.03545, over 4912.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2098, pruned_loss=0.03099, over 971634.97 frames.], batch size: 29, lr: 1.58e-04 +2022-05-08 01:05:44,641 INFO [train.py:715] (6/8) Epoch 14, batch 9900, loss[loss=0.1241, simple_loss=0.197, pruned_loss=0.0256, over 4825.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2091, pruned_loss=0.03101, over 971311.00 frames.], batch size: 25, lr: 1.58e-04 +2022-05-08 01:06:24,623 INFO [train.py:715] (6/8) Epoch 14, batch 9950, loss[loss=0.1253, simple_loss=0.2026, pruned_loss=0.02394, over 4981.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03145, over 971814.78 frames.], batch size: 31, lr: 1.58e-04 +2022-05-08 01:07:03,948 INFO [train.py:715] (6/8) Epoch 14, batch 10000, loss[loss=0.1199, simple_loss=0.1939, pruned_loss=0.02293, over 4826.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2095, pruned_loss=0.03116, over 972147.59 frames.], batch size: 26, lr: 1.58e-04 +2022-05-08 01:07:43,997 INFO [train.py:715] (6/8) Epoch 14, batch 10050, loss[loss=0.1161, simple_loss=0.1921, pruned_loss=0.02008, over 4768.00 frames.], tot_loss[loss=0.1356, simple_loss=0.209, pruned_loss=0.03116, over 972801.09 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 01:08:23,519 INFO [train.py:715] (6/8) Epoch 14, batch 10100, loss[loss=0.149, simple_loss=0.226, pruned_loss=0.03598, over 4790.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2092, pruned_loss=0.03109, over 972795.57 frames.], batch size: 24, lr: 1.58e-04 +2022-05-08 01:09:03,296 INFO [train.py:715] (6/8) Epoch 14, batch 10150, loss[loss=0.148, simple_loss=0.2123, pruned_loss=0.04186, over 4853.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03094, over 973491.00 frames.], batch size: 32, lr: 1.58e-04 +2022-05-08 01:09:42,491 INFO [train.py:715] (6/8) Epoch 14, batch 10200, loss[loss=0.1141, simple_loss=0.18, pruned_loss=0.02407, over 4896.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.03067, over 973319.71 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 01:10:22,737 INFO [train.py:715] (6/8) Epoch 14, batch 10250, loss[loss=0.1229, simple_loss=0.2002, pruned_loss=0.02281, over 4863.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2094, pruned_loss=0.03076, over 973010.20 frames.], batch size: 22, lr: 1.58e-04 +2022-05-08 01:11:02,462 INFO [train.py:715] (6/8) Epoch 14, batch 10300, loss[loss=0.1445, simple_loss=0.2243, pruned_loss=0.03239, over 4807.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2099, pruned_loss=0.03114, over 972034.23 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 01:11:41,912 INFO [train.py:715] (6/8) Epoch 14, batch 10350, loss[loss=0.1436, simple_loss=0.2072, pruned_loss=0.04005, over 4915.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.03141, over 972479.63 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 01:12:22,114 INFO [train.py:715] (6/8) Epoch 14, batch 10400, loss[loss=0.1268, simple_loss=0.1991, pruned_loss=0.02724, over 4744.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03168, over 972328.77 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 01:13:01,503 INFO [train.py:715] (6/8) Epoch 14, batch 10450, loss[loss=0.1491, simple_loss=0.2174, pruned_loss=0.0404, over 4984.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2099, pruned_loss=0.03136, over 973046.71 frames.], batch size: 39, lr: 1.58e-04 +2022-05-08 01:13:41,732 INFO [train.py:715] (6/8) Epoch 14, batch 10500, loss[loss=0.1242, simple_loss=0.1868, pruned_loss=0.03082, over 4777.00 frames.], tot_loss[loss=0.1355, simple_loss=0.209, pruned_loss=0.03096, over 972387.36 frames.], batch size: 12, lr: 1.58e-04 +2022-05-08 01:14:21,006 INFO [train.py:715] (6/8) Epoch 14, batch 10550, loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02844, over 4818.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2088, pruned_loss=0.03085, over 972696.16 frames.], batch size: 27, lr: 1.58e-04 +2022-05-08 01:15:01,266 INFO [train.py:715] (6/8) Epoch 14, batch 10600, loss[loss=0.147, simple_loss=0.2136, pruned_loss=0.04019, over 4969.00 frames.], tot_loss[loss=0.1353, simple_loss=0.209, pruned_loss=0.03075, over 972484.52 frames.], batch size: 35, lr: 1.58e-04 +2022-05-08 01:15:40,592 INFO [train.py:715] (6/8) Epoch 14, batch 10650, loss[loss=0.1221, simple_loss=0.2021, pruned_loss=0.02107, over 4777.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03073, over 972385.70 frames.], batch size: 18, lr: 1.58e-04 +2022-05-08 01:16:19,726 INFO [train.py:715] (6/8) Epoch 14, batch 10700, loss[loss=0.1346, simple_loss=0.2004, pruned_loss=0.03438, over 4776.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.03066, over 972303.07 frames.], batch size: 12, lr: 1.58e-04 +2022-05-08 01:16:58,899 INFO [train.py:715] (6/8) Epoch 14, batch 10750, loss[loss=0.1437, simple_loss=0.2206, pruned_loss=0.03339, over 4814.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03069, over 971686.40 frames.], batch size: 27, lr: 1.58e-04 +2022-05-08 01:17:38,326 INFO [train.py:715] (6/8) Epoch 14, batch 10800, loss[loss=0.139, simple_loss=0.2149, pruned_loss=0.03154, over 4953.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2098, pruned_loss=0.03073, over 972152.74 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 01:18:17,865 INFO [train.py:715] (6/8) Epoch 14, batch 10850, loss[loss=0.1642, simple_loss=0.2302, pruned_loss=0.04912, over 4776.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.03088, over 971965.61 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 01:18:56,529 INFO [train.py:715] (6/8) Epoch 14, batch 10900, loss[loss=0.1494, simple_loss=0.2193, pruned_loss=0.03973, over 4869.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2101, pruned_loss=0.03082, over 972477.43 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 01:19:36,726 INFO [train.py:715] (6/8) Epoch 14, batch 10950, loss[loss=0.1106, simple_loss=0.1786, pruned_loss=0.02132, over 4805.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2098, pruned_loss=0.03086, over 973244.09 frames.], batch size: 14, lr: 1.58e-04 +2022-05-08 01:20:17,494 INFO [train.py:715] (6/8) Epoch 14, batch 11000, loss[loss=0.1208, simple_loss=0.2007, pruned_loss=0.0205, over 4958.00 frames.], tot_loss[loss=0.135, simple_loss=0.2093, pruned_loss=0.03039, over 972767.37 frames.], batch size: 24, lr: 1.58e-04 +2022-05-08 01:20:56,620 INFO [train.py:715] (6/8) Epoch 14, batch 11050, loss[loss=0.1406, simple_loss=0.2107, pruned_loss=0.0352, over 4924.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2094, pruned_loss=0.03075, over 972082.98 frames.], batch size: 39, lr: 1.57e-04 +2022-05-08 01:21:37,664 INFO [train.py:715] (6/8) Epoch 14, batch 11100, loss[loss=0.1273, simple_loss=0.2108, pruned_loss=0.0219, over 4934.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.03071, over 972310.09 frames.], batch size: 23, lr: 1.57e-04 +2022-05-08 01:22:18,219 INFO [train.py:715] (6/8) Epoch 14, batch 11150, loss[loss=0.1202, simple_loss=0.1889, pruned_loss=0.02568, over 4887.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2091, pruned_loss=0.03116, over 972024.61 frames.], batch size: 22, lr: 1.57e-04 +2022-05-08 01:22:58,451 INFO [train.py:715] (6/8) Epoch 14, batch 11200, loss[loss=0.1434, simple_loss=0.2162, pruned_loss=0.0353, over 4734.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03115, over 972148.47 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 01:23:37,876 INFO [train.py:715] (6/8) Epoch 14, batch 11250, loss[loss=0.1391, simple_loss=0.2111, pruned_loss=0.0336, over 4859.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2087, pruned_loss=0.03071, over 971351.84 frames.], batch size: 13, lr: 1.57e-04 +2022-05-08 01:24:18,306 INFO [train.py:715] (6/8) Epoch 14, batch 11300, loss[loss=0.1644, simple_loss=0.2266, pruned_loss=0.05112, over 4788.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.03004, over 970258.11 frames.], batch size: 14, lr: 1.57e-04 +2022-05-08 01:24:58,565 INFO [train.py:715] (6/8) Epoch 14, batch 11350, loss[loss=0.1472, simple_loss=0.2121, pruned_loss=0.04115, over 4983.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03037, over 971595.90 frames.], batch size: 35, lr: 1.57e-04 +2022-05-08 01:25:37,730 INFO [train.py:715] (6/8) Epoch 14, batch 11400, loss[loss=0.1466, simple_loss=0.2233, pruned_loss=0.03497, over 4894.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03022, over 971945.25 frames.], batch size: 22, lr: 1.57e-04 +2022-05-08 01:26:18,741 INFO [train.py:715] (6/8) Epoch 14, batch 11450, loss[loss=0.1206, simple_loss=0.1858, pruned_loss=0.02765, over 4794.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2075, pruned_loss=0.02996, over 972006.61 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 01:26:59,114 INFO [train.py:715] (6/8) Epoch 14, batch 11500, loss[loss=0.1297, simple_loss=0.1958, pruned_loss=0.03178, over 4975.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2087, pruned_loss=0.03072, over 971950.29 frames.], batch size: 14, lr: 1.57e-04 +2022-05-08 01:27:39,026 INFO [train.py:715] (6/8) Epoch 14, batch 11550, loss[loss=0.138, simple_loss=0.2093, pruned_loss=0.03331, over 4980.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2087, pruned_loss=0.03094, over 971983.00 frames.], batch size: 31, lr: 1.57e-04 +2022-05-08 01:28:18,477 INFO [train.py:715] (6/8) Epoch 14, batch 11600, loss[loss=0.1413, simple_loss=0.2229, pruned_loss=0.02989, over 4862.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03083, over 972084.46 frames.], batch size: 20, lr: 1.57e-04 +2022-05-08 01:28:58,180 INFO [train.py:715] (6/8) Epoch 14, batch 11650, loss[loss=0.1198, simple_loss=0.1998, pruned_loss=0.01986, over 4778.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2076, pruned_loss=0.03034, over 971999.28 frames.], batch size: 14, lr: 1.57e-04 +2022-05-08 01:29:37,889 INFO [train.py:715] (6/8) Epoch 14, batch 11700, loss[loss=0.1308, simple_loss=0.2199, pruned_loss=0.0209, over 4795.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.02997, over 972598.70 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 01:30:17,154 INFO [train.py:715] (6/8) Epoch 14, batch 11750, loss[loss=0.1357, simple_loss=0.2149, pruned_loss=0.02822, over 4779.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2089, pruned_loss=0.03008, over 972795.32 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 01:30:56,857 INFO [train.py:715] (6/8) Epoch 14, batch 11800, loss[loss=0.1189, simple_loss=0.1925, pruned_loss=0.02267, over 4906.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.0299, over 973203.69 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 01:31:35,984 INFO [train.py:715] (6/8) Epoch 14, batch 11850, loss[loss=0.1284, simple_loss=0.2026, pruned_loss=0.02712, over 4931.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03017, over 973197.14 frames.], batch size: 21, lr: 1.57e-04 +2022-05-08 01:32:14,894 INFO [train.py:715] (6/8) Epoch 14, batch 11900, loss[loss=0.1194, simple_loss=0.2018, pruned_loss=0.01849, over 4985.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2082, pruned_loss=0.03049, over 973061.75 frames.], batch size: 28, lr: 1.57e-04 +2022-05-08 01:32:54,219 INFO [train.py:715] (6/8) Epoch 14, batch 11950, loss[loss=0.1296, simple_loss=0.1982, pruned_loss=0.03055, over 4856.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2089, pruned_loss=0.03092, over 973101.36 frames.], batch size: 20, lr: 1.57e-04 +2022-05-08 01:33:33,590 INFO [train.py:715] (6/8) Epoch 14, batch 12000, loss[loss=0.1393, simple_loss=0.2056, pruned_loss=0.03648, over 4933.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03098, over 973088.41 frames.], batch size: 35, lr: 1.57e-04 +2022-05-08 01:33:33,590 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 01:33:43,199 INFO [train.py:742] (6/8) Epoch 14, validation: loss=0.1051, simple_loss=0.1889, pruned_loss=0.01067, over 914524.00 frames. +2022-05-08 01:34:22,503 INFO [train.py:715] (6/8) Epoch 14, batch 12050, loss[loss=0.1119, simple_loss=0.1901, pruned_loss=0.01685, over 4778.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03109, over 974326.50 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 01:35:01,859 INFO [train.py:715] (6/8) Epoch 14, batch 12100, loss[loss=0.1278, simple_loss=0.1963, pruned_loss=0.02968, over 4900.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2084, pruned_loss=0.03057, over 973909.26 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 01:35:41,281 INFO [train.py:715] (6/8) Epoch 14, batch 12150, loss[loss=0.1733, simple_loss=0.2472, pruned_loss=0.04966, over 4957.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03088, over 973619.67 frames.], batch size: 24, lr: 1.57e-04 +2022-05-08 01:36:20,620 INFO [train.py:715] (6/8) Epoch 14, batch 12200, loss[loss=0.1492, simple_loss=0.2072, pruned_loss=0.0456, over 4756.00 frames.], tot_loss[loss=0.1356, simple_loss=0.209, pruned_loss=0.03113, over 972738.60 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 01:37:00,488 INFO [train.py:715] (6/8) Epoch 14, batch 12250, loss[loss=0.1312, simple_loss=0.2032, pruned_loss=0.02956, over 4855.00 frames.], tot_loss[loss=0.1357, simple_loss=0.209, pruned_loss=0.03124, over 972765.92 frames.], batch size: 20, lr: 1.57e-04 +2022-05-08 01:37:39,680 INFO [train.py:715] (6/8) Epoch 14, batch 12300, loss[loss=0.1489, simple_loss=0.2116, pruned_loss=0.04316, over 4933.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2089, pruned_loss=0.03119, over 973069.40 frames.], batch size: 29, lr: 1.57e-04 +2022-05-08 01:38:19,191 INFO [train.py:715] (6/8) Epoch 14, batch 12350, loss[loss=0.1385, simple_loss=0.2148, pruned_loss=0.03105, over 4982.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.03116, over 973763.13 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 01:38:58,801 INFO [train.py:715] (6/8) Epoch 14, batch 12400, loss[loss=0.1301, simple_loss=0.2057, pruned_loss=0.02723, over 4946.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2093, pruned_loss=0.03107, over 973915.17 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 01:39:37,831 INFO [train.py:715] (6/8) Epoch 14, batch 12450, loss[loss=0.1473, simple_loss=0.2184, pruned_loss=0.0381, over 4968.00 frames.], tot_loss[loss=0.1355, simple_loss=0.209, pruned_loss=0.03096, over 973851.00 frames.], batch size: 24, lr: 1.57e-04 +2022-05-08 01:40:17,259 INFO [train.py:715] (6/8) Epoch 14, batch 12500, loss[loss=0.115, simple_loss=0.1933, pruned_loss=0.01837, over 4796.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2093, pruned_loss=0.03114, over 973425.01 frames.], batch size: 14, lr: 1.57e-04 +2022-05-08 01:40:57,010 INFO [train.py:715] (6/8) Epoch 14, batch 12550, loss[loss=0.1509, simple_loss=0.2236, pruned_loss=0.03912, over 4834.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03104, over 974047.32 frames.], batch size: 13, lr: 1.57e-04 +2022-05-08 01:41:36,646 INFO [train.py:715] (6/8) Epoch 14, batch 12600, loss[loss=0.1314, simple_loss=0.2131, pruned_loss=0.02489, over 4960.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2084, pruned_loss=0.03096, over 973163.17 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 01:42:15,595 INFO [train.py:715] (6/8) Epoch 14, batch 12650, loss[loss=0.1726, simple_loss=0.2404, pruned_loss=0.05238, over 4771.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2087, pruned_loss=0.03133, over 972418.36 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 01:42:55,472 INFO [train.py:715] (6/8) Epoch 14, batch 12700, loss[loss=0.1526, simple_loss=0.2296, pruned_loss=0.03785, over 4757.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2093, pruned_loss=0.03105, over 971649.45 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 01:43:35,528 INFO [train.py:715] (6/8) Epoch 14, batch 12750, loss[loss=0.1319, simple_loss=0.2045, pruned_loss=0.02969, over 4861.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2099, pruned_loss=0.03131, over 971128.75 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 01:44:15,505 INFO [train.py:715] (6/8) Epoch 14, batch 12800, loss[loss=0.1373, simple_loss=0.2018, pruned_loss=0.03637, over 4921.00 frames.], tot_loss[loss=0.1355, simple_loss=0.209, pruned_loss=0.03097, over 970597.87 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 01:44:55,319 INFO [train.py:715] (6/8) Epoch 14, batch 12850, loss[loss=0.1523, simple_loss=0.2157, pruned_loss=0.04445, over 4934.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2089, pruned_loss=0.03103, over 972319.34 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 01:45:35,526 INFO [train.py:715] (6/8) Epoch 14, batch 12900, loss[loss=0.1454, simple_loss=0.2187, pruned_loss=0.036, over 4965.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2093, pruned_loss=0.03107, over 972931.57 frames.], batch size: 21, lr: 1.57e-04 +2022-05-08 01:46:15,876 INFO [train.py:715] (6/8) Epoch 14, batch 12950, loss[loss=0.1874, simple_loss=0.2707, pruned_loss=0.05205, over 4847.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03088, over 972426.03 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 01:46:55,839 INFO [train.py:715] (6/8) Epoch 14, batch 13000, loss[loss=0.1125, simple_loss=0.1951, pruned_loss=0.01495, over 4947.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.0307, over 972173.07 frames.], batch size: 29, lr: 1.57e-04 +2022-05-08 01:47:36,083 INFO [train.py:715] (6/8) Epoch 14, batch 13050, loss[loss=0.1253, simple_loss=0.2039, pruned_loss=0.02334, over 4740.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2085, pruned_loss=0.03025, over 972650.25 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 01:48:16,101 INFO [train.py:715] (6/8) Epoch 14, batch 13100, loss[loss=0.1362, simple_loss=0.2093, pruned_loss=0.03155, over 4639.00 frames.], tot_loss[loss=0.135, simple_loss=0.2088, pruned_loss=0.03058, over 972004.62 frames.], batch size: 13, lr: 1.57e-04 +2022-05-08 01:48:56,286 INFO [train.py:715] (6/8) Epoch 14, batch 13150, loss[loss=0.1363, simple_loss=0.2109, pruned_loss=0.03083, over 4698.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03043, over 971367.62 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 01:49:36,379 INFO [train.py:715] (6/8) Epoch 14, batch 13200, loss[loss=0.125, simple_loss=0.1999, pruned_loss=0.02503, over 4796.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2098, pruned_loss=0.03101, over 971146.89 frames.], batch size: 12, lr: 1.57e-04 +2022-05-08 01:50:16,584 INFO [train.py:715] (6/8) Epoch 14, batch 13250, loss[loss=0.1197, simple_loss=0.1883, pruned_loss=0.02552, over 4758.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.031, over 971323.59 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 01:50:56,842 INFO [train.py:715] (6/8) Epoch 14, batch 13300, loss[loss=0.1252, simple_loss=0.188, pruned_loss=0.03122, over 4794.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2096, pruned_loss=0.03095, over 972086.77 frames.], batch size: 12, lr: 1.57e-04 +2022-05-08 01:51:36,428 INFO [train.py:715] (6/8) Epoch 14, batch 13350, loss[loss=0.1451, simple_loss=0.2173, pruned_loss=0.03642, over 4948.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03072, over 972996.83 frames.], batch size: 35, lr: 1.57e-04 +2022-05-08 01:52:15,921 INFO [train.py:715] (6/8) Epoch 14, batch 13400, loss[loss=0.1285, simple_loss=0.1979, pruned_loss=0.02953, over 4864.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.03098, over 972957.29 frames.], batch size: 32, lr: 1.57e-04 +2022-05-08 01:52:55,518 INFO [train.py:715] (6/8) Epoch 14, batch 13450, loss[loss=0.1319, simple_loss=0.2083, pruned_loss=0.02773, over 4970.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03086, over 972090.29 frames.], batch size: 28, lr: 1.57e-04 +2022-05-08 01:53:35,080 INFO [train.py:715] (6/8) Epoch 14, batch 13500, loss[loss=0.1355, simple_loss=0.2065, pruned_loss=0.0322, over 4883.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.03092, over 972257.97 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 01:54:14,287 INFO [train.py:715] (6/8) Epoch 14, batch 13550, loss[loss=0.1165, simple_loss=0.1918, pruned_loss=0.02059, over 4964.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03044, over 972145.66 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 01:54:53,679 INFO [train.py:715] (6/8) Epoch 14, batch 13600, loss[loss=0.1458, simple_loss=0.2253, pruned_loss=0.03313, over 4780.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2094, pruned_loss=0.03069, over 971786.87 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 01:55:32,975 INFO [train.py:715] (6/8) Epoch 14, batch 13650, loss[loss=0.1172, simple_loss=0.1927, pruned_loss=0.02081, over 4796.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2089, pruned_loss=0.03029, over 973066.42 frames.], batch size: 12, lr: 1.57e-04 +2022-05-08 01:56:12,541 INFO [train.py:715] (6/8) Epoch 14, batch 13700, loss[loss=0.1357, simple_loss=0.2096, pruned_loss=0.03086, over 4902.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2089, pruned_loss=0.03038, over 972333.24 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 01:56:51,586 INFO [train.py:715] (6/8) Epoch 14, batch 13750, loss[loss=0.1405, simple_loss=0.2212, pruned_loss=0.02994, over 4692.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2092, pruned_loss=0.03049, over 972075.93 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 01:57:30,927 INFO [train.py:715] (6/8) Epoch 14, batch 13800, loss[loss=0.1001, simple_loss=0.1763, pruned_loss=0.01194, over 4788.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.03112, over 972451.48 frames.], batch size: 12, lr: 1.57e-04 +2022-05-08 01:58:12,475 INFO [train.py:715] (6/8) Epoch 14, batch 13850, loss[loss=0.1555, simple_loss=0.2436, pruned_loss=0.03372, over 4815.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2103, pruned_loss=0.03129, over 972372.59 frames.], batch size: 27, lr: 1.57e-04 +2022-05-08 01:58:51,824 INFO [train.py:715] (6/8) Epoch 14, batch 13900, loss[loss=0.1326, simple_loss=0.2137, pruned_loss=0.02573, over 4914.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2109, pruned_loss=0.03172, over 973554.07 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 01:59:31,450 INFO [train.py:715] (6/8) Epoch 14, batch 13950, loss[loss=0.1384, simple_loss=0.217, pruned_loss=0.02993, over 4883.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03144, over 974027.58 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 02:00:10,945 INFO [train.py:715] (6/8) Epoch 14, batch 14000, loss[loss=0.151, simple_loss=0.2246, pruned_loss=0.03863, over 4749.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.03142, over 973459.74 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 02:00:50,384 INFO [train.py:715] (6/8) Epoch 14, batch 14050, loss[loss=0.1328, simple_loss=0.2112, pruned_loss=0.02721, over 4819.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.0313, over 973678.47 frames.], batch size: 27, lr: 1.57e-04 +2022-05-08 02:01:30,049 INFO [train.py:715] (6/8) Epoch 14, batch 14100, loss[loss=0.1324, simple_loss=0.207, pruned_loss=0.02893, over 4847.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2089, pruned_loss=0.03075, over 973391.17 frames.], batch size: 20, lr: 1.57e-04 +2022-05-08 02:02:09,598 INFO [train.py:715] (6/8) Epoch 14, batch 14150, loss[loss=0.1224, simple_loss=0.1931, pruned_loss=0.0258, over 4889.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03096, over 972785.84 frames.], batch size: 22, lr: 1.57e-04 +2022-05-08 02:02:49,098 INFO [train.py:715] (6/8) Epoch 14, batch 14200, loss[loss=0.1211, simple_loss=0.1899, pruned_loss=0.02618, over 4778.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2091, pruned_loss=0.03086, over 972207.52 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 02:03:28,356 INFO [train.py:715] (6/8) Epoch 14, batch 14250, loss[loss=0.1152, simple_loss=0.1873, pruned_loss=0.0216, over 4932.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2088, pruned_loss=0.03028, over 973003.82 frames.], batch size: 23, lr: 1.57e-04 +2022-05-08 02:04:08,209 INFO [train.py:715] (6/8) Epoch 14, batch 14300, loss[loss=0.1165, simple_loss=0.1927, pruned_loss=0.02016, over 4975.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2089, pruned_loss=0.03038, over 973585.28 frames.], batch size: 39, lr: 1.57e-04 +2022-05-08 02:04:47,400 INFO [train.py:715] (6/8) Epoch 14, batch 14350, loss[loss=0.1511, simple_loss=0.2223, pruned_loss=0.03993, over 4915.00 frames.], tot_loss[loss=0.135, simple_loss=0.2092, pruned_loss=0.03043, over 973574.38 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 02:05:26,854 INFO [train.py:715] (6/8) Epoch 14, batch 14400, loss[loss=0.13, simple_loss=0.194, pruned_loss=0.03297, over 4965.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.03028, over 973482.02 frames.], batch size: 24, lr: 1.57e-04 +2022-05-08 02:06:06,346 INFO [train.py:715] (6/8) Epoch 14, batch 14450, loss[loss=0.1414, simple_loss=0.2076, pruned_loss=0.0376, over 4941.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03024, over 973239.37 frames.], batch size: 24, lr: 1.57e-04 +2022-05-08 02:06:45,903 INFO [train.py:715] (6/8) Epoch 14, batch 14500, loss[loss=0.1269, simple_loss=0.2082, pruned_loss=0.02278, over 4771.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2094, pruned_loss=0.03049, over 972524.47 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 02:07:25,179 INFO [train.py:715] (6/8) Epoch 14, batch 14550, loss[loss=0.1335, simple_loss=0.2109, pruned_loss=0.02802, over 4911.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2098, pruned_loss=0.03045, over 972500.60 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 02:08:04,459 INFO [train.py:715] (6/8) Epoch 14, batch 14600, loss[loss=0.1318, simple_loss=0.2073, pruned_loss=0.02817, over 4926.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2097, pruned_loss=0.03052, over 972824.26 frames.], batch size: 29, lr: 1.57e-04 +2022-05-08 02:08:44,682 INFO [train.py:715] (6/8) Epoch 14, batch 14650, loss[loss=0.1247, simple_loss=0.202, pruned_loss=0.02364, over 4971.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2088, pruned_loss=0.03029, over 971696.07 frames.], batch size: 24, lr: 1.57e-04 +2022-05-08 02:09:24,110 INFO [train.py:715] (6/8) Epoch 14, batch 14700, loss[loss=0.1223, simple_loss=0.1897, pruned_loss=0.02751, over 4964.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03066, over 971285.24 frames.], batch size: 35, lr: 1.57e-04 +2022-05-08 02:10:03,941 INFO [train.py:715] (6/8) Epoch 14, batch 14750, loss[loss=0.1145, simple_loss=0.1888, pruned_loss=0.02009, over 4780.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2088, pruned_loss=0.0311, over 971058.53 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 02:10:43,099 INFO [train.py:715] (6/8) Epoch 14, batch 14800, loss[loss=0.1382, simple_loss=0.2195, pruned_loss=0.02846, over 4782.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03065, over 971008.20 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 02:11:23,015 INFO [train.py:715] (6/8) Epoch 14, batch 14850, loss[loss=0.153, simple_loss=0.2206, pruned_loss=0.04272, over 4833.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2084, pruned_loss=0.03057, over 970680.16 frames.], batch size: 30, lr: 1.57e-04 +2022-05-08 02:12:02,555 INFO [train.py:715] (6/8) Epoch 14, batch 14900, loss[loss=0.121, simple_loss=0.1933, pruned_loss=0.02436, over 4962.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03081, over 971290.71 frames.], batch size: 21, lr: 1.57e-04 +2022-05-08 02:12:42,006 INFO [train.py:715] (6/8) Epoch 14, batch 14950, loss[loss=0.1301, simple_loss=0.2093, pruned_loss=0.02544, over 4929.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2088, pruned_loss=0.03119, over 971720.20 frames.], batch size: 21, lr: 1.57e-04 +2022-05-08 02:13:22,062 INFO [train.py:715] (6/8) Epoch 14, batch 15000, loss[loss=0.1467, simple_loss=0.2253, pruned_loss=0.03401, over 4760.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2088, pruned_loss=0.03091, over 971568.99 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 02:13:22,063 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 02:13:31,707 INFO [train.py:742] (6/8) Epoch 14, validation: loss=0.1052, simple_loss=0.1889, pruned_loss=0.01079, over 914524.00 frames. +2022-05-08 02:14:12,556 INFO [train.py:715] (6/8) Epoch 14, batch 15050, loss[loss=0.1054, simple_loss=0.18, pruned_loss=0.01542, over 4700.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03088, over 972026.22 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 02:14:52,643 INFO [train.py:715] (6/8) Epoch 14, batch 15100, loss[loss=0.1159, simple_loss=0.1885, pruned_loss=0.02168, over 4931.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.03087, over 972145.81 frames.], batch size: 29, lr: 1.57e-04 +2022-05-08 02:15:33,289 INFO [train.py:715] (6/8) Epoch 14, batch 15150, loss[loss=0.1181, simple_loss=0.2043, pruned_loss=0.01595, over 4800.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2102, pruned_loss=0.03113, over 971911.18 frames.], batch size: 21, lr: 1.57e-04 +2022-05-08 02:16:13,417 INFO [train.py:715] (6/8) Epoch 14, batch 15200, loss[loss=0.1157, simple_loss=0.1893, pruned_loss=0.021, over 4935.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.03091, over 972083.39 frames.], batch size: 29, lr: 1.57e-04 +2022-05-08 02:16:54,061 INFO [train.py:715] (6/8) Epoch 14, batch 15250, loss[loss=0.1381, simple_loss=0.2167, pruned_loss=0.02975, over 4756.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.0309, over 971098.58 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 02:17:33,930 INFO [train.py:715] (6/8) Epoch 14, batch 15300, loss[loss=0.1471, simple_loss=0.2246, pruned_loss=0.03478, over 4798.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2096, pruned_loss=0.03082, over 971161.51 frames.], batch size: 21, lr: 1.57e-04 +2022-05-08 02:18:13,479 INFO [train.py:715] (6/8) Epoch 14, batch 15350, loss[loss=0.14, simple_loss=0.2127, pruned_loss=0.0337, over 4962.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2096, pruned_loss=0.03084, over 971586.44 frames.], batch size: 39, lr: 1.57e-04 +2022-05-08 02:18:53,587 INFO [train.py:715] (6/8) Epoch 14, batch 15400, loss[loss=0.1495, simple_loss=0.2248, pruned_loss=0.0371, over 4868.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03099, over 972009.63 frames.], batch size: 20, lr: 1.57e-04 +2022-05-08 02:19:32,979 INFO [train.py:715] (6/8) Epoch 14, batch 15450, loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02921, over 4798.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.03098, over 972664.74 frames.], batch size: 21, lr: 1.57e-04 +2022-05-08 02:20:12,216 INFO [train.py:715] (6/8) Epoch 14, batch 15500, loss[loss=0.1255, simple_loss=0.1972, pruned_loss=0.02693, over 4886.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2086, pruned_loss=0.03115, over 971757.69 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 02:20:51,552 INFO [train.py:715] (6/8) Epoch 14, batch 15550, loss[loss=0.1515, simple_loss=0.2232, pruned_loss=0.03987, over 4922.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2097, pruned_loss=0.03139, over 971084.40 frames.], batch size: 29, lr: 1.57e-04 +2022-05-08 02:21:31,496 INFO [train.py:715] (6/8) Epoch 14, batch 15600, loss[loss=0.1321, simple_loss=0.2106, pruned_loss=0.02679, over 4818.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2088, pruned_loss=0.03092, over 971094.95 frames.], batch size: 27, lr: 1.57e-04 +2022-05-08 02:22:10,939 INFO [train.py:715] (6/8) Epoch 14, batch 15650, loss[loss=0.1395, simple_loss=0.2073, pruned_loss=0.03582, over 4762.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03095, over 971182.67 frames.], batch size: 14, lr: 1.57e-04 +2022-05-08 02:22:49,324 INFO [train.py:715] (6/8) Epoch 14, batch 15700, loss[loss=0.1344, simple_loss=0.2132, pruned_loss=0.02781, over 4785.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.03092, over 971413.03 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 02:23:29,532 INFO [train.py:715] (6/8) Epoch 14, batch 15750, loss[loss=0.1399, simple_loss=0.2106, pruned_loss=0.03457, over 4688.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03082, over 971597.46 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 02:24:09,059 INFO [train.py:715] (6/8) Epoch 14, batch 15800, loss[loss=0.1369, simple_loss=0.2085, pruned_loss=0.03263, over 4978.00 frames.], tot_loss[loss=0.1351, simple_loss=0.209, pruned_loss=0.03066, over 971555.47 frames.], batch size: 28, lr: 1.57e-04 +2022-05-08 02:24:48,301 INFO [train.py:715] (6/8) Epoch 14, batch 15850, loss[loss=0.1243, simple_loss=0.2017, pruned_loss=0.0234, over 4921.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2082, pruned_loss=0.03039, over 971069.16 frames.], batch size: 29, lr: 1.57e-04 +2022-05-08 02:25:27,579 INFO [train.py:715] (6/8) Epoch 14, batch 15900, loss[loss=0.1318, simple_loss=0.1964, pruned_loss=0.03358, over 4817.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2091, pruned_loss=0.03106, over 971234.94 frames.], batch size: 25, lr: 1.57e-04 +2022-05-08 02:26:07,621 INFO [train.py:715] (6/8) Epoch 14, batch 15950, loss[loss=0.1334, simple_loss=0.2098, pruned_loss=0.0285, over 4956.00 frames.], tot_loss[loss=0.136, simple_loss=0.2092, pruned_loss=0.03137, over 970886.22 frames.], batch size: 24, lr: 1.57e-04 +2022-05-08 02:26:47,033 INFO [train.py:715] (6/8) Epoch 14, batch 16000, loss[loss=0.126, simple_loss=0.1919, pruned_loss=0.03004, over 4845.00 frames.], tot_loss[loss=0.136, simple_loss=0.209, pruned_loss=0.03147, over 970296.83 frames.], batch size: 34, lr: 1.57e-04 +2022-05-08 02:27:25,753 INFO [train.py:715] (6/8) Epoch 14, batch 16050, loss[loss=0.145, simple_loss=0.2209, pruned_loss=0.03452, over 4878.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2092, pruned_loss=0.03118, over 970978.05 frames.], batch size: 22, lr: 1.57e-04 +2022-05-08 02:28:04,469 INFO [train.py:715] (6/8) Epoch 14, batch 16100, loss[loss=0.1427, simple_loss=0.2188, pruned_loss=0.03327, over 4954.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.03137, over 971129.71 frames.], batch size: 35, lr: 1.57e-04 +2022-05-08 02:28:42,585 INFO [train.py:715] (6/8) Epoch 14, batch 16150, loss[loss=0.1999, simple_loss=0.2531, pruned_loss=0.07331, over 4947.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03123, over 971158.19 frames.], batch size: 35, lr: 1.57e-04 +2022-05-08 02:29:20,835 INFO [train.py:715] (6/8) Epoch 14, batch 16200, loss[loss=0.1278, simple_loss=0.2009, pruned_loss=0.02731, over 4815.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2095, pruned_loss=0.03098, over 971788.41 frames.], batch size: 21, lr: 1.57e-04 +2022-05-08 02:29:59,441 INFO [train.py:715] (6/8) Epoch 14, batch 16250, loss[loss=0.1496, simple_loss=0.2252, pruned_loss=0.03705, over 4812.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2093, pruned_loss=0.03068, over 972416.22 frames.], batch size: 27, lr: 1.57e-04 +2022-05-08 02:30:38,581 INFO [train.py:715] (6/8) Epoch 14, batch 16300, loss[loss=0.151, simple_loss=0.2268, pruned_loss=0.03762, over 4890.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2091, pruned_loss=0.03062, over 972096.70 frames.], batch size: 22, lr: 1.57e-04 +2022-05-08 02:31:16,528 INFO [train.py:715] (6/8) Epoch 14, batch 16350, loss[loss=0.1687, simple_loss=0.2323, pruned_loss=0.05251, over 4850.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2094, pruned_loss=0.03052, over 972621.88 frames.], batch size: 34, lr: 1.57e-04 +2022-05-08 02:31:55,703 INFO [train.py:715] (6/8) Epoch 14, batch 16400, loss[loss=0.1178, simple_loss=0.195, pruned_loss=0.02034, over 4947.00 frames.], tot_loss[loss=0.1357, simple_loss=0.21, pruned_loss=0.03073, over 972732.61 frames.], batch size: 29, lr: 1.57e-04 +2022-05-08 02:32:35,413 INFO [train.py:715] (6/8) Epoch 14, batch 16450, loss[loss=0.1226, simple_loss=0.1999, pruned_loss=0.02264, over 4781.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2093, pruned_loss=0.03042, over 972447.33 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 02:33:14,851 INFO [train.py:715] (6/8) Epoch 14, batch 16500, loss[loss=0.1284, simple_loss=0.1996, pruned_loss=0.02858, over 4956.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02977, over 972642.24 frames.], batch size: 21, lr: 1.57e-04 +2022-05-08 02:33:53,744 INFO [train.py:715] (6/8) Epoch 14, batch 16550, loss[loss=0.1497, simple_loss=0.229, pruned_loss=0.03518, over 4685.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.03057, over 972608.30 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 02:34:34,124 INFO [train.py:715] (6/8) Epoch 14, batch 16600, loss[loss=0.1258, simple_loss=0.1939, pruned_loss=0.02882, over 4900.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2088, pruned_loss=0.03012, over 972650.62 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 02:35:13,400 INFO [train.py:715] (6/8) Epoch 14, batch 16650, loss[loss=0.1578, simple_loss=0.2259, pruned_loss=0.04483, over 4907.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03065, over 973057.56 frames.], batch size: 23, lr: 1.57e-04 +2022-05-08 02:35:55,020 INFO [train.py:715] (6/8) Epoch 14, batch 16700, loss[loss=0.1282, simple_loss=0.1935, pruned_loss=0.03149, over 4854.00 frames.], tot_loss[loss=0.135, simple_loss=0.2088, pruned_loss=0.03057, over 972103.96 frames.], batch size: 30, lr: 1.57e-04 +2022-05-08 02:36:34,914 INFO [train.py:715] (6/8) Epoch 14, batch 16750, loss[loss=0.1359, simple_loss=0.2041, pruned_loss=0.03389, over 4868.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03054, over 973566.13 frames.], batch size: 32, lr: 1.57e-04 +2022-05-08 02:37:15,256 INFO [train.py:715] (6/8) Epoch 14, batch 16800, loss[loss=0.1359, simple_loss=0.2213, pruned_loss=0.02525, over 4749.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2083, pruned_loss=0.03064, over 972888.37 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 02:37:54,772 INFO [train.py:715] (6/8) Epoch 14, batch 16850, loss[loss=0.1594, simple_loss=0.239, pruned_loss=0.03985, over 4891.00 frames.], tot_loss[loss=0.1355, simple_loss=0.209, pruned_loss=0.03099, over 972684.10 frames.], batch size: 22, lr: 1.57e-04 +2022-05-08 02:38:34,394 INFO [train.py:715] (6/8) Epoch 14, batch 16900, loss[loss=0.1303, simple_loss=0.2003, pruned_loss=0.03016, over 4697.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2098, pruned_loss=0.03098, over 971976.18 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 02:39:15,372 INFO [train.py:715] (6/8) Epoch 14, batch 16950, loss[loss=0.1591, simple_loss=0.2333, pruned_loss=0.04245, over 4866.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2099, pruned_loss=0.0311, over 971439.27 frames.], batch size: 20, lr: 1.57e-04 +2022-05-08 02:39:56,925 INFO [train.py:715] (6/8) Epoch 14, batch 17000, loss[loss=0.1396, simple_loss=0.2259, pruned_loss=0.02666, over 4824.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03071, over 970604.42 frames.], batch size: 13, lr: 1.57e-04 +2022-05-08 02:40:37,815 INFO [train.py:715] (6/8) Epoch 14, batch 17050, loss[loss=0.1093, simple_loss=0.1856, pruned_loss=0.0165, over 4866.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2094, pruned_loss=0.03085, over 971197.93 frames.], batch size: 20, lr: 1.57e-04 +2022-05-08 02:41:18,911 INFO [train.py:715] (6/8) Epoch 14, batch 17100, loss[loss=0.1221, simple_loss=0.1983, pruned_loss=0.02291, over 4887.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2097, pruned_loss=0.03102, over 972212.67 frames.], batch size: 22, lr: 1.57e-04 +2022-05-08 02:42:01,003 INFO [train.py:715] (6/8) Epoch 14, batch 17150, loss[loss=0.1504, simple_loss=0.227, pruned_loss=0.03695, over 4859.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03122, over 972004.85 frames.], batch size: 20, lr: 1.57e-04 +2022-05-08 02:42:41,747 INFO [train.py:715] (6/8) Epoch 14, batch 17200, loss[loss=0.148, simple_loss=0.2388, pruned_loss=0.02862, over 4920.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.0309, over 971635.53 frames.], batch size: 23, lr: 1.57e-04 +2022-05-08 02:43:22,726 INFO [train.py:715] (6/8) Epoch 14, batch 17250, loss[loss=0.1441, simple_loss=0.2216, pruned_loss=0.03326, over 4816.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03127, over 970893.99 frames.], batch size: 25, lr: 1.57e-04 +2022-05-08 02:44:04,211 INFO [train.py:715] (6/8) Epoch 14, batch 17300, loss[loss=0.1414, simple_loss=0.2151, pruned_loss=0.03379, over 4765.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.03157, over 971318.49 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 02:44:45,868 INFO [train.py:715] (6/8) Epoch 14, batch 17350, loss[loss=0.138, simple_loss=0.2064, pruned_loss=0.03481, over 4773.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2105, pruned_loss=0.03181, over 972503.03 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 02:45:26,240 INFO [train.py:715] (6/8) Epoch 14, batch 17400, loss[loss=0.1103, simple_loss=0.1863, pruned_loss=0.01711, over 4913.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03143, over 971924.15 frames.], batch size: 29, lr: 1.56e-04 +2022-05-08 02:46:07,489 INFO [train.py:715] (6/8) Epoch 14, batch 17450, loss[loss=0.1347, simple_loss=0.2005, pruned_loss=0.03448, over 4857.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.0311, over 972514.27 frames.], batch size: 32, lr: 1.56e-04 +2022-05-08 02:46:49,068 INFO [train.py:715] (6/8) Epoch 14, batch 17500, loss[loss=0.1304, simple_loss=0.205, pruned_loss=0.02787, over 4794.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2094, pruned_loss=0.0307, over 972606.69 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 02:47:29,791 INFO [train.py:715] (6/8) Epoch 14, batch 17550, loss[loss=0.125, simple_loss=0.2019, pruned_loss=0.02408, over 4779.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03035, over 972291.49 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 02:48:10,328 INFO [train.py:715] (6/8) Epoch 14, batch 17600, loss[loss=0.1434, simple_loss=0.2084, pruned_loss=0.03923, over 4818.00 frames.], tot_loss[loss=0.135, simple_loss=0.2088, pruned_loss=0.03063, over 970935.75 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 02:48:52,034 INFO [train.py:715] (6/8) Epoch 14, batch 17650, loss[loss=0.1267, simple_loss=0.2089, pruned_loss=0.02225, over 4796.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.0304, over 970783.46 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 02:49:33,167 INFO [train.py:715] (6/8) Epoch 14, batch 17700, loss[loss=0.1497, simple_loss=0.2186, pruned_loss=0.0404, over 4908.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03039, over 970646.15 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 02:50:13,655 INFO [train.py:715] (6/8) Epoch 14, batch 17750, loss[loss=0.1503, simple_loss=0.2296, pruned_loss=0.03552, over 4791.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2093, pruned_loss=0.03043, over 971721.32 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 02:50:55,019 INFO [train.py:715] (6/8) Epoch 14, batch 17800, loss[loss=0.1172, simple_loss=0.1899, pruned_loss=0.02222, over 4808.00 frames.], tot_loss[loss=0.1348, simple_loss=0.209, pruned_loss=0.03029, over 971682.02 frames.], batch size: 13, lr: 1.56e-04 +2022-05-08 02:51:35,968 INFO [train.py:715] (6/8) Epoch 14, batch 17850, loss[loss=0.1661, simple_loss=0.2361, pruned_loss=0.04806, over 4840.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.02996, over 970819.90 frames.], batch size: 32, lr: 1.56e-04 +2022-05-08 02:52:16,734 INFO [train.py:715] (6/8) Epoch 14, batch 17900, loss[loss=0.1674, simple_loss=0.2426, pruned_loss=0.04608, over 4990.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.02997, over 971319.90 frames.], batch size: 25, lr: 1.56e-04 +2022-05-08 02:52:57,212 INFO [train.py:715] (6/8) Epoch 14, batch 17950, loss[loss=0.1283, simple_loss=0.2045, pruned_loss=0.02607, over 4914.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2092, pruned_loss=0.03031, over 971596.98 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 02:53:38,603 INFO [train.py:715] (6/8) Epoch 14, batch 18000, loss[loss=0.1676, simple_loss=0.2424, pruned_loss=0.04643, over 4913.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2099, pruned_loss=0.03063, over 972029.47 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 02:53:38,604 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 02:53:48,448 INFO [train.py:742] (6/8) Epoch 14, validation: loss=0.1052, simple_loss=0.1889, pruned_loss=0.01075, over 914524.00 frames. +2022-05-08 02:54:29,838 INFO [train.py:715] (6/8) Epoch 14, batch 18050, loss[loss=0.1327, simple_loss=0.2111, pruned_loss=0.02719, over 4836.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2097, pruned_loss=0.0307, over 972200.06 frames.], batch size: 13, lr: 1.56e-04 +2022-05-08 02:55:10,985 INFO [train.py:715] (6/8) Epoch 14, batch 18100, loss[loss=0.1623, simple_loss=0.2398, pruned_loss=0.04235, over 4771.00 frames.], tot_loss[loss=0.1361, simple_loss=0.21, pruned_loss=0.03107, over 972598.22 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 02:55:52,584 INFO [train.py:715] (6/8) Epoch 14, batch 18150, loss[loss=0.1287, simple_loss=0.2056, pruned_loss=0.02584, over 4922.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03137, over 973023.22 frames.], batch size: 29, lr: 1.56e-04 +2022-05-08 02:56:33,503 INFO [train.py:715] (6/8) Epoch 14, batch 18200, loss[loss=0.179, simple_loss=0.2287, pruned_loss=0.06464, over 4988.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03187, over 972843.95 frames.], batch size: 25, lr: 1.56e-04 +2022-05-08 02:57:15,447 INFO [train.py:715] (6/8) Epoch 14, batch 18250, loss[loss=0.1366, simple_loss=0.2177, pruned_loss=0.02775, over 4763.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03142, over 972878.03 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 02:57:56,900 INFO [train.py:715] (6/8) Epoch 14, batch 18300, loss[loss=0.1644, simple_loss=0.254, pruned_loss=0.03738, over 4842.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2096, pruned_loss=0.03145, over 973391.68 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 02:58:36,500 INFO [train.py:715] (6/8) Epoch 14, batch 18350, loss[loss=0.1407, simple_loss=0.2217, pruned_loss=0.02985, over 4756.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03102, over 973516.25 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 02:59:17,360 INFO [train.py:715] (6/8) Epoch 14, batch 18400, loss[loss=0.1365, simple_loss=0.2172, pruned_loss=0.02794, over 4879.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03046, over 972763.68 frames.], batch size: 22, lr: 1.56e-04 +2022-05-08 02:59:57,997 INFO [train.py:715] (6/8) Epoch 14, batch 18450, loss[loss=0.1274, simple_loss=0.2076, pruned_loss=0.02363, over 4887.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02991, over 972341.34 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 03:00:38,224 INFO [train.py:715] (6/8) Epoch 14, batch 18500, loss[loss=0.1181, simple_loss=0.1961, pruned_loss=0.02004, over 4782.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03054, over 972902.18 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 03:01:18,700 INFO [train.py:715] (6/8) Epoch 14, batch 18550, loss[loss=0.1218, simple_loss=0.199, pruned_loss=0.02235, over 4742.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03012, over 971750.24 frames.], batch size: 12, lr: 1.56e-04 +2022-05-08 03:01:59,560 INFO [train.py:715] (6/8) Epoch 14, batch 18600, loss[loss=0.1504, simple_loss=0.22, pruned_loss=0.04036, over 4983.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03007, over 971278.43 frames.], batch size: 35, lr: 1.56e-04 +2022-05-08 03:02:39,873 INFO [train.py:715] (6/8) Epoch 14, batch 18650, loss[loss=0.1019, simple_loss=0.1774, pruned_loss=0.01318, over 4950.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03018, over 971724.27 frames.], batch size: 24, lr: 1.56e-04 +2022-05-08 03:03:20,571 INFO [train.py:715] (6/8) Epoch 14, batch 18700, loss[loss=0.09528, simple_loss=0.1729, pruned_loss=0.008838, over 4783.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03009, over 971889.20 frames.], batch size: 12, lr: 1.56e-04 +2022-05-08 03:04:01,159 INFO [train.py:715] (6/8) Epoch 14, batch 18750, loss[loss=0.1251, simple_loss=0.1937, pruned_loss=0.02824, over 4861.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03048, over 971626.33 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:04:41,134 INFO [train.py:715] (6/8) Epoch 14, batch 18800, loss[loss=0.1636, simple_loss=0.2352, pruned_loss=0.046, over 4947.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2084, pruned_loss=0.03057, over 971847.91 frames.], batch size: 29, lr: 1.56e-04 +2022-05-08 03:05:21,084 INFO [train.py:715] (6/8) Epoch 14, batch 18850, loss[loss=0.1282, simple_loss=0.2152, pruned_loss=0.02057, over 4935.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03063, over 972535.97 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 03:06:01,838 INFO [train.py:715] (6/8) Epoch 14, batch 18900, loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03327, over 4849.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03086, over 972600.82 frames.], batch size: 13, lr: 1.56e-04 +2022-05-08 03:06:42,902 INFO [train.py:715] (6/8) Epoch 14, batch 18950, loss[loss=0.1673, simple_loss=0.2463, pruned_loss=0.04411, over 4913.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03087, over 971930.57 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 03:07:23,148 INFO [train.py:715] (6/8) Epoch 14, batch 19000, loss[loss=0.1272, simple_loss=0.2086, pruned_loss=0.02289, over 4771.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.03106, over 971400.18 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:08:04,092 INFO [train.py:715] (6/8) Epoch 14, batch 19050, loss[loss=0.1361, simple_loss=0.213, pruned_loss=0.02961, over 4905.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2099, pruned_loss=0.03083, over 971474.98 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 03:08:45,085 INFO [train.py:715] (6/8) Epoch 14, batch 19100, loss[loss=0.135, simple_loss=0.2055, pruned_loss=0.03222, over 4941.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2097, pruned_loss=0.03065, over 972107.69 frames.], batch size: 29, lr: 1.56e-04 +2022-05-08 03:09:25,472 INFO [train.py:715] (6/8) Epoch 14, batch 19150, loss[loss=0.1552, simple_loss=0.2381, pruned_loss=0.03615, over 4773.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03038, over 972775.05 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 03:10:04,877 INFO [train.py:715] (6/8) Epoch 14, batch 19200, loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02994, over 4748.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.03061, over 973095.35 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:10:45,986 INFO [train.py:715] (6/8) Epoch 14, batch 19250, loss[loss=0.1628, simple_loss=0.2452, pruned_loss=0.04022, over 4779.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2088, pruned_loss=0.03036, over 972921.91 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 03:11:26,905 INFO [train.py:715] (6/8) Epoch 14, batch 19300, loss[loss=0.1371, simple_loss=0.2001, pruned_loss=0.0371, over 4744.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03016, over 971991.82 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:12:06,955 INFO [train.py:715] (6/8) Epoch 14, batch 19350, loss[loss=0.1154, simple_loss=0.1811, pruned_loss=0.02487, over 4824.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.03069, over 971940.82 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 03:12:47,203 INFO [train.py:715] (6/8) Epoch 14, batch 19400, loss[loss=0.1586, simple_loss=0.2414, pruned_loss=0.0379, over 4979.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2093, pruned_loss=0.03073, over 971137.86 frames.], batch size: 24, lr: 1.56e-04 +2022-05-08 03:13:28,652 INFO [train.py:715] (6/8) Epoch 14, batch 19450, loss[loss=0.1094, simple_loss=0.1883, pruned_loss=0.01525, over 4813.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2094, pruned_loss=0.03075, over 972097.33 frames.], batch size: 13, lr: 1.56e-04 +2022-05-08 03:14:08,963 INFO [train.py:715] (6/8) Epoch 14, batch 19500, loss[loss=0.1283, simple_loss=0.1957, pruned_loss=0.03041, over 4935.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2091, pruned_loss=0.03063, over 972425.92 frames.], batch size: 23, lr: 1.56e-04 +2022-05-08 03:14:49,615 INFO [train.py:715] (6/8) Epoch 14, batch 19550, loss[loss=0.1283, simple_loss=0.2041, pruned_loss=0.02626, over 4817.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2102, pruned_loss=0.03124, over 971736.04 frames.], batch size: 26, lr: 1.56e-04 +2022-05-08 03:15:30,077 INFO [train.py:715] (6/8) Epoch 14, batch 19600, loss[loss=0.1741, simple_loss=0.2405, pruned_loss=0.05391, over 4779.00 frames.], tot_loss[loss=0.136, simple_loss=0.2101, pruned_loss=0.031, over 971473.71 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:16:11,005 INFO [train.py:715] (6/8) Epoch 14, batch 19650, loss[loss=0.1191, simple_loss=0.1958, pruned_loss=0.02117, over 4809.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2098, pruned_loss=0.03038, over 972033.89 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 03:16:51,975 INFO [train.py:715] (6/8) Epoch 14, batch 19700, loss[loss=0.1063, simple_loss=0.1712, pruned_loss=0.02068, over 4835.00 frames.], tot_loss[loss=0.1358, simple_loss=0.21, pruned_loss=0.03077, over 972035.66 frames.], batch size: 12, lr: 1.56e-04 +2022-05-08 03:17:32,734 INFO [train.py:715] (6/8) Epoch 14, batch 19750, loss[loss=0.1395, simple_loss=0.2081, pruned_loss=0.03543, over 4883.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2105, pruned_loss=0.03111, over 972380.30 frames.], batch size: 39, lr: 1.56e-04 +2022-05-08 03:18:13,652 INFO [train.py:715] (6/8) Epoch 14, batch 19800, loss[loss=0.1274, simple_loss=0.2096, pruned_loss=0.02259, over 4794.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2109, pruned_loss=0.03142, over 973785.30 frames.], batch size: 24, lr: 1.56e-04 +2022-05-08 03:18:54,281 INFO [train.py:715] (6/8) Epoch 14, batch 19850, loss[loss=0.1395, simple_loss=0.2116, pruned_loss=0.03369, over 4740.00 frames.], tot_loss[loss=0.136, simple_loss=0.2103, pruned_loss=0.03091, over 974296.31 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:19:35,283 INFO [train.py:715] (6/8) Epoch 14, batch 19900, loss[loss=0.141, simple_loss=0.2111, pruned_loss=0.03544, over 4794.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2103, pruned_loss=0.03112, over 973433.60 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 03:20:15,394 INFO [train.py:715] (6/8) Epoch 14, batch 19950, loss[loss=0.1357, simple_loss=0.2157, pruned_loss=0.02783, over 4987.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03104, over 973627.15 frames.], batch size: 27, lr: 1.56e-04 +2022-05-08 03:20:55,693 INFO [train.py:715] (6/8) Epoch 14, batch 20000, loss[loss=0.1531, simple_loss=0.2311, pruned_loss=0.03755, over 4921.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2098, pruned_loss=0.03092, over 973411.49 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 03:21:35,501 INFO [train.py:715] (6/8) Epoch 14, batch 20050, loss[loss=0.1207, simple_loss=0.192, pruned_loss=0.02467, over 4890.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03049, over 973196.90 frames.], batch size: 22, lr: 1.56e-04 +2022-05-08 03:22:15,346 INFO [train.py:715] (6/8) Epoch 14, batch 20100, loss[loss=0.1619, simple_loss=0.2267, pruned_loss=0.04852, over 4705.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03056, over 972863.64 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 03:22:55,790 INFO [train.py:715] (6/8) Epoch 14, batch 20150, loss[loss=0.1064, simple_loss=0.1778, pruned_loss=0.01751, over 4758.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03027, over 971900.75 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 03:23:35,888 INFO [train.py:715] (6/8) Epoch 14, batch 20200, loss[loss=0.1314, simple_loss=0.2108, pruned_loss=0.02598, over 4778.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03009, over 971581.43 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:24:16,359 INFO [train.py:715] (6/8) Epoch 14, batch 20250, loss[loss=0.1151, simple_loss=0.1903, pruned_loss=0.01996, over 4781.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03002, over 972326.49 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 03:24:56,505 INFO [train.py:715] (6/8) Epoch 14, batch 20300, loss[loss=0.1276, simple_loss=0.1995, pruned_loss=0.02783, over 4794.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03012, over 972557.64 frames.], batch size: 24, lr: 1.56e-04 +2022-05-08 03:25:37,283 INFO [train.py:715] (6/8) Epoch 14, batch 20350, loss[loss=0.1263, simple_loss=0.2033, pruned_loss=0.02462, over 4828.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03012, over 971880.71 frames.], batch size: 25, lr: 1.56e-04 +2022-05-08 03:26:17,610 INFO [train.py:715] (6/8) Epoch 14, batch 20400, loss[loss=0.1782, simple_loss=0.2481, pruned_loss=0.05413, over 4873.00 frames.], tot_loss[loss=0.1358, simple_loss=0.21, pruned_loss=0.03076, over 971946.09 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:26:58,060 INFO [train.py:715] (6/8) Epoch 14, batch 20450, loss[loss=0.1362, simple_loss=0.2079, pruned_loss=0.03226, over 4992.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2091, pruned_loss=0.0307, over 972390.28 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:27:39,218 INFO [train.py:715] (6/8) Epoch 14, batch 20500, loss[loss=0.1503, simple_loss=0.2256, pruned_loss=0.03757, over 4749.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.0306, over 971796.37 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 03:28:19,575 INFO [train.py:715] (6/8) Epoch 14, batch 20550, loss[loss=0.1865, simple_loss=0.2598, pruned_loss=0.05663, over 4861.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2099, pruned_loss=0.03087, over 972812.51 frames.], batch size: 38, lr: 1.56e-04 +2022-05-08 03:29:00,475 INFO [train.py:715] (6/8) Epoch 14, batch 20600, loss[loss=0.1452, simple_loss=0.2118, pruned_loss=0.03928, over 4848.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2097, pruned_loss=0.03042, over 972682.62 frames.], batch size: 32, lr: 1.56e-04 +2022-05-08 03:29:41,273 INFO [train.py:715] (6/8) Epoch 14, batch 20650, loss[loss=0.1521, simple_loss=0.2102, pruned_loss=0.04698, over 4752.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03062, over 972439.78 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:30:22,926 INFO [train.py:715] (6/8) Epoch 14, batch 20700, loss[loss=0.1261, simple_loss=0.2003, pruned_loss=0.02599, over 4892.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.02993, over 972858.53 frames.], batch size: 22, lr: 1.56e-04 +2022-05-08 03:31:03,265 INFO [train.py:715] (6/8) Epoch 14, batch 20750, loss[loss=0.1563, simple_loss=0.2372, pruned_loss=0.03768, over 4873.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2096, pruned_loss=0.03051, over 972360.72 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:31:43,461 INFO [train.py:715] (6/8) Epoch 14, batch 20800, loss[loss=0.1137, simple_loss=0.1879, pruned_loss=0.01978, over 4969.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2092, pruned_loss=0.03033, over 972177.23 frames.], batch size: 24, lr: 1.56e-04 +2022-05-08 03:32:24,159 INFO [train.py:715] (6/8) Epoch 14, batch 20850, loss[loss=0.148, simple_loss=0.2301, pruned_loss=0.03296, over 4787.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.03063, over 972490.24 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:33:04,694 INFO [train.py:715] (6/8) Epoch 14, batch 20900, loss[loss=0.1484, simple_loss=0.2075, pruned_loss=0.04471, over 4966.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03124, over 973314.23 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 03:33:45,371 INFO [train.py:715] (6/8) Epoch 14, batch 20950, loss[loss=0.1601, simple_loss=0.2348, pruned_loss=0.04269, over 4925.00 frames.], tot_loss[loss=0.1361, simple_loss=0.21, pruned_loss=0.03103, over 972651.67 frames.], batch size: 23, lr: 1.56e-04 +2022-05-08 03:34:25,919 INFO [train.py:715] (6/8) Epoch 14, batch 21000, loss[loss=0.1423, simple_loss=0.2106, pruned_loss=0.03703, over 4857.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.031, over 972603.19 frames.], batch size: 32, lr: 1.56e-04 +2022-05-08 03:34:25,920 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 03:34:37,000 INFO [train.py:742] (6/8) Epoch 14, validation: loss=0.1051, simple_loss=0.1889, pruned_loss=0.0107, over 914524.00 frames. +2022-05-08 03:35:17,906 INFO [train.py:715] (6/8) Epoch 14, batch 21050, loss[loss=0.1507, simple_loss=0.2145, pruned_loss=0.04347, over 4979.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2111, pruned_loss=0.03114, over 972604.82 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:35:58,610 INFO [train.py:715] (6/8) Epoch 14, batch 21100, loss[loss=0.1117, simple_loss=0.1899, pruned_loss=0.01675, over 4831.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2103, pruned_loss=0.03071, over 972691.46 frames.], batch size: 25, lr: 1.56e-04 +2022-05-08 03:36:39,416 INFO [train.py:715] (6/8) Epoch 14, batch 21150, loss[loss=0.1446, simple_loss=0.2173, pruned_loss=0.03591, over 4798.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2105, pruned_loss=0.03122, over 973663.17 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 03:37:18,918 INFO [train.py:715] (6/8) Epoch 14, batch 21200, loss[loss=0.1221, simple_loss=0.1909, pruned_loss=0.02665, over 4827.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2095, pruned_loss=0.03075, over 972837.21 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 03:37:59,331 INFO [train.py:715] (6/8) Epoch 14, batch 21250, loss[loss=0.138, simple_loss=0.215, pruned_loss=0.03052, over 4911.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03033, over 973139.86 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 03:38:39,037 INFO [train.py:715] (6/8) Epoch 14, batch 21300, loss[loss=0.109, simple_loss=0.1794, pruned_loss=0.01925, over 4822.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2098, pruned_loss=0.03088, over 972379.91 frames.], batch size: 26, lr: 1.56e-04 +2022-05-08 03:39:17,955 INFO [train.py:715] (6/8) Epoch 14, batch 21350, loss[loss=0.1488, simple_loss=0.229, pruned_loss=0.03433, over 4873.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2095, pruned_loss=0.03068, over 973255.89 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:39:58,397 INFO [train.py:715] (6/8) Epoch 14, batch 21400, loss[loss=0.1213, simple_loss=0.1944, pruned_loss=0.02414, over 4922.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2088, pruned_loss=0.0309, over 973034.64 frames.], batch size: 29, lr: 1.56e-04 +2022-05-08 03:40:38,646 INFO [train.py:715] (6/8) Epoch 14, batch 21450, loss[loss=0.1296, simple_loss=0.2021, pruned_loss=0.02859, over 4853.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2084, pruned_loss=0.03063, over 972614.62 frames.], batch size: 32, lr: 1.56e-04 +2022-05-08 03:41:18,064 INFO [train.py:715] (6/8) Epoch 14, batch 21500, loss[loss=0.1257, simple_loss=0.2077, pruned_loss=0.02181, over 4772.00 frames.], tot_loss[loss=0.1333, simple_loss=0.207, pruned_loss=0.0298, over 972266.90 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:41:57,083 INFO [train.py:715] (6/8) Epoch 14, batch 21550, loss[loss=0.1253, simple_loss=0.2063, pruned_loss=0.02216, over 4826.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2078, pruned_loss=0.03041, over 972874.85 frames.], batch size: 27, lr: 1.56e-04 +2022-05-08 03:42:37,075 INFO [train.py:715] (6/8) Epoch 14, batch 21600, loss[loss=0.1391, simple_loss=0.2069, pruned_loss=0.03563, over 4914.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03059, over 973119.66 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 03:43:16,849 INFO [train.py:715] (6/8) Epoch 14, batch 21650, loss[loss=0.1428, simple_loss=0.207, pruned_loss=0.03935, over 4948.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.03111, over 973256.59 frames.], batch size: 35, lr: 1.56e-04 +2022-05-08 03:43:55,952 INFO [train.py:715] (6/8) Epoch 14, batch 21700, loss[loss=0.1508, simple_loss=0.2335, pruned_loss=0.03403, over 4874.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2092, pruned_loss=0.03109, over 973435.37 frames.], batch size: 22, lr: 1.56e-04 +2022-05-08 03:44:36,358 INFO [train.py:715] (6/8) Epoch 14, batch 21750, loss[loss=0.1251, simple_loss=0.1842, pruned_loss=0.03306, over 4733.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.03033, over 973734.83 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:45:16,752 INFO [train.py:715] (6/8) Epoch 14, batch 21800, loss[loss=0.1443, simple_loss=0.2114, pruned_loss=0.03859, over 4859.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03093, over 973402.95 frames.], batch size: 20, lr: 1.56e-04 +2022-05-08 03:45:56,146 INFO [train.py:715] (6/8) Epoch 14, batch 21850, loss[loss=0.1527, simple_loss=0.221, pruned_loss=0.04216, over 4986.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2096, pruned_loss=0.03146, over 973919.33 frames.], batch size: 33, lr: 1.56e-04 +2022-05-08 03:46:35,759 INFO [train.py:715] (6/8) Epoch 14, batch 21900, loss[loss=0.1177, simple_loss=0.1978, pruned_loss=0.01883, over 4916.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03157, over 972987.69 frames.], batch size: 23, lr: 1.56e-04 +2022-05-08 03:47:16,029 INFO [train.py:715] (6/8) Epoch 14, batch 21950, loss[loss=0.1021, simple_loss=0.1774, pruned_loss=0.01346, over 4731.00 frames.], tot_loss[loss=0.136, simple_loss=0.2094, pruned_loss=0.03129, over 972735.09 frames.], batch size: 12, lr: 1.56e-04 +2022-05-08 03:47:55,290 INFO [train.py:715] (6/8) Epoch 14, batch 22000, loss[loss=0.1436, simple_loss=0.2182, pruned_loss=0.0345, over 4947.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03089, over 972525.61 frames.], batch size: 23, lr: 1.56e-04 +2022-05-08 03:48:34,009 INFO [train.py:715] (6/8) Epoch 14, batch 22050, loss[loss=0.1347, simple_loss=0.2168, pruned_loss=0.02635, over 4960.00 frames.], tot_loss[loss=0.1351, simple_loss=0.209, pruned_loss=0.03061, over 972499.33 frames.], batch size: 35, lr: 1.56e-04 +2022-05-08 03:49:14,110 INFO [train.py:715] (6/8) Epoch 14, batch 22100, loss[loss=0.1301, simple_loss=0.2009, pruned_loss=0.02966, over 4711.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03013, over 971496.96 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 03:49:53,820 INFO [train.py:715] (6/8) Epoch 14, batch 22150, loss[loss=0.1027, simple_loss=0.1745, pruned_loss=0.0154, over 4780.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02982, over 971544.84 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:50:32,843 INFO [train.py:715] (6/8) Epoch 14, batch 22200, loss[loss=0.1142, simple_loss=0.1898, pruned_loss=0.01931, over 4878.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.02987, over 971068.08 frames.], batch size: 22, lr: 1.56e-04 +2022-05-08 03:51:12,587 INFO [train.py:715] (6/8) Epoch 14, batch 22250, loss[loss=0.1863, simple_loss=0.2618, pruned_loss=0.05538, over 4741.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2082, pruned_loss=0.03055, over 970940.87 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:51:52,770 INFO [train.py:715] (6/8) Epoch 14, batch 22300, loss[loss=0.1172, simple_loss=0.1881, pruned_loss=0.0232, over 4749.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2093, pruned_loss=0.03052, over 970518.14 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 03:52:32,255 INFO [train.py:715] (6/8) Epoch 14, batch 22350, loss[loss=0.147, simple_loss=0.2055, pruned_loss=0.04424, over 4843.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2091, pruned_loss=0.03052, over 971656.10 frames.], batch size: 32, lr: 1.56e-04 +2022-05-08 03:53:11,402 INFO [train.py:715] (6/8) Epoch 14, batch 22400, loss[loss=0.1647, simple_loss=0.2315, pruned_loss=0.04896, over 4902.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2088, pruned_loss=0.03067, over 971624.51 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 03:53:51,753 INFO [train.py:715] (6/8) Epoch 14, batch 22450, loss[loss=0.1453, simple_loss=0.2231, pruned_loss=0.03375, over 4913.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03007, over 972379.99 frames.], batch size: 23, lr: 1.56e-04 +2022-05-08 03:54:31,163 INFO [train.py:715] (6/8) Epoch 14, batch 22500, loss[loss=0.1573, simple_loss=0.2252, pruned_loss=0.04475, over 4970.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2079, pruned_loss=0.03052, over 972265.00 frames.], batch size: 35, lr: 1.56e-04 +2022-05-08 03:55:10,459 INFO [train.py:715] (6/8) Epoch 14, batch 22550, loss[loss=0.1235, simple_loss=0.1937, pruned_loss=0.02663, over 4833.00 frames.], tot_loss[loss=0.134, simple_loss=0.2073, pruned_loss=0.0303, over 971452.13 frames.], batch size: 13, lr: 1.56e-04 +2022-05-08 03:55:50,815 INFO [train.py:715] (6/8) Epoch 14, batch 22600, loss[loss=0.1163, simple_loss=0.1908, pruned_loss=0.02086, over 4867.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2069, pruned_loss=0.02997, over 971517.58 frames.], batch size: 20, lr: 1.56e-04 +2022-05-08 03:56:31,699 INFO [train.py:715] (6/8) Epoch 14, batch 22650, loss[loss=0.1265, simple_loss=0.2019, pruned_loss=0.02556, over 4984.00 frames.], tot_loss[loss=0.134, simple_loss=0.2073, pruned_loss=0.03038, over 971314.60 frames.], batch size: 25, lr: 1.56e-04 +2022-05-08 03:57:11,536 INFO [train.py:715] (6/8) Epoch 14, batch 22700, loss[loss=0.1328, simple_loss=0.2028, pruned_loss=0.03145, over 4807.00 frames.], tot_loss[loss=0.134, simple_loss=0.2072, pruned_loss=0.03041, over 970917.86 frames.], batch size: 25, lr: 1.56e-04 +2022-05-08 03:57:50,670 INFO [train.py:715] (6/8) Epoch 14, batch 22750, loss[loss=0.1372, simple_loss=0.1981, pruned_loss=0.0381, over 4918.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2085, pruned_loss=0.03091, over 972049.30 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 03:58:31,999 INFO [train.py:715] (6/8) Epoch 14, batch 22800, loss[loss=0.121, simple_loss=0.1956, pruned_loss=0.02318, over 4783.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2087, pruned_loss=0.03099, over 972708.15 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:59:12,918 INFO [train.py:715] (6/8) Epoch 14, batch 22850, loss[loss=0.1429, simple_loss=0.2235, pruned_loss=0.0312, over 4814.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2091, pruned_loss=0.0312, over 972070.71 frames.], batch size: 25, lr: 1.56e-04 +2022-05-08 03:59:53,208 INFO [train.py:715] (6/8) Epoch 14, batch 22900, loss[loss=0.1842, simple_loss=0.2561, pruned_loss=0.0562, over 4961.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2094, pruned_loss=0.03139, over 972121.94 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 04:00:33,081 INFO [train.py:715] (6/8) Epoch 14, batch 22950, loss[loss=0.1246, simple_loss=0.1993, pruned_loss=0.02498, over 4784.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03139, over 972012.16 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 04:01:13,586 INFO [train.py:715] (6/8) Epoch 14, batch 23000, loss[loss=0.1095, simple_loss=0.1832, pruned_loss=0.01785, over 4754.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2102, pruned_loss=0.03132, over 971868.92 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 04:01:53,102 INFO [train.py:715] (6/8) Epoch 14, batch 23050, loss[loss=0.1395, simple_loss=0.2219, pruned_loss=0.02853, over 4813.00 frames.], tot_loss[loss=0.137, simple_loss=0.2107, pruned_loss=0.03166, over 973038.95 frames.], batch size: 26, lr: 1.56e-04 +2022-05-08 04:02:32,427 INFO [train.py:715] (6/8) Epoch 14, batch 23100, loss[loss=0.1558, simple_loss=0.2184, pruned_loss=0.0466, over 4763.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2105, pruned_loss=0.03149, over 972914.54 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 04:03:13,071 INFO [train.py:715] (6/8) Epoch 14, batch 23150, loss[loss=0.1415, simple_loss=0.2173, pruned_loss=0.03286, over 4827.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2097, pruned_loss=0.03103, over 972860.11 frames.], batch size: 13, lr: 1.56e-04 +2022-05-08 04:03:54,324 INFO [train.py:715] (6/8) Epoch 14, batch 23200, loss[loss=0.1222, simple_loss=0.198, pruned_loss=0.02316, over 4910.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03015, over 972390.18 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 04:04:33,068 INFO [train.py:715] (6/8) Epoch 14, batch 23250, loss[loss=0.1302, simple_loss=0.1953, pruned_loss=0.03252, over 4696.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03026, over 971971.21 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 04:05:13,479 INFO [train.py:715] (6/8) Epoch 14, batch 23300, loss[loss=0.1215, simple_loss=0.2021, pruned_loss=0.02047, over 4986.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03078, over 971997.14 frames.], batch size: 28, lr: 1.56e-04 +2022-05-08 04:05:54,158 INFO [train.py:715] (6/8) Epoch 14, batch 23350, loss[loss=0.1522, simple_loss=0.2205, pruned_loss=0.04195, over 4800.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2088, pruned_loss=0.03111, over 971857.68 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 04:06:33,757 INFO [train.py:715] (6/8) Epoch 14, batch 23400, loss[loss=0.1078, simple_loss=0.1809, pruned_loss=0.01737, over 4887.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2087, pruned_loss=0.03092, over 972246.49 frames.], batch size: 22, lr: 1.56e-04 +2022-05-08 04:07:12,805 INFO [train.py:715] (6/8) Epoch 14, batch 23450, loss[loss=0.1413, simple_loss=0.212, pruned_loss=0.03533, over 4853.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2086, pruned_loss=0.03053, over 972819.88 frames.], batch size: 30, lr: 1.56e-04 +2022-05-08 04:07:53,402 INFO [train.py:715] (6/8) Epoch 14, batch 23500, loss[loss=0.1199, simple_loss=0.1913, pruned_loss=0.02426, over 4817.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2078, pruned_loss=0.03041, over 972401.20 frames.], batch size: 27, lr: 1.56e-04 +2022-05-08 04:08:34,057 INFO [train.py:715] (6/8) Epoch 14, batch 23550, loss[loss=0.1179, simple_loss=0.1872, pruned_loss=0.02428, over 4687.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02987, over 972262.60 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 04:09:13,316 INFO [train.py:715] (6/8) Epoch 14, batch 23600, loss[loss=0.1388, simple_loss=0.2096, pruned_loss=0.03402, over 4705.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03005, over 972314.29 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 04:09:52,597 INFO [train.py:715] (6/8) Epoch 14, batch 23650, loss[loss=0.1677, simple_loss=0.2412, pruned_loss=0.04709, over 4750.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2083, pruned_loss=0.03056, over 971686.19 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 04:10:32,139 INFO [train.py:715] (6/8) Epoch 14, batch 23700, loss[loss=0.1391, simple_loss=0.2055, pruned_loss=0.03636, over 4864.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2081, pruned_loss=0.03053, over 971803.37 frames.], batch size: 32, lr: 1.56e-04 +2022-05-08 04:11:11,203 INFO [train.py:715] (6/8) Epoch 14, batch 23750, loss[loss=0.124, simple_loss=0.1982, pruned_loss=0.02495, over 4969.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2081, pruned_loss=0.03066, over 971651.93 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 04:11:50,485 INFO [train.py:715] (6/8) Epoch 14, batch 23800, loss[loss=0.1725, simple_loss=0.2433, pruned_loss=0.05088, over 4924.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2079, pruned_loss=0.03034, over 971652.48 frames.], batch size: 23, lr: 1.56e-04 +2022-05-08 04:12:30,659 INFO [train.py:715] (6/8) Epoch 14, batch 23850, loss[loss=0.149, simple_loss=0.2215, pruned_loss=0.03819, over 4764.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03084, over 971609.09 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 04:13:10,491 INFO [train.py:715] (6/8) Epoch 14, batch 23900, loss[loss=0.1343, simple_loss=0.1987, pruned_loss=0.03492, over 4754.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2082, pruned_loss=0.03067, over 971743.00 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 04:13:49,740 INFO [train.py:715] (6/8) Epoch 14, batch 23950, loss[loss=0.1318, simple_loss=0.2036, pruned_loss=0.03001, over 4952.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2077, pruned_loss=0.03047, over 972565.51 frames.], batch size: 14, lr: 1.55e-04 +2022-05-08 04:14:30,063 INFO [train.py:715] (6/8) Epoch 14, batch 24000, loss[loss=0.14, simple_loss=0.2088, pruned_loss=0.0356, over 4957.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2072, pruned_loss=0.03027, over 972390.62 frames.], batch size: 35, lr: 1.55e-04 +2022-05-08 04:14:30,064 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 04:14:41,438 INFO [train.py:742] (6/8) Epoch 14, validation: loss=0.1052, simple_loss=0.1889, pruned_loss=0.01074, over 914524.00 frames. +2022-05-08 04:15:21,383 INFO [train.py:715] (6/8) Epoch 14, batch 24050, loss[loss=0.1306, simple_loss=0.2121, pruned_loss=0.02453, over 4848.00 frames.], tot_loss[loss=0.135, simple_loss=0.2084, pruned_loss=0.03083, over 972667.96 frames.], batch size: 30, lr: 1.55e-04 +2022-05-08 04:16:02,436 INFO [train.py:715] (6/8) Epoch 14, batch 24100, loss[loss=0.1266, simple_loss=0.2073, pruned_loss=0.02296, over 4784.00 frames.], tot_loss[loss=0.1345, simple_loss=0.208, pruned_loss=0.0305, over 972785.76 frames.], batch size: 17, lr: 1.55e-04 +2022-05-08 04:16:41,514 INFO [train.py:715] (6/8) Epoch 14, batch 24150, loss[loss=0.132, simple_loss=0.2028, pruned_loss=0.03057, over 4766.00 frames.], tot_loss[loss=0.1346, simple_loss=0.208, pruned_loss=0.03057, over 972503.96 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 04:17:21,105 INFO [train.py:715] (6/8) Epoch 14, batch 24200, loss[loss=0.1304, simple_loss=0.2117, pruned_loss=0.02454, over 4902.00 frames.], tot_loss[loss=0.135, simple_loss=0.2085, pruned_loss=0.03072, over 971788.22 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 04:18:01,400 INFO [train.py:715] (6/8) Epoch 14, batch 24250, loss[loss=0.134, simple_loss=0.2122, pruned_loss=0.02785, over 4857.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03085, over 972528.57 frames.], batch size: 20, lr: 1.55e-04 +2022-05-08 04:18:41,647 INFO [train.py:715] (6/8) Epoch 14, batch 24300, loss[loss=0.1257, simple_loss=0.2, pruned_loss=0.02572, over 4806.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2082, pruned_loss=0.03044, over 972968.71 frames.], batch size: 21, lr: 1.55e-04 +2022-05-08 04:19:20,608 INFO [train.py:715] (6/8) Epoch 14, batch 24350, loss[loss=0.1365, simple_loss=0.216, pruned_loss=0.0285, over 4936.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.0304, over 973483.70 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 04:20:01,383 INFO [train.py:715] (6/8) Epoch 14, batch 24400, loss[loss=0.1253, simple_loss=0.1972, pruned_loss=0.02667, over 4971.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2077, pruned_loss=0.03034, over 973487.49 frames.], batch size: 35, lr: 1.55e-04 +2022-05-08 04:20:43,004 INFO [train.py:715] (6/8) Epoch 14, batch 24450, loss[loss=0.117, simple_loss=0.1929, pruned_loss=0.02054, over 4793.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2083, pruned_loss=0.03053, over 972843.96 frames.], batch size: 21, lr: 1.55e-04 +2022-05-08 04:21:22,344 INFO [train.py:715] (6/8) Epoch 14, batch 24500, loss[loss=0.1402, simple_loss=0.2165, pruned_loss=0.03191, over 4944.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2075, pruned_loss=0.03037, over 973037.13 frames.], batch size: 21, lr: 1.55e-04 +2022-05-08 04:22:02,603 INFO [train.py:715] (6/8) Epoch 14, batch 24550, loss[loss=0.1391, simple_loss=0.2151, pruned_loss=0.03151, over 4977.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2077, pruned_loss=0.03028, over 973190.34 frames.], batch size: 26, lr: 1.55e-04 +2022-05-08 04:22:43,754 INFO [train.py:715] (6/8) Epoch 14, batch 24600, loss[loss=0.153, simple_loss=0.2172, pruned_loss=0.04434, over 4836.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03064, over 972910.47 frames.], batch size: 30, lr: 1.55e-04 +2022-05-08 04:23:25,379 INFO [train.py:715] (6/8) Epoch 14, batch 24650, loss[loss=0.1228, simple_loss=0.1991, pruned_loss=0.02322, over 4841.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2089, pruned_loss=0.03093, over 973068.75 frames.], batch size: 26, lr: 1.55e-04 +2022-05-08 04:24:07,560 INFO [train.py:715] (6/8) Epoch 14, batch 24700, loss[loss=0.1388, simple_loss=0.2203, pruned_loss=0.02868, over 4868.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.03098, over 973929.22 frames.], batch size: 20, lr: 1.55e-04 +2022-05-08 04:24:48,442 INFO [train.py:715] (6/8) Epoch 14, batch 24750, loss[loss=0.1388, simple_loss=0.2145, pruned_loss=0.03156, over 4980.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.03109, over 973739.35 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 04:25:30,058 INFO [train.py:715] (6/8) Epoch 14, batch 24800, loss[loss=0.1438, simple_loss=0.2053, pruned_loss=0.04118, over 4846.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2101, pruned_loss=0.03128, over 973592.91 frames.], batch size: 20, lr: 1.55e-04 +2022-05-08 04:26:10,636 INFO [train.py:715] (6/8) Epoch 14, batch 24850, loss[loss=0.1296, simple_loss=0.2125, pruned_loss=0.02331, over 4782.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2103, pruned_loss=0.03128, over 973533.53 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 04:26:50,220 INFO [train.py:715] (6/8) Epoch 14, batch 24900, loss[loss=0.1066, simple_loss=0.1806, pruned_loss=0.01627, over 4735.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2089, pruned_loss=0.03102, over 973119.59 frames.], batch size: 12, lr: 1.55e-04 +2022-05-08 04:27:31,160 INFO [train.py:715] (6/8) Epoch 14, batch 24950, loss[loss=0.1672, simple_loss=0.2523, pruned_loss=0.04102, over 4887.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2095, pruned_loss=0.0309, over 974298.82 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 04:28:12,054 INFO [train.py:715] (6/8) Epoch 14, batch 25000, loss[loss=0.1178, simple_loss=0.1836, pruned_loss=0.02596, over 4815.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2089, pruned_loss=0.0308, over 973991.59 frames.], batch size: 26, lr: 1.55e-04 +2022-05-08 04:28:51,325 INFO [train.py:715] (6/8) Epoch 14, batch 25050, loss[loss=0.1206, simple_loss=0.1982, pruned_loss=0.02143, over 4906.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03032, over 974540.69 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 04:29:32,183 INFO [train.py:715] (6/8) Epoch 14, batch 25100, loss[loss=0.1249, simple_loss=0.2017, pruned_loss=0.02409, over 4819.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2074, pruned_loss=0.03017, over 973349.16 frames.], batch size: 12, lr: 1.55e-04 +2022-05-08 04:30:13,137 INFO [train.py:715] (6/8) Epoch 14, batch 25150, loss[loss=0.125, simple_loss=0.2079, pruned_loss=0.02104, over 4792.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2074, pruned_loss=0.03038, over 972160.83 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 04:30:53,335 INFO [train.py:715] (6/8) Epoch 14, batch 25200, loss[loss=0.1347, simple_loss=0.2159, pruned_loss=0.02677, over 4981.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2069, pruned_loss=0.03031, over 971897.19 frames.], batch size: 39, lr: 1.55e-04 +2022-05-08 04:31:31,969 INFO [train.py:715] (6/8) Epoch 14, batch 25250, loss[loss=0.1249, simple_loss=0.1977, pruned_loss=0.026, over 4828.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2075, pruned_loss=0.03033, over 972735.91 frames.], batch size: 13, lr: 1.55e-04 +2022-05-08 04:32:12,616 INFO [train.py:715] (6/8) Epoch 14, batch 25300, loss[loss=0.1512, simple_loss=0.2217, pruned_loss=0.04036, over 4808.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2074, pruned_loss=0.03022, over 972048.79 frames.], batch size: 25, lr: 1.55e-04 +2022-05-08 04:32:53,031 INFO [train.py:715] (6/8) Epoch 14, batch 25350, loss[loss=0.145, simple_loss=0.207, pruned_loss=0.04154, over 4751.00 frames.], tot_loss[loss=0.1343, simple_loss=0.208, pruned_loss=0.03029, over 971586.00 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 04:33:31,585 INFO [train.py:715] (6/8) Epoch 14, batch 25400, loss[loss=0.1746, simple_loss=0.2437, pruned_loss=0.05272, over 4849.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03006, over 971613.43 frames.], batch size: 32, lr: 1.55e-04 +2022-05-08 04:34:11,986 INFO [train.py:715] (6/8) Epoch 14, batch 25450, loss[loss=0.1414, simple_loss=0.2128, pruned_loss=0.03502, over 4763.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03061, over 971493.09 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 04:34:52,385 INFO [train.py:715] (6/8) Epoch 14, batch 25500, loss[loss=0.1043, simple_loss=0.1776, pruned_loss=0.01554, over 4729.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2091, pruned_loss=0.03079, over 970994.81 frames.], batch size: 12, lr: 1.55e-04 +2022-05-08 04:35:31,821 INFO [train.py:715] (6/8) Epoch 14, batch 25550, loss[loss=0.1194, simple_loss=0.2004, pruned_loss=0.0192, over 4858.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2087, pruned_loss=0.03096, over 971913.54 frames.], batch size: 20, lr: 1.55e-04 +2022-05-08 04:36:10,564 INFO [train.py:715] (6/8) Epoch 14, batch 25600, loss[loss=0.1149, simple_loss=0.185, pruned_loss=0.02241, over 4905.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2088, pruned_loss=0.03069, over 972254.43 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 04:36:50,637 INFO [train.py:715] (6/8) Epoch 14, batch 25650, loss[loss=0.1376, simple_loss=0.2112, pruned_loss=0.03195, over 4913.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.0303, over 972494.98 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 04:37:30,751 INFO [train.py:715] (6/8) Epoch 14, batch 25700, loss[loss=0.1447, simple_loss=0.2139, pruned_loss=0.03775, over 4888.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.03047, over 972908.10 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 04:38:09,217 INFO [train.py:715] (6/8) Epoch 14, batch 25750, loss[loss=0.1622, simple_loss=0.2311, pruned_loss=0.04668, over 4798.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.03036, over 972980.04 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 04:38:48,529 INFO [train.py:715] (6/8) Epoch 14, batch 25800, loss[loss=0.1155, simple_loss=0.1924, pruned_loss=0.01934, over 4774.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03019, over 973076.54 frames.], batch size: 12, lr: 1.55e-04 +2022-05-08 04:39:28,744 INFO [train.py:715] (6/8) Epoch 14, batch 25850, loss[loss=0.1482, simple_loss=0.2202, pruned_loss=0.03811, over 4839.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2084, pruned_loss=0.0297, over 972647.37 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 04:40:07,963 INFO [train.py:715] (6/8) Epoch 14, batch 25900, loss[loss=0.1801, simple_loss=0.2515, pruned_loss=0.05437, over 4856.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2084, pruned_loss=0.02991, over 971787.69 frames.], batch size: 32, lr: 1.55e-04 +2022-05-08 04:40:46,743 INFO [train.py:715] (6/8) Epoch 14, batch 25950, loss[loss=0.1262, simple_loss=0.1836, pruned_loss=0.03444, over 4827.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02988, over 971230.34 frames.], batch size: 12, lr: 1.55e-04 +2022-05-08 04:41:26,887 INFO [train.py:715] (6/8) Epoch 14, batch 26000, loss[loss=0.133, simple_loss=0.2113, pruned_loss=0.02735, over 4776.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03012, over 970807.16 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 04:42:06,872 INFO [train.py:715] (6/8) Epoch 14, batch 26050, loss[loss=0.1156, simple_loss=0.1864, pruned_loss=0.02236, over 4827.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2082, pruned_loss=0.03034, over 971352.02 frames.], batch size: 13, lr: 1.55e-04 +2022-05-08 04:42:44,777 INFO [train.py:715] (6/8) Epoch 14, batch 26100, loss[loss=0.1261, simple_loss=0.1853, pruned_loss=0.03345, over 4765.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2084, pruned_loss=0.03056, over 971650.74 frames.], batch size: 12, lr: 1.55e-04 +2022-05-08 04:43:24,717 INFO [train.py:715] (6/8) Epoch 14, batch 26150, loss[loss=0.1344, simple_loss=0.2105, pruned_loss=0.02918, over 4923.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.03032, over 971680.55 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 04:44:05,201 INFO [train.py:715] (6/8) Epoch 14, batch 26200, loss[loss=0.1361, simple_loss=0.2203, pruned_loss=0.02594, over 4933.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03026, over 970532.00 frames.], batch size: 21, lr: 1.55e-04 +2022-05-08 04:44:44,003 INFO [train.py:715] (6/8) Epoch 14, batch 26250, loss[loss=0.1143, simple_loss=0.1809, pruned_loss=0.02385, over 4769.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2078, pruned_loss=0.03056, over 970329.82 frames.], batch size: 12, lr: 1.55e-04 +2022-05-08 04:45:23,188 INFO [train.py:715] (6/8) Epoch 14, batch 26300, loss[loss=0.116, simple_loss=0.1958, pruned_loss=0.01812, over 4815.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03079, over 970252.90 frames.], batch size: 25, lr: 1.55e-04 +2022-05-08 04:46:03,665 INFO [train.py:715] (6/8) Epoch 14, batch 26350, loss[loss=0.123, simple_loss=0.2062, pruned_loss=0.0199, over 4883.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03071, over 971098.73 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 04:46:43,192 INFO [train.py:715] (6/8) Epoch 14, batch 26400, loss[loss=0.145, simple_loss=0.215, pruned_loss=0.03752, over 4985.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2097, pruned_loss=0.03069, over 971035.79 frames.], batch size: 28, lr: 1.55e-04 +2022-05-08 04:47:21,827 INFO [train.py:715] (6/8) Epoch 14, batch 26450, loss[loss=0.1303, simple_loss=0.1955, pruned_loss=0.03251, over 4933.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2093, pruned_loss=0.0307, over 972488.95 frames.], batch size: 39, lr: 1.55e-04 +2022-05-08 04:48:02,187 INFO [train.py:715] (6/8) Epoch 14, batch 26500, loss[loss=0.09523, simple_loss=0.1573, pruned_loss=0.01658, over 4777.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03043, over 972528.39 frames.], batch size: 12, lr: 1.55e-04 +2022-05-08 04:48:42,603 INFO [train.py:715] (6/8) Epoch 14, batch 26550, loss[loss=0.1069, simple_loss=0.1808, pruned_loss=0.01648, over 4810.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03059, over 972348.28 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 04:49:21,898 INFO [train.py:715] (6/8) Epoch 14, batch 26600, loss[loss=0.1335, simple_loss=0.1985, pruned_loss=0.03429, over 4964.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.0306, over 972161.94 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 04:50:00,870 INFO [train.py:715] (6/8) Epoch 14, batch 26650, loss[loss=0.1459, simple_loss=0.2289, pruned_loss=0.03148, over 4792.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.0307, over 971109.58 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 04:50:41,181 INFO [train.py:715] (6/8) Epoch 14, batch 26700, loss[loss=0.1305, simple_loss=0.197, pruned_loss=0.03204, over 4843.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2084, pruned_loss=0.03094, over 971248.41 frames.], batch size: 30, lr: 1.55e-04 +2022-05-08 04:51:21,680 INFO [train.py:715] (6/8) Epoch 14, batch 26750, loss[loss=0.1376, simple_loss=0.2135, pruned_loss=0.03082, over 4806.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2078, pruned_loss=0.03035, over 972514.77 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 04:52:00,702 INFO [train.py:715] (6/8) Epoch 14, batch 26800, loss[loss=0.1186, simple_loss=0.197, pruned_loss=0.02016, over 4832.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2076, pruned_loss=0.03028, over 973404.13 frames.], batch size: 13, lr: 1.55e-04 +2022-05-08 04:52:40,485 INFO [train.py:715] (6/8) Epoch 14, batch 26850, loss[loss=0.1174, simple_loss=0.1948, pruned_loss=0.02006, over 4809.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03022, over 973242.58 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 04:53:20,916 INFO [train.py:715] (6/8) Epoch 14, batch 26900, loss[loss=0.1206, simple_loss=0.1981, pruned_loss=0.02158, over 4933.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2072, pruned_loss=0.02979, over 972529.24 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 04:54:00,764 INFO [train.py:715] (6/8) Epoch 14, batch 26950, loss[loss=0.1414, simple_loss=0.1999, pruned_loss=0.04143, over 4846.00 frames.], tot_loss[loss=0.1334, simple_loss=0.207, pruned_loss=0.02987, over 971792.57 frames.], batch size: 32, lr: 1.55e-04 +2022-05-08 04:54:39,969 INFO [train.py:715] (6/8) Epoch 14, batch 27000, loss[loss=0.1283, simple_loss=0.2013, pruned_loss=0.02761, over 4885.00 frames.], tot_loss[loss=0.135, simple_loss=0.2083, pruned_loss=0.03086, over 971760.26 frames.], batch size: 22, lr: 1.55e-04 +2022-05-08 04:54:39,970 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 04:54:49,614 INFO [train.py:742] (6/8) Epoch 14, validation: loss=0.1049, simple_loss=0.1886, pruned_loss=0.01053, over 914524.00 frames. +2022-05-08 04:55:29,148 INFO [train.py:715] (6/8) Epoch 14, batch 27050, loss[loss=0.128, simple_loss=0.2016, pruned_loss=0.02723, over 4809.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2083, pruned_loss=0.03046, over 972290.58 frames.], batch size: 27, lr: 1.55e-04 +2022-05-08 04:56:09,803 INFO [train.py:715] (6/8) Epoch 14, batch 27100, loss[loss=0.1391, simple_loss=0.2073, pruned_loss=0.03547, over 4850.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2081, pruned_loss=0.03042, over 972662.77 frames.], batch size: 12, lr: 1.55e-04 +2022-05-08 04:56:50,330 INFO [train.py:715] (6/8) Epoch 14, batch 27150, loss[loss=0.1681, simple_loss=0.2382, pruned_loss=0.04896, over 4778.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03043, over 972870.44 frames.], batch size: 14, lr: 1.55e-04 +2022-05-08 04:57:29,051 INFO [train.py:715] (6/8) Epoch 14, batch 27200, loss[loss=0.1384, simple_loss=0.2116, pruned_loss=0.03256, over 4861.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.03057, over 973509.86 frames.], batch size: 20, lr: 1.55e-04 +2022-05-08 04:58:08,439 INFO [train.py:715] (6/8) Epoch 14, batch 27250, loss[loss=0.1791, simple_loss=0.2464, pruned_loss=0.05584, over 4897.00 frames.], tot_loss[loss=0.136, simple_loss=0.2099, pruned_loss=0.03099, over 974524.65 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 04:58:48,576 INFO [train.py:715] (6/8) Epoch 14, batch 27300, loss[loss=0.1326, simple_loss=0.2056, pruned_loss=0.02978, over 4853.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2095, pruned_loss=0.03103, over 974285.98 frames.], batch size: 32, lr: 1.55e-04 +2022-05-08 04:59:28,196 INFO [train.py:715] (6/8) Epoch 14, batch 27350, loss[loss=0.1273, simple_loss=0.1971, pruned_loss=0.02878, over 4859.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2098, pruned_loss=0.03096, over 974384.99 frames.], batch size: 30, lr: 1.55e-04 +2022-05-08 05:00:06,592 INFO [train.py:715] (6/8) Epoch 14, batch 27400, loss[loss=0.1441, simple_loss=0.2136, pruned_loss=0.03731, over 4792.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2097, pruned_loss=0.03064, over 974451.44 frames.], batch size: 14, lr: 1.55e-04 +2022-05-08 05:00:46,866 INFO [train.py:715] (6/8) Epoch 14, batch 27450, loss[loss=0.1203, simple_loss=0.1946, pruned_loss=0.02299, over 4817.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2093, pruned_loss=0.03058, over 974321.10 frames.], batch size: 12, lr: 1.55e-04 +2022-05-08 05:01:26,699 INFO [train.py:715] (6/8) Epoch 14, batch 27500, loss[loss=0.1582, simple_loss=0.2346, pruned_loss=0.04092, over 4787.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.031, over 973981.60 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 05:02:05,454 INFO [train.py:715] (6/8) Epoch 14, batch 27550, loss[loss=0.1182, simple_loss=0.1977, pruned_loss=0.01936, over 4926.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03102, over 973559.26 frames.], batch size: 23, lr: 1.55e-04 +2022-05-08 05:02:45,161 INFO [train.py:715] (6/8) Epoch 14, batch 27600, loss[loss=0.1276, simple_loss=0.2059, pruned_loss=0.02464, over 4980.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2085, pruned_loss=0.03072, over 973495.16 frames.], batch size: 31, lr: 1.55e-04 +2022-05-08 05:03:25,492 INFO [train.py:715] (6/8) Epoch 14, batch 27650, loss[loss=0.1147, simple_loss=0.194, pruned_loss=0.01776, over 4836.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2093, pruned_loss=0.03122, over 973150.68 frames.], batch size: 26, lr: 1.55e-04 +2022-05-08 05:04:04,759 INFO [train.py:715] (6/8) Epoch 14, batch 27700, loss[loss=0.13, simple_loss=0.2109, pruned_loss=0.02453, over 4746.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2085, pruned_loss=0.03099, over 973135.00 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 05:04:43,281 INFO [train.py:715] (6/8) Epoch 14, batch 27750, loss[loss=0.1275, simple_loss=0.2076, pruned_loss=0.02368, over 4757.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2075, pruned_loss=0.03078, over 973266.77 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 05:05:23,450 INFO [train.py:715] (6/8) Epoch 14, batch 27800, loss[loss=0.1208, simple_loss=0.2006, pruned_loss=0.02054, over 4933.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2076, pruned_loss=0.03077, over 973704.95 frames.], batch size: 29, lr: 1.55e-04 +2022-05-08 05:06:03,192 INFO [train.py:715] (6/8) Epoch 14, batch 27850, loss[loss=0.1425, simple_loss=0.2206, pruned_loss=0.03223, over 4954.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2068, pruned_loss=0.0303, over 974167.00 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 05:06:41,705 INFO [train.py:715] (6/8) Epoch 14, batch 27900, loss[loss=0.1346, simple_loss=0.2027, pruned_loss=0.03327, over 4839.00 frames.], tot_loss[loss=0.1341, simple_loss=0.207, pruned_loss=0.03055, over 975021.06 frames.], batch size: 20, lr: 1.55e-04 +2022-05-08 05:07:21,730 INFO [train.py:715] (6/8) Epoch 14, batch 27950, loss[loss=0.1225, simple_loss=0.1877, pruned_loss=0.02868, over 4814.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2072, pruned_loss=0.0305, over 973935.62 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 05:08:01,580 INFO [train.py:715] (6/8) Epoch 14, batch 28000, loss[loss=0.1432, simple_loss=0.2146, pruned_loss=0.03595, over 4906.00 frames.], tot_loss[loss=0.1344, simple_loss=0.208, pruned_loss=0.03043, over 973210.44 frames.], batch size: 17, lr: 1.55e-04 +2022-05-08 05:08:40,622 INFO [train.py:715] (6/8) Epoch 14, batch 28050, loss[loss=0.1226, simple_loss=0.1963, pruned_loss=0.02448, over 4797.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2075, pruned_loss=0.0299, over 973204.41 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 05:09:19,681 INFO [train.py:715] (6/8) Epoch 14, batch 28100, loss[loss=0.1132, simple_loss=0.1901, pruned_loss=0.0182, over 4981.00 frames.], tot_loss[loss=0.1346, simple_loss=0.208, pruned_loss=0.03059, over 973310.97 frames.], batch size: 28, lr: 1.55e-04 +2022-05-08 05:10:00,235 INFO [train.py:715] (6/8) Epoch 14, batch 28150, loss[loss=0.1601, simple_loss=0.2339, pruned_loss=0.04313, over 4907.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2089, pruned_loss=0.03092, over 972031.27 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 05:10:39,944 INFO [train.py:715] (6/8) Epoch 14, batch 28200, loss[loss=0.1339, simple_loss=0.2103, pruned_loss=0.02877, over 4850.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2087, pruned_loss=0.03104, over 972472.89 frames.], batch size: 20, lr: 1.55e-04 +2022-05-08 05:11:17,985 INFO [train.py:715] (6/8) Epoch 14, batch 28250, loss[loss=0.1275, simple_loss=0.2038, pruned_loss=0.02563, over 4964.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.03139, over 973335.44 frames.], batch size: 28, lr: 1.55e-04 +2022-05-08 05:11:58,126 INFO [train.py:715] (6/8) Epoch 14, batch 28300, loss[loss=0.1718, simple_loss=0.2457, pruned_loss=0.0489, over 4817.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.03168, over 972413.98 frames.], batch size: 25, lr: 1.55e-04 +2022-05-08 05:12:38,004 INFO [train.py:715] (6/8) Epoch 14, batch 28350, loss[loss=0.1317, simple_loss=0.2142, pruned_loss=0.02464, over 4814.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.03141, over 971971.30 frames.], batch size: 21, lr: 1.55e-04 +2022-05-08 05:13:16,550 INFO [train.py:715] (6/8) Epoch 14, batch 28400, loss[loss=0.1101, simple_loss=0.1931, pruned_loss=0.01352, over 4919.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03167, over 971788.86 frames.], batch size: 17, lr: 1.55e-04 +2022-05-08 05:13:56,136 INFO [train.py:715] (6/8) Epoch 14, batch 28450, loss[loss=0.1361, simple_loss=0.2123, pruned_loss=0.0299, over 4821.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03123, over 972030.97 frames.], batch size: 26, lr: 1.55e-04 +2022-05-08 05:14:36,403 INFO [train.py:715] (6/8) Epoch 14, batch 28500, loss[loss=0.1235, simple_loss=0.1974, pruned_loss=0.02478, over 4789.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03083, over 971838.73 frames.], batch size: 14, lr: 1.55e-04 +2022-05-08 05:15:15,664 INFO [train.py:715] (6/8) Epoch 14, batch 28550, loss[loss=0.1248, simple_loss=0.2089, pruned_loss=0.02035, over 4825.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03078, over 972473.87 frames.], batch size: 30, lr: 1.55e-04 +2022-05-08 05:15:54,183 INFO [train.py:715] (6/8) Epoch 14, batch 28600, loss[loss=0.1525, simple_loss=0.2247, pruned_loss=0.04021, over 4817.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2095, pruned_loss=0.03046, over 973069.75 frames.], batch size: 21, lr: 1.55e-04 +2022-05-08 05:16:34,513 INFO [train.py:715] (6/8) Epoch 14, batch 28650, loss[loss=0.145, simple_loss=0.215, pruned_loss=0.03751, over 4985.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03051, over 973436.28 frames.], batch size: 14, lr: 1.55e-04 +2022-05-08 05:17:14,559 INFO [train.py:715] (6/8) Epoch 14, batch 28700, loss[loss=0.1308, simple_loss=0.2039, pruned_loss=0.02884, over 4956.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03017, over 973657.96 frames.], batch size: 14, lr: 1.55e-04 +2022-05-08 05:17:52,654 INFO [train.py:715] (6/8) Epoch 14, batch 28750, loss[loss=0.1213, simple_loss=0.1942, pruned_loss=0.02417, over 4766.00 frames.], tot_loss[loss=0.134, simple_loss=0.2077, pruned_loss=0.03021, over 973909.00 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 05:18:32,374 INFO [train.py:715] (6/8) Epoch 14, batch 28800, loss[loss=0.1228, simple_loss=0.1992, pruned_loss=0.02323, over 4705.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2077, pruned_loss=0.03037, over 972540.78 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 05:19:12,484 INFO [train.py:715] (6/8) Epoch 14, batch 28850, loss[loss=0.1291, simple_loss=0.2045, pruned_loss=0.02683, over 4721.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2078, pruned_loss=0.03031, over 971739.45 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 05:19:52,375 INFO [train.py:715] (6/8) Epoch 14, batch 28900, loss[loss=0.12, simple_loss=0.1894, pruned_loss=0.02524, over 4814.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2079, pruned_loss=0.03038, over 972269.67 frames.], batch size: 25, lr: 1.55e-04 +2022-05-08 05:20:30,227 INFO [train.py:715] (6/8) Epoch 14, batch 28950, loss[loss=0.1546, simple_loss=0.2175, pruned_loss=0.04585, over 4863.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2089, pruned_loss=0.03096, over 971739.47 frames.], batch size: 32, lr: 1.55e-04 +2022-05-08 05:21:10,701 INFO [train.py:715] (6/8) Epoch 14, batch 29000, loss[loss=0.1205, simple_loss=0.1902, pruned_loss=0.02542, over 4968.00 frames.], tot_loss[loss=0.1356, simple_loss=0.209, pruned_loss=0.03107, over 972968.86 frames.], batch size: 14, lr: 1.55e-04 +2022-05-08 05:21:50,338 INFO [train.py:715] (6/8) Epoch 14, batch 29050, loss[loss=0.1125, simple_loss=0.1941, pruned_loss=0.0154, over 4959.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03093, over 973093.12 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 05:22:29,108 INFO [train.py:715] (6/8) Epoch 14, batch 29100, loss[loss=0.1277, simple_loss=0.1968, pruned_loss=0.02926, over 4907.00 frames.], tot_loss[loss=0.1356, simple_loss=0.209, pruned_loss=0.03106, over 973052.48 frames.], batch size: 17, lr: 1.55e-04 +2022-05-08 05:23:08,496 INFO [train.py:715] (6/8) Epoch 14, batch 29150, loss[loss=0.125, simple_loss=0.1919, pruned_loss=0.02911, over 4885.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2089, pruned_loss=0.03101, over 973535.77 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 05:23:48,534 INFO [train.py:715] (6/8) Epoch 14, batch 29200, loss[loss=0.1297, simple_loss=0.2064, pruned_loss=0.02646, over 4859.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2087, pruned_loss=0.03095, over 973016.01 frames.], batch size: 20, lr: 1.55e-04 +2022-05-08 05:24:28,396 INFO [train.py:715] (6/8) Epoch 14, batch 29250, loss[loss=0.1389, simple_loss=0.2131, pruned_loss=0.03233, over 4964.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2083, pruned_loss=0.03051, over 973197.39 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 05:25:06,492 INFO [train.py:715] (6/8) Epoch 14, batch 29300, loss[loss=0.1238, simple_loss=0.2095, pruned_loss=0.01902, over 4947.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03067, over 973588.31 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 05:25:46,610 INFO [train.py:715] (6/8) Epoch 14, batch 29350, loss[loss=0.1359, simple_loss=0.2075, pruned_loss=0.03214, over 4690.00 frames.], tot_loss[loss=0.1346, simple_loss=0.208, pruned_loss=0.0306, over 973631.79 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 05:26:26,519 INFO [train.py:715] (6/8) Epoch 14, batch 29400, loss[loss=0.1138, simple_loss=0.1862, pruned_loss=0.02072, over 4872.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03019, over 973203.31 frames.], batch size: 22, lr: 1.55e-04 +2022-05-08 05:27:05,394 INFO [train.py:715] (6/8) Epoch 14, batch 29450, loss[loss=0.1164, simple_loss=0.1894, pruned_loss=0.02175, over 4831.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2091, pruned_loss=0.03029, over 974116.40 frames.], batch size: 30, lr: 1.55e-04 +2022-05-08 05:27:45,245 INFO [train.py:715] (6/8) Epoch 14, batch 29500, loss[loss=0.1208, simple_loss=0.1869, pruned_loss=0.02733, over 4852.00 frames.], tot_loss[loss=0.135, simple_loss=0.2094, pruned_loss=0.03029, over 973439.03 frames.], batch size: 13, lr: 1.55e-04 +2022-05-08 05:28:25,580 INFO [train.py:715] (6/8) Epoch 14, batch 29550, loss[loss=0.1402, simple_loss=0.1901, pruned_loss=0.04513, over 4972.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02993, over 973884.04 frames.], batch size: 14, lr: 1.55e-04 +2022-05-08 05:29:05,391 INFO [train.py:715] (6/8) Epoch 14, batch 29600, loss[loss=0.1295, simple_loss=0.2063, pruned_loss=0.02632, over 4836.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2088, pruned_loss=0.03011, over 973485.65 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 05:29:44,396 INFO [train.py:715] (6/8) Epoch 14, batch 29650, loss[loss=0.1342, simple_loss=0.2012, pruned_loss=0.03358, over 4729.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.02983, over 972699.18 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 05:30:25,191 INFO [train.py:715] (6/8) Epoch 14, batch 29700, loss[loss=0.1175, simple_loss=0.1858, pruned_loss=0.02461, over 4806.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2093, pruned_loss=0.0305, over 972054.22 frames.], batch size: 12, lr: 1.55e-04 +2022-05-08 05:31:06,278 INFO [train.py:715] (6/8) Epoch 14, batch 29750, loss[loss=0.1441, simple_loss=0.2225, pruned_loss=0.03282, over 4968.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.0309, over 971704.89 frames.], batch size: 25, lr: 1.55e-04 +2022-05-08 05:31:45,875 INFO [train.py:715] (6/8) Epoch 14, batch 29800, loss[loss=0.1183, simple_loss=0.1913, pruned_loss=0.02265, over 4822.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.03095, over 972426.22 frames.], batch size: 26, lr: 1.55e-04 +2022-05-08 05:32:26,697 INFO [train.py:715] (6/8) Epoch 14, batch 29850, loss[loss=0.1384, simple_loss=0.2079, pruned_loss=0.03447, over 4974.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2091, pruned_loss=0.03058, over 972749.69 frames.], batch size: 35, lr: 1.55e-04 +2022-05-08 05:33:06,681 INFO [train.py:715] (6/8) Epoch 14, batch 29900, loss[loss=0.1243, simple_loss=0.198, pruned_loss=0.02533, over 4861.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2094, pruned_loss=0.03071, over 972732.71 frames.], batch size: 20, lr: 1.55e-04 +2022-05-08 05:33:46,337 INFO [train.py:715] (6/8) Epoch 14, batch 29950, loss[loss=0.1151, simple_loss=0.19, pruned_loss=0.02008, over 4814.00 frames.], tot_loss[loss=0.135, simple_loss=0.2092, pruned_loss=0.0304, over 972229.17 frames.], batch size: 26, lr: 1.55e-04 +2022-05-08 05:34:25,101 INFO [train.py:715] (6/8) Epoch 14, batch 30000, loss[loss=0.151, simple_loss=0.2211, pruned_loss=0.04045, over 4833.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2095, pruned_loss=0.03045, over 972615.94 frames.], batch size: 13, lr: 1.55e-04 +2022-05-08 05:34:25,102 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 05:34:42,242 INFO [train.py:742] (6/8) Epoch 14, validation: loss=0.1052, simple_loss=0.189, pruned_loss=0.01075, over 914524.00 frames. +2022-05-08 05:35:21,213 INFO [train.py:715] (6/8) Epoch 14, batch 30050, loss[loss=0.1283, simple_loss=0.2102, pruned_loss=0.02325, over 4831.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2097, pruned_loss=0.03053, over 973388.31 frames.], batch size: 27, lr: 1.55e-04 +2022-05-08 05:36:01,199 INFO [train.py:715] (6/8) Epoch 14, batch 30100, loss[loss=0.1398, simple_loss=0.2039, pruned_loss=0.03785, over 4853.00 frames.], tot_loss[loss=0.136, simple_loss=0.21, pruned_loss=0.03104, over 973410.30 frames.], batch size: 30, lr: 1.55e-04 +2022-05-08 05:36:42,311 INFO [train.py:715] (6/8) Epoch 14, batch 30150, loss[loss=0.1247, simple_loss=0.197, pruned_loss=0.02623, over 4974.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03045, over 972299.18 frames.], batch size: 35, lr: 1.55e-04 +2022-05-08 05:37:21,243 INFO [train.py:715] (6/8) Epoch 14, batch 30200, loss[loss=0.1421, simple_loss=0.2254, pruned_loss=0.0294, over 4689.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.0306, over 971679.63 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 05:38:01,180 INFO [train.py:715] (6/8) Epoch 14, batch 30250, loss[loss=0.1462, simple_loss=0.225, pruned_loss=0.03373, over 4870.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2084, pruned_loss=0.03056, over 973500.24 frames.], batch size: 22, lr: 1.55e-04 +2022-05-08 05:38:41,856 INFO [train.py:715] (6/8) Epoch 14, batch 30300, loss[loss=0.1478, simple_loss=0.219, pruned_loss=0.03827, over 4915.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03038, over 972632.30 frames.], batch size: 23, lr: 1.55e-04 +2022-05-08 05:39:21,371 INFO [train.py:715] (6/8) Epoch 14, batch 30350, loss[loss=0.1348, simple_loss=0.211, pruned_loss=0.02929, over 4761.00 frames.], tot_loss[loss=0.1343, simple_loss=0.208, pruned_loss=0.03033, over 971908.25 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 05:40:00,603 INFO [train.py:715] (6/8) Epoch 14, batch 30400, loss[loss=0.1729, simple_loss=0.2356, pruned_loss=0.05512, over 4922.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2081, pruned_loss=0.03041, over 972592.27 frames.], batch size: 17, lr: 1.55e-04 +2022-05-08 05:40:40,497 INFO [train.py:715] (6/8) Epoch 14, batch 30450, loss[loss=0.1463, simple_loss=0.2203, pruned_loss=0.03618, over 4924.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03042, over 973042.46 frames.], batch size: 23, lr: 1.55e-04 +2022-05-08 05:41:20,821 INFO [train.py:715] (6/8) Epoch 14, batch 30500, loss[loss=0.1146, simple_loss=0.1927, pruned_loss=0.01827, over 4793.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03097, over 973679.76 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 05:41:59,760 INFO [train.py:715] (6/8) Epoch 14, batch 30550, loss[loss=0.1276, simple_loss=0.1981, pruned_loss=0.02856, over 4787.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2092, pruned_loss=0.03048, over 972798.16 frames.], batch size: 14, lr: 1.54e-04 +2022-05-08 05:42:39,649 INFO [train.py:715] (6/8) Epoch 14, batch 30600, loss[loss=0.1738, simple_loss=0.2515, pruned_loss=0.04806, over 4884.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03038, over 972231.73 frames.], batch size: 22, lr: 1.54e-04 +2022-05-08 05:43:20,417 INFO [train.py:715] (6/8) Epoch 14, batch 30650, loss[loss=0.1435, simple_loss=0.2086, pruned_loss=0.03913, over 4911.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2094, pruned_loss=0.03048, over 971544.38 frames.], batch size: 19, lr: 1.54e-04 +2022-05-08 05:43:59,993 INFO [train.py:715] (6/8) Epoch 14, batch 30700, loss[loss=0.1471, simple_loss=0.2158, pruned_loss=0.0392, over 4779.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2099, pruned_loss=0.0306, over 971070.74 frames.], batch size: 14, lr: 1.54e-04 +2022-05-08 05:44:39,758 INFO [train.py:715] (6/8) Epoch 14, batch 30750, loss[loss=0.1341, simple_loss=0.2112, pruned_loss=0.02847, over 4889.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2091, pruned_loss=0.03035, over 971332.37 frames.], batch size: 19, lr: 1.54e-04 +2022-05-08 05:45:19,656 INFO [train.py:715] (6/8) Epoch 14, batch 30800, loss[loss=0.1484, simple_loss=0.2196, pruned_loss=0.03863, over 4963.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2091, pruned_loss=0.03028, over 971876.51 frames.], batch size: 24, lr: 1.54e-04 +2022-05-08 05:46:00,446 INFO [train.py:715] (6/8) Epoch 14, batch 30850, loss[loss=0.1218, simple_loss=0.196, pruned_loss=0.0238, over 4833.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03007, over 971535.81 frames.], batch size: 15, lr: 1.54e-04 +2022-05-08 05:46:39,514 INFO [train.py:715] (6/8) Epoch 14, batch 30900, loss[loss=0.1304, simple_loss=0.2085, pruned_loss=0.02616, over 4851.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03006, over 972326.57 frames.], batch size: 32, lr: 1.54e-04 +2022-05-08 05:47:18,044 INFO [train.py:715] (6/8) Epoch 14, batch 30950, loss[loss=0.1134, simple_loss=0.1889, pruned_loss=0.01897, over 4926.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02986, over 972121.58 frames.], batch size: 29, lr: 1.54e-04 +2022-05-08 05:47:57,805 INFO [train.py:715] (6/8) Epoch 14, batch 31000, loss[loss=0.1292, simple_loss=0.2059, pruned_loss=0.02622, over 4855.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2094, pruned_loss=0.03009, over 972217.98 frames.], batch size: 16, lr: 1.54e-04 +2022-05-08 05:48:37,486 INFO [train.py:715] (6/8) Epoch 14, batch 31050, loss[loss=0.1201, simple_loss=0.1987, pruned_loss=0.02078, over 4800.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2088, pruned_loss=0.02974, over 973411.38 frames.], batch size: 25, lr: 1.54e-04 +2022-05-08 05:49:17,855 INFO [train.py:715] (6/8) Epoch 14, batch 31100, loss[loss=0.1439, simple_loss=0.2079, pruned_loss=0.03999, over 4867.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2099, pruned_loss=0.03049, over 973566.02 frames.], batch size: 20, lr: 1.54e-04 +2022-05-08 05:49:58,981 INFO [train.py:715] (6/8) Epoch 14, batch 31150, loss[loss=0.1379, simple_loss=0.2088, pruned_loss=0.03345, over 4978.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2098, pruned_loss=0.03054, over 973268.74 frames.], batch size: 15, lr: 1.54e-04 +2022-05-08 05:50:40,133 INFO [train.py:715] (6/8) Epoch 14, batch 31200, loss[loss=0.1401, simple_loss=0.2113, pruned_loss=0.03447, over 4991.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2098, pruned_loss=0.03024, over 972389.22 frames.], batch size: 14, lr: 1.54e-04 +2022-05-08 05:51:19,912 INFO [train.py:715] (6/8) Epoch 14, batch 31250, loss[loss=0.1432, simple_loss=0.2173, pruned_loss=0.03451, over 4822.00 frames.], tot_loss[loss=0.1347, simple_loss=0.209, pruned_loss=0.03022, over 972643.66 frames.], batch size: 26, lr: 1.54e-04 +2022-05-08 05:52:00,317 INFO [train.py:715] (6/8) Epoch 14, batch 31300, loss[loss=0.1586, simple_loss=0.2288, pruned_loss=0.04417, over 4874.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.02994, over 972748.42 frames.], batch size: 30, lr: 1.54e-04 +2022-05-08 05:52:41,158 INFO [train.py:715] (6/8) Epoch 14, batch 31350, loss[loss=0.1436, simple_loss=0.2126, pruned_loss=0.03731, over 4940.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2087, pruned_loss=0.03007, over 972712.90 frames.], batch size: 18, lr: 1.54e-04 +2022-05-08 05:53:21,047 INFO [train.py:715] (6/8) Epoch 14, batch 31400, loss[loss=0.1019, simple_loss=0.1748, pruned_loss=0.01453, over 4783.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02998, over 972658.42 frames.], batch size: 17, lr: 1.54e-04 +2022-05-08 05:54:00,730 INFO [train.py:715] (6/8) Epoch 14, batch 31450, loss[loss=0.128, simple_loss=0.1999, pruned_loss=0.028, over 4844.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03018, over 971374.97 frames.], batch size: 13, lr: 1.54e-04 +2022-05-08 05:54:40,751 INFO [train.py:715] (6/8) Epoch 14, batch 31500, loss[loss=0.1303, simple_loss=0.2118, pruned_loss=0.02445, over 4980.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.03, over 971613.12 frames.], batch size: 24, lr: 1.54e-04 +2022-05-08 05:55:21,340 INFO [train.py:715] (6/8) Epoch 14, batch 31550, loss[loss=0.1567, simple_loss=0.2329, pruned_loss=0.04026, over 4961.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.03019, over 971908.37 frames.], batch size: 39, lr: 1.54e-04 +2022-05-08 05:56:01,192 INFO [train.py:715] (6/8) Epoch 14, batch 31600, loss[loss=0.13, simple_loss=0.2099, pruned_loss=0.02503, over 4923.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03064, over 972587.42 frames.], batch size: 18, lr: 1.54e-04 +2022-05-08 05:56:40,704 INFO [train.py:715] (6/8) Epoch 14, batch 31650, loss[loss=0.1399, simple_loss=0.2031, pruned_loss=0.0383, over 4805.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03002, over 972870.17 frames.], batch size: 26, lr: 1.54e-04 +2022-05-08 05:57:21,077 INFO [train.py:715] (6/8) Epoch 14, batch 31700, loss[loss=0.1334, simple_loss=0.2056, pruned_loss=0.03056, over 4824.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2084, pruned_loss=0.0303, over 972430.03 frames.], batch size: 30, lr: 1.54e-04 +2022-05-08 05:58:00,667 INFO [train.py:715] (6/8) Epoch 14, batch 31750, loss[loss=0.1269, simple_loss=0.2104, pruned_loss=0.02167, over 4952.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2084, pruned_loss=0.02974, over 972931.84 frames.], batch size: 24, lr: 1.54e-04 +2022-05-08 05:58:40,579 INFO [train.py:715] (6/8) Epoch 14, batch 31800, loss[loss=0.1205, simple_loss=0.1949, pruned_loss=0.02308, over 4825.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2087, pruned_loss=0.02998, over 972572.30 frames.], batch size: 26, lr: 1.54e-04 +2022-05-08 05:59:20,884 INFO [train.py:715] (6/8) Epoch 14, batch 31850, loss[loss=0.1258, simple_loss=0.2046, pruned_loss=0.02349, over 4962.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2089, pruned_loss=0.02987, over 972955.58 frames.], batch size: 21, lr: 1.54e-04 +2022-05-08 06:00:01,590 INFO [train.py:715] (6/8) Epoch 14, batch 31900, loss[loss=0.1301, simple_loss=0.1938, pruned_loss=0.03323, over 4755.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2088, pruned_loss=0.03012, over 972828.50 frames.], batch size: 16, lr: 1.54e-04 +2022-05-08 06:00:40,985 INFO [train.py:715] (6/8) Epoch 14, batch 31950, loss[loss=0.1294, simple_loss=0.2131, pruned_loss=0.02286, over 4938.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03039, over 972651.34 frames.], batch size: 21, lr: 1.54e-04 +2022-05-08 06:01:20,569 INFO [train.py:715] (6/8) Epoch 14, batch 32000, loss[loss=0.1177, simple_loss=0.195, pruned_loss=0.02026, over 4805.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03009, over 972673.95 frames.], batch size: 25, lr: 1.54e-04 +2022-05-08 06:02:01,143 INFO [train.py:715] (6/8) Epoch 14, batch 32050, loss[loss=0.128, simple_loss=0.2119, pruned_loss=0.02205, over 4912.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03005, over 972256.71 frames.], batch size: 17, lr: 1.54e-04 +2022-05-08 06:02:40,618 INFO [train.py:715] (6/8) Epoch 14, batch 32100, loss[loss=0.1736, simple_loss=0.2395, pruned_loss=0.0539, over 4883.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03019, over 972326.43 frames.], batch size: 22, lr: 1.54e-04 +2022-05-08 06:03:20,382 INFO [train.py:715] (6/8) Epoch 14, batch 32150, loss[loss=0.1197, simple_loss=0.1984, pruned_loss=0.02055, over 4953.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03014, over 972180.05 frames.], batch size: 39, lr: 1.54e-04 +2022-05-08 06:04:00,813 INFO [train.py:715] (6/8) Epoch 14, batch 32200, loss[loss=0.1505, simple_loss=0.2217, pruned_loss=0.03959, over 4975.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03058, over 971950.37 frames.], batch size: 25, lr: 1.54e-04 +2022-05-08 06:04:41,249 INFO [train.py:715] (6/8) Epoch 14, batch 32250, loss[loss=0.1281, simple_loss=0.2026, pruned_loss=0.02679, over 4952.00 frames.], tot_loss[loss=0.134, simple_loss=0.2077, pruned_loss=0.03011, over 972254.59 frames.], batch size: 35, lr: 1.54e-04 +2022-05-08 06:05:20,520 INFO [train.py:715] (6/8) Epoch 14, batch 32300, loss[loss=0.1642, simple_loss=0.2443, pruned_loss=0.04207, over 4819.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02993, over 972239.00 frames.], batch size: 15, lr: 1.54e-04 +2022-05-08 06:06:00,153 INFO [train.py:715] (6/8) Epoch 14, batch 32350, loss[loss=0.1117, simple_loss=0.1923, pruned_loss=0.01561, over 4828.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02988, over 971128.58 frames.], batch size: 27, lr: 1.54e-04 +2022-05-08 06:06:40,261 INFO [train.py:715] (6/8) Epoch 14, batch 32400, loss[loss=0.1488, simple_loss=0.2299, pruned_loss=0.03385, over 4795.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03035, over 971005.79 frames.], batch size: 21, lr: 1.54e-04 +2022-05-08 06:07:19,956 INFO [train.py:715] (6/8) Epoch 14, batch 32450, loss[loss=0.1266, simple_loss=0.2014, pruned_loss=0.0259, over 4959.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2094, pruned_loss=0.03069, over 969854.98 frames.], batch size: 14, lr: 1.54e-04 +2022-05-08 06:07:59,621 INFO [train.py:715] (6/8) Epoch 14, batch 32500, loss[loss=0.141, simple_loss=0.2067, pruned_loss=0.03767, over 4895.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03064, over 970764.25 frames.], batch size: 17, lr: 1.54e-04 +2022-05-08 06:08:39,990 INFO [train.py:715] (6/8) Epoch 14, batch 32550, loss[loss=0.1653, simple_loss=0.2314, pruned_loss=0.0496, over 4830.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03033, over 970723.24 frames.], batch size: 30, lr: 1.54e-04 +2022-05-08 06:09:20,736 INFO [train.py:715] (6/8) Epoch 14, batch 32600, loss[loss=0.1202, simple_loss=0.1995, pruned_loss=0.02046, over 4947.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.03041, over 971237.62 frames.], batch size: 21, lr: 1.54e-04 +2022-05-08 06:10:00,315 INFO [train.py:715] (6/8) Epoch 14, batch 32650, loss[loss=0.1246, simple_loss=0.1989, pruned_loss=0.02516, over 4877.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03052, over 971704.74 frames.], batch size: 32, lr: 1.54e-04 +2022-05-08 06:10:43,609 INFO [train.py:715] (6/8) Epoch 14, batch 32700, loss[loss=0.1219, simple_loss=0.1893, pruned_loss=0.02729, over 4985.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03002, over 971712.30 frames.], batch size: 28, lr: 1.54e-04 +2022-05-08 06:11:24,772 INFO [train.py:715] (6/8) Epoch 14, batch 32750, loss[loss=0.1207, simple_loss=0.1976, pruned_loss=0.02195, over 4944.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02963, over 971872.78 frames.], batch size: 21, lr: 1.54e-04 +2022-05-08 06:12:05,089 INFO [train.py:715] (6/8) Epoch 14, batch 32800, loss[loss=0.1259, simple_loss=0.2027, pruned_loss=0.02451, over 4664.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02932, over 971965.10 frames.], batch size: 14, lr: 1.54e-04 +2022-05-08 06:12:45,516 INFO [train.py:715] (6/8) Epoch 14, batch 32850, loss[loss=0.1418, simple_loss=0.2175, pruned_loss=0.03299, over 4741.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03007, over 971816.39 frames.], batch size: 16, lr: 1.54e-04 +2022-05-08 06:13:26,812 INFO [train.py:715] (6/8) Epoch 14, batch 32900, loss[loss=0.1668, simple_loss=0.2305, pruned_loss=0.05158, over 4942.00 frames.], tot_loss[loss=0.134, simple_loss=0.2078, pruned_loss=0.03011, over 972024.78 frames.], batch size: 35, lr: 1.54e-04 +2022-05-08 06:14:07,979 INFO [train.py:715] (6/8) Epoch 14, batch 32950, loss[loss=0.1171, simple_loss=0.1878, pruned_loss=0.02316, over 4824.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2082, pruned_loss=0.03084, over 972545.03 frames.], batch size: 13, lr: 1.54e-04 +2022-05-08 06:14:47,670 INFO [train.py:715] (6/8) Epoch 14, batch 33000, loss[loss=0.1324, simple_loss=0.2111, pruned_loss=0.02687, over 4746.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2082, pruned_loss=0.03083, over 972363.79 frames.], batch size: 12, lr: 1.54e-04 +2022-05-08 06:14:47,671 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 06:15:25,560 INFO [train.py:742] (6/8) Epoch 14, validation: loss=0.1051, simple_loss=0.1889, pruned_loss=0.01071, over 914524.00 frames. +2022-05-08 06:16:05,302 INFO [train.py:715] (6/8) Epoch 14, batch 33050, loss[loss=0.1292, simple_loss=0.2111, pruned_loss=0.02366, over 4978.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2084, pruned_loss=0.03092, over 971805.71 frames.], batch size: 26, lr: 1.54e-04 +2022-05-08 06:16:46,159 INFO [train.py:715] (6/8) Epoch 14, batch 33100, loss[loss=0.1285, simple_loss=0.1897, pruned_loss=0.03363, over 4830.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2081, pruned_loss=0.03071, over 971182.54 frames.], batch size: 12, lr: 1.54e-04 +2022-05-08 06:17:27,373 INFO [train.py:715] (6/8) Epoch 14, batch 33150, loss[loss=0.1439, simple_loss=0.2175, pruned_loss=0.03515, over 4734.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03061, over 971928.10 frames.], batch size: 16, lr: 1.54e-04 +2022-05-08 06:18:07,431 INFO [train.py:715] (6/8) Epoch 14, batch 33200, loss[loss=0.1233, simple_loss=0.1911, pruned_loss=0.02773, over 4785.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.03067, over 972083.97 frames.], batch size: 12, lr: 1.54e-04 +2022-05-08 06:18:47,778 INFO [train.py:715] (6/8) Epoch 14, batch 33250, loss[loss=0.1074, simple_loss=0.1839, pruned_loss=0.01543, over 4934.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2098, pruned_loss=0.03088, over 972468.92 frames.], batch size: 18, lr: 1.54e-04 +2022-05-08 06:19:28,530 INFO [train.py:715] (6/8) Epoch 14, batch 33300, loss[loss=0.1591, simple_loss=0.2276, pruned_loss=0.04529, over 4973.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2105, pruned_loss=0.03152, over 972706.62 frames.], batch size: 15, lr: 1.54e-04 +2022-05-08 06:20:09,726 INFO [train.py:715] (6/8) Epoch 14, batch 33350, loss[loss=0.1411, simple_loss=0.2145, pruned_loss=0.0339, over 4728.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2095, pruned_loss=0.03096, over 972440.12 frames.], batch size: 16, lr: 1.54e-04 +2022-05-08 06:20:49,903 INFO [train.py:715] (6/8) Epoch 14, batch 33400, loss[loss=0.1494, simple_loss=0.2131, pruned_loss=0.04281, over 4957.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2099, pruned_loss=0.03092, over 972573.00 frames.], batch size: 35, lr: 1.54e-04 +2022-05-08 06:21:30,273 INFO [train.py:715] (6/8) Epoch 14, batch 33450, loss[loss=0.1595, simple_loss=0.2418, pruned_loss=0.03859, over 4815.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2105, pruned_loss=0.03149, over 972464.71 frames.], batch size: 21, lr: 1.54e-04 +2022-05-08 06:22:11,512 INFO [train.py:715] (6/8) Epoch 14, batch 33500, loss[loss=0.1353, simple_loss=0.2056, pruned_loss=0.03253, over 4964.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2099, pruned_loss=0.03134, over 972245.29 frames.], batch size: 15, lr: 1.54e-04 +2022-05-08 06:22:51,812 INFO [train.py:715] (6/8) Epoch 14, batch 33550, loss[loss=0.133, simple_loss=0.2076, pruned_loss=0.02925, over 4972.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03105, over 971222.10 frames.], batch size: 28, lr: 1.54e-04 +2022-05-08 06:23:33,010 INFO [train.py:715] (6/8) Epoch 14, batch 33600, loss[loss=0.1237, simple_loss=0.2017, pruned_loss=0.02281, over 4959.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03057, over 971078.73 frames.], batch size: 24, lr: 1.54e-04 +2022-05-08 06:24:14,054 INFO [train.py:715] (6/8) Epoch 14, batch 33650, loss[loss=0.1177, simple_loss=0.1974, pruned_loss=0.01902, over 4898.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.03053, over 971323.80 frames.], batch size: 17, lr: 1.54e-04 +2022-05-08 06:24:54,975 INFO [train.py:715] (6/8) Epoch 14, batch 33700, loss[loss=0.1464, simple_loss=0.2254, pruned_loss=0.0337, over 4878.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2091, pruned_loss=0.03082, over 971854.51 frames.], batch size: 19, lr: 1.54e-04 +2022-05-08 06:25:35,116 INFO [train.py:715] (6/8) Epoch 14, batch 33750, loss[loss=0.1566, simple_loss=0.2269, pruned_loss=0.04311, over 4834.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2094, pruned_loss=0.03078, over 972272.49 frames.], batch size: 20, lr: 1.54e-04 +2022-05-08 06:26:15,689 INFO [train.py:715] (6/8) Epoch 14, batch 33800, loss[loss=0.1149, simple_loss=0.191, pruned_loss=0.01933, over 4916.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2103, pruned_loss=0.03126, over 972483.56 frames.], batch size: 23, lr: 1.54e-04 +2022-05-08 06:26:56,939 INFO [train.py:715] (6/8) Epoch 14, batch 33850, loss[loss=0.118, simple_loss=0.1867, pruned_loss=0.02467, over 4833.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2096, pruned_loss=0.03094, over 972177.94 frames.], batch size: 13, lr: 1.54e-04 +2022-05-08 06:27:37,013 INFO [train.py:715] (6/8) Epoch 14, batch 33900, loss[loss=0.1188, simple_loss=0.1936, pruned_loss=0.02198, over 4922.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03078, over 972488.62 frames.], batch size: 18, lr: 1.54e-04 +2022-05-08 06:28:17,553 INFO [train.py:715] (6/8) Epoch 14, batch 33950, loss[loss=0.1836, simple_loss=0.2382, pruned_loss=0.06449, over 4853.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2087, pruned_loss=0.03101, over 971683.09 frames.], batch size: 32, lr: 1.54e-04 +2022-05-08 06:28:58,273 INFO [train.py:715] (6/8) Epoch 14, batch 34000, loss[loss=0.1844, simple_loss=0.2583, pruned_loss=0.05529, over 4974.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2088, pruned_loss=0.03108, over 971649.09 frames.], batch size: 39, lr: 1.54e-04 +2022-05-08 06:29:39,256 INFO [train.py:715] (6/8) Epoch 14, batch 34050, loss[loss=0.1359, simple_loss=0.2013, pruned_loss=0.03529, over 4961.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2089, pruned_loss=0.03089, over 971528.52 frames.], batch size: 15, lr: 1.54e-04 +2022-05-08 06:30:19,213 INFO [train.py:715] (6/8) Epoch 14, batch 34100, loss[loss=0.1217, simple_loss=0.1954, pruned_loss=0.02399, over 4963.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2094, pruned_loss=0.03064, over 970904.34 frames.], batch size: 24, lr: 1.54e-04 +2022-05-08 06:30:59,706 INFO [train.py:715] (6/8) Epoch 14, batch 34150, loss[loss=0.1194, simple_loss=0.1902, pruned_loss=0.02428, over 4971.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2094, pruned_loss=0.03081, over 971150.08 frames.], batch size: 31, lr: 1.54e-04 +2022-05-08 06:31:40,140 INFO [train.py:715] (6/8) Epoch 14, batch 34200, loss[loss=0.1265, simple_loss=0.1943, pruned_loss=0.02938, over 4903.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03042, over 971525.33 frames.], batch size: 18, lr: 1.54e-04 +2022-05-08 06:32:20,303 INFO [train.py:715] (6/8) Epoch 14, batch 34250, loss[loss=0.1582, simple_loss=0.2183, pruned_loss=0.04904, over 4774.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2082, pruned_loss=0.03045, over 971463.57 frames.], batch size: 17, lr: 1.54e-04 +2022-05-08 06:33:00,840 INFO [train.py:715] (6/8) Epoch 14, batch 34300, loss[loss=0.1126, simple_loss=0.1869, pruned_loss=0.01918, over 4804.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2077, pruned_loss=0.03001, over 972154.20 frames.], batch size: 21, lr: 1.54e-04 +2022-05-08 06:33:41,487 INFO [train.py:715] (6/8) Epoch 14, batch 34350, loss[loss=0.09685, simple_loss=0.1679, pruned_loss=0.01291, over 4834.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03006, over 971739.17 frames.], batch size: 13, lr: 1.54e-04 +2022-05-08 06:34:22,188 INFO [train.py:715] (6/8) Epoch 14, batch 34400, loss[loss=0.1552, simple_loss=0.2077, pruned_loss=0.05139, over 4794.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.03042, over 972387.72 frames.], batch size: 12, lr: 1.54e-04 +2022-05-08 06:35:01,774 INFO [train.py:715] (6/8) Epoch 14, batch 34450, loss[loss=0.1529, simple_loss=0.2205, pruned_loss=0.04262, over 4989.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.03089, over 972368.70 frames.], batch size: 31, lr: 1.54e-04 +2022-05-08 06:35:42,630 INFO [train.py:715] (6/8) Epoch 14, batch 34500, loss[loss=0.1502, simple_loss=0.2197, pruned_loss=0.04029, over 4808.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2093, pruned_loss=0.03079, over 971557.72 frames.], batch size: 17, lr: 1.54e-04 +2022-05-08 06:36:23,327 INFO [train.py:715] (6/8) Epoch 14, batch 34550, loss[loss=0.133, simple_loss=0.2045, pruned_loss=0.03077, over 4754.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2096, pruned_loss=0.03068, over 971452.85 frames.], batch size: 16, lr: 1.54e-04 +2022-05-08 06:37:03,436 INFO [train.py:715] (6/8) Epoch 14, batch 34600, loss[loss=0.135, simple_loss=0.2057, pruned_loss=0.03213, over 4898.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03085, over 972042.95 frames.], batch size: 19, lr: 1.54e-04 +2022-05-08 06:37:43,655 INFO [train.py:715] (6/8) Epoch 14, batch 34650, loss[loss=0.137, simple_loss=0.2045, pruned_loss=0.03473, over 4927.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2093, pruned_loss=0.03064, over 972361.80 frames.], batch size: 23, lr: 1.54e-04 +2022-05-08 06:38:24,400 INFO [train.py:715] (6/8) Epoch 14, batch 34700, loss[loss=0.128, simple_loss=0.2009, pruned_loss=0.02754, over 4965.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2094, pruned_loss=0.03044, over 971939.73 frames.], batch size: 15, lr: 1.54e-04 +2022-05-08 06:39:03,270 INFO [train.py:715] (6/8) Epoch 14, batch 34750, loss[loss=0.1401, simple_loss=0.2132, pruned_loss=0.03349, over 4840.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2095, pruned_loss=0.03018, over 972037.86 frames.], batch size: 13, lr: 1.54e-04 +2022-05-08 06:39:40,048 INFO [train.py:715] (6/8) Epoch 14, batch 34800, loss[loss=0.188, simple_loss=0.2635, pruned_loss=0.05631, over 4782.00 frames.], tot_loss[loss=0.1356, simple_loss=0.21, pruned_loss=0.03058, over 972251.66 frames.], batch size: 14, lr: 1.54e-04 +2022-05-08 06:40:33,617 INFO [train.py:715] (6/8) Epoch 15, batch 0, loss[loss=0.111, simple_loss=0.1745, pruned_loss=0.02373, over 4801.00 frames.], tot_loss[loss=0.111, simple_loss=0.1745, pruned_loss=0.02373, over 4801.00 frames.], batch size: 12, lr: 1.49e-04 +2022-05-08 06:41:12,925 INFO [train.py:715] (6/8) Epoch 15, batch 50, loss[loss=0.1255, simple_loss=0.1962, pruned_loss=0.02737, over 4941.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2078, pruned_loss=0.02924, over 219218.75 frames.], batch size: 23, lr: 1.49e-04 +2022-05-08 06:41:54,169 INFO [train.py:715] (6/8) Epoch 15, batch 100, loss[loss=0.1296, simple_loss=0.1881, pruned_loss=0.03551, over 4976.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03035, over 386357.37 frames.], batch size: 31, lr: 1.49e-04 +2022-05-08 06:42:35,664 INFO [train.py:715] (6/8) Epoch 15, batch 150, loss[loss=0.1544, simple_loss=0.2208, pruned_loss=0.04403, over 4883.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2085, pruned_loss=0.03025, over 515963.97 frames.], batch size: 32, lr: 1.49e-04 +2022-05-08 06:43:15,922 INFO [train.py:715] (6/8) Epoch 15, batch 200, loss[loss=0.1565, simple_loss=0.23, pruned_loss=0.0415, over 4969.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2098, pruned_loss=0.03082, over 617077.78 frames.], batch size: 24, lr: 1.49e-04 +2022-05-08 06:43:56,390 INFO [train.py:715] (6/8) Epoch 15, batch 250, loss[loss=0.1202, simple_loss=0.1925, pruned_loss=0.024, over 4941.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2095, pruned_loss=0.03044, over 695467.25 frames.], batch size: 18, lr: 1.49e-04 +2022-05-08 06:44:37,771 INFO [train.py:715] (6/8) Epoch 15, batch 300, loss[loss=0.1449, simple_loss=0.2108, pruned_loss=0.03955, over 4880.00 frames.], tot_loss[loss=0.1348, simple_loss=0.209, pruned_loss=0.03025, over 757778.33 frames.], batch size: 39, lr: 1.49e-04 +2022-05-08 06:45:18,791 INFO [train.py:715] (6/8) Epoch 15, batch 350, loss[loss=0.1357, simple_loss=0.2194, pruned_loss=0.02599, over 4915.00 frames.], tot_loss[loss=0.134, simple_loss=0.2085, pruned_loss=0.02978, over 806128.52 frames.], batch size: 18, lr: 1.49e-04 +2022-05-08 06:45:58,478 INFO [train.py:715] (6/8) Epoch 15, batch 400, loss[loss=0.1572, simple_loss=0.2335, pruned_loss=0.0405, over 4807.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2093, pruned_loss=0.0301, over 842827.62 frames.], batch size: 25, lr: 1.49e-04 +2022-05-08 06:46:39,372 INFO [train.py:715] (6/8) Epoch 15, batch 450, loss[loss=0.1522, simple_loss=0.2304, pruned_loss=0.03698, over 4767.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2085, pruned_loss=0.02965, over 871782.75 frames.], batch size: 18, lr: 1.49e-04 +2022-05-08 06:47:20,106 INFO [train.py:715] (6/8) Epoch 15, batch 500, loss[loss=0.1141, simple_loss=0.1916, pruned_loss=0.01836, over 4972.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2083, pruned_loss=0.02964, over 894401.24 frames.], batch size: 14, lr: 1.49e-04 +2022-05-08 06:48:00,515 INFO [train.py:715] (6/8) Epoch 15, batch 550, loss[loss=0.1507, simple_loss=0.2264, pruned_loss=0.03751, over 4806.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2089, pruned_loss=0.02996, over 911988.79 frames.], batch size: 21, lr: 1.49e-04 +2022-05-08 06:48:40,071 INFO [train.py:715] (6/8) Epoch 15, batch 600, loss[loss=0.1209, simple_loss=0.1906, pruned_loss=0.02561, over 4740.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2096, pruned_loss=0.0308, over 925951.90 frames.], batch size: 12, lr: 1.49e-04 +2022-05-08 06:49:21,145 INFO [train.py:715] (6/8) Epoch 15, batch 650, loss[loss=0.1225, simple_loss=0.1896, pruned_loss=0.02766, over 4793.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2104, pruned_loss=0.03118, over 936064.20 frames.], batch size: 12, lr: 1.49e-04 +2022-05-08 06:50:01,511 INFO [train.py:715] (6/8) Epoch 15, batch 700, loss[loss=0.1441, simple_loss=0.2152, pruned_loss=0.03653, over 4981.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03082, over 944004.30 frames.], batch size: 25, lr: 1.49e-04 +2022-05-08 06:50:41,533 INFO [train.py:715] (6/8) Epoch 15, batch 750, loss[loss=0.1227, simple_loss=0.201, pruned_loss=0.02224, over 4868.00 frames.], tot_loss[loss=0.136, simple_loss=0.21, pruned_loss=0.03098, over 950564.07 frames.], batch size: 16, lr: 1.49e-04 +2022-05-08 06:51:22,007 INFO [train.py:715] (6/8) Epoch 15, batch 800, loss[loss=0.1271, simple_loss=0.1944, pruned_loss=0.02985, over 4987.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2098, pruned_loss=0.03089, over 954925.10 frames.], batch size: 25, lr: 1.49e-04 +2022-05-08 06:52:02,790 INFO [train.py:715] (6/8) Epoch 15, batch 850, loss[loss=0.1336, simple_loss=0.202, pruned_loss=0.03264, over 4910.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03025, over 958996.23 frames.], batch size: 17, lr: 1.49e-04 +2022-05-08 06:52:43,863 INFO [train.py:715] (6/8) Epoch 15, batch 900, loss[loss=0.1601, simple_loss=0.2287, pruned_loss=0.04575, over 4774.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2086, pruned_loss=0.03016, over 962122.93 frames.], batch size: 14, lr: 1.49e-04 +2022-05-08 06:53:23,534 INFO [train.py:715] (6/8) Epoch 15, batch 950, loss[loss=0.135, simple_loss=0.2201, pruned_loss=0.02501, over 4789.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.02999, over 964550.46 frames.], batch size: 17, lr: 1.49e-04 +2022-05-08 06:54:04,074 INFO [train.py:715] (6/8) Epoch 15, batch 1000, loss[loss=0.1401, simple_loss=0.2133, pruned_loss=0.03346, over 4858.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.0299, over 965216.28 frames.], batch size: 13, lr: 1.49e-04 +2022-05-08 06:54:44,305 INFO [train.py:715] (6/8) Epoch 15, batch 1050, loss[loss=0.1221, simple_loss=0.1983, pruned_loss=0.02299, over 4946.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03035, over 967309.92 frames.], batch size: 21, lr: 1.49e-04 +2022-05-08 06:55:23,589 INFO [train.py:715] (6/8) Epoch 15, batch 1100, loss[loss=0.1197, simple_loss=0.1924, pruned_loss=0.02353, over 4885.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03014, over 969084.78 frames.], batch size: 22, lr: 1.49e-04 +2022-05-08 06:56:04,692 INFO [train.py:715] (6/8) Epoch 15, batch 1150, loss[loss=0.1205, simple_loss=0.1919, pruned_loss=0.02457, over 4809.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02976, over 969840.76 frames.], batch size: 13, lr: 1.49e-04 +2022-05-08 06:56:45,832 INFO [train.py:715] (6/8) Epoch 15, batch 1200, loss[loss=0.1861, simple_loss=0.2387, pruned_loss=0.06676, over 4932.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02976, over 970670.17 frames.], batch size: 18, lr: 1.49e-04 +2022-05-08 06:57:26,543 INFO [train.py:715] (6/8) Epoch 15, batch 1250, loss[loss=0.1429, simple_loss=0.2189, pruned_loss=0.03348, over 4925.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02978, over 970870.28 frames.], batch size: 18, lr: 1.49e-04 +2022-05-08 06:58:06,006 INFO [train.py:715] (6/8) Epoch 15, batch 1300, loss[loss=0.1206, simple_loss=0.2039, pruned_loss=0.01868, over 4922.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2073, pruned_loss=0.02968, over 970599.00 frames.], batch size: 29, lr: 1.49e-04 +2022-05-08 06:58:46,687 INFO [train.py:715] (6/8) Epoch 15, batch 1350, loss[loss=0.1632, simple_loss=0.2239, pruned_loss=0.0513, over 4983.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.03022, over 971349.31 frames.], batch size: 15, lr: 1.49e-04 +2022-05-08 06:59:27,347 INFO [train.py:715] (6/8) Epoch 15, batch 1400, loss[loss=0.1142, simple_loss=0.1934, pruned_loss=0.0175, over 4977.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.03036, over 972227.60 frames.], batch size: 28, lr: 1.49e-04 +2022-05-08 07:00:07,539 INFO [train.py:715] (6/8) Epoch 15, batch 1450, loss[loss=0.1323, simple_loss=0.2111, pruned_loss=0.02673, over 4900.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2075, pruned_loss=0.02996, over 972191.37 frames.], batch size: 22, lr: 1.49e-04 +2022-05-08 07:00:47,346 INFO [train.py:715] (6/8) Epoch 15, batch 1500, loss[loss=0.09867, simple_loss=0.1706, pruned_loss=0.01337, over 4935.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.0301, over 972420.07 frames.], batch size: 29, lr: 1.49e-04 +2022-05-08 07:01:28,522 INFO [train.py:715] (6/8) Epoch 15, batch 1550, loss[loss=0.1222, simple_loss=0.1949, pruned_loss=0.02479, over 4989.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2071, pruned_loss=0.02969, over 972318.31 frames.], batch size: 25, lr: 1.49e-04 +2022-05-08 07:02:08,735 INFO [train.py:715] (6/8) Epoch 15, batch 1600, loss[loss=0.1418, simple_loss=0.2132, pruned_loss=0.03518, over 4911.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2077, pruned_loss=0.03034, over 972588.59 frames.], batch size: 29, lr: 1.49e-04 +2022-05-08 07:02:47,787 INFO [train.py:715] (6/8) Epoch 15, batch 1650, loss[loss=0.13, simple_loss=0.1989, pruned_loss=0.03049, over 4953.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.03038, over 972865.60 frames.], batch size: 29, lr: 1.49e-04 +2022-05-08 07:03:28,308 INFO [train.py:715] (6/8) Epoch 15, batch 1700, loss[loss=0.1373, simple_loss=0.2133, pruned_loss=0.03063, over 4874.00 frames.], tot_loss[loss=0.135, simple_loss=0.2082, pruned_loss=0.03085, over 972762.08 frames.], batch size: 32, lr: 1.49e-04 +2022-05-08 07:04:08,889 INFO [train.py:715] (6/8) Epoch 15, batch 1750, loss[loss=0.1558, simple_loss=0.2293, pruned_loss=0.04111, over 4992.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2082, pruned_loss=0.03067, over 972887.22 frames.], batch size: 28, lr: 1.49e-04 +2022-05-08 07:04:48,969 INFO [train.py:715] (6/8) Epoch 15, batch 1800, loss[loss=0.1239, simple_loss=0.2062, pruned_loss=0.02078, over 4894.00 frames.], tot_loss[loss=0.1345, simple_loss=0.208, pruned_loss=0.03056, over 972516.81 frames.], batch size: 19, lr: 1.49e-04 +2022-05-08 07:05:28,947 INFO [train.py:715] (6/8) Epoch 15, batch 1850, loss[loss=0.1371, simple_loss=0.1989, pruned_loss=0.03766, over 4930.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2083, pruned_loss=0.0306, over 972411.93 frames.], batch size: 18, lr: 1.49e-04 +2022-05-08 07:06:09,788 INFO [train.py:715] (6/8) Epoch 15, batch 1900, loss[loss=0.1652, simple_loss=0.233, pruned_loss=0.04872, over 4984.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03134, over 972185.98 frames.], batch size: 25, lr: 1.49e-04 +2022-05-08 07:06:50,239 INFO [train.py:715] (6/8) Epoch 15, batch 1950, loss[loss=0.1261, simple_loss=0.2036, pruned_loss=0.02428, over 4754.00 frames.], tot_loss[loss=0.135, simple_loss=0.2084, pruned_loss=0.03075, over 972754.90 frames.], batch size: 12, lr: 1.49e-04 +2022-05-08 07:07:29,410 INFO [train.py:715] (6/8) Epoch 15, batch 2000, loss[loss=0.1181, simple_loss=0.1944, pruned_loss=0.02091, over 4784.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03082, over 972864.47 frames.], batch size: 18, lr: 1.49e-04 +2022-05-08 07:08:10,507 INFO [train.py:715] (6/8) Epoch 15, batch 2050, loss[loss=0.1116, simple_loss=0.1886, pruned_loss=0.01732, over 4917.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.03089, over 972889.02 frames.], batch size: 17, lr: 1.49e-04 +2022-05-08 07:08:50,819 INFO [train.py:715] (6/8) Epoch 15, batch 2100, loss[loss=0.09772, simple_loss=0.1644, pruned_loss=0.01552, over 4821.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03026, over 972311.63 frames.], batch size: 15, lr: 1.49e-04 +2022-05-08 07:09:30,727 INFO [train.py:715] (6/8) Epoch 15, batch 2150, loss[loss=0.1379, simple_loss=0.2123, pruned_loss=0.03172, over 4855.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.0301, over 971950.83 frames.], batch size: 34, lr: 1.49e-04 +2022-05-08 07:10:10,981 INFO [train.py:715] (6/8) Epoch 15, batch 2200, loss[loss=0.1319, simple_loss=0.2129, pruned_loss=0.02544, over 4794.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.03002, over 972256.96 frames.], batch size: 21, lr: 1.49e-04 +2022-05-08 07:10:51,408 INFO [train.py:715] (6/8) Epoch 15, batch 2250, loss[loss=0.131, simple_loss=0.1996, pruned_loss=0.03119, over 4950.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02975, over 972211.40 frames.], batch size: 15, lr: 1.49e-04 +2022-05-08 07:11:31,553 INFO [train.py:715] (6/8) Epoch 15, batch 2300, loss[loss=0.1381, simple_loss=0.2149, pruned_loss=0.03069, over 4981.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02971, over 971644.20 frames.], batch size: 24, lr: 1.49e-04 +2022-05-08 07:12:11,047 INFO [train.py:715] (6/8) Epoch 15, batch 2350, loss[loss=0.1127, simple_loss=0.1919, pruned_loss=0.01676, over 4888.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02941, over 972376.85 frames.], batch size: 22, lr: 1.49e-04 +2022-05-08 07:12:51,322 INFO [train.py:715] (6/8) Epoch 15, batch 2400, loss[loss=0.1455, simple_loss=0.2199, pruned_loss=0.03556, over 4823.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02972, over 973191.22 frames.], batch size: 26, lr: 1.49e-04 +2022-05-08 07:13:31,564 INFO [train.py:715] (6/8) Epoch 15, batch 2450, loss[loss=0.1495, simple_loss=0.224, pruned_loss=0.03753, over 4875.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02991, over 972210.60 frames.], batch size: 16, lr: 1.49e-04 +2022-05-08 07:14:11,492 INFO [train.py:715] (6/8) Epoch 15, batch 2500, loss[loss=0.1415, simple_loss=0.2159, pruned_loss=0.03354, over 4771.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.0304, over 972313.71 frames.], batch size: 17, lr: 1.49e-04 +2022-05-08 07:14:50,610 INFO [train.py:715] (6/8) Epoch 15, batch 2550, loss[loss=0.1255, simple_loss=0.2035, pruned_loss=0.02372, over 4931.00 frames.], tot_loss[loss=0.134, simple_loss=0.2078, pruned_loss=0.03011, over 972182.18 frames.], batch size: 23, lr: 1.49e-04 +2022-05-08 07:15:31,417 INFO [train.py:715] (6/8) Epoch 15, batch 2600, loss[loss=0.1237, simple_loss=0.1982, pruned_loss=0.02462, over 4815.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.0301, over 972146.29 frames.], batch size: 26, lr: 1.49e-04 +2022-05-08 07:16:12,108 INFO [train.py:715] (6/8) Epoch 15, batch 2650, loss[loss=0.1239, simple_loss=0.1969, pruned_loss=0.02539, over 4990.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02985, over 972401.95 frames.], batch size: 25, lr: 1.49e-04 +2022-05-08 07:16:51,599 INFO [train.py:715] (6/8) Epoch 15, batch 2700, loss[loss=0.1356, simple_loss=0.214, pruned_loss=0.02854, over 4823.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2082, pruned_loss=0.02959, over 973077.01 frames.], batch size: 27, lr: 1.49e-04 +2022-05-08 07:17:33,121 INFO [train.py:715] (6/8) Epoch 15, batch 2750, loss[loss=0.1301, simple_loss=0.2158, pruned_loss=0.02223, over 4817.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02973, over 972937.64 frames.], batch size: 26, lr: 1.49e-04 +2022-05-08 07:18:14,191 INFO [train.py:715] (6/8) Epoch 15, batch 2800, loss[loss=0.1504, simple_loss=0.2329, pruned_loss=0.034, over 4978.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02974, over 973257.54 frames.], batch size: 15, lr: 1.49e-04 +2022-05-08 07:18:54,891 INFO [train.py:715] (6/8) Epoch 15, batch 2850, loss[loss=0.1172, simple_loss=0.1914, pruned_loss=0.02146, over 4822.00 frames.], tot_loss[loss=0.134, simple_loss=0.2078, pruned_loss=0.03005, over 972429.14 frames.], batch size: 25, lr: 1.49e-04 +2022-05-08 07:19:34,219 INFO [train.py:715] (6/8) Epoch 15, batch 2900, loss[loss=0.1498, simple_loss=0.2102, pruned_loss=0.04471, over 4816.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03022, over 972910.21 frames.], batch size: 13, lr: 1.49e-04 +2022-05-08 07:20:14,832 INFO [train.py:715] (6/8) Epoch 15, batch 2950, loss[loss=0.1561, simple_loss=0.2221, pruned_loss=0.045, over 4955.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03068, over 972899.47 frames.], batch size: 24, lr: 1.49e-04 +2022-05-08 07:20:55,622 INFO [train.py:715] (6/8) Epoch 15, batch 3000, loss[loss=0.1383, simple_loss=0.2139, pruned_loss=0.03134, over 4953.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.031, over 972269.50 frames.], batch size: 29, lr: 1.49e-04 +2022-05-08 07:20:55,623 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 07:21:13,096 INFO [train.py:742] (6/8) Epoch 15, validation: loss=0.1049, simple_loss=0.1887, pruned_loss=0.01057, over 914524.00 frames. +2022-05-08 07:21:54,027 INFO [train.py:715] (6/8) Epoch 15, batch 3050, loss[loss=0.1673, simple_loss=0.243, pruned_loss=0.04582, over 4810.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2086, pruned_loss=0.03103, over 972544.55 frames.], batch size: 25, lr: 1.49e-04 +2022-05-08 07:22:33,944 INFO [train.py:715] (6/8) Epoch 15, batch 3100, loss[loss=0.1437, simple_loss=0.2137, pruned_loss=0.03683, over 4975.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2086, pruned_loss=0.03103, over 973457.64 frames.], batch size: 35, lr: 1.49e-04 +2022-05-08 07:23:14,662 INFO [train.py:715] (6/8) Epoch 15, batch 3150, loss[loss=0.1034, simple_loss=0.1788, pruned_loss=0.01401, over 4702.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2076, pruned_loss=0.0307, over 972451.87 frames.], batch size: 15, lr: 1.49e-04 +2022-05-08 07:23:55,203 INFO [train.py:715] (6/8) Epoch 15, batch 3200, loss[loss=0.1346, simple_loss=0.2052, pruned_loss=0.03204, over 4915.00 frames.], tot_loss[loss=0.135, simple_loss=0.2083, pruned_loss=0.03084, over 972456.55 frames.], batch size: 17, lr: 1.49e-04 +2022-05-08 07:24:35,386 INFO [train.py:715] (6/8) Epoch 15, batch 3250, loss[loss=0.1386, simple_loss=0.1993, pruned_loss=0.03889, over 4808.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2087, pruned_loss=0.03097, over 972834.86 frames.], batch size: 12, lr: 1.49e-04 +2022-05-08 07:25:15,327 INFO [train.py:715] (6/8) Epoch 15, batch 3300, loss[loss=0.1385, simple_loss=0.2205, pruned_loss=0.02827, over 4683.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2086, pruned_loss=0.03092, over 971859.15 frames.], batch size: 15, lr: 1.49e-04 +2022-05-08 07:25:56,112 INFO [train.py:715] (6/8) Epoch 15, batch 3350, loss[loss=0.1551, simple_loss=0.2307, pruned_loss=0.03974, over 4914.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03054, over 972195.89 frames.], batch size: 29, lr: 1.49e-04 +2022-05-08 07:26:36,439 INFO [train.py:715] (6/8) Epoch 15, batch 3400, loss[loss=0.1539, simple_loss=0.2292, pruned_loss=0.03931, over 4856.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2092, pruned_loss=0.03027, over 971859.15 frames.], batch size: 20, lr: 1.49e-04 +2022-05-08 07:27:16,669 INFO [train.py:715] (6/8) Epoch 15, batch 3450, loss[loss=0.1336, simple_loss=0.2045, pruned_loss=0.03134, over 4828.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2099, pruned_loss=0.03037, over 972042.66 frames.], batch size: 15, lr: 1.49e-04 +2022-05-08 07:27:56,933 INFO [train.py:715] (6/8) Epoch 15, batch 3500, loss[loss=0.1439, simple_loss=0.218, pruned_loss=0.03487, over 4953.00 frames.], tot_loss[loss=0.1354, simple_loss=0.21, pruned_loss=0.03045, over 972645.99 frames.], batch size: 39, lr: 1.49e-04 +2022-05-08 07:28:37,345 INFO [train.py:715] (6/8) Epoch 15, batch 3550, loss[loss=0.1486, simple_loss=0.2167, pruned_loss=0.0403, over 4911.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2106, pruned_loss=0.03102, over 972891.42 frames.], batch size: 35, lr: 1.49e-04 +2022-05-08 07:29:17,859 INFO [train.py:715] (6/8) Epoch 15, batch 3600, loss[loss=0.1327, simple_loss=0.2078, pruned_loss=0.02882, over 4764.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2106, pruned_loss=0.031, over 973108.47 frames.], batch size: 19, lr: 1.49e-04 +2022-05-08 07:29:57,639 INFO [train.py:715] (6/8) Epoch 15, batch 3650, loss[loss=0.1499, simple_loss=0.2219, pruned_loss=0.03888, over 4910.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2093, pruned_loss=0.03054, over 972594.93 frames.], batch size: 39, lr: 1.48e-04 +2022-05-08 07:30:38,284 INFO [train.py:715] (6/8) Epoch 15, batch 3700, loss[loss=0.1361, simple_loss=0.2123, pruned_loss=0.02996, over 4697.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03057, over 972620.30 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 07:31:19,139 INFO [train.py:715] (6/8) Epoch 15, batch 3750, loss[loss=0.1526, simple_loss=0.226, pruned_loss=0.03965, over 4882.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03021, over 973623.13 frames.], batch size: 22, lr: 1.48e-04 +2022-05-08 07:31:58,801 INFO [train.py:715] (6/8) Epoch 15, batch 3800, loss[loss=0.1382, simple_loss=0.2059, pruned_loss=0.03521, over 4840.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03037, over 974123.01 frames.], batch size: 30, lr: 1.48e-04 +2022-05-08 07:32:38,812 INFO [train.py:715] (6/8) Epoch 15, batch 3850, loss[loss=0.1528, simple_loss=0.2242, pruned_loss=0.04072, over 4884.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03053, over 973448.00 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 07:33:19,086 INFO [train.py:715] (6/8) Epoch 15, batch 3900, loss[loss=0.1134, simple_loss=0.1829, pruned_loss=0.02195, over 4985.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2076, pruned_loss=0.03042, over 972832.12 frames.], batch size: 28, lr: 1.48e-04 +2022-05-08 07:33:58,253 INFO [train.py:715] (6/8) Epoch 15, batch 3950, loss[loss=0.1523, simple_loss=0.2298, pruned_loss=0.03742, over 4921.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.03066, over 972372.06 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 07:34:37,996 INFO [train.py:715] (6/8) Epoch 15, batch 4000, loss[loss=0.119, simple_loss=0.1916, pruned_loss=0.02319, over 4881.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2095, pruned_loss=0.03098, over 973077.33 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 07:35:17,776 INFO [train.py:715] (6/8) Epoch 15, batch 4050, loss[loss=0.1272, simple_loss=0.2029, pruned_loss=0.0258, over 4828.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03083, over 973036.57 frames.], batch size: 12, lr: 1.48e-04 +2022-05-08 07:35:58,784 INFO [train.py:715] (6/8) Epoch 15, batch 4100, loss[loss=0.1226, simple_loss=0.2047, pruned_loss=0.02026, over 4961.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03062, over 973204.57 frames.], batch size: 29, lr: 1.48e-04 +2022-05-08 07:36:37,615 INFO [train.py:715] (6/8) Epoch 15, batch 4150, loss[loss=0.1352, simple_loss=0.2091, pruned_loss=0.03064, over 4759.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03079, over 972608.01 frames.], batch size: 14, lr: 1.48e-04 +2022-05-08 07:37:17,781 INFO [train.py:715] (6/8) Epoch 15, batch 4200, loss[loss=0.1157, simple_loss=0.191, pruned_loss=0.02016, over 4929.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03065, over 972160.05 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 07:37:58,204 INFO [train.py:715] (6/8) Epoch 15, batch 4250, loss[loss=0.1389, simple_loss=0.2276, pruned_loss=0.02511, over 4756.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.03069, over 972082.37 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 07:38:38,208 INFO [train.py:715] (6/8) Epoch 15, batch 4300, loss[loss=0.151, simple_loss=0.2254, pruned_loss=0.03828, over 4883.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.03096, over 971948.32 frames.], batch size: 22, lr: 1.48e-04 +2022-05-08 07:39:18,231 INFO [train.py:715] (6/8) Epoch 15, batch 4350, loss[loss=0.1472, simple_loss=0.2194, pruned_loss=0.03746, over 4913.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2092, pruned_loss=0.03114, over 972268.33 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 07:39:58,273 INFO [train.py:715] (6/8) Epoch 15, batch 4400, loss[loss=0.1283, simple_loss=0.2085, pruned_loss=0.024, over 4805.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2087, pruned_loss=0.03104, over 971888.34 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 07:40:38,807 INFO [train.py:715] (6/8) Epoch 15, batch 4450, loss[loss=0.104, simple_loss=0.1784, pruned_loss=0.01477, over 4931.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2087, pruned_loss=0.03112, over 971424.69 frames.], batch size: 29, lr: 1.48e-04 +2022-05-08 07:41:18,469 INFO [train.py:715] (6/8) Epoch 15, batch 4500, loss[loss=0.1377, simple_loss=0.2222, pruned_loss=0.02663, over 4779.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2097, pruned_loss=0.03124, over 971204.60 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 07:41:58,882 INFO [train.py:715] (6/8) Epoch 15, batch 4550, loss[loss=0.1382, simple_loss=0.2127, pruned_loss=0.03184, over 4934.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2089, pruned_loss=0.03086, over 971944.39 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 07:42:39,506 INFO [train.py:715] (6/8) Epoch 15, batch 4600, loss[loss=0.1371, simple_loss=0.2094, pruned_loss=0.03247, over 4854.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.03089, over 971147.71 frames.], batch size: 20, lr: 1.48e-04 +2022-05-08 07:43:19,671 INFO [train.py:715] (6/8) Epoch 15, batch 4650, loss[loss=0.1498, simple_loss=0.2264, pruned_loss=0.0366, over 4955.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03122, over 971147.47 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 07:43:59,071 INFO [train.py:715] (6/8) Epoch 15, batch 4700, loss[loss=0.1371, simple_loss=0.2091, pruned_loss=0.03254, over 4920.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03027, over 971481.19 frames.], batch size: 29, lr: 1.48e-04 +2022-05-08 07:44:39,328 INFO [train.py:715] (6/8) Epoch 15, batch 4750, loss[loss=0.1324, simple_loss=0.2114, pruned_loss=0.02671, over 4902.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03007, over 972041.95 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 07:45:20,571 INFO [train.py:715] (6/8) Epoch 15, batch 4800, loss[loss=0.1363, simple_loss=0.2187, pruned_loss=0.02694, over 4987.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03007, over 972267.47 frames.], batch size: 25, lr: 1.48e-04 +2022-05-08 07:46:00,537 INFO [train.py:715] (6/8) Epoch 15, batch 4850, loss[loss=0.1417, simple_loss=0.2156, pruned_loss=0.03391, over 4938.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03005, over 972585.79 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 07:46:41,242 INFO [train.py:715] (6/8) Epoch 15, batch 4900, loss[loss=0.1272, simple_loss=0.1981, pruned_loss=0.02815, over 4858.00 frames.], tot_loss[loss=0.1333, simple_loss=0.207, pruned_loss=0.02983, over 972232.80 frames.], batch size: 12, lr: 1.48e-04 +2022-05-08 07:47:21,682 INFO [train.py:715] (6/8) Epoch 15, batch 4950, loss[loss=0.1389, simple_loss=0.2037, pruned_loss=0.03711, over 4932.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02984, over 971889.85 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 07:48:02,264 INFO [train.py:715] (6/8) Epoch 15, batch 5000, loss[loss=0.1361, simple_loss=0.2258, pruned_loss=0.0232, over 4876.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2077, pruned_loss=0.03024, over 972050.37 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 07:48:41,756 INFO [train.py:715] (6/8) Epoch 15, batch 5050, loss[loss=0.1208, simple_loss=0.1904, pruned_loss=0.02558, over 4791.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2073, pruned_loss=0.02988, over 972652.77 frames.], batch size: 12, lr: 1.48e-04 +2022-05-08 07:49:21,843 INFO [train.py:715] (6/8) Epoch 15, batch 5100, loss[loss=0.1336, simple_loss=0.2025, pruned_loss=0.03235, over 4778.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2074, pruned_loss=0.02999, over 972411.72 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 07:50:02,146 INFO [train.py:715] (6/8) Epoch 15, batch 5150, loss[loss=0.1536, simple_loss=0.2177, pruned_loss=0.0448, over 4787.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2075, pruned_loss=0.03006, over 972717.63 frames.], batch size: 12, lr: 1.48e-04 +2022-05-08 07:50:42,067 INFO [train.py:715] (6/8) Epoch 15, batch 5200, loss[loss=0.1591, simple_loss=0.2377, pruned_loss=0.0402, over 4702.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03025, over 973001.98 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 07:51:22,088 INFO [train.py:715] (6/8) Epoch 15, batch 5250, loss[loss=0.1299, simple_loss=0.1953, pruned_loss=0.03224, over 4898.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03035, over 972201.21 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 07:52:03,631 INFO [train.py:715] (6/8) Epoch 15, batch 5300, loss[loss=0.152, simple_loss=0.2286, pruned_loss=0.03773, over 4890.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2088, pruned_loss=0.03029, over 971779.51 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 07:52:45,867 INFO [train.py:715] (6/8) Epoch 15, batch 5350, loss[loss=0.1626, simple_loss=0.2267, pruned_loss=0.04928, over 4983.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.03007, over 971802.42 frames.], batch size: 24, lr: 1.48e-04 +2022-05-08 07:53:26,860 INFO [train.py:715] (6/8) Epoch 15, batch 5400, loss[loss=0.1352, simple_loss=0.2103, pruned_loss=0.03007, over 4812.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2076, pruned_loss=0.03011, over 971884.17 frames.], batch size: 26, lr: 1.48e-04 +2022-05-08 07:54:08,833 INFO [train.py:715] (6/8) Epoch 15, batch 5450, loss[loss=0.1325, simple_loss=0.2152, pruned_loss=0.02486, over 4823.00 frames.], tot_loss[loss=0.1334, simple_loss=0.207, pruned_loss=0.0299, over 971853.31 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 07:54:50,467 INFO [train.py:715] (6/8) Epoch 15, batch 5500, loss[loss=0.1747, simple_loss=0.245, pruned_loss=0.05223, over 4746.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2076, pruned_loss=0.03004, over 971863.93 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 07:55:32,149 INFO [train.py:715] (6/8) Epoch 15, batch 5550, loss[loss=0.1437, simple_loss=0.21, pruned_loss=0.0387, over 4913.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2073, pruned_loss=0.02985, over 972982.85 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 07:56:12,917 INFO [train.py:715] (6/8) Epoch 15, batch 5600, loss[loss=0.119, simple_loss=0.2032, pruned_loss=0.01744, over 4855.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.0297, over 973268.34 frames.], batch size: 20, lr: 1.48e-04 +2022-05-08 07:56:54,783 INFO [train.py:715] (6/8) Epoch 15, batch 5650, loss[loss=0.107, simple_loss=0.1912, pruned_loss=0.01141, over 4812.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2068, pruned_loss=0.02937, over 973220.50 frames.], batch size: 26, lr: 1.48e-04 +2022-05-08 07:57:37,294 INFO [train.py:715] (6/8) Epoch 15, batch 5700, loss[loss=0.1207, simple_loss=0.1993, pruned_loss=0.02103, over 4948.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02988, over 972651.15 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 07:58:18,522 INFO [train.py:715] (6/8) Epoch 15, batch 5750, loss[loss=0.1331, simple_loss=0.2122, pruned_loss=0.02705, over 4818.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02987, over 973337.04 frames.], batch size: 13, lr: 1.48e-04 +2022-05-08 07:58:59,985 INFO [train.py:715] (6/8) Epoch 15, batch 5800, loss[loss=0.1454, simple_loss=0.2091, pruned_loss=0.04082, over 4891.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03, over 972933.37 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 07:59:41,228 INFO [train.py:715] (6/8) Epoch 15, batch 5850, loss[loss=0.1244, simple_loss=0.2022, pruned_loss=0.0233, over 4695.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2088, pruned_loss=0.02986, over 973175.44 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:00:25,529 INFO [train.py:715] (6/8) Epoch 15, batch 5900, loss[loss=0.1377, simple_loss=0.2112, pruned_loss=0.03212, over 4929.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02994, over 972745.43 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 08:01:06,123 INFO [train.py:715] (6/8) Epoch 15, batch 5950, loss[loss=0.1259, simple_loss=0.1979, pruned_loss=0.02689, over 4866.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2089, pruned_loss=0.03014, over 972200.20 frames.], batch size: 32, lr: 1.48e-04 +2022-05-08 08:01:47,585 INFO [train.py:715] (6/8) Epoch 15, batch 6000, loss[loss=0.1515, simple_loss=0.2095, pruned_loss=0.04676, over 4790.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2086, pruned_loss=0.02998, over 971886.21 frames.], batch size: 14, lr: 1.48e-04 +2022-05-08 08:01:47,586 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 08:01:57,158 INFO [train.py:742] (6/8) Epoch 15, validation: loss=0.1051, simple_loss=0.1887, pruned_loss=0.01077, over 914524.00 frames. +2022-05-08 08:02:38,327 INFO [train.py:715] (6/8) Epoch 15, batch 6050, loss[loss=0.141, simple_loss=0.2232, pruned_loss=0.0294, over 4852.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2084, pruned_loss=0.02948, over 971986.83 frames.], batch size: 34, lr: 1.48e-04 +2022-05-08 08:03:20,377 INFO [train.py:715] (6/8) Epoch 15, batch 6100, loss[loss=0.1303, simple_loss=0.2051, pruned_loss=0.02777, over 4849.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2095, pruned_loss=0.0302, over 972345.75 frames.], batch size: 30, lr: 1.48e-04 +2022-05-08 08:04:00,132 INFO [train.py:715] (6/8) Epoch 15, batch 6150, loss[loss=0.1372, simple_loss=0.21, pruned_loss=0.03221, over 4950.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2094, pruned_loss=0.03011, over 972795.38 frames.], batch size: 35, lr: 1.48e-04 +2022-05-08 08:04:41,011 INFO [train.py:715] (6/8) Epoch 15, batch 6200, loss[loss=0.1212, simple_loss=0.1967, pruned_loss=0.02288, over 4937.00 frames.], tot_loss[loss=0.1347, simple_loss=0.209, pruned_loss=0.03023, over 972207.23 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 08:05:20,642 INFO [train.py:715] (6/8) Epoch 15, batch 6250, loss[loss=0.1067, simple_loss=0.184, pruned_loss=0.01468, over 4822.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.0306, over 972009.77 frames.], batch size: 26, lr: 1.48e-04 +2022-05-08 08:06:01,397 INFO [train.py:715] (6/8) Epoch 15, batch 6300, loss[loss=0.1555, simple_loss=0.2428, pruned_loss=0.03409, over 4748.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2081, pruned_loss=0.03053, over 972641.59 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 08:06:41,236 INFO [train.py:715] (6/8) Epoch 15, batch 6350, loss[loss=0.1297, simple_loss=0.2067, pruned_loss=0.02633, over 4834.00 frames.], tot_loss[loss=0.135, simple_loss=0.2085, pruned_loss=0.03069, over 972254.81 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:07:21,275 INFO [train.py:715] (6/8) Epoch 15, batch 6400, loss[loss=0.1425, simple_loss=0.2037, pruned_loss=0.04062, over 4857.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03025, over 971161.74 frames.], batch size: 32, lr: 1.48e-04 +2022-05-08 08:08:01,849 INFO [train.py:715] (6/8) Epoch 15, batch 6450, loss[loss=0.1378, simple_loss=0.2142, pruned_loss=0.03067, over 4799.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.0302, over 972193.86 frames.], batch size: 14, lr: 1.48e-04 +2022-05-08 08:08:41,393 INFO [train.py:715] (6/8) Epoch 15, batch 6500, loss[loss=0.1234, simple_loss=0.1831, pruned_loss=0.03189, over 4773.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02975, over 972535.16 frames.], batch size: 12, lr: 1.48e-04 +2022-05-08 08:09:21,822 INFO [train.py:715] (6/8) Epoch 15, batch 6550, loss[loss=0.1788, simple_loss=0.2649, pruned_loss=0.04631, over 4930.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03016, over 972298.44 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:10:02,128 INFO [train.py:715] (6/8) Epoch 15, batch 6600, loss[loss=0.138, simple_loss=0.2128, pruned_loss=0.03158, over 4944.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03005, over 972955.24 frames.], batch size: 39, lr: 1.48e-04 +2022-05-08 08:10:42,816 INFO [train.py:715] (6/8) Epoch 15, batch 6650, loss[loss=0.1206, simple_loss=0.2035, pruned_loss=0.01888, over 4810.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2073, pruned_loss=0.02965, over 972500.85 frames.], batch size: 25, lr: 1.48e-04 +2022-05-08 08:11:22,255 INFO [train.py:715] (6/8) Epoch 15, batch 6700, loss[loss=0.134, simple_loss=0.2024, pruned_loss=0.03278, over 4925.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02952, over 972207.36 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:12:02,758 INFO [train.py:715] (6/8) Epoch 15, batch 6750, loss[loss=0.147, simple_loss=0.2156, pruned_loss=0.03914, over 4908.00 frames.], tot_loss[loss=0.134, simple_loss=0.2077, pruned_loss=0.03014, over 973169.58 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:12:44,120 INFO [train.py:715] (6/8) Epoch 15, batch 6800, loss[loss=0.137, simple_loss=0.2195, pruned_loss=0.02721, over 4751.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2073, pruned_loss=0.02986, over 973213.09 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 08:13:23,952 INFO [train.py:715] (6/8) Epoch 15, batch 6850, loss[loss=0.1313, simple_loss=0.1988, pruned_loss=0.0319, over 4951.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2066, pruned_loss=0.0295, over 973244.11 frames.], batch size: 35, lr: 1.48e-04 +2022-05-08 08:14:03,537 INFO [train.py:715] (6/8) Epoch 15, batch 6900, loss[loss=0.152, simple_loss=0.2187, pruned_loss=0.0426, over 4844.00 frames.], tot_loss[loss=0.133, simple_loss=0.2067, pruned_loss=0.02958, over 973572.43 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:14:44,362 INFO [train.py:715] (6/8) Epoch 15, batch 6950, loss[loss=0.1339, simple_loss=0.2039, pruned_loss=0.03192, over 4866.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2075, pruned_loss=0.03013, over 974387.91 frames.], batch size: 39, lr: 1.48e-04 +2022-05-08 08:15:24,998 INFO [train.py:715] (6/8) Epoch 15, batch 7000, loss[loss=0.1882, simple_loss=0.2681, pruned_loss=0.05411, over 4763.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2084, pruned_loss=0.03036, over 973150.98 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 08:16:03,961 INFO [train.py:715] (6/8) Epoch 15, batch 7050, loss[loss=0.1233, simple_loss=0.196, pruned_loss=0.02532, over 4990.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03005, over 972481.80 frames.], batch size: 25, lr: 1.48e-04 +2022-05-08 08:16:44,706 INFO [train.py:715] (6/8) Epoch 15, batch 7100, loss[loss=0.1164, simple_loss=0.1896, pruned_loss=0.02158, over 4942.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03052, over 972514.23 frames.], batch size: 29, lr: 1.48e-04 +2022-05-08 08:17:25,237 INFO [train.py:715] (6/8) Epoch 15, batch 7150, loss[loss=0.1377, simple_loss=0.2186, pruned_loss=0.02842, over 4846.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.03, over 972548.42 frames.], batch size: 20, lr: 1.48e-04 +2022-05-08 08:18:05,136 INFO [train.py:715] (6/8) Epoch 15, batch 7200, loss[loss=0.1256, simple_loss=0.1943, pruned_loss=0.02843, over 4806.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2088, pruned_loss=0.03019, over 972098.08 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:18:44,340 INFO [train.py:715] (6/8) Epoch 15, batch 7250, loss[loss=0.1506, simple_loss=0.2139, pruned_loss=0.04366, over 4871.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03046, over 972570.13 frames.], batch size: 32, lr: 1.48e-04 +2022-05-08 08:19:25,088 INFO [train.py:715] (6/8) Epoch 15, batch 7300, loss[loss=0.1299, simple_loss=0.2115, pruned_loss=0.02418, over 4962.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03048, over 972834.17 frames.], batch size: 25, lr: 1.48e-04 +2022-05-08 08:20:06,078 INFO [train.py:715] (6/8) Epoch 15, batch 7350, loss[loss=0.1415, simple_loss=0.226, pruned_loss=0.02854, over 4921.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2086, pruned_loss=0.03048, over 971998.12 frames.], batch size: 39, lr: 1.48e-04 +2022-05-08 08:20:45,519 INFO [train.py:715] (6/8) Epoch 15, batch 7400, loss[loss=0.1281, simple_loss=0.2092, pruned_loss=0.02355, over 4821.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2082, pruned_loss=0.03038, over 972296.72 frames.], batch size: 27, lr: 1.48e-04 +2022-05-08 08:21:25,989 INFO [train.py:715] (6/8) Epoch 15, batch 7450, loss[loss=0.1449, simple_loss=0.2109, pruned_loss=0.03943, over 4787.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.03003, over 973140.42 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:22:06,377 INFO [train.py:715] (6/8) Epoch 15, batch 7500, loss[loss=0.1181, simple_loss=0.1911, pruned_loss=0.0226, over 4988.00 frames.], tot_loss[loss=0.134, simple_loss=0.2084, pruned_loss=0.02983, over 973442.41 frames.], batch size: 25, lr: 1.48e-04 +2022-05-08 08:22:46,680 INFO [train.py:715] (6/8) Epoch 15, batch 7550, loss[loss=0.1264, simple_loss=0.2106, pruned_loss=0.02103, over 4929.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2078, pruned_loss=0.02959, over 973928.19 frames.], batch size: 29, lr: 1.48e-04 +2022-05-08 08:23:25,908 INFO [train.py:715] (6/8) Epoch 15, batch 7600, loss[loss=0.1375, simple_loss=0.2085, pruned_loss=0.03319, over 4933.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.02991, over 973456.78 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 08:24:05,912 INFO [train.py:715] (6/8) Epoch 15, batch 7650, loss[loss=0.1489, simple_loss=0.2142, pruned_loss=0.0418, over 4863.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03012, over 973840.51 frames.], batch size: 13, lr: 1.48e-04 +2022-05-08 08:24:45,958 INFO [train.py:715] (6/8) Epoch 15, batch 7700, loss[loss=0.1354, simple_loss=0.2109, pruned_loss=0.02997, over 4876.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03053, over 973350.63 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:25:24,893 INFO [train.py:715] (6/8) Epoch 15, batch 7750, loss[loss=0.1474, simple_loss=0.2232, pruned_loss=0.03584, over 4818.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2091, pruned_loss=0.03073, over 973550.34 frames.], batch size: 27, lr: 1.48e-04 +2022-05-08 08:26:04,532 INFO [train.py:715] (6/8) Epoch 15, batch 7800, loss[loss=0.09962, simple_loss=0.1659, pruned_loss=0.01667, over 4748.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.03136, over 972934.96 frames.], batch size: 12, lr: 1.48e-04 +2022-05-08 08:26:43,771 INFO [train.py:715] (6/8) Epoch 15, batch 7850, loss[loss=0.1321, simple_loss=0.2033, pruned_loss=0.03049, over 4869.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2106, pruned_loss=0.03137, over 972339.47 frames.], batch size: 20, lr: 1.48e-04 +2022-05-08 08:27:23,766 INFO [train.py:715] (6/8) Epoch 15, batch 7900, loss[loss=0.1877, simple_loss=0.2507, pruned_loss=0.06236, over 4913.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2106, pruned_loss=0.03136, over 972219.32 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:28:01,915 INFO [train.py:715] (6/8) Epoch 15, batch 7950, loss[loss=0.1407, simple_loss=0.2146, pruned_loss=0.03341, over 4803.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2105, pruned_loss=0.03141, over 971932.37 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 08:28:41,234 INFO [train.py:715] (6/8) Epoch 15, batch 8000, loss[loss=0.1403, simple_loss=0.2109, pruned_loss=0.03483, over 4741.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2096, pruned_loss=0.03067, over 971406.55 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 08:29:20,799 INFO [train.py:715] (6/8) Epoch 15, batch 8050, loss[loss=0.1394, simple_loss=0.2268, pruned_loss=0.026, over 4972.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2094, pruned_loss=0.0305, over 971093.23 frames.], batch size: 28, lr: 1.48e-04 +2022-05-08 08:29:59,766 INFO [train.py:715] (6/8) Epoch 15, batch 8100, loss[loss=0.1264, simple_loss=0.2023, pruned_loss=0.0252, over 4817.00 frames.], tot_loss[loss=0.1347, simple_loss=0.209, pruned_loss=0.03026, over 971110.92 frames.], batch size: 25, lr: 1.48e-04 +2022-05-08 08:30:38,764 INFO [train.py:715] (6/8) Epoch 15, batch 8150, loss[loss=0.1311, simple_loss=0.1988, pruned_loss=0.03168, over 4868.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03047, over 970533.95 frames.], batch size: 32, lr: 1.48e-04 +2022-05-08 08:31:18,843 INFO [train.py:715] (6/8) Epoch 15, batch 8200, loss[loss=0.1405, simple_loss=0.2086, pruned_loss=0.03616, over 4953.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03073, over 970898.76 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 08:31:57,573 INFO [train.py:715] (6/8) Epoch 15, batch 8250, loss[loss=0.106, simple_loss=0.1854, pruned_loss=0.0133, over 4913.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03017, over 971978.36 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:32:36,546 INFO [train.py:715] (6/8) Epoch 15, batch 8300, loss[loss=0.1186, simple_loss=0.2025, pruned_loss=0.01736, over 4991.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.03064, over 971145.44 frames.], batch size: 20, lr: 1.48e-04 +2022-05-08 08:33:15,772 INFO [train.py:715] (6/8) Epoch 15, batch 8350, loss[loss=0.1126, simple_loss=0.1907, pruned_loss=0.01722, over 4827.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03048, over 970460.22 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:33:55,967 INFO [train.py:715] (6/8) Epoch 15, batch 8400, loss[loss=0.1406, simple_loss=0.2106, pruned_loss=0.0353, over 4940.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2086, pruned_loss=0.03048, over 971097.48 frames.], batch size: 29, lr: 1.48e-04 +2022-05-08 08:34:35,506 INFO [train.py:715] (6/8) Epoch 15, batch 8450, loss[loss=0.1302, simple_loss=0.2137, pruned_loss=0.02336, over 4938.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.0308, over 971147.48 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 08:35:14,651 INFO [train.py:715] (6/8) Epoch 15, batch 8500, loss[loss=0.1165, simple_loss=0.2034, pruned_loss=0.01475, over 4785.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2074, pruned_loss=0.03054, over 970871.29 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:35:54,836 INFO [train.py:715] (6/8) Epoch 15, batch 8550, loss[loss=0.1258, simple_loss=0.1913, pruned_loss=0.03013, over 4928.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2078, pruned_loss=0.03074, over 971648.15 frames.], batch size: 29, lr: 1.48e-04 +2022-05-08 08:36:33,509 INFO [train.py:715] (6/8) Epoch 15, batch 8600, loss[loss=0.1291, simple_loss=0.1954, pruned_loss=0.03136, over 4822.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2079, pruned_loss=0.03096, over 971500.46 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:37:12,322 INFO [train.py:715] (6/8) Epoch 15, batch 8650, loss[loss=0.1733, simple_loss=0.2435, pruned_loss=0.05151, over 4982.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2084, pruned_loss=0.03106, over 971738.98 frames.], batch size: 26, lr: 1.48e-04 +2022-05-08 08:37:51,174 INFO [train.py:715] (6/8) Epoch 15, batch 8700, loss[loss=0.1569, simple_loss=0.243, pruned_loss=0.03544, over 4963.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2082, pruned_loss=0.03046, over 973008.93 frames.], batch size: 24, lr: 1.48e-04 +2022-05-08 08:38:30,424 INFO [train.py:715] (6/8) Epoch 15, batch 8750, loss[loss=0.1246, simple_loss=0.2032, pruned_loss=0.02295, over 4763.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2085, pruned_loss=0.0302, over 972523.58 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 08:39:08,913 INFO [train.py:715] (6/8) Epoch 15, batch 8800, loss[loss=0.1557, simple_loss=0.2296, pruned_loss=0.04088, over 4951.00 frames.], tot_loss[loss=0.1353, simple_loss=0.209, pruned_loss=0.03078, over 973546.30 frames.], batch size: 24, lr: 1.48e-04 +2022-05-08 08:39:47,402 INFO [train.py:715] (6/8) Epoch 15, batch 8850, loss[loss=0.1016, simple_loss=0.1751, pruned_loss=0.014, over 4816.00 frames.], tot_loss[loss=0.1353, simple_loss=0.209, pruned_loss=0.0308, over 972636.69 frames.], batch size: 25, lr: 1.48e-04 +2022-05-08 08:40:26,825 INFO [train.py:715] (6/8) Epoch 15, batch 8900, loss[loss=0.1591, simple_loss=0.2386, pruned_loss=0.03983, over 4956.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03045, over 972230.46 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 08:41:06,361 INFO [train.py:715] (6/8) Epoch 15, batch 8950, loss[loss=0.1423, simple_loss=0.2088, pruned_loss=0.03792, over 4790.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.0304, over 972112.10 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:41:45,474 INFO [train.py:715] (6/8) Epoch 15, batch 9000, loss[loss=0.1408, simple_loss=0.224, pruned_loss=0.02882, over 4977.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03094, over 972617.87 frames.], batch size: 28, lr: 1.48e-04 +2022-05-08 08:41:45,474 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 08:42:05,028 INFO [train.py:742] (6/8) Epoch 15, validation: loss=0.1051, simple_loss=0.1887, pruned_loss=0.01074, over 914524.00 frames. +2022-05-08 08:42:44,048 INFO [train.py:715] (6/8) Epoch 15, batch 9050, loss[loss=0.1471, simple_loss=0.2291, pruned_loss=0.0325, over 4921.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2078, pruned_loss=0.03024, over 972108.26 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 08:43:23,566 INFO [train.py:715] (6/8) Epoch 15, batch 9100, loss[loss=0.1284, simple_loss=0.2085, pruned_loss=0.02411, over 4756.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03007, over 971582.63 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 08:44:03,261 INFO [train.py:715] (6/8) Epoch 15, batch 9150, loss[loss=0.1605, simple_loss=0.2331, pruned_loss=0.04398, over 4879.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.03022, over 972399.69 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 08:44:42,060 INFO [train.py:715] (6/8) Epoch 15, batch 9200, loss[loss=0.1403, simple_loss=0.2088, pruned_loss=0.03592, over 4953.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2082, pruned_loss=0.02976, over 972679.69 frames.], batch size: 24, lr: 1.48e-04 +2022-05-08 08:45:21,337 INFO [train.py:715] (6/8) Epoch 15, batch 9250, loss[loss=0.1568, simple_loss=0.224, pruned_loss=0.04485, over 4967.00 frames.], tot_loss[loss=0.134, simple_loss=0.2084, pruned_loss=0.02981, over 972571.77 frames.], batch size: 33, lr: 1.48e-04 +2022-05-08 08:46:01,215 INFO [train.py:715] (6/8) Epoch 15, batch 9300, loss[loss=0.1425, simple_loss=0.2111, pruned_loss=0.03699, over 4771.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03038, over 972701.07 frames.], batch size: 12, lr: 1.48e-04 +2022-05-08 08:46:41,140 INFO [train.py:715] (6/8) Epoch 15, batch 9350, loss[loss=0.1593, simple_loss=0.2309, pruned_loss=0.04382, over 4781.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03037, over 972336.85 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:47:19,985 INFO [train.py:715] (6/8) Epoch 15, batch 9400, loss[loss=0.1192, simple_loss=0.195, pruned_loss=0.02173, over 4838.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02989, over 972938.96 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:47:59,299 INFO [train.py:715] (6/8) Epoch 15, batch 9450, loss[loss=0.1444, simple_loss=0.2196, pruned_loss=0.03457, over 4823.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02995, over 972946.46 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:48:38,583 INFO [train.py:715] (6/8) Epoch 15, batch 9500, loss[loss=0.1296, simple_loss=0.2065, pruned_loss=0.02636, over 4949.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.03033, over 972079.47 frames.], batch size: 24, lr: 1.48e-04 +2022-05-08 08:49:16,970 INFO [train.py:715] (6/8) Epoch 15, batch 9550, loss[loss=0.157, simple_loss=0.2309, pruned_loss=0.04151, over 4919.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.03071, over 972174.69 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 08:49:56,304 INFO [train.py:715] (6/8) Epoch 15, batch 9600, loss[loss=0.1201, simple_loss=0.1938, pruned_loss=0.0232, over 4689.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.03072, over 972196.48 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:50:35,891 INFO [train.py:715] (6/8) Epoch 15, batch 9650, loss[loss=0.1281, simple_loss=0.2036, pruned_loss=0.02629, over 4793.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2083, pruned_loss=0.03079, over 972237.92 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 08:51:15,437 INFO [train.py:715] (6/8) Epoch 15, batch 9700, loss[loss=0.1381, simple_loss=0.2172, pruned_loss=0.02949, over 4905.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2074, pruned_loss=0.03045, over 972359.76 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 08:51:53,982 INFO [train.py:715] (6/8) Epoch 15, batch 9750, loss[loss=0.1232, simple_loss=0.2074, pruned_loss=0.01947, over 4883.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2077, pruned_loss=0.03059, over 972720.05 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 08:52:33,224 INFO [train.py:715] (6/8) Epoch 15, batch 9800, loss[loss=0.1326, simple_loss=0.2073, pruned_loss=0.02892, over 4885.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2098, pruned_loss=0.03089, over 972698.51 frames.], batch size: 22, lr: 1.48e-04 +2022-05-08 08:53:12,403 INFO [train.py:715] (6/8) Epoch 15, batch 9850, loss[loss=0.1093, simple_loss=0.1749, pruned_loss=0.02185, over 4766.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2095, pruned_loss=0.03102, over 972670.99 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 08:53:50,990 INFO [train.py:715] (6/8) Epoch 15, batch 9900, loss[loss=0.1384, simple_loss=0.1985, pruned_loss=0.0392, over 4908.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.03111, over 972569.03 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:54:30,412 INFO [train.py:715] (6/8) Epoch 15, batch 9950, loss[loss=0.1371, simple_loss=0.2049, pruned_loss=0.03471, over 4704.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03109, over 972664.22 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:55:09,381 INFO [train.py:715] (6/8) Epoch 15, batch 10000, loss[loss=0.1383, simple_loss=0.212, pruned_loss=0.03228, over 4761.00 frames.], tot_loss[loss=0.1351, simple_loss=0.209, pruned_loss=0.03057, over 972872.16 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 08:55:48,594 INFO [train.py:715] (6/8) Epoch 15, batch 10050, loss[loss=0.13, simple_loss=0.2035, pruned_loss=0.02831, over 4970.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03035, over 974046.13 frames.], batch size: 35, lr: 1.48e-04 +2022-05-08 08:56:26,955 INFO [train.py:715] (6/8) Epoch 15, batch 10100, loss[loss=0.1245, simple_loss=0.2, pruned_loss=0.02447, over 4894.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2078, pruned_loss=0.03029, over 974251.39 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 08:57:05,756 INFO [train.py:715] (6/8) Epoch 15, batch 10150, loss[loss=0.1308, simple_loss=0.2113, pruned_loss=0.02514, over 4775.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2084, pruned_loss=0.03047, over 973815.77 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 08:57:45,595 INFO [train.py:715] (6/8) Epoch 15, batch 10200, loss[loss=0.1617, simple_loss=0.235, pruned_loss=0.04416, over 4901.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03048, over 972934.11 frames.], batch size: 39, lr: 1.48e-04 +2022-05-08 08:58:23,935 INFO [train.py:715] (6/8) Epoch 15, batch 10250, loss[loss=0.1921, simple_loss=0.2559, pruned_loss=0.06416, over 4770.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2094, pruned_loss=0.03072, over 972498.33 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 08:59:03,231 INFO [train.py:715] (6/8) Epoch 15, batch 10300, loss[loss=0.1193, simple_loss=0.2011, pruned_loss=0.01873, over 4855.00 frames.], tot_loss[loss=0.1351, simple_loss=0.209, pruned_loss=0.03056, over 972670.99 frames.], batch size: 20, lr: 1.48e-04 +2022-05-08 08:59:42,673 INFO [train.py:715] (6/8) Epoch 15, batch 10350, loss[loss=0.1726, simple_loss=0.2342, pruned_loss=0.05549, over 4692.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03034, over 972653.75 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 09:00:21,800 INFO [train.py:715] (6/8) Epoch 15, batch 10400, loss[loss=0.1255, simple_loss=0.2093, pruned_loss=0.02084, over 4889.00 frames.], tot_loss[loss=0.135, simple_loss=0.2085, pruned_loss=0.03069, over 972627.08 frames.], batch size: 22, lr: 1.48e-04 +2022-05-08 09:00:59,815 INFO [train.py:715] (6/8) Epoch 15, batch 10450, loss[loss=0.1248, simple_loss=0.1946, pruned_loss=0.02753, over 4972.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2082, pruned_loss=0.03069, over 972232.03 frames.], batch size: 35, lr: 1.48e-04 +2022-05-08 09:01:38,799 INFO [train.py:715] (6/8) Epoch 15, batch 10500, loss[loss=0.1337, simple_loss=0.2085, pruned_loss=0.02938, over 4984.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03041, over 973361.34 frames.], batch size: 28, lr: 1.48e-04 +2022-05-08 09:02:18,511 INFO [train.py:715] (6/8) Epoch 15, batch 10550, loss[loss=0.1317, simple_loss=0.1956, pruned_loss=0.03389, over 4959.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03069, over 973935.95 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 09:02:56,676 INFO [train.py:715] (6/8) Epoch 15, batch 10600, loss[loss=0.1151, simple_loss=0.1827, pruned_loss=0.02379, over 4738.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.03039, over 973389.46 frames.], batch size: 12, lr: 1.48e-04 +2022-05-08 09:03:35,331 INFO [train.py:715] (6/8) Epoch 15, batch 10650, loss[loss=0.1311, simple_loss=0.2151, pruned_loss=0.02356, over 4975.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2079, pruned_loss=0.03016, over 973464.60 frames.], batch size: 24, lr: 1.48e-04 +2022-05-08 09:04:14,413 INFO [train.py:715] (6/8) Epoch 15, batch 10700, loss[loss=0.1715, simple_loss=0.2411, pruned_loss=0.05099, over 4704.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.02988, over 972373.89 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 09:04:53,622 INFO [train.py:715] (6/8) Epoch 15, batch 10750, loss[loss=0.1187, simple_loss=0.2013, pruned_loss=0.0181, over 4813.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02985, over 972247.94 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 09:05:31,570 INFO [train.py:715] (6/8) Epoch 15, batch 10800, loss[loss=0.1679, simple_loss=0.2452, pruned_loss=0.04535, over 4893.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03028, over 971891.90 frames.], batch size: 39, lr: 1.47e-04 +2022-05-08 09:06:11,097 INFO [train.py:715] (6/8) Epoch 15, batch 10850, loss[loss=0.1431, simple_loss=0.2149, pruned_loss=0.03561, over 4981.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.03046, over 971872.35 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:06:50,381 INFO [train.py:715] (6/8) Epoch 15, batch 10900, loss[loss=0.1313, simple_loss=0.2041, pruned_loss=0.02932, over 4959.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.0302, over 972418.97 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 09:07:28,750 INFO [train.py:715] (6/8) Epoch 15, batch 10950, loss[loss=0.1378, simple_loss=0.2123, pruned_loss=0.03163, over 4814.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02975, over 972647.67 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 09:08:06,751 INFO [train.py:715] (6/8) Epoch 15, batch 11000, loss[loss=0.1298, simple_loss=0.2034, pruned_loss=0.02813, over 4907.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02911, over 972958.94 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 09:08:45,833 INFO [train.py:715] (6/8) Epoch 15, batch 11050, loss[loss=0.1549, simple_loss=0.2208, pruned_loss=0.04446, over 4859.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02964, over 972980.37 frames.], batch size: 30, lr: 1.47e-04 +2022-05-08 09:09:25,313 INFO [train.py:715] (6/8) Epoch 15, batch 11100, loss[loss=0.1136, simple_loss=0.184, pruned_loss=0.02154, over 4831.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02921, over 973523.54 frames.], batch size: 26, lr: 1.47e-04 +2022-05-08 09:10:03,218 INFO [train.py:715] (6/8) Epoch 15, batch 11150, loss[loss=0.1199, simple_loss=0.1899, pruned_loss=0.02496, over 4789.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02914, over 972174.53 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 09:10:41,864 INFO [train.py:715] (6/8) Epoch 15, batch 11200, loss[loss=0.1631, simple_loss=0.233, pruned_loss=0.04662, over 4710.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02975, over 972105.65 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:11:20,807 INFO [train.py:715] (6/8) Epoch 15, batch 11250, loss[loss=0.1248, simple_loss=0.2007, pruned_loss=0.02449, over 4743.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2076, pruned_loss=0.02915, over 972383.04 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 09:11:59,325 INFO [train.py:715] (6/8) Epoch 15, batch 11300, loss[loss=0.1236, simple_loss=0.2078, pruned_loss=0.01966, over 4788.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2072, pruned_loss=0.02881, over 972882.69 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 09:12:37,830 INFO [train.py:715] (6/8) Epoch 15, batch 11350, loss[loss=0.1033, simple_loss=0.1664, pruned_loss=0.02012, over 4810.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02915, over 971692.76 frames.], batch size: 12, lr: 1.47e-04 +2022-05-08 09:13:17,185 INFO [train.py:715] (6/8) Epoch 15, batch 11400, loss[loss=0.1301, simple_loss=0.2028, pruned_loss=0.02866, over 4885.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02933, over 971546.70 frames.], batch size: 32, lr: 1.47e-04 +2022-05-08 09:13:55,476 INFO [train.py:715] (6/8) Epoch 15, batch 11450, loss[loss=0.1336, simple_loss=0.2015, pruned_loss=0.03291, over 4843.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03025, over 972584.63 frames.], batch size: 32, lr: 1.47e-04 +2022-05-08 09:14:34,170 INFO [train.py:715] (6/8) Epoch 15, batch 11500, loss[loss=0.1365, simple_loss=0.2036, pruned_loss=0.0347, over 4863.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2072, pruned_loss=0.02983, over 972241.02 frames.], batch size: 32, lr: 1.47e-04 +2022-05-08 09:15:13,130 INFO [train.py:715] (6/8) Epoch 15, batch 11550, loss[loss=0.1384, simple_loss=0.2179, pruned_loss=0.02942, over 4892.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2073, pruned_loss=0.02987, over 972646.15 frames.], batch size: 22, lr: 1.47e-04 +2022-05-08 09:15:52,412 INFO [train.py:715] (6/8) Epoch 15, batch 11600, loss[loss=0.1387, simple_loss=0.2129, pruned_loss=0.03223, over 4962.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2079, pruned_loss=0.03017, over 972851.02 frames.], batch size: 35, lr: 1.47e-04 +2022-05-08 09:16:30,746 INFO [train.py:715] (6/8) Epoch 15, batch 11650, loss[loss=0.1472, simple_loss=0.2168, pruned_loss=0.03879, over 4696.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.0303, over 972178.05 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:17:09,221 INFO [train.py:715] (6/8) Epoch 15, batch 11700, loss[loss=0.1274, simple_loss=0.1992, pruned_loss=0.02777, over 4684.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2084, pruned_loss=0.03059, over 970181.56 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:17:48,438 INFO [train.py:715] (6/8) Epoch 15, batch 11750, loss[loss=0.118, simple_loss=0.1953, pruned_loss=0.02036, over 4803.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2082, pruned_loss=0.03047, over 970248.86 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 09:18:27,441 INFO [train.py:715] (6/8) Epoch 15, batch 11800, loss[loss=0.1348, simple_loss=0.2078, pruned_loss=0.03083, over 4942.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2088, pruned_loss=0.031, over 971275.29 frames.], batch size: 23, lr: 1.47e-04 +2022-05-08 09:19:05,508 INFO [train.py:715] (6/8) Epoch 15, batch 11850, loss[loss=0.1391, simple_loss=0.2129, pruned_loss=0.0326, over 4945.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2087, pruned_loss=0.03078, over 971747.53 frames.], batch size: 35, lr: 1.47e-04 +2022-05-08 09:19:45,033 INFO [train.py:715] (6/8) Epoch 15, batch 11900, loss[loss=0.138, simple_loss=0.2076, pruned_loss=0.03416, over 4822.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03036, over 970916.66 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:20:25,113 INFO [train.py:715] (6/8) Epoch 15, batch 11950, loss[loss=0.1187, simple_loss=0.1994, pruned_loss=0.01895, over 4795.00 frames.], tot_loss[loss=0.134, simple_loss=0.2077, pruned_loss=0.03011, over 971740.29 frames.], batch size: 24, lr: 1.47e-04 +2022-05-08 09:21:03,703 INFO [train.py:715] (6/8) Epoch 15, batch 12000, loss[loss=0.111, simple_loss=0.1878, pruned_loss=0.01711, over 4893.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2089, pruned_loss=0.03035, over 971686.40 frames.], batch size: 22, lr: 1.47e-04 +2022-05-08 09:21:03,704 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 09:21:20,395 INFO [train.py:742] (6/8) Epoch 15, validation: loss=0.105, simple_loss=0.1887, pruned_loss=0.01066, over 914524.00 frames. +2022-05-08 09:21:59,106 INFO [train.py:715] (6/8) Epoch 15, batch 12050, loss[loss=0.154, simple_loss=0.2266, pruned_loss=0.04068, over 4912.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03017, over 971526.62 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 09:22:38,254 INFO [train.py:715] (6/8) Epoch 15, batch 12100, loss[loss=0.1326, simple_loss=0.2107, pruned_loss=0.02726, over 4949.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03016, over 972534.52 frames.], batch size: 23, lr: 1.47e-04 +2022-05-08 09:23:17,962 INFO [train.py:715] (6/8) Epoch 15, batch 12150, loss[loss=0.1595, simple_loss=0.2306, pruned_loss=0.04415, over 4836.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2089, pruned_loss=0.03018, over 972269.56 frames.], batch size: 30, lr: 1.47e-04 +2022-05-08 09:23:56,410 INFO [train.py:715] (6/8) Epoch 15, batch 12200, loss[loss=0.1308, simple_loss=0.2137, pruned_loss=0.02391, over 4894.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2083, pruned_loss=0.02982, over 972197.00 frames.], batch size: 22, lr: 1.47e-04 +2022-05-08 09:24:35,176 INFO [train.py:715] (6/8) Epoch 15, batch 12250, loss[loss=0.145, simple_loss=0.2035, pruned_loss=0.04324, over 4976.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2096, pruned_loss=0.03069, over 972479.87 frames.], batch size: 31, lr: 1.47e-04 +2022-05-08 09:25:14,190 INFO [train.py:715] (6/8) Epoch 15, batch 12300, loss[loss=0.142, simple_loss=0.2164, pruned_loss=0.03382, over 4759.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2093, pruned_loss=0.03063, over 972200.68 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 09:25:54,055 INFO [train.py:715] (6/8) Epoch 15, batch 12350, loss[loss=0.1257, simple_loss=0.2064, pruned_loss=0.02247, over 4799.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03045, over 972841.99 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 09:26:32,317 INFO [train.py:715] (6/8) Epoch 15, batch 12400, loss[loss=0.1876, simple_loss=0.2552, pruned_loss=0.05998, over 4956.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2098, pruned_loss=0.03053, over 972740.46 frames.], batch size: 35, lr: 1.47e-04 +2022-05-08 09:27:11,076 INFO [train.py:715] (6/8) Epoch 15, batch 12450, loss[loss=0.1518, simple_loss=0.2206, pruned_loss=0.04149, over 4965.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2091, pruned_loss=0.0306, over 972386.04 frames.], batch size: 35, lr: 1.47e-04 +2022-05-08 09:27:51,091 INFO [train.py:715] (6/8) Epoch 15, batch 12500, loss[loss=0.1515, simple_loss=0.2216, pruned_loss=0.04073, over 4827.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03083, over 972362.97 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:28:29,281 INFO [train.py:715] (6/8) Epoch 15, batch 12550, loss[loss=0.1179, simple_loss=0.1908, pruned_loss=0.02247, over 4928.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03091, over 973206.51 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 09:29:08,341 INFO [train.py:715] (6/8) Epoch 15, batch 12600, loss[loss=0.1111, simple_loss=0.1887, pruned_loss=0.01677, over 4930.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.03062, over 973601.20 frames.], batch size: 23, lr: 1.47e-04 +2022-05-08 09:29:46,867 INFO [train.py:715] (6/8) Epoch 15, batch 12650, loss[loss=0.1383, simple_loss=0.2212, pruned_loss=0.02774, over 4847.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03015, over 974107.47 frames.], batch size: 20, lr: 1.47e-04 +2022-05-08 09:30:26,459 INFO [train.py:715] (6/8) Epoch 15, batch 12700, loss[loss=0.1053, simple_loss=0.1862, pruned_loss=0.01215, over 4984.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02989, over 974272.35 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 09:31:04,796 INFO [train.py:715] (6/8) Epoch 15, batch 12750, loss[loss=0.1358, simple_loss=0.2221, pruned_loss=0.02474, over 4888.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2089, pruned_loss=0.02984, over 973652.32 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 09:31:43,628 INFO [train.py:715] (6/8) Epoch 15, batch 12800, loss[loss=0.1658, simple_loss=0.2469, pruned_loss=0.04237, over 4875.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.02992, over 973370.21 frames.], batch size: 22, lr: 1.47e-04 +2022-05-08 09:32:23,161 INFO [train.py:715] (6/8) Epoch 15, batch 12850, loss[loss=0.1099, simple_loss=0.1797, pruned_loss=0.02007, over 4667.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02968, over 973213.43 frames.], batch size: 13, lr: 1.47e-04 +2022-05-08 09:33:01,793 INFO [train.py:715] (6/8) Epoch 15, batch 12900, loss[loss=0.1124, simple_loss=0.1868, pruned_loss=0.01898, over 4968.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02913, over 973449.53 frames.], batch size: 24, lr: 1.47e-04 +2022-05-08 09:33:40,808 INFO [train.py:715] (6/8) Epoch 15, batch 12950, loss[loss=0.1455, simple_loss=0.2105, pruned_loss=0.04023, over 4787.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02925, over 973346.15 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 09:34:20,118 INFO [train.py:715] (6/8) Epoch 15, batch 13000, loss[loss=0.1195, simple_loss=0.1946, pruned_loss=0.02217, over 4970.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2083, pruned_loss=0.02963, over 972977.20 frames.], batch size: 35, lr: 1.47e-04 +2022-05-08 09:34:59,664 INFO [train.py:715] (6/8) Epoch 15, batch 13050, loss[loss=0.1617, simple_loss=0.2257, pruned_loss=0.04888, over 4878.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2088, pruned_loss=0.03031, over 973187.15 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 09:35:38,140 INFO [train.py:715] (6/8) Epoch 15, batch 13100, loss[loss=0.1434, simple_loss=0.213, pruned_loss=0.03692, over 4847.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.0301, over 972590.94 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:36:17,620 INFO [train.py:715] (6/8) Epoch 15, batch 13150, loss[loss=0.1246, simple_loss=0.1954, pruned_loss=0.02687, over 4817.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03008, over 972130.84 frames.], batch size: 26, lr: 1.47e-04 +2022-05-08 09:36:57,400 INFO [train.py:715] (6/8) Epoch 15, batch 13200, loss[loss=0.1178, simple_loss=0.1952, pruned_loss=0.02017, over 4928.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02967, over 972505.89 frames.], batch size: 29, lr: 1.47e-04 +2022-05-08 09:37:35,197 INFO [train.py:715] (6/8) Epoch 15, batch 13250, loss[loss=0.1101, simple_loss=0.1842, pruned_loss=0.01802, over 4780.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03016, over 972741.11 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 09:38:14,326 INFO [train.py:715] (6/8) Epoch 15, batch 13300, loss[loss=0.1551, simple_loss=0.2314, pruned_loss=0.03944, over 4927.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2089, pruned_loss=0.03024, over 973112.67 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 09:38:53,944 INFO [train.py:715] (6/8) Epoch 15, batch 13350, loss[loss=0.1705, simple_loss=0.2367, pruned_loss=0.05213, over 4774.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03041, over 973244.75 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 09:39:34,574 INFO [train.py:715] (6/8) Epoch 15, batch 13400, loss[loss=0.1286, simple_loss=0.2109, pruned_loss=0.02311, over 4922.00 frames.], tot_loss[loss=0.1353, simple_loss=0.209, pruned_loss=0.03083, over 972875.73 frames.], batch size: 23, lr: 1.47e-04 +2022-05-08 09:40:13,178 INFO [train.py:715] (6/8) Epoch 15, batch 13450, loss[loss=0.1545, simple_loss=0.2259, pruned_loss=0.04152, over 4908.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2093, pruned_loss=0.03075, over 972810.86 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 09:40:51,763 INFO [train.py:715] (6/8) Epoch 15, batch 13500, loss[loss=0.1301, simple_loss=0.2018, pruned_loss=0.02924, over 4842.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.03099, over 972327.57 frames.], batch size: 30, lr: 1.47e-04 +2022-05-08 09:41:31,299 INFO [train.py:715] (6/8) Epoch 15, batch 13550, loss[loss=0.1377, simple_loss=0.2114, pruned_loss=0.03198, over 4700.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03087, over 972001.11 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:42:09,578 INFO [train.py:715] (6/8) Epoch 15, batch 13600, loss[loss=0.1389, simple_loss=0.2094, pruned_loss=0.0342, over 4776.00 frames.], tot_loss[loss=0.136, simple_loss=0.2099, pruned_loss=0.03102, over 971716.19 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 09:42:48,559 INFO [train.py:715] (6/8) Epoch 15, batch 13650, loss[loss=0.1324, simple_loss=0.2088, pruned_loss=0.02794, over 4908.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03066, over 971950.92 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 09:43:27,803 INFO [train.py:715] (6/8) Epoch 15, batch 13700, loss[loss=0.1314, simple_loss=0.1948, pruned_loss=0.03398, over 4745.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03027, over 971299.72 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 09:44:06,255 INFO [train.py:715] (6/8) Epoch 15, batch 13750, loss[loss=0.1492, simple_loss=0.2261, pruned_loss=0.03614, over 4978.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2079, pruned_loss=0.03017, over 971843.48 frames.], batch size: 39, lr: 1.47e-04 +2022-05-08 09:44:44,977 INFO [train.py:715] (6/8) Epoch 15, batch 13800, loss[loss=0.1243, simple_loss=0.1964, pruned_loss=0.0261, over 4894.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2082, pruned_loss=0.03033, over 972251.96 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 09:45:23,192 INFO [train.py:715] (6/8) Epoch 15, batch 13850, loss[loss=0.1146, simple_loss=0.1904, pruned_loss=0.01945, over 4738.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2079, pruned_loss=0.03033, over 972247.33 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 09:46:05,196 INFO [train.py:715] (6/8) Epoch 15, batch 13900, loss[loss=0.1165, simple_loss=0.1912, pruned_loss=0.02092, over 4935.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2078, pruned_loss=0.03046, over 972069.93 frames.], batch size: 29, lr: 1.47e-04 +2022-05-08 09:46:43,307 INFO [train.py:715] (6/8) Epoch 15, batch 13950, loss[loss=0.1338, simple_loss=0.2129, pruned_loss=0.02739, over 4969.00 frames.], tot_loss[loss=0.1345, simple_loss=0.208, pruned_loss=0.03052, over 971766.04 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:47:21,600 INFO [train.py:715] (6/8) Epoch 15, batch 14000, loss[loss=0.09819, simple_loss=0.1761, pruned_loss=0.01015, over 4941.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2084, pruned_loss=0.03096, over 972038.46 frames.], batch size: 29, lr: 1.47e-04 +2022-05-08 09:48:00,878 INFO [train.py:715] (6/8) Epoch 15, batch 14050, loss[loss=0.1416, simple_loss=0.2075, pruned_loss=0.03782, over 4764.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2072, pruned_loss=0.03062, over 971783.83 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 09:48:38,844 INFO [train.py:715] (6/8) Epoch 15, batch 14100, loss[loss=0.1138, simple_loss=0.1881, pruned_loss=0.01975, over 4727.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2068, pruned_loss=0.03039, over 971656.05 frames.], batch size: 12, lr: 1.47e-04 +2022-05-08 09:49:17,892 INFO [train.py:715] (6/8) Epoch 15, batch 14150, loss[loss=0.1107, simple_loss=0.191, pruned_loss=0.01523, over 4900.00 frames.], tot_loss[loss=0.134, simple_loss=0.2072, pruned_loss=0.03039, over 972213.50 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 09:49:56,535 INFO [train.py:715] (6/8) Epoch 15, batch 14200, loss[loss=0.1098, simple_loss=0.1767, pruned_loss=0.02145, over 4826.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03016, over 972548.59 frames.], batch size: 13, lr: 1.47e-04 +2022-05-08 09:50:35,493 INFO [train.py:715] (6/8) Epoch 15, batch 14250, loss[loss=0.1086, simple_loss=0.1872, pruned_loss=0.01507, over 4971.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03029, over 972993.81 frames.], batch size: 28, lr: 1.47e-04 +2022-05-08 09:51:13,318 INFO [train.py:715] (6/8) Epoch 15, batch 14300, loss[loss=0.1087, simple_loss=0.1736, pruned_loss=0.0219, over 4647.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2091, pruned_loss=0.03058, over 973489.22 frames.], batch size: 13, lr: 1.47e-04 +2022-05-08 09:51:51,760 INFO [train.py:715] (6/8) Epoch 15, batch 14350, loss[loss=0.1682, simple_loss=0.2412, pruned_loss=0.04761, over 4817.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03039, over 973373.55 frames.], batch size: 14, lr: 1.47e-04 +2022-05-08 09:52:30,860 INFO [train.py:715] (6/8) Epoch 15, batch 14400, loss[loss=0.1273, simple_loss=0.1957, pruned_loss=0.02944, over 4835.00 frames.], tot_loss[loss=0.133, simple_loss=0.207, pruned_loss=0.02951, over 972686.80 frames.], batch size: 20, lr: 1.47e-04 +2022-05-08 09:53:08,603 INFO [train.py:715] (6/8) Epoch 15, batch 14450, loss[loss=0.1371, simple_loss=0.2083, pruned_loss=0.03296, over 4893.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.02993, over 973274.20 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 09:53:47,585 INFO [train.py:715] (6/8) Epoch 15, batch 14500, loss[loss=0.1568, simple_loss=0.2302, pruned_loss=0.04175, over 4784.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2085, pruned_loss=0.03019, over 972927.60 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 09:54:25,848 INFO [train.py:715] (6/8) Epoch 15, batch 14550, loss[loss=0.1427, simple_loss=0.2211, pruned_loss=0.03213, over 4883.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.03066, over 972244.46 frames.], batch size: 22, lr: 1.47e-04 +2022-05-08 09:55:04,845 INFO [train.py:715] (6/8) Epoch 15, batch 14600, loss[loss=0.1226, simple_loss=0.1886, pruned_loss=0.02832, over 4906.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03068, over 971872.07 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 09:55:42,672 INFO [train.py:715] (6/8) Epoch 15, batch 14650, loss[loss=0.1351, simple_loss=0.1985, pruned_loss=0.03586, over 4644.00 frames.], tot_loss[loss=0.135, simple_loss=0.2084, pruned_loss=0.03077, over 971424.92 frames.], batch size: 13, lr: 1.47e-04 +2022-05-08 09:56:20,650 INFO [train.py:715] (6/8) Epoch 15, batch 14700, loss[loss=0.1343, simple_loss=0.2106, pruned_loss=0.02899, over 4860.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2091, pruned_loss=0.03102, over 971199.68 frames.], batch size: 38, lr: 1.47e-04 +2022-05-08 09:56:59,716 INFO [train.py:715] (6/8) Epoch 15, batch 14750, loss[loss=0.1224, simple_loss=0.2007, pruned_loss=0.02201, over 4971.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.03096, over 971489.25 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 09:57:37,353 INFO [train.py:715] (6/8) Epoch 15, batch 14800, loss[loss=0.1337, simple_loss=0.2083, pruned_loss=0.02958, over 4962.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2089, pruned_loss=0.031, over 971937.94 frames.], batch size: 24, lr: 1.47e-04 +2022-05-08 09:58:16,193 INFO [train.py:715] (6/8) Epoch 15, batch 14850, loss[loss=0.154, simple_loss=0.2157, pruned_loss=0.04619, over 4821.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2087, pruned_loss=0.03115, over 972657.34 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:58:55,096 INFO [train.py:715] (6/8) Epoch 15, batch 14900, loss[loss=0.1287, simple_loss=0.2018, pruned_loss=0.02777, over 4701.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2087, pruned_loss=0.0311, over 972324.92 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:59:33,266 INFO [train.py:715] (6/8) Epoch 15, batch 14950, loss[loss=0.1456, simple_loss=0.2086, pruned_loss=0.04128, over 4927.00 frames.], tot_loss[loss=0.135, simple_loss=0.2085, pruned_loss=0.03078, over 973034.83 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 10:00:11,562 INFO [train.py:715] (6/8) Epoch 15, batch 15000, loss[loss=0.1267, simple_loss=0.1968, pruned_loss=0.02825, over 4791.00 frames.], tot_loss[loss=0.134, simple_loss=0.2077, pruned_loss=0.03017, over 972183.37 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 10:00:11,563 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 10:00:26,345 INFO [train.py:742] (6/8) Epoch 15, validation: loss=0.1051, simple_loss=0.1887, pruned_loss=0.01077, over 914524.00 frames. +2022-05-08 10:01:05,813 INFO [train.py:715] (6/8) Epoch 15, batch 15050, loss[loss=0.122, simple_loss=0.1955, pruned_loss=0.02427, over 4877.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2079, pruned_loss=0.03012, over 972420.26 frames.], batch size: 22, lr: 1.47e-04 +2022-05-08 10:01:43,978 INFO [train.py:715] (6/8) Epoch 15, batch 15100, loss[loss=0.1343, simple_loss=0.2036, pruned_loss=0.03245, over 4921.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03003, over 972137.56 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 10:02:23,329 INFO [train.py:715] (6/8) Epoch 15, batch 15150, loss[loss=0.1371, simple_loss=0.207, pruned_loss=0.03357, over 4907.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.02998, over 972946.31 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 10:03:01,051 INFO [train.py:715] (6/8) Epoch 15, batch 15200, loss[loss=0.1308, simple_loss=0.2039, pruned_loss=0.02885, over 4817.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02964, over 972319.46 frames.], batch size: 13, lr: 1.47e-04 +2022-05-08 10:03:39,351 INFO [train.py:715] (6/8) Epoch 15, batch 15250, loss[loss=0.1142, simple_loss=0.191, pruned_loss=0.01867, over 4940.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2081, pruned_loss=0.02936, over 973194.50 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 10:04:18,901 INFO [train.py:715] (6/8) Epoch 15, batch 15300, loss[loss=0.142, simple_loss=0.2237, pruned_loss=0.03015, over 4965.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2102, pruned_loss=0.02986, over 973787.87 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 10:04:56,983 INFO [train.py:715] (6/8) Epoch 15, batch 15350, loss[loss=0.123, simple_loss=0.1926, pruned_loss=0.02676, over 4710.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2098, pruned_loss=0.02964, over 972884.12 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 10:05:35,892 INFO [train.py:715] (6/8) Epoch 15, batch 15400, loss[loss=0.1518, simple_loss=0.2264, pruned_loss=0.03859, over 4840.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2092, pruned_loss=0.02981, over 972252.77 frames.], batch size: 30, lr: 1.47e-04 +2022-05-08 10:06:13,984 INFO [train.py:715] (6/8) Epoch 15, batch 15450, loss[loss=0.1392, simple_loss=0.2083, pruned_loss=0.03505, over 4811.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02963, over 972072.06 frames.], batch size: 12, lr: 1.47e-04 +2022-05-08 10:06:52,880 INFO [train.py:715] (6/8) Epoch 15, batch 15500, loss[loss=0.1373, simple_loss=0.2117, pruned_loss=0.03146, over 4818.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02998, over 971338.86 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 10:07:31,440 INFO [train.py:715] (6/8) Epoch 15, batch 15550, loss[loss=0.1528, simple_loss=0.2341, pruned_loss=0.03573, over 4782.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2086, pruned_loss=0.03045, over 971405.79 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 10:08:10,327 INFO [train.py:715] (6/8) Epoch 15, batch 15600, loss[loss=0.1366, simple_loss=0.2127, pruned_loss=0.03029, over 4898.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2079, pruned_loss=0.03012, over 971142.24 frames.], batch size: 22, lr: 1.47e-04 +2022-05-08 10:08:49,134 INFO [train.py:715] (6/8) Epoch 15, batch 15650, loss[loss=0.1108, simple_loss=0.1878, pruned_loss=0.01694, over 4793.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03024, over 970681.09 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 10:09:27,216 INFO [train.py:715] (6/8) Epoch 15, batch 15700, loss[loss=0.1241, simple_loss=0.2047, pruned_loss=0.02172, over 4810.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02985, over 971663.05 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 10:10:05,787 INFO [train.py:715] (6/8) Epoch 15, batch 15750, loss[loss=0.1161, simple_loss=0.1957, pruned_loss=0.01825, over 4931.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.0302, over 971607.88 frames.], batch size: 29, lr: 1.47e-04 +2022-05-08 10:10:44,347 INFO [train.py:715] (6/8) Epoch 15, batch 15800, loss[loss=0.1131, simple_loss=0.1898, pruned_loss=0.0182, over 4812.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.02996, over 971489.04 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 10:11:23,020 INFO [train.py:715] (6/8) Epoch 15, batch 15850, loss[loss=0.1111, simple_loss=0.189, pruned_loss=0.01667, over 4951.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.03044, over 971703.71 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 10:12:01,146 INFO [train.py:715] (6/8) Epoch 15, batch 15900, loss[loss=0.1316, simple_loss=0.2085, pruned_loss=0.02733, over 4944.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2096, pruned_loss=0.03087, over 971337.11 frames.], batch size: 35, lr: 1.47e-04 +2022-05-08 10:12:39,299 INFO [train.py:715] (6/8) Epoch 15, batch 15950, loss[loss=0.1741, simple_loss=0.2421, pruned_loss=0.05301, over 4987.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2091, pruned_loss=0.03054, over 972094.91 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 10:13:18,370 INFO [train.py:715] (6/8) Epoch 15, batch 16000, loss[loss=0.1302, simple_loss=0.1978, pruned_loss=0.03128, over 4881.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03008, over 971499.06 frames.], batch size: 20, lr: 1.47e-04 +2022-05-08 10:13:56,002 INFO [train.py:715] (6/8) Epoch 15, batch 16050, loss[loss=0.09699, simple_loss=0.1748, pruned_loss=0.009599, over 4796.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.03, over 971116.65 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 10:14:34,593 INFO [train.py:715] (6/8) Epoch 15, batch 16100, loss[loss=0.1341, simple_loss=0.2112, pruned_loss=0.02852, over 4976.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2091, pruned_loss=0.03068, over 970401.49 frames.], batch size: 28, lr: 1.47e-04 +2022-05-08 10:15:13,036 INFO [train.py:715] (6/8) Epoch 15, batch 16150, loss[loss=0.1581, simple_loss=0.2248, pruned_loss=0.04572, over 4834.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03029, over 970847.80 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 10:15:51,554 INFO [train.py:715] (6/8) Epoch 15, batch 16200, loss[loss=0.09549, simple_loss=0.1729, pruned_loss=0.009042, over 4665.00 frames.], tot_loss[loss=0.1346, simple_loss=0.209, pruned_loss=0.03012, over 971131.70 frames.], batch size: 14, lr: 1.47e-04 +2022-05-08 10:16:29,838 INFO [train.py:715] (6/8) Epoch 15, batch 16250, loss[loss=0.1327, simple_loss=0.2045, pruned_loss=0.0304, over 4841.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02958, over 971308.53 frames.], batch size: 32, lr: 1.47e-04 +2022-05-08 10:17:08,215 INFO [train.py:715] (6/8) Epoch 15, batch 16300, loss[loss=0.1145, simple_loss=0.186, pruned_loss=0.02155, over 4836.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02981, over 971823.55 frames.], batch size: 13, lr: 1.47e-04 +2022-05-08 10:17:46,751 INFO [train.py:715] (6/8) Epoch 15, batch 16350, loss[loss=0.1176, simple_loss=0.2083, pruned_loss=0.01344, over 4792.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2074, pruned_loss=0.02986, over 972284.55 frames.], batch size: 24, lr: 1.47e-04 +2022-05-08 10:18:24,620 INFO [train.py:715] (6/8) Epoch 15, batch 16400, loss[loss=0.1559, simple_loss=0.2329, pruned_loss=0.03945, over 4787.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2089, pruned_loss=0.03013, over 971964.02 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 10:19:03,503 INFO [train.py:715] (6/8) Epoch 15, batch 16450, loss[loss=0.1072, simple_loss=0.1848, pruned_loss=0.01476, over 4928.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03003, over 972135.63 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 10:19:41,758 INFO [train.py:715] (6/8) Epoch 15, batch 16500, loss[loss=0.1415, simple_loss=0.2052, pruned_loss=0.03896, over 4956.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03045, over 971784.26 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 10:20:20,130 INFO [train.py:715] (6/8) Epoch 15, batch 16550, loss[loss=0.117, simple_loss=0.1989, pruned_loss=0.01756, over 4937.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.03, over 971656.51 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 10:20:58,278 INFO [train.py:715] (6/8) Epoch 15, batch 16600, loss[loss=0.1247, simple_loss=0.2036, pruned_loss=0.02289, over 4756.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02989, over 972191.56 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 10:21:37,034 INFO [train.py:715] (6/8) Epoch 15, batch 16650, loss[loss=0.1377, simple_loss=0.2108, pruned_loss=0.03234, over 4837.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2082, pruned_loss=0.02984, over 972132.39 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 10:22:16,802 INFO [train.py:715] (6/8) Epoch 15, batch 16700, loss[loss=0.111, simple_loss=0.1924, pruned_loss=0.0148, over 4827.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02976, over 971775.08 frames.], batch size: 26, lr: 1.47e-04 +2022-05-08 10:22:55,522 INFO [train.py:715] (6/8) Epoch 15, batch 16750, loss[loss=0.1432, simple_loss=0.2127, pruned_loss=0.03685, over 4777.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02976, over 972097.36 frames.], batch size: 14, lr: 1.47e-04 +2022-05-08 10:23:34,513 INFO [train.py:715] (6/8) Epoch 15, batch 16800, loss[loss=0.1083, simple_loss=0.1856, pruned_loss=0.01547, over 4761.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02968, over 971802.59 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 10:24:13,669 INFO [train.py:715] (6/8) Epoch 15, batch 16850, loss[loss=0.1169, simple_loss=0.1928, pruned_loss=0.02051, over 4887.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.02964, over 971008.78 frames.], batch size: 22, lr: 1.47e-04 +2022-05-08 10:24:52,752 INFO [train.py:715] (6/8) Epoch 15, batch 16900, loss[loss=0.132, simple_loss=0.2182, pruned_loss=0.02288, over 4965.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2072, pruned_loss=0.03014, over 971782.76 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 10:25:31,720 INFO [train.py:715] (6/8) Epoch 15, batch 16950, loss[loss=0.1258, simple_loss=0.1977, pruned_loss=0.02701, over 4804.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2069, pruned_loss=0.03032, over 971687.79 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 10:26:10,071 INFO [train.py:715] (6/8) Epoch 15, batch 17000, loss[loss=0.1262, simple_loss=0.2021, pruned_loss=0.02519, over 4839.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2064, pruned_loss=0.02992, over 970898.97 frames.], batch size: 26, lr: 1.47e-04 +2022-05-08 10:26:49,333 INFO [train.py:715] (6/8) Epoch 15, batch 17050, loss[loss=0.1583, simple_loss=0.2291, pruned_loss=0.04373, over 4955.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2071, pruned_loss=0.02965, over 972285.56 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 10:27:27,261 INFO [train.py:715] (6/8) Epoch 15, batch 17100, loss[loss=0.1368, simple_loss=0.2097, pruned_loss=0.0319, over 4785.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.02994, over 972214.71 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 10:28:06,075 INFO [train.py:715] (6/8) Epoch 15, batch 17150, loss[loss=0.111, simple_loss=0.1912, pruned_loss=0.01538, over 4952.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2082, pruned_loss=0.03029, over 971094.09 frames.], batch size: 24, lr: 1.47e-04 +2022-05-08 10:28:44,478 INFO [train.py:715] (6/8) Epoch 15, batch 17200, loss[loss=0.1381, simple_loss=0.2198, pruned_loss=0.02815, over 4813.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03057, over 971495.64 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 10:29:23,148 INFO [train.py:715] (6/8) Epoch 15, batch 17250, loss[loss=0.1302, simple_loss=0.2017, pruned_loss=0.02931, over 4816.00 frames.], tot_loss[loss=0.1348, simple_loss=0.209, pruned_loss=0.03027, over 972036.02 frames.], batch size: 26, lr: 1.47e-04 +2022-05-08 10:30:01,738 INFO [train.py:715] (6/8) Epoch 15, batch 17300, loss[loss=0.1297, simple_loss=0.2053, pruned_loss=0.027, over 4787.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2087, pruned_loss=0.03004, over 971862.27 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 10:30:40,376 INFO [train.py:715] (6/8) Epoch 15, batch 17350, loss[loss=0.1363, simple_loss=0.2093, pruned_loss=0.03166, over 4785.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2086, pruned_loss=0.02995, over 971153.48 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 10:31:19,966 INFO [train.py:715] (6/8) Epoch 15, batch 17400, loss[loss=0.1146, simple_loss=0.1912, pruned_loss=0.01896, over 4729.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2093, pruned_loss=0.03014, over 970963.86 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 10:31:57,908 INFO [train.py:715] (6/8) Epoch 15, batch 17450, loss[loss=0.1217, simple_loss=0.1954, pruned_loss=0.02404, over 4861.00 frames.], tot_loss[loss=0.134, simple_loss=0.2086, pruned_loss=0.02975, over 971397.30 frames.], batch size: 13, lr: 1.47e-04 +2022-05-08 10:32:36,886 INFO [train.py:715] (6/8) Epoch 15, batch 17500, loss[loss=0.1263, simple_loss=0.2027, pruned_loss=0.02493, over 4853.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2085, pruned_loss=0.02964, over 971127.22 frames.], batch size: 32, lr: 1.47e-04 +2022-05-08 10:33:15,859 INFO [train.py:715] (6/8) Epoch 15, batch 17550, loss[loss=0.1257, simple_loss=0.1951, pruned_loss=0.02818, over 4925.00 frames.], tot_loss[loss=0.1353, simple_loss=0.21, pruned_loss=0.03026, over 972580.15 frames.], batch size: 23, lr: 1.47e-04 +2022-05-08 10:33:54,450 INFO [train.py:715] (6/8) Epoch 15, batch 17600, loss[loss=0.1789, simple_loss=0.241, pruned_loss=0.05839, over 4761.00 frames.], tot_loss[loss=0.134, simple_loss=0.2086, pruned_loss=0.02968, over 972425.58 frames.], batch size: 14, lr: 1.47e-04 +2022-05-08 10:34:32,829 INFO [train.py:715] (6/8) Epoch 15, batch 17650, loss[loss=0.09669, simple_loss=0.1763, pruned_loss=0.00852, over 4771.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2085, pruned_loss=0.0295, over 972260.35 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 10:35:11,449 INFO [train.py:715] (6/8) Epoch 15, batch 17700, loss[loss=0.1383, simple_loss=0.2188, pruned_loss=0.0289, over 4794.00 frames.], tot_loss[loss=0.134, simple_loss=0.2086, pruned_loss=0.02975, over 972738.81 frames.], batch size: 14, lr: 1.47e-04 +2022-05-08 10:35:50,339 INFO [train.py:715] (6/8) Epoch 15, batch 17750, loss[loss=0.1258, simple_loss=0.2017, pruned_loss=0.02498, over 4810.00 frames.], tot_loss[loss=0.1345, simple_loss=0.209, pruned_loss=0.02995, over 972407.86 frames.], batch size: 12, lr: 1.47e-04 +2022-05-08 10:36:28,700 INFO [train.py:715] (6/8) Epoch 15, batch 17800, loss[loss=0.1467, simple_loss=0.2288, pruned_loss=0.03234, over 4791.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03008, over 972919.89 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 10:37:07,684 INFO [train.py:715] (6/8) Epoch 15, batch 17850, loss[loss=0.1529, simple_loss=0.2144, pruned_loss=0.04568, over 4687.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2088, pruned_loss=0.03019, over 972507.03 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 10:37:46,672 INFO [train.py:715] (6/8) Epoch 15, batch 17900, loss[loss=0.1374, simple_loss=0.2088, pruned_loss=0.03303, over 4839.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02981, over 972933.04 frames.], batch size: 13, lr: 1.47e-04 +2022-05-08 10:38:25,489 INFO [train.py:715] (6/8) Epoch 15, batch 17950, loss[loss=0.1255, simple_loss=0.2063, pruned_loss=0.02232, over 4889.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2091, pruned_loss=0.03088, over 972758.86 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 10:39:03,819 INFO [train.py:715] (6/8) Epoch 15, batch 18000, loss[loss=0.126, simple_loss=0.2152, pruned_loss=0.01842, over 4850.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.03096, over 972664.69 frames.], batch size: 20, lr: 1.47e-04 +2022-05-08 10:39:03,820 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 10:39:13,329 INFO [train.py:742] (6/8) Epoch 15, validation: loss=0.1048, simple_loss=0.1885, pruned_loss=0.01059, over 914524.00 frames. +2022-05-08 10:39:51,810 INFO [train.py:715] (6/8) Epoch 15, batch 18050, loss[loss=0.1115, simple_loss=0.1867, pruned_loss=0.01809, over 4918.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2094, pruned_loss=0.03124, over 972834.40 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 10:40:30,477 INFO [train.py:715] (6/8) Epoch 15, batch 18100, loss[loss=0.1331, simple_loss=0.2195, pruned_loss=0.02337, over 4834.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.03068, over 973078.16 frames.], batch size: 26, lr: 1.46e-04 +2022-05-08 10:41:09,231 INFO [train.py:715] (6/8) Epoch 15, batch 18150, loss[loss=0.1363, simple_loss=0.2107, pruned_loss=0.03094, over 4860.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2085, pruned_loss=0.03079, over 973422.14 frames.], batch size: 20, lr: 1.46e-04 +2022-05-08 10:41:47,120 INFO [train.py:715] (6/8) Epoch 15, batch 18200, loss[loss=0.1313, simple_loss=0.2015, pruned_loss=0.03054, over 4753.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2084, pruned_loss=0.0306, over 973673.26 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 10:42:25,781 INFO [train.py:715] (6/8) Epoch 15, batch 18250, loss[loss=0.1156, simple_loss=0.1884, pruned_loss=0.02141, over 4814.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03043, over 973099.62 frames.], batch size: 27, lr: 1.46e-04 +2022-05-08 10:43:04,442 INFO [train.py:715] (6/8) Epoch 15, batch 18300, loss[loss=0.1464, simple_loss=0.2357, pruned_loss=0.02859, over 4903.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2094, pruned_loss=0.03055, over 973815.57 frames.], batch size: 22, lr: 1.46e-04 +2022-05-08 10:43:42,533 INFO [train.py:715] (6/8) Epoch 15, batch 18350, loss[loss=0.1502, simple_loss=0.2248, pruned_loss=0.03784, over 4966.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03066, over 974047.51 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 10:44:21,120 INFO [train.py:715] (6/8) Epoch 15, batch 18400, loss[loss=0.1099, simple_loss=0.1814, pruned_loss=0.01916, over 4947.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03065, over 972707.10 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 10:44:59,618 INFO [train.py:715] (6/8) Epoch 15, batch 18450, loss[loss=0.1434, simple_loss=0.2186, pruned_loss=0.03404, over 4932.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2096, pruned_loss=0.03061, over 973719.72 frames.], batch size: 39, lr: 1.46e-04 +2022-05-08 10:45:38,878 INFO [train.py:715] (6/8) Epoch 15, batch 18500, loss[loss=0.1342, simple_loss=0.2037, pruned_loss=0.03235, over 4861.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2093, pruned_loss=0.03052, over 973079.48 frames.], batch size: 30, lr: 1.46e-04 +2022-05-08 10:46:17,372 INFO [train.py:715] (6/8) Epoch 15, batch 18550, loss[loss=0.1343, simple_loss=0.2036, pruned_loss=0.03256, over 4746.00 frames.], tot_loss[loss=0.135, simple_loss=0.2091, pruned_loss=0.03043, over 973579.39 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 10:46:55,962 INFO [train.py:715] (6/8) Epoch 15, batch 18600, loss[loss=0.1243, simple_loss=0.1974, pruned_loss=0.02565, over 4789.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.02998, over 973536.60 frames.], batch size: 14, lr: 1.46e-04 +2022-05-08 10:47:34,879 INFO [train.py:715] (6/8) Epoch 15, batch 18650, loss[loss=0.1302, simple_loss=0.1914, pruned_loss=0.03454, over 4890.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.0301, over 972998.64 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 10:48:13,535 INFO [train.py:715] (6/8) Epoch 15, batch 18700, loss[loss=0.1598, simple_loss=0.2177, pruned_loss=0.05094, over 4840.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2076, pruned_loss=0.03029, over 972450.87 frames.], batch size: 30, lr: 1.46e-04 +2022-05-08 10:48:52,400 INFO [train.py:715] (6/8) Epoch 15, batch 18750, loss[loss=0.1422, simple_loss=0.215, pruned_loss=0.03471, over 4747.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2071, pruned_loss=0.03002, over 972486.62 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 10:49:31,689 INFO [train.py:715] (6/8) Epoch 15, batch 18800, loss[loss=0.1194, simple_loss=0.1963, pruned_loss=0.02129, over 4916.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.02995, over 973235.50 frames.], batch size: 23, lr: 1.46e-04 +2022-05-08 10:50:10,932 INFO [train.py:715] (6/8) Epoch 15, batch 18850, loss[loss=0.1305, simple_loss=0.2004, pruned_loss=0.03033, over 4879.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.0299, over 972438.65 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 10:50:49,353 INFO [train.py:715] (6/8) Epoch 15, batch 18900, loss[loss=0.1207, simple_loss=0.2019, pruned_loss=0.01977, over 4898.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.02998, over 972690.87 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 10:51:28,567 INFO [train.py:715] (6/8) Epoch 15, batch 18950, loss[loss=0.1146, simple_loss=0.1822, pruned_loss=0.02346, over 4978.00 frames.], tot_loss[loss=0.1348, simple_loss=0.209, pruned_loss=0.03035, over 973303.90 frames.], batch size: 14, lr: 1.46e-04 +2022-05-08 10:52:07,879 INFO [train.py:715] (6/8) Epoch 15, batch 19000, loss[loss=0.1054, simple_loss=0.1736, pruned_loss=0.01856, over 4806.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.0305, over 972867.97 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 10:52:46,235 INFO [train.py:715] (6/8) Epoch 15, batch 19050, loss[loss=0.1237, simple_loss=0.1997, pruned_loss=0.02385, over 4942.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03047, over 972389.20 frames.], batch size: 29, lr: 1.46e-04 +2022-05-08 10:53:25,396 INFO [train.py:715] (6/8) Epoch 15, batch 19100, loss[loss=0.1462, simple_loss=0.223, pruned_loss=0.03471, over 4871.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03038, over 972443.08 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 10:54:03,699 INFO [train.py:715] (6/8) Epoch 15, batch 19150, loss[loss=0.1062, simple_loss=0.1899, pruned_loss=0.01122, over 4822.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2077, pruned_loss=0.02996, over 972303.67 frames.], batch size: 27, lr: 1.46e-04 +2022-05-08 10:54:41,931 INFO [train.py:715] (6/8) Epoch 15, batch 19200, loss[loss=0.1433, simple_loss=0.2212, pruned_loss=0.03275, over 4950.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02991, over 972237.32 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 10:55:19,947 INFO [train.py:715] (6/8) Epoch 15, batch 19250, loss[loss=0.1316, simple_loss=0.2063, pruned_loss=0.02844, over 4851.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.0296, over 971772.65 frames.], batch size: 32, lr: 1.46e-04 +2022-05-08 10:55:58,062 INFO [train.py:715] (6/8) Epoch 15, batch 19300, loss[loss=0.1716, simple_loss=0.2443, pruned_loss=0.04943, over 4976.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.0294, over 971305.34 frames.], batch size: 33, lr: 1.46e-04 +2022-05-08 10:56:36,948 INFO [train.py:715] (6/8) Epoch 15, batch 19350, loss[loss=0.1359, simple_loss=0.2023, pruned_loss=0.03473, over 4942.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.0296, over 972393.94 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 10:57:14,727 INFO [train.py:715] (6/8) Epoch 15, batch 19400, loss[loss=0.1693, simple_loss=0.2262, pruned_loss=0.0562, over 4925.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02944, over 971681.28 frames.], batch size: 39, lr: 1.46e-04 +2022-05-08 10:57:53,614 INFO [train.py:715] (6/8) Epoch 15, batch 19450, loss[loss=0.1697, simple_loss=0.2429, pruned_loss=0.04826, over 4936.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03007, over 972032.13 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 10:58:31,634 INFO [train.py:715] (6/8) Epoch 15, batch 19500, loss[loss=0.146, simple_loss=0.2128, pruned_loss=0.03965, over 4963.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.03, over 972341.64 frames.], batch size: 35, lr: 1.46e-04 +2022-05-08 10:59:09,768 INFO [train.py:715] (6/8) Epoch 15, batch 19550, loss[loss=0.09703, simple_loss=0.1735, pruned_loss=0.01027, over 4957.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.03001, over 972915.50 frames.], batch size: 29, lr: 1.46e-04 +2022-05-08 10:59:48,217 INFO [train.py:715] (6/8) Epoch 15, batch 19600, loss[loss=0.126, simple_loss=0.1891, pruned_loss=0.03149, over 4753.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02986, over 973631.93 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 11:00:26,253 INFO [train.py:715] (6/8) Epoch 15, batch 19650, loss[loss=0.1694, simple_loss=0.2427, pruned_loss=0.04805, over 4956.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.0301, over 974403.11 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 11:01:05,290 INFO [train.py:715] (6/8) Epoch 15, batch 19700, loss[loss=0.1305, simple_loss=0.2208, pruned_loss=0.02006, over 4930.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2085, pruned_loss=0.03026, over 973990.77 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:01:42,979 INFO [train.py:715] (6/8) Epoch 15, batch 19750, loss[loss=0.1175, simple_loss=0.192, pruned_loss=0.02157, over 4806.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03014, over 973617.95 frames.], batch size: 25, lr: 1.46e-04 +2022-05-08 11:02:21,390 INFO [train.py:715] (6/8) Epoch 15, batch 19800, loss[loss=0.1164, simple_loss=0.19, pruned_loss=0.02144, over 4792.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03005, over 972982.51 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:02:59,696 INFO [train.py:715] (6/8) Epoch 15, batch 19850, loss[loss=0.1213, simple_loss=0.1951, pruned_loss=0.02372, over 4980.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02987, over 972922.39 frames.], batch size: 28, lr: 1.46e-04 +2022-05-08 11:03:37,786 INFO [train.py:715] (6/8) Epoch 15, batch 19900, loss[loss=0.1524, simple_loss=0.2301, pruned_loss=0.03739, over 4898.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02989, over 972838.02 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 11:04:16,956 INFO [train.py:715] (6/8) Epoch 15, batch 19950, loss[loss=0.1115, simple_loss=0.1994, pruned_loss=0.01179, over 4967.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.0301, over 972615.13 frames.], batch size: 24, lr: 1.46e-04 +2022-05-08 11:04:55,166 INFO [train.py:715] (6/8) Epoch 15, batch 20000, loss[loss=0.1448, simple_loss=0.2188, pruned_loss=0.03543, over 4802.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03005, over 973099.14 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 11:05:33,565 INFO [train.py:715] (6/8) Epoch 15, batch 20050, loss[loss=0.1303, simple_loss=0.2137, pruned_loss=0.02339, over 4921.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2092, pruned_loss=0.03052, over 973195.81 frames.], batch size: 29, lr: 1.46e-04 +2022-05-08 11:06:11,837 INFO [train.py:715] (6/8) Epoch 15, batch 20100, loss[loss=0.1013, simple_loss=0.1817, pruned_loss=0.01046, over 4800.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.03057, over 973707.27 frames.], batch size: 24, lr: 1.46e-04 +2022-05-08 11:06:50,116 INFO [train.py:715] (6/8) Epoch 15, batch 20150, loss[loss=0.1311, simple_loss=0.2023, pruned_loss=0.02997, over 4904.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2094, pruned_loss=0.03048, over 973862.78 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 11:07:28,125 INFO [train.py:715] (6/8) Epoch 15, batch 20200, loss[loss=0.1642, simple_loss=0.2271, pruned_loss=0.05071, over 4869.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03063, over 973595.44 frames.], batch size: 30, lr: 1.46e-04 +2022-05-08 11:08:05,821 INFO [train.py:715] (6/8) Epoch 15, batch 20250, loss[loss=0.1529, simple_loss=0.2317, pruned_loss=0.03702, over 4799.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2096, pruned_loss=0.03078, over 973252.49 frames.], batch size: 24, lr: 1.46e-04 +2022-05-08 11:08:44,516 INFO [train.py:715] (6/8) Epoch 15, batch 20300, loss[loss=0.1688, simple_loss=0.2389, pruned_loss=0.04932, over 4833.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2094, pruned_loss=0.03019, over 973529.49 frames.], batch size: 30, lr: 1.46e-04 +2022-05-08 11:09:22,704 INFO [train.py:715] (6/8) Epoch 15, batch 20350, loss[loss=0.1391, simple_loss=0.2036, pruned_loss=0.0373, over 4970.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2099, pruned_loss=0.0307, over 973406.32 frames.], batch size: 35, lr: 1.46e-04 +2022-05-08 11:10:01,086 INFO [train.py:715] (6/8) Epoch 15, batch 20400, loss[loss=0.1241, simple_loss=0.2062, pruned_loss=0.02103, over 4979.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2099, pruned_loss=0.03042, over 972986.94 frames.], batch size: 24, lr: 1.46e-04 +2022-05-08 11:10:38,946 INFO [train.py:715] (6/8) Epoch 15, batch 20450, loss[loss=0.1246, simple_loss=0.1904, pruned_loss=0.02939, over 4857.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03017, over 972134.86 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 11:11:17,696 INFO [train.py:715] (6/8) Epoch 15, batch 20500, loss[loss=0.1277, simple_loss=0.2016, pruned_loss=0.02696, over 4900.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02983, over 972895.42 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 11:11:55,870 INFO [train.py:715] (6/8) Epoch 15, batch 20550, loss[loss=0.1123, simple_loss=0.1924, pruned_loss=0.01608, over 4937.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03003, over 972914.61 frames.], batch size: 23, lr: 1.46e-04 +2022-05-08 11:12:33,919 INFO [train.py:715] (6/8) Epoch 15, batch 20600, loss[loss=0.1168, simple_loss=0.1979, pruned_loss=0.01791, over 4752.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2082, pruned_loss=0.03029, over 971874.70 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 11:13:12,979 INFO [train.py:715] (6/8) Epoch 15, batch 20650, loss[loss=0.1198, simple_loss=0.2028, pruned_loss=0.01837, over 4860.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03021, over 972348.12 frames.], batch size: 20, lr: 1.46e-04 +2022-05-08 11:13:51,739 INFO [train.py:715] (6/8) Epoch 15, batch 20700, loss[loss=0.1087, simple_loss=0.188, pruned_loss=0.01469, over 4853.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2075, pruned_loss=0.03004, over 971882.31 frames.], batch size: 20, lr: 1.46e-04 +2022-05-08 11:14:31,082 INFO [train.py:715] (6/8) Epoch 15, batch 20750, loss[loss=0.1093, simple_loss=0.1815, pruned_loss=0.0186, over 4770.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03012, over 972657.01 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 11:15:09,386 INFO [train.py:715] (6/8) Epoch 15, batch 20800, loss[loss=0.126, simple_loss=0.2, pruned_loss=0.02607, over 4871.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2089, pruned_loss=0.03017, over 971674.85 frames.], batch size: 22, lr: 1.46e-04 +2022-05-08 11:15:48,761 INFO [train.py:715] (6/8) Epoch 15, batch 20850, loss[loss=0.1382, simple_loss=0.2186, pruned_loss=0.02895, over 4803.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2089, pruned_loss=0.0304, over 971741.25 frames.], batch size: 25, lr: 1.46e-04 +2022-05-08 11:16:27,991 INFO [train.py:715] (6/8) Epoch 15, batch 20900, loss[loss=0.1375, simple_loss=0.207, pruned_loss=0.03405, over 4769.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2093, pruned_loss=0.03043, over 972341.73 frames.], batch size: 14, lr: 1.46e-04 +2022-05-08 11:17:06,242 INFO [train.py:715] (6/8) Epoch 15, batch 20950, loss[loss=0.156, simple_loss=0.2281, pruned_loss=0.04193, over 4876.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2097, pruned_loss=0.03041, over 972102.97 frames.], batch size: 32, lr: 1.46e-04 +2022-05-08 11:17:45,527 INFO [train.py:715] (6/8) Epoch 15, batch 21000, loss[loss=0.1197, simple_loss=0.1901, pruned_loss=0.0246, over 4780.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2093, pruned_loss=0.0304, over 972645.03 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:17:45,528 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 11:17:56,038 INFO [train.py:742] (6/8) Epoch 15, validation: loss=0.1051, simple_loss=0.1887, pruned_loss=0.01075, over 914524.00 frames. +2022-05-08 11:18:35,286 INFO [train.py:715] (6/8) Epoch 15, batch 21050, loss[loss=0.1392, simple_loss=0.2207, pruned_loss=0.02887, over 4813.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03003, over 973244.06 frames.], batch size: 25, lr: 1.46e-04 +2022-05-08 11:19:14,768 INFO [train.py:715] (6/8) Epoch 15, batch 21100, loss[loss=0.1512, simple_loss=0.2138, pruned_loss=0.04428, over 4976.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03031, over 973887.24 frames.], batch size: 35, lr: 1.46e-04 +2022-05-08 11:19:53,786 INFO [train.py:715] (6/8) Epoch 15, batch 21150, loss[loss=0.1541, simple_loss=0.2246, pruned_loss=0.04178, over 4834.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2089, pruned_loss=0.0302, over 973606.54 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 11:20:32,269 INFO [train.py:715] (6/8) Epoch 15, batch 21200, loss[loss=0.1456, simple_loss=0.2097, pruned_loss=0.04076, over 4796.00 frames.], tot_loss[loss=0.1346, simple_loss=0.209, pruned_loss=0.03013, over 973517.29 frames.], batch size: 24, lr: 1.46e-04 +2022-05-08 11:21:11,106 INFO [train.py:715] (6/8) Epoch 15, batch 21250, loss[loss=0.1738, simple_loss=0.244, pruned_loss=0.05182, over 4780.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03021, over 973524.68 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:21:49,146 INFO [train.py:715] (6/8) Epoch 15, batch 21300, loss[loss=0.1347, simple_loss=0.2038, pruned_loss=0.03277, over 4792.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03015, over 972845.38 frames.], batch size: 24, lr: 1.46e-04 +2022-05-08 11:22:26,789 INFO [train.py:715] (6/8) Epoch 15, batch 21350, loss[loss=0.1392, simple_loss=0.2144, pruned_loss=0.03196, over 4878.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2082, pruned_loss=0.03033, over 972286.53 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 11:23:05,096 INFO [train.py:715] (6/8) Epoch 15, batch 21400, loss[loss=0.1334, simple_loss=0.2091, pruned_loss=0.02888, over 4936.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2072, pruned_loss=0.03012, over 972278.16 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 11:23:43,352 INFO [train.py:715] (6/8) Epoch 15, batch 21450, loss[loss=0.1393, simple_loss=0.2169, pruned_loss=0.0309, over 4702.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2072, pruned_loss=0.03009, over 971160.81 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 11:24:21,357 INFO [train.py:715] (6/8) Epoch 15, batch 21500, loss[loss=0.1357, simple_loss=0.2175, pruned_loss=0.02693, over 4878.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2072, pruned_loss=0.03004, over 971189.99 frames.], batch size: 22, lr: 1.46e-04 +2022-05-08 11:24:59,647 INFO [train.py:715] (6/8) Epoch 15, batch 21550, loss[loss=0.1249, simple_loss=0.2067, pruned_loss=0.02158, over 4871.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2076, pruned_loss=0.03002, over 971094.05 frames.], batch size: 34, lr: 1.46e-04 +2022-05-08 11:25:38,152 INFO [train.py:715] (6/8) Epoch 15, batch 21600, loss[loss=0.14, simple_loss=0.216, pruned_loss=0.03197, over 4745.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03, over 970926.81 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 11:26:16,020 INFO [train.py:715] (6/8) Epoch 15, batch 21650, loss[loss=0.1551, simple_loss=0.2281, pruned_loss=0.041, over 4688.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03026, over 970885.92 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 11:26:54,238 INFO [train.py:715] (6/8) Epoch 15, batch 21700, loss[loss=0.1381, simple_loss=0.2107, pruned_loss=0.03274, over 4758.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03006, over 971401.22 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:27:32,376 INFO [train.py:715] (6/8) Epoch 15, batch 21750, loss[loss=0.1259, simple_loss=0.1979, pruned_loss=0.02692, over 4770.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03003, over 970848.74 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 11:28:10,475 INFO [train.py:715] (6/8) Epoch 15, batch 21800, loss[loss=0.1414, simple_loss=0.2091, pruned_loss=0.0368, over 4937.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03052, over 971435.81 frames.], batch size: 35, lr: 1.46e-04 +2022-05-08 11:28:48,403 INFO [train.py:715] (6/8) Epoch 15, batch 21850, loss[loss=0.1283, simple_loss=0.1988, pruned_loss=0.02887, over 4819.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03004, over 971275.64 frames.], batch size: 27, lr: 1.46e-04 +2022-05-08 11:29:29,584 INFO [train.py:715] (6/8) Epoch 15, batch 21900, loss[loss=0.1118, simple_loss=0.1834, pruned_loss=0.02008, over 4770.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.02992, over 971243.54 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 11:30:08,734 INFO [train.py:715] (6/8) Epoch 15, batch 21950, loss[loss=0.1498, simple_loss=0.2202, pruned_loss=0.03972, over 4846.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2078, pruned_loss=0.02964, over 971632.75 frames.], batch size: 30, lr: 1.46e-04 +2022-05-08 11:30:47,267 INFO [train.py:715] (6/8) Epoch 15, batch 22000, loss[loss=0.1202, simple_loss=0.1866, pruned_loss=0.02691, over 4741.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02947, over 972099.09 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 11:31:25,789 INFO [train.py:715] (6/8) Epoch 15, batch 22050, loss[loss=0.1362, simple_loss=0.2091, pruned_loss=0.03163, over 4934.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.0295, over 972199.53 frames.], batch size: 23, lr: 1.46e-04 +2022-05-08 11:32:05,112 INFO [train.py:715] (6/8) Epoch 15, batch 22100, loss[loss=0.1535, simple_loss=0.214, pruned_loss=0.04648, over 4983.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02939, over 972788.43 frames.], batch size: 35, lr: 1.46e-04 +2022-05-08 11:32:43,914 INFO [train.py:715] (6/8) Epoch 15, batch 22150, loss[loss=0.1093, simple_loss=0.1867, pruned_loss=0.01594, over 4726.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.02953, over 972040.33 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 11:33:22,284 INFO [train.py:715] (6/8) Epoch 15, batch 22200, loss[loss=0.1208, simple_loss=0.1898, pruned_loss=0.02588, over 4779.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02955, over 972117.43 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:34:01,327 INFO [train.py:715] (6/8) Epoch 15, batch 22250, loss[loss=0.1832, simple_loss=0.2527, pruned_loss=0.05681, over 4969.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02942, over 972577.56 frames.], batch size: 35, lr: 1.46e-04 +2022-05-08 11:34:40,257 INFO [train.py:715] (6/8) Epoch 15, batch 22300, loss[loss=0.1412, simple_loss=0.2148, pruned_loss=0.03382, over 4895.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02954, over 972138.77 frames.], batch size: 22, lr: 1.46e-04 +2022-05-08 11:35:18,793 INFO [train.py:715] (6/8) Epoch 15, batch 22350, loss[loss=0.1263, simple_loss=0.2003, pruned_loss=0.02611, over 4912.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.03003, over 972925.57 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 11:35:57,366 INFO [train.py:715] (6/8) Epoch 15, batch 22400, loss[loss=0.1071, simple_loss=0.1833, pruned_loss=0.01544, over 4800.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2075, pruned_loss=0.0299, over 972829.03 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 11:36:36,628 INFO [train.py:715] (6/8) Epoch 15, batch 22450, loss[loss=0.1376, simple_loss=0.1979, pruned_loss=0.0386, over 4822.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02974, over 973218.53 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 11:37:15,516 INFO [train.py:715] (6/8) Epoch 15, batch 22500, loss[loss=0.1447, simple_loss=0.2143, pruned_loss=0.03752, over 4739.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02969, over 973718.38 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 11:37:54,242 INFO [train.py:715] (6/8) Epoch 15, batch 22550, loss[loss=0.1205, simple_loss=0.1904, pruned_loss=0.02535, over 4831.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02974, over 972870.57 frames.], batch size: 30, lr: 1.46e-04 +2022-05-08 11:38:32,797 INFO [train.py:715] (6/8) Epoch 15, batch 22600, loss[loss=0.1666, simple_loss=0.2329, pruned_loss=0.05012, over 4974.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02994, over 972695.58 frames.], batch size: 40, lr: 1.46e-04 +2022-05-08 11:39:11,718 INFO [train.py:715] (6/8) Epoch 15, batch 22650, loss[loss=0.1254, simple_loss=0.2049, pruned_loss=0.02294, over 4793.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.03, over 973175.70 frames.], batch size: 24, lr: 1.46e-04 +2022-05-08 11:39:50,374 INFO [train.py:715] (6/8) Epoch 15, batch 22700, loss[loss=0.1137, simple_loss=0.1956, pruned_loss=0.0159, over 4806.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2087, pruned_loss=0.03004, over 973379.47 frames.], batch size: 27, lr: 1.46e-04 +2022-05-08 11:40:29,113 INFO [train.py:715] (6/8) Epoch 15, batch 22750, loss[loss=0.1408, simple_loss=0.2266, pruned_loss=0.02751, over 4751.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2089, pruned_loss=0.03022, over 972734.73 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 11:41:08,455 INFO [train.py:715] (6/8) Epoch 15, batch 22800, loss[loss=0.1277, simple_loss=0.2098, pruned_loss=0.02277, over 4979.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03009, over 972522.13 frames.], batch size: 24, lr: 1.46e-04 +2022-05-08 11:41:47,280 INFO [train.py:715] (6/8) Epoch 15, batch 22850, loss[loss=0.1297, simple_loss=0.2065, pruned_loss=0.02644, over 4836.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2098, pruned_loss=0.03093, over 973206.21 frames.], batch size: 30, lr: 1.46e-04 +2022-05-08 11:42:26,021 INFO [train.py:715] (6/8) Epoch 15, batch 22900, loss[loss=0.1812, simple_loss=0.2426, pruned_loss=0.0599, over 4986.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03055, over 973385.42 frames.], batch size: 31, lr: 1.46e-04 +2022-05-08 11:43:05,208 INFO [train.py:715] (6/8) Epoch 15, batch 22950, loss[loss=0.1164, simple_loss=0.1907, pruned_loss=0.02107, over 4697.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2082, pruned_loss=0.0298, over 973549.88 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 11:43:43,826 INFO [train.py:715] (6/8) Epoch 15, batch 23000, loss[loss=0.1276, simple_loss=0.2015, pruned_loss=0.02687, over 4864.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03, over 972608.55 frames.], batch size: 20, lr: 1.46e-04 +2022-05-08 11:44:22,239 INFO [train.py:715] (6/8) Epoch 15, batch 23050, loss[loss=0.1463, simple_loss=0.2189, pruned_loss=0.0368, over 4844.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03034, over 973230.07 frames.], batch size: 34, lr: 1.46e-04 +2022-05-08 11:45:00,625 INFO [train.py:715] (6/8) Epoch 15, batch 23100, loss[loss=0.1303, simple_loss=0.2197, pruned_loss=0.02048, over 4888.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2097, pruned_loss=0.03045, over 972827.10 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 11:45:39,479 INFO [train.py:715] (6/8) Epoch 15, batch 23150, loss[loss=0.1781, simple_loss=0.242, pruned_loss=0.05703, over 4943.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2096, pruned_loss=0.03053, over 972968.72 frames.], batch size: 39, lr: 1.46e-04 +2022-05-08 11:46:17,452 INFO [train.py:715] (6/8) Epoch 15, batch 23200, loss[loss=0.1297, simple_loss=0.2108, pruned_loss=0.02431, over 4896.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2091, pruned_loss=0.03057, over 971968.71 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 11:46:55,708 INFO [train.py:715] (6/8) Epoch 15, batch 23250, loss[loss=0.1283, simple_loss=0.2015, pruned_loss=0.02753, over 4969.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.0306, over 972258.25 frames.], batch size: 35, lr: 1.46e-04 +2022-05-08 11:47:34,386 INFO [train.py:715] (6/8) Epoch 15, batch 23300, loss[loss=0.1393, simple_loss=0.2105, pruned_loss=0.03407, over 4875.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03015, over 972264.84 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 11:48:12,427 INFO [train.py:715] (6/8) Epoch 15, batch 23350, loss[loss=0.1994, simple_loss=0.2627, pruned_loss=0.06801, over 4962.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03022, over 973054.06 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 11:48:50,738 INFO [train.py:715] (6/8) Epoch 15, batch 23400, loss[loss=0.1402, simple_loss=0.2125, pruned_loss=0.03397, over 4952.00 frames.], tot_loss[loss=0.1338, simple_loss=0.208, pruned_loss=0.02978, over 972934.81 frames.], batch size: 24, lr: 1.46e-04 +2022-05-08 11:49:28,561 INFO [train.py:715] (6/8) Epoch 15, batch 23450, loss[loss=0.1508, simple_loss=0.2271, pruned_loss=0.03722, over 4964.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2075, pruned_loss=0.02989, over 973496.35 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 11:50:07,084 INFO [train.py:715] (6/8) Epoch 15, batch 23500, loss[loss=0.1162, simple_loss=0.1884, pruned_loss=0.02203, over 4920.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2073, pruned_loss=0.02998, over 972990.22 frames.], batch size: 23, lr: 1.46e-04 +2022-05-08 11:50:44,859 INFO [train.py:715] (6/8) Epoch 15, batch 23550, loss[loss=0.1522, simple_loss=0.2249, pruned_loss=0.03973, over 4960.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2069, pruned_loss=0.02994, over 973563.02 frames.], batch size: 39, lr: 1.46e-04 +2022-05-08 11:51:22,853 INFO [train.py:715] (6/8) Epoch 15, batch 23600, loss[loss=0.1253, simple_loss=0.2043, pruned_loss=0.02317, over 4783.00 frames.], tot_loss[loss=0.133, simple_loss=0.2069, pruned_loss=0.02957, over 973059.74 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:52:01,277 INFO [train.py:715] (6/8) Epoch 15, batch 23650, loss[loss=0.123, simple_loss=0.2016, pruned_loss=0.0222, over 4851.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02932, over 972554.37 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 11:52:39,165 INFO [train.py:715] (6/8) Epoch 15, batch 23700, loss[loss=0.1469, simple_loss=0.2171, pruned_loss=0.03837, over 4770.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.02959, over 972929.00 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:53:17,230 INFO [train.py:715] (6/8) Epoch 15, batch 23750, loss[loss=0.1407, simple_loss=0.2192, pruned_loss=0.03116, over 4888.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.02988, over 973065.69 frames.], batch size: 22, lr: 1.46e-04 +2022-05-08 11:53:55,056 INFO [train.py:715] (6/8) Epoch 15, batch 23800, loss[loss=0.1364, simple_loss=0.2137, pruned_loss=0.0295, over 4895.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.0298, over 973152.84 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 11:54:33,061 INFO [train.py:715] (6/8) Epoch 15, batch 23850, loss[loss=0.1505, simple_loss=0.2204, pruned_loss=0.04024, over 4799.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.03031, over 971786.84 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 11:55:11,362 INFO [train.py:715] (6/8) Epoch 15, batch 23900, loss[loss=0.1544, simple_loss=0.2139, pruned_loss=0.04743, over 4875.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03042, over 971971.46 frames.], batch size: 32, lr: 1.46e-04 +2022-05-08 11:55:48,902 INFO [train.py:715] (6/8) Epoch 15, batch 23950, loss[loss=0.1433, simple_loss=0.2213, pruned_loss=0.03271, over 4688.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.0303, over 971967.00 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 11:56:27,442 INFO [train.py:715] (6/8) Epoch 15, batch 24000, loss[loss=0.1383, simple_loss=0.2043, pruned_loss=0.0361, over 4855.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.0299, over 972093.11 frames.], batch size: 20, lr: 1.46e-04 +2022-05-08 11:56:27,443 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 11:56:37,034 INFO [train.py:742] (6/8) Epoch 15, validation: loss=0.105, simple_loss=0.1886, pruned_loss=0.01071, over 914524.00 frames. +2022-05-08 11:57:15,621 INFO [train.py:715] (6/8) Epoch 15, batch 24050, loss[loss=0.1149, simple_loss=0.1893, pruned_loss=0.02025, over 4916.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02966, over 972124.29 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:57:54,186 INFO [train.py:715] (6/8) Epoch 15, batch 24100, loss[loss=0.131, simple_loss=0.2009, pruned_loss=0.0306, over 4851.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02975, over 971853.24 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 11:58:32,190 INFO [train.py:715] (6/8) Epoch 15, batch 24150, loss[loss=0.1305, simple_loss=0.2052, pruned_loss=0.02793, over 4930.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02951, over 971853.94 frames.], batch size: 23, lr: 1.46e-04 +2022-05-08 11:59:10,406 INFO [train.py:715] (6/8) Epoch 15, batch 24200, loss[loss=0.1339, simple_loss=0.1982, pruned_loss=0.03481, over 4843.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2081, pruned_loss=0.02946, over 972683.56 frames.], batch size: 32, lr: 1.46e-04 +2022-05-08 11:59:48,420 INFO [train.py:715] (6/8) Epoch 15, batch 24250, loss[loss=0.1316, simple_loss=0.212, pruned_loss=0.0256, over 4804.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02932, over 973408.68 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 12:00:26,746 INFO [train.py:715] (6/8) Epoch 15, batch 24300, loss[loss=0.1387, simple_loss=0.207, pruned_loss=0.03516, over 4750.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02927, over 973054.92 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 12:01:03,898 INFO [train.py:715] (6/8) Epoch 15, batch 24350, loss[loss=0.132, simple_loss=0.2075, pruned_loss=0.02822, over 4905.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02951, over 972397.74 frames.], batch size: 39, lr: 1.46e-04 +2022-05-08 12:01:42,320 INFO [train.py:715] (6/8) Epoch 15, batch 24400, loss[loss=0.1547, simple_loss=0.2357, pruned_loss=0.0368, over 4879.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2074, pruned_loss=0.02922, over 972222.21 frames.], batch size: 22, lr: 1.46e-04 +2022-05-08 12:02:20,844 INFO [train.py:715] (6/8) Epoch 15, batch 24450, loss[loss=0.1284, simple_loss=0.2009, pruned_loss=0.028, over 4992.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02974, over 972119.47 frames.], batch size: 14, lr: 1.46e-04 +2022-05-08 12:02:58,825 INFO [train.py:715] (6/8) Epoch 15, batch 24500, loss[loss=0.1176, simple_loss=0.1889, pruned_loss=0.02314, over 4957.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03039, over 972195.50 frames.], batch size: 24, lr: 1.46e-04 +2022-05-08 12:03:36,484 INFO [train.py:715] (6/8) Epoch 15, batch 24550, loss[loss=0.1605, simple_loss=0.2222, pruned_loss=0.04942, over 4826.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2083, pruned_loss=0.03076, over 972167.77 frames.], batch size: 13, lr: 1.46e-04 +2022-05-08 12:04:14,725 INFO [train.py:715] (6/8) Epoch 15, batch 24600, loss[loss=0.1526, simple_loss=0.2202, pruned_loss=0.04243, over 4695.00 frames.], tot_loss[loss=0.1347, simple_loss=0.208, pruned_loss=0.03068, over 972602.06 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 12:04:53,493 INFO [train.py:715] (6/8) Epoch 15, batch 24650, loss[loss=0.1294, simple_loss=0.2023, pruned_loss=0.02825, over 4892.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2085, pruned_loss=0.03068, over 972713.96 frames.], batch size: 22, lr: 1.46e-04 +2022-05-08 12:05:31,174 INFO [train.py:715] (6/8) Epoch 15, batch 24700, loss[loss=0.1445, simple_loss=0.2219, pruned_loss=0.03358, over 4972.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.03075, over 972453.94 frames.], batch size: 25, lr: 1.46e-04 +2022-05-08 12:06:09,580 INFO [train.py:715] (6/8) Epoch 15, batch 24750, loss[loss=0.1261, simple_loss=0.1967, pruned_loss=0.02775, over 4914.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2083, pruned_loss=0.03071, over 972659.19 frames.], batch size: 23, lr: 1.46e-04 +2022-05-08 12:06:47,908 INFO [train.py:715] (6/8) Epoch 15, batch 24800, loss[loss=0.1226, simple_loss=0.1964, pruned_loss=0.02435, over 4918.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2073, pruned_loss=0.03001, over 973144.51 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 12:07:25,660 INFO [train.py:715] (6/8) Epoch 15, batch 24850, loss[loss=0.1622, simple_loss=0.231, pruned_loss=0.04668, over 4881.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2065, pruned_loss=0.02979, over 973297.98 frames.], batch size: 32, lr: 1.46e-04 +2022-05-08 12:08:03,592 INFO [train.py:715] (6/8) Epoch 15, batch 24900, loss[loss=0.1645, simple_loss=0.2415, pruned_loss=0.04378, over 4893.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2066, pruned_loss=0.02976, over 973039.89 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 12:08:41,840 INFO [train.py:715] (6/8) Epoch 15, batch 24950, loss[loss=0.1395, simple_loss=0.2123, pruned_loss=0.03336, over 4743.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2068, pruned_loss=0.02943, over 972706.04 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 12:09:20,962 INFO [train.py:715] (6/8) Epoch 15, batch 25000, loss[loss=0.1176, simple_loss=0.1865, pruned_loss=0.02438, over 4792.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2071, pruned_loss=0.02963, over 972031.94 frames.], batch size: 24, lr: 1.46e-04 +2022-05-08 12:09:58,498 INFO [train.py:715] (6/8) Epoch 15, batch 25050, loss[loss=0.1429, simple_loss=0.2229, pruned_loss=0.03144, over 4740.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2075, pruned_loss=0.03014, over 972127.43 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 12:10:36,536 INFO [train.py:715] (6/8) Epoch 15, batch 25100, loss[loss=0.118, simple_loss=0.1915, pruned_loss=0.02222, over 4953.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2073, pruned_loss=0.02986, over 972422.48 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 12:11:14,987 INFO [train.py:715] (6/8) Epoch 15, batch 25150, loss[loss=0.1601, simple_loss=0.2355, pruned_loss=0.0423, over 4980.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2082, pruned_loss=0.03034, over 972345.52 frames.], batch size: 39, lr: 1.46e-04 +2022-05-08 12:11:53,003 INFO [train.py:715] (6/8) Epoch 15, batch 25200, loss[loss=0.1407, simple_loss=0.2095, pruned_loss=0.036, over 4939.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02996, over 972094.83 frames.], batch size: 29, lr: 1.46e-04 +2022-05-08 12:12:30,795 INFO [train.py:715] (6/8) Epoch 15, batch 25250, loss[loss=0.1358, simple_loss=0.2069, pruned_loss=0.03231, over 4754.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02993, over 971352.09 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 12:13:09,119 INFO [train.py:715] (6/8) Epoch 15, batch 25300, loss[loss=0.1327, simple_loss=0.2045, pruned_loss=0.03044, over 4823.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.0301, over 971680.94 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 12:13:47,200 INFO [train.py:715] (6/8) Epoch 15, batch 25350, loss[loss=0.1161, simple_loss=0.1908, pruned_loss=0.02072, over 4865.00 frames.], tot_loss[loss=0.1347, simple_loss=0.209, pruned_loss=0.03022, over 972455.79 frames.], batch size: 32, lr: 1.46e-04 +2022-05-08 12:14:24,745 INFO [train.py:715] (6/8) Epoch 15, batch 25400, loss[loss=0.1409, simple_loss=0.2076, pruned_loss=0.0371, over 4904.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2087, pruned_loss=0.02997, over 972998.21 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 12:15:02,818 INFO [train.py:715] (6/8) Epoch 15, batch 25450, loss[loss=0.1366, simple_loss=0.2068, pruned_loss=0.03317, over 4827.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2088, pruned_loss=0.03015, over 972860.35 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 12:15:41,206 INFO [train.py:715] (6/8) Epoch 15, batch 25500, loss[loss=0.1416, simple_loss=0.2085, pruned_loss=0.03732, over 4895.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02969, over 971916.33 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 12:16:18,763 INFO [train.py:715] (6/8) Epoch 15, batch 25550, loss[loss=0.1323, simple_loss=0.2033, pruned_loss=0.03061, over 4829.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02942, over 972412.07 frames.], batch size: 26, lr: 1.45e-04 +2022-05-08 12:16:56,915 INFO [train.py:715] (6/8) Epoch 15, batch 25600, loss[loss=0.1433, simple_loss=0.2082, pruned_loss=0.03922, over 4748.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03009, over 972871.05 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 12:17:35,537 INFO [train.py:715] (6/8) Epoch 15, batch 25650, loss[loss=0.1279, simple_loss=0.1975, pruned_loss=0.0291, over 4964.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02997, over 973555.68 frames.], batch size: 35, lr: 1.45e-04 +2022-05-08 12:18:13,814 INFO [train.py:715] (6/8) Epoch 15, batch 25700, loss[loss=0.1367, simple_loss=0.2038, pruned_loss=0.0348, over 4817.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02976, over 973163.84 frames.], batch size: 27, lr: 1.45e-04 +2022-05-08 12:18:51,200 INFO [train.py:715] (6/8) Epoch 15, batch 25750, loss[loss=0.1194, simple_loss=0.1945, pruned_loss=0.02219, over 4901.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02962, over 972536.01 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 12:19:29,348 INFO [train.py:715] (6/8) Epoch 15, batch 25800, loss[loss=0.1255, simple_loss=0.2066, pruned_loss=0.02221, over 4853.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2084, pruned_loss=0.02968, over 972526.91 frames.], batch size: 20, lr: 1.45e-04 +2022-05-08 12:20:07,975 INFO [train.py:715] (6/8) Epoch 15, batch 25850, loss[loss=0.1477, simple_loss=0.2186, pruned_loss=0.03836, over 4843.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2084, pruned_loss=0.0297, over 971742.18 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:20:45,416 INFO [train.py:715] (6/8) Epoch 15, batch 25900, loss[loss=0.116, simple_loss=0.186, pruned_loss=0.02299, over 4969.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02955, over 972569.35 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:21:24,014 INFO [train.py:715] (6/8) Epoch 15, batch 25950, loss[loss=0.1369, simple_loss=0.214, pruned_loss=0.02984, over 4796.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02968, over 973107.83 frames.], batch size: 25, lr: 1.45e-04 +2022-05-08 12:22:02,168 INFO [train.py:715] (6/8) Epoch 15, batch 26000, loss[loss=0.1337, simple_loss=0.2036, pruned_loss=0.03186, over 4799.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02984, over 972974.21 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 12:22:39,851 INFO [train.py:715] (6/8) Epoch 15, batch 26050, loss[loss=0.1212, simple_loss=0.2034, pruned_loss=0.01953, over 4785.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02993, over 972696.38 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 12:23:17,645 INFO [train.py:715] (6/8) Epoch 15, batch 26100, loss[loss=0.1303, simple_loss=0.2012, pruned_loss=0.02972, over 4979.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02972, over 973260.88 frames.], batch size: 28, lr: 1.45e-04 +2022-05-08 12:23:56,076 INFO [train.py:715] (6/8) Epoch 15, batch 26150, loss[loss=0.1774, simple_loss=0.2527, pruned_loss=0.05103, over 4861.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2074, pruned_loss=0.02977, over 972730.61 frames.], batch size: 20, lr: 1.45e-04 +2022-05-08 12:24:33,866 INFO [train.py:715] (6/8) Epoch 15, batch 26200, loss[loss=0.1542, simple_loss=0.2162, pruned_loss=0.04609, over 4875.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02974, over 973063.23 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 12:25:11,684 INFO [train.py:715] (6/8) Epoch 15, batch 26250, loss[loss=0.1072, simple_loss=0.1787, pruned_loss=0.01785, over 4799.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.0295, over 973231.85 frames.], batch size: 24, lr: 1.45e-04 +2022-05-08 12:25:50,003 INFO [train.py:715] (6/8) Epoch 15, batch 26300, loss[loss=0.1426, simple_loss=0.2209, pruned_loss=0.03217, over 4707.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2081, pruned_loss=0.02956, over 973347.04 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:26:28,461 INFO [train.py:715] (6/8) Epoch 15, batch 26350, loss[loss=0.1273, simple_loss=0.2017, pruned_loss=0.0265, over 4971.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02973, over 973749.24 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 12:27:06,219 INFO [train.py:715] (6/8) Epoch 15, batch 26400, loss[loss=0.1462, simple_loss=0.2217, pruned_loss=0.0354, over 4771.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.0293, over 973657.20 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 12:27:44,349 INFO [train.py:715] (6/8) Epoch 15, batch 26450, loss[loss=0.1371, simple_loss=0.2218, pruned_loss=0.02622, over 4793.00 frames.], tot_loss[loss=0.134, simple_loss=0.2084, pruned_loss=0.02978, over 973755.74 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 12:28:22,636 INFO [train.py:715] (6/8) Epoch 15, batch 26500, loss[loss=0.1319, simple_loss=0.2055, pruned_loss=0.02911, over 4699.00 frames.], tot_loss[loss=0.134, simple_loss=0.2084, pruned_loss=0.02983, over 972736.17 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:29:00,418 INFO [train.py:715] (6/8) Epoch 15, batch 26550, loss[loss=0.1167, simple_loss=0.1916, pruned_loss=0.02089, over 4735.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2081, pruned_loss=0.02927, over 971882.52 frames.], batch size: 12, lr: 1.45e-04 +2022-05-08 12:29:38,153 INFO [train.py:715] (6/8) Epoch 15, batch 26600, loss[loss=0.13, simple_loss=0.2103, pruned_loss=0.02491, over 4745.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2082, pruned_loss=0.02903, over 971895.38 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 12:30:16,185 INFO [train.py:715] (6/8) Epoch 15, batch 26650, loss[loss=0.1338, simple_loss=0.2105, pruned_loss=0.02852, over 4772.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2077, pruned_loss=0.02903, over 972253.15 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 12:30:54,316 INFO [train.py:715] (6/8) Epoch 15, batch 26700, loss[loss=0.1605, simple_loss=0.2154, pruned_loss=0.05285, over 4897.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2077, pruned_loss=0.02905, over 973078.88 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 12:31:31,943 INFO [train.py:715] (6/8) Epoch 15, batch 26750, loss[loss=0.1585, simple_loss=0.2281, pruned_loss=0.0445, over 4837.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2079, pruned_loss=0.02929, over 973290.88 frames.], batch size: 30, lr: 1.45e-04 +2022-05-08 12:32:10,362 INFO [train.py:715] (6/8) Epoch 15, batch 26800, loss[loss=0.1462, simple_loss=0.2191, pruned_loss=0.03669, over 4952.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03018, over 972873.30 frames.], batch size: 35, lr: 1.45e-04 +2022-05-08 12:32:48,681 INFO [train.py:715] (6/8) Epoch 15, batch 26850, loss[loss=0.1423, simple_loss=0.2184, pruned_loss=0.03315, over 4969.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.03, over 972787.88 frames.], batch size: 24, lr: 1.45e-04 +2022-05-08 12:33:26,760 INFO [train.py:715] (6/8) Epoch 15, batch 26900, loss[loss=0.1503, simple_loss=0.2332, pruned_loss=0.03366, over 4771.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03005, over 973245.30 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 12:34:04,499 INFO [train.py:715] (6/8) Epoch 15, batch 26950, loss[loss=0.168, simple_loss=0.2389, pruned_loss=0.04856, over 4820.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.02986, over 972856.05 frames.], batch size: 27, lr: 1.45e-04 +2022-05-08 12:34:42,582 INFO [train.py:715] (6/8) Epoch 15, batch 27000, loss[loss=0.1437, simple_loss=0.2087, pruned_loss=0.0393, over 4850.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.03001, over 971996.89 frames.], batch size: 30, lr: 1.45e-04 +2022-05-08 12:34:42,583 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 12:34:52,203 INFO [train.py:742] (6/8) Epoch 15, validation: loss=0.1049, simple_loss=0.1884, pruned_loss=0.01064, over 914524.00 frames. +2022-05-08 12:35:31,295 INFO [train.py:715] (6/8) Epoch 15, batch 27050, loss[loss=0.1192, simple_loss=0.1951, pruned_loss=0.02161, over 4907.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03016, over 972237.41 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 12:36:10,035 INFO [train.py:715] (6/8) Epoch 15, batch 27100, loss[loss=0.1281, simple_loss=0.2085, pruned_loss=0.02381, over 4932.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03037, over 971497.02 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 12:36:48,689 INFO [train.py:715] (6/8) Epoch 15, batch 27150, loss[loss=0.1356, simple_loss=0.2077, pruned_loss=0.03175, over 4984.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03061, over 971720.35 frames.], batch size: 39, lr: 1.45e-04 +2022-05-08 12:37:26,864 INFO [train.py:715] (6/8) Epoch 15, batch 27200, loss[loss=0.1072, simple_loss=0.1781, pruned_loss=0.0181, over 4819.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.02994, over 971769.78 frames.], batch size: 12, lr: 1.45e-04 +2022-05-08 12:38:05,901 INFO [train.py:715] (6/8) Epoch 15, batch 27250, loss[loss=0.1798, simple_loss=0.2345, pruned_loss=0.06253, over 4847.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2078, pruned_loss=0.03028, over 971625.34 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:38:43,692 INFO [train.py:715] (6/8) Epoch 15, batch 27300, loss[loss=0.1033, simple_loss=0.1757, pruned_loss=0.01543, over 4884.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03044, over 972282.13 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 12:39:21,920 INFO [train.py:715] (6/8) Epoch 15, batch 27350, loss[loss=0.1254, simple_loss=0.1929, pruned_loss=0.02895, over 4946.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2083, pruned_loss=0.0305, over 972732.24 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 12:40:00,099 INFO [train.py:715] (6/8) Epoch 15, batch 27400, loss[loss=0.1507, simple_loss=0.2275, pruned_loss=0.03697, over 4805.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.03105, over 972342.89 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 12:40:38,394 INFO [train.py:715] (6/8) Epoch 15, batch 27450, loss[loss=0.1248, simple_loss=0.2022, pruned_loss=0.02367, over 4829.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.03161, over 972829.12 frames.], batch size: 26, lr: 1.45e-04 +2022-05-08 12:41:16,659 INFO [train.py:715] (6/8) Epoch 15, batch 27500, loss[loss=0.1335, simple_loss=0.2149, pruned_loss=0.02603, over 4788.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2098, pruned_loss=0.03132, over 971186.50 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 12:41:54,848 INFO [train.py:715] (6/8) Epoch 15, batch 27550, loss[loss=0.1217, simple_loss=0.1955, pruned_loss=0.02401, over 4882.00 frames.], tot_loss[loss=0.136, simple_loss=0.2099, pruned_loss=0.03106, over 970925.24 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 12:42:33,401 INFO [train.py:715] (6/8) Epoch 15, batch 27600, loss[loss=0.1575, simple_loss=0.2277, pruned_loss=0.0437, over 4731.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03084, over 970795.58 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 12:43:10,760 INFO [train.py:715] (6/8) Epoch 15, batch 27650, loss[loss=0.1466, simple_loss=0.2193, pruned_loss=0.03688, over 4954.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2091, pruned_loss=0.03071, over 971006.13 frames.], batch size: 35, lr: 1.45e-04 +2022-05-08 12:43:49,458 INFO [train.py:715] (6/8) Epoch 15, batch 27700, loss[loss=0.138, simple_loss=0.2135, pruned_loss=0.03127, over 4839.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03117, over 971730.24 frames.], batch size: 30, lr: 1.45e-04 +2022-05-08 12:44:27,756 INFO [train.py:715] (6/8) Epoch 15, batch 27750, loss[loss=0.1146, simple_loss=0.1828, pruned_loss=0.02324, over 4978.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03073, over 972527.38 frames.], batch size: 25, lr: 1.45e-04 +2022-05-08 12:45:06,221 INFO [train.py:715] (6/8) Epoch 15, batch 27800, loss[loss=0.1478, simple_loss=0.2262, pruned_loss=0.03469, over 4946.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2098, pruned_loss=0.0313, over 972289.00 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 12:45:44,232 INFO [train.py:715] (6/8) Epoch 15, batch 27850, loss[loss=0.1442, simple_loss=0.2025, pruned_loss=0.04302, over 4853.00 frames.], tot_loss[loss=0.136, simple_loss=0.2094, pruned_loss=0.03128, over 972680.24 frames.], batch size: 30, lr: 1.45e-04 +2022-05-08 12:46:21,972 INFO [train.py:715] (6/8) Epoch 15, batch 27900, loss[loss=0.1181, simple_loss=0.1986, pruned_loss=0.01883, over 4756.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2087, pruned_loss=0.03098, over 972074.21 frames.], batch size: 12, lr: 1.45e-04 +2022-05-08 12:47:00,797 INFO [train.py:715] (6/8) Epoch 15, batch 27950, loss[loss=0.1324, simple_loss=0.208, pruned_loss=0.02838, over 4897.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03032, over 972676.65 frames.], batch size: 39, lr: 1.45e-04 +2022-05-08 12:47:38,666 INFO [train.py:715] (6/8) Epoch 15, batch 28000, loss[loss=0.1604, simple_loss=0.2272, pruned_loss=0.0468, over 4884.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03052, over 972180.99 frames.], batch size: 30, lr: 1.45e-04 +2022-05-08 12:48:16,879 INFO [train.py:715] (6/8) Epoch 15, batch 28050, loss[loss=0.1212, simple_loss=0.1987, pruned_loss=0.02184, over 4963.00 frames.], tot_loss[loss=0.1351, simple_loss=0.209, pruned_loss=0.03062, over 971972.60 frames.], batch size: 24, lr: 1.45e-04 +2022-05-08 12:48:55,111 INFO [train.py:715] (6/8) Epoch 15, batch 28100, loss[loss=0.1545, simple_loss=0.2197, pruned_loss=0.04465, over 4939.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03056, over 972510.16 frames.], batch size: 29, lr: 1.45e-04 +2022-05-08 12:49:33,362 INFO [train.py:715] (6/8) Epoch 15, batch 28150, loss[loss=0.1719, simple_loss=0.2539, pruned_loss=0.04497, over 4789.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.0304, over 972054.89 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 12:50:11,124 INFO [train.py:715] (6/8) Epoch 15, batch 28200, loss[loss=0.1255, simple_loss=0.2034, pruned_loss=0.02375, over 4919.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03008, over 972657.15 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 12:50:49,026 INFO [train.py:715] (6/8) Epoch 15, batch 28250, loss[loss=0.1403, simple_loss=0.2099, pruned_loss=0.03539, over 4827.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.02998, over 972398.50 frames.], batch size: 27, lr: 1.45e-04 +2022-05-08 12:51:28,179 INFO [train.py:715] (6/8) Epoch 15, batch 28300, loss[loss=0.1578, simple_loss=0.2258, pruned_loss=0.04493, over 4817.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02975, over 972062.02 frames.], batch size: 26, lr: 1.45e-04 +2022-05-08 12:52:05,675 INFO [train.py:715] (6/8) Epoch 15, batch 28350, loss[loss=0.1527, simple_loss=0.2331, pruned_loss=0.03619, over 4734.00 frames.], tot_loss[loss=0.134, simple_loss=0.2085, pruned_loss=0.0297, over 972088.29 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 12:52:43,905 INFO [train.py:715] (6/8) Epoch 15, batch 28400, loss[loss=0.151, simple_loss=0.2246, pruned_loss=0.03866, over 4651.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.02987, over 972314.51 frames.], batch size: 13, lr: 1.45e-04 +2022-05-08 12:53:22,223 INFO [train.py:715] (6/8) Epoch 15, batch 28450, loss[loss=0.1403, simple_loss=0.2219, pruned_loss=0.02933, over 4821.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02958, over 972835.50 frames.], batch size: 26, lr: 1.45e-04 +2022-05-08 12:54:00,368 INFO [train.py:715] (6/8) Epoch 15, batch 28500, loss[loss=0.1524, simple_loss=0.2381, pruned_loss=0.03329, over 4944.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2081, pruned_loss=0.02932, over 973371.29 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 12:54:38,502 INFO [train.py:715] (6/8) Epoch 15, batch 28550, loss[loss=0.1584, simple_loss=0.2234, pruned_loss=0.04669, over 4818.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2089, pruned_loss=0.02965, over 973471.57 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:55:16,671 INFO [train.py:715] (6/8) Epoch 15, batch 28600, loss[loss=0.1162, simple_loss=0.1915, pruned_loss=0.02043, over 4883.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2074, pruned_loss=0.02922, over 973042.80 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 12:55:55,087 INFO [train.py:715] (6/8) Epoch 15, batch 28650, loss[loss=0.1156, simple_loss=0.1933, pruned_loss=0.01893, over 4819.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02949, over 972569.88 frames.], batch size: 26, lr: 1.45e-04 +2022-05-08 12:56:32,940 INFO [train.py:715] (6/8) Epoch 15, batch 28700, loss[loss=0.1216, simple_loss=0.2015, pruned_loss=0.02084, over 4935.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02906, over 972837.78 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 12:57:11,382 INFO [train.py:715] (6/8) Epoch 15, batch 28750, loss[loss=0.1243, simple_loss=0.2008, pruned_loss=0.02386, over 4840.00 frames.], tot_loss[loss=0.1335, simple_loss=0.208, pruned_loss=0.02953, over 973187.35 frames.], batch size: 27, lr: 1.45e-04 +2022-05-08 12:57:50,112 INFO [train.py:715] (6/8) Epoch 15, batch 28800, loss[loss=0.1431, simple_loss=0.2208, pruned_loss=0.03275, over 4982.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2086, pruned_loss=0.02998, over 971420.93 frames.], batch size: 35, lr: 1.45e-04 +2022-05-08 12:58:28,472 INFO [train.py:715] (6/8) Epoch 15, batch 28850, loss[loss=0.1706, simple_loss=0.2253, pruned_loss=0.05791, over 4768.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2086, pruned_loss=0.03008, over 971842.97 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 12:59:06,964 INFO [train.py:715] (6/8) Epoch 15, batch 28900, loss[loss=0.1402, simple_loss=0.2082, pruned_loss=0.03605, over 4777.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03044, over 972089.60 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 12:59:45,680 INFO [train.py:715] (6/8) Epoch 15, batch 28950, loss[loss=0.1093, simple_loss=0.1851, pruned_loss=0.01677, over 4947.00 frames.], tot_loss[loss=0.134, simple_loss=0.2078, pruned_loss=0.0301, over 972620.87 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 13:00:24,852 INFO [train.py:715] (6/8) Epoch 15, batch 29000, loss[loss=0.1252, simple_loss=0.199, pruned_loss=0.02567, over 4990.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.02997, over 973020.93 frames.], batch size: 26, lr: 1.45e-04 +2022-05-08 13:01:03,427 INFO [train.py:715] (6/8) Epoch 15, batch 29050, loss[loss=0.1398, simple_loss=0.2014, pruned_loss=0.03908, over 4971.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2096, pruned_loss=0.03049, over 972477.19 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 13:01:42,350 INFO [train.py:715] (6/8) Epoch 15, batch 29100, loss[loss=0.1287, simple_loss=0.2105, pruned_loss=0.02342, over 4963.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2088, pruned_loss=0.02973, over 973142.65 frames.], batch size: 24, lr: 1.45e-04 +2022-05-08 13:02:21,518 INFO [train.py:715] (6/8) Epoch 15, batch 29150, loss[loss=0.1089, simple_loss=0.1823, pruned_loss=0.01769, over 4785.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2083, pruned_loss=0.02949, over 973553.98 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 13:03:00,528 INFO [train.py:715] (6/8) Epoch 15, batch 29200, loss[loss=0.1286, simple_loss=0.2093, pruned_loss=0.0239, over 4918.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2082, pruned_loss=0.02957, over 972935.59 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 13:03:38,940 INFO [train.py:715] (6/8) Epoch 15, batch 29250, loss[loss=0.125, simple_loss=0.1966, pruned_loss=0.02671, over 4757.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.0295, over 972914.50 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 13:04:17,998 INFO [train.py:715] (6/8) Epoch 15, batch 29300, loss[loss=0.1353, simple_loss=0.2113, pruned_loss=0.02967, over 4756.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02936, over 972638.44 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 13:04:56,887 INFO [train.py:715] (6/8) Epoch 15, batch 29350, loss[loss=0.1368, simple_loss=0.2077, pruned_loss=0.03298, over 4821.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.02961, over 972896.59 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 13:05:35,474 INFO [train.py:715] (6/8) Epoch 15, batch 29400, loss[loss=0.1316, simple_loss=0.199, pruned_loss=0.03216, over 4907.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.03061, over 972945.43 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 13:06:14,532 INFO [train.py:715] (6/8) Epoch 15, batch 29450, loss[loss=0.1319, simple_loss=0.2018, pruned_loss=0.03099, over 4911.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.0308, over 973181.89 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 13:06:53,810 INFO [train.py:715] (6/8) Epoch 15, batch 29500, loss[loss=0.1066, simple_loss=0.1692, pruned_loss=0.02203, over 4750.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2085, pruned_loss=0.03115, over 973585.07 frames.], batch size: 12, lr: 1.45e-04 +2022-05-08 13:07:31,955 INFO [train.py:715] (6/8) Epoch 15, batch 29550, loss[loss=0.14, simple_loss=0.2124, pruned_loss=0.03377, over 4932.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2085, pruned_loss=0.03084, over 972374.73 frames.], batch size: 23, lr: 1.45e-04 +2022-05-08 13:08:09,732 INFO [train.py:715] (6/8) Epoch 15, batch 29600, loss[loss=0.1177, simple_loss=0.1903, pruned_loss=0.02256, over 4900.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03044, over 971947.04 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 13:08:48,779 INFO [train.py:715] (6/8) Epoch 15, batch 29650, loss[loss=0.1226, simple_loss=0.1964, pruned_loss=0.02441, over 4955.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2075, pruned_loss=0.02997, over 971986.95 frames.], batch size: 35, lr: 1.45e-04 +2022-05-08 13:09:27,542 INFO [train.py:715] (6/8) Epoch 15, batch 29700, loss[loss=0.1418, simple_loss=0.2188, pruned_loss=0.03239, over 4911.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2078, pruned_loss=0.03017, over 972090.43 frames.], batch size: 39, lr: 1.45e-04 +2022-05-08 13:10:05,827 INFO [train.py:715] (6/8) Epoch 15, batch 29750, loss[loss=0.1157, simple_loss=0.1933, pruned_loss=0.01903, over 4942.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2072, pruned_loss=0.02984, over 972353.77 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 13:10:43,494 INFO [train.py:715] (6/8) Epoch 15, batch 29800, loss[loss=0.1191, simple_loss=0.1962, pruned_loss=0.02094, over 4947.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02963, over 972456.34 frames.], batch size: 29, lr: 1.45e-04 +2022-05-08 13:11:22,784 INFO [train.py:715] (6/8) Epoch 15, batch 29850, loss[loss=0.1346, simple_loss=0.2067, pruned_loss=0.03124, over 4781.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02948, over 973091.02 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 13:12:04,494 INFO [train.py:715] (6/8) Epoch 15, batch 29900, loss[loss=0.1307, simple_loss=0.2104, pruned_loss=0.02555, over 4967.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02964, over 972785.83 frames.], batch size: 24, lr: 1.45e-04 +2022-05-08 13:12:43,054 INFO [train.py:715] (6/8) Epoch 15, batch 29950, loss[loss=0.1111, simple_loss=0.1849, pruned_loss=0.01859, over 4948.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2073, pruned_loss=0.0292, over 973718.88 frames.], batch size: 29, lr: 1.45e-04 +2022-05-08 13:13:21,389 INFO [train.py:715] (6/8) Epoch 15, batch 30000, loss[loss=0.1338, simple_loss=0.2162, pruned_loss=0.02572, over 4976.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2093, pruned_loss=0.03027, over 973821.22 frames.], batch size: 24, lr: 1.45e-04 +2022-05-08 13:13:21,390 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 13:13:30,915 INFO [train.py:742] (6/8) Epoch 15, validation: loss=0.1049, simple_loss=0.1885, pruned_loss=0.01066, over 914524.00 frames. +2022-05-08 13:14:09,968 INFO [train.py:715] (6/8) Epoch 15, batch 30050, loss[loss=0.1559, simple_loss=0.2399, pruned_loss=0.03592, over 4762.00 frames.], tot_loss[loss=0.1347, simple_loss=0.209, pruned_loss=0.03018, over 972917.68 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 13:14:49,057 INFO [train.py:715] (6/8) Epoch 15, batch 30100, loss[loss=0.1287, simple_loss=0.2034, pruned_loss=0.02701, over 4893.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2092, pruned_loss=0.03019, over 973501.84 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 13:15:28,215 INFO [train.py:715] (6/8) Epoch 15, batch 30150, loss[loss=0.1174, simple_loss=0.1961, pruned_loss=0.01939, over 4977.00 frames.], tot_loss[loss=0.135, simple_loss=0.2093, pruned_loss=0.03029, over 972918.76 frames.], batch size: 28, lr: 1.45e-04 +2022-05-08 13:16:07,078 INFO [train.py:715] (6/8) Epoch 15, batch 30200, loss[loss=0.1227, simple_loss=0.2016, pruned_loss=0.02191, over 4962.00 frames.], tot_loss[loss=0.135, simple_loss=0.2094, pruned_loss=0.03029, over 972969.13 frames.], batch size: 35, lr: 1.45e-04 +2022-05-08 13:16:46,373 INFO [train.py:715] (6/8) Epoch 15, batch 30250, loss[loss=0.1031, simple_loss=0.185, pruned_loss=0.01066, over 4975.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2088, pruned_loss=0.02973, over 973340.47 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 13:17:25,196 INFO [train.py:715] (6/8) Epoch 15, batch 30300, loss[loss=0.1374, simple_loss=0.2085, pruned_loss=0.03316, over 4931.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02959, over 973746.26 frames.], batch size: 29, lr: 1.45e-04 +2022-05-08 13:18:03,167 INFO [train.py:715] (6/8) Epoch 15, batch 30350, loss[loss=0.1363, simple_loss=0.2092, pruned_loss=0.03171, over 4801.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02926, over 972652.18 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 13:18:42,388 INFO [train.py:715] (6/8) Epoch 15, batch 30400, loss[loss=0.136, simple_loss=0.2088, pruned_loss=0.03158, over 4982.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02944, over 972293.97 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 13:19:21,253 INFO [train.py:715] (6/8) Epoch 15, batch 30450, loss[loss=0.1455, simple_loss=0.2281, pruned_loss=0.03149, over 4794.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02946, over 972903.35 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 13:20:00,153 INFO [train.py:715] (6/8) Epoch 15, batch 30500, loss[loss=0.1333, simple_loss=0.209, pruned_loss=0.02883, over 4821.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02951, over 973962.85 frames.], batch size: 27, lr: 1.45e-04 +2022-05-08 13:20:38,359 INFO [train.py:715] (6/8) Epoch 15, batch 30550, loss[loss=0.122, simple_loss=0.2028, pruned_loss=0.02059, over 4968.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2082, pruned_loss=0.02983, over 974132.54 frames.], batch size: 35, lr: 1.45e-04 +2022-05-08 13:21:17,404 INFO [train.py:715] (6/8) Epoch 15, batch 30600, loss[loss=0.1262, simple_loss=0.202, pruned_loss=0.02521, over 4969.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03008, over 974595.76 frames.], batch size: 24, lr: 1.45e-04 +2022-05-08 13:21:56,209 INFO [train.py:715] (6/8) Epoch 15, batch 30650, loss[loss=0.1581, simple_loss=0.2333, pruned_loss=0.0414, over 4885.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02986, over 973320.69 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 13:22:34,351 INFO [train.py:715] (6/8) Epoch 15, batch 30700, loss[loss=0.149, simple_loss=0.2284, pruned_loss=0.03484, over 4869.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02986, over 973884.60 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 13:23:13,421 INFO [train.py:715] (6/8) Epoch 15, batch 30750, loss[loss=0.1265, simple_loss=0.2118, pruned_loss=0.02064, over 4946.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02985, over 973159.17 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 13:23:52,091 INFO [train.py:715] (6/8) Epoch 15, batch 30800, loss[loss=0.1361, simple_loss=0.1987, pruned_loss=0.03674, over 4851.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02977, over 972278.93 frames.], batch size: 32, lr: 1.45e-04 +2022-05-08 13:24:30,180 INFO [train.py:715] (6/8) Epoch 15, batch 30850, loss[loss=0.1246, simple_loss=0.2049, pruned_loss=0.02211, over 4786.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.02958, over 972358.29 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 13:25:08,416 INFO [train.py:715] (6/8) Epoch 15, batch 30900, loss[loss=0.1241, simple_loss=0.1863, pruned_loss=0.03093, over 4816.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02976, over 971562.36 frames.], batch size: 13, lr: 1.45e-04 +2022-05-08 13:25:46,854 INFO [train.py:715] (6/8) Epoch 15, batch 30950, loss[loss=0.1259, simple_loss=0.2006, pruned_loss=0.0256, over 4765.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2084, pruned_loss=0.03048, over 972292.34 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 13:26:25,011 INFO [train.py:715] (6/8) Epoch 15, batch 31000, loss[loss=0.1427, simple_loss=0.2153, pruned_loss=0.03503, over 4791.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.0307, over 972733.73 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 13:27:02,424 INFO [train.py:715] (6/8) Epoch 15, batch 31050, loss[loss=0.1107, simple_loss=0.1823, pruned_loss=0.01959, over 4840.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03028, over 973260.19 frames.], batch size: 13, lr: 1.45e-04 +2022-05-08 13:27:40,734 INFO [train.py:715] (6/8) Epoch 15, batch 31100, loss[loss=0.1335, simple_loss=0.2051, pruned_loss=0.031, over 4741.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2075, pruned_loss=0.03013, over 972100.68 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 13:28:18,885 INFO [train.py:715] (6/8) Epoch 15, batch 31150, loss[loss=0.1604, simple_loss=0.2331, pruned_loss=0.04386, over 4747.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02954, over 972173.20 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 13:28:57,278 INFO [train.py:715] (6/8) Epoch 15, batch 31200, loss[loss=0.1439, simple_loss=0.2241, pruned_loss=0.03179, over 4885.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.0291, over 972669.89 frames.], batch size: 39, lr: 1.45e-04 +2022-05-08 13:29:34,876 INFO [train.py:715] (6/8) Epoch 15, batch 31250, loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02988, over 4838.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.02959, over 973132.08 frames.], batch size: 30, lr: 1.45e-04 +2022-05-08 13:30:13,197 INFO [train.py:715] (6/8) Epoch 15, batch 31300, loss[loss=0.1577, simple_loss=0.2297, pruned_loss=0.04285, over 4854.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2073, pruned_loss=0.02982, over 973471.41 frames.], batch size: 34, lr: 1.45e-04 +2022-05-08 13:30:51,249 INFO [train.py:715] (6/8) Epoch 15, batch 31350, loss[loss=0.1371, simple_loss=0.2144, pruned_loss=0.02986, over 4748.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02983, over 973436.86 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 13:31:28,510 INFO [train.py:715] (6/8) Epoch 15, batch 31400, loss[loss=0.1464, simple_loss=0.2178, pruned_loss=0.0375, over 4827.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02972, over 973086.19 frames.], batch size: 26, lr: 1.45e-04 +2022-05-08 13:32:06,856 INFO [train.py:715] (6/8) Epoch 15, batch 31450, loss[loss=0.1344, simple_loss=0.2123, pruned_loss=0.02826, over 4925.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.02957, over 973263.08 frames.], batch size: 39, lr: 1.45e-04 +2022-05-08 13:32:45,119 INFO [train.py:715] (6/8) Epoch 15, batch 31500, loss[loss=0.1207, simple_loss=0.2012, pruned_loss=0.02012, over 4809.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02941, over 973761.56 frames.], batch size: 27, lr: 1.45e-04 +2022-05-08 13:33:23,445 INFO [train.py:715] (6/8) Epoch 15, batch 31550, loss[loss=0.1139, simple_loss=0.1903, pruned_loss=0.01875, over 4782.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02919, over 973242.45 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 13:34:01,212 INFO [train.py:715] (6/8) Epoch 15, batch 31600, loss[loss=0.1463, simple_loss=0.2183, pruned_loss=0.03716, over 4854.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.02878, over 972255.04 frames.], batch size: 32, lr: 1.45e-04 +2022-05-08 13:34:39,668 INFO [train.py:715] (6/8) Epoch 15, batch 31650, loss[loss=0.1556, simple_loss=0.2241, pruned_loss=0.04348, over 4752.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2059, pruned_loss=0.02894, over 972761.82 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 13:35:18,002 INFO [train.py:715] (6/8) Epoch 15, batch 31700, loss[loss=0.1445, simple_loss=0.2257, pruned_loss=0.03164, over 4907.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02958, over 972071.90 frames.], batch size: 23, lr: 1.45e-04 +2022-05-08 13:35:55,497 INFO [train.py:715] (6/8) Epoch 15, batch 31750, loss[loss=0.116, simple_loss=0.1925, pruned_loss=0.01975, over 4809.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2072, pruned_loss=0.02987, over 971960.71 frames.], batch size: 25, lr: 1.45e-04 +2022-05-08 13:36:34,377 INFO [train.py:715] (6/8) Epoch 15, batch 31800, loss[loss=0.1239, simple_loss=0.1977, pruned_loss=0.02507, over 4858.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03006, over 972447.66 frames.], batch size: 20, lr: 1.45e-04 +2022-05-08 13:37:12,849 INFO [train.py:715] (6/8) Epoch 15, batch 31850, loss[loss=0.154, simple_loss=0.2261, pruned_loss=0.04096, over 4974.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02981, over 973278.48 frames.], batch size: 35, lr: 1.45e-04 +2022-05-08 13:37:52,388 INFO [train.py:715] (6/8) Epoch 15, batch 31900, loss[loss=0.1414, simple_loss=0.2061, pruned_loss=0.03839, over 4788.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02954, over 972884.16 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 13:38:29,677 INFO [train.py:715] (6/8) Epoch 15, batch 31950, loss[loss=0.122, simple_loss=0.1919, pruned_loss=0.02612, over 4782.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02956, over 972938.11 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 13:39:08,340 INFO [train.py:715] (6/8) Epoch 15, batch 32000, loss[loss=0.1139, simple_loss=0.1956, pruned_loss=0.01614, over 4904.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2069, pruned_loss=0.02963, over 972643.75 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 13:39:46,517 INFO [train.py:715] (6/8) Epoch 15, batch 32050, loss[loss=0.113, simple_loss=0.1833, pruned_loss=0.02139, over 4824.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2066, pruned_loss=0.02936, over 972696.37 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 13:40:23,947 INFO [train.py:715] (6/8) Epoch 15, batch 32100, loss[loss=0.1319, simple_loss=0.2086, pruned_loss=0.02757, over 4953.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02935, over 972934.70 frames.], batch size: 35, lr: 1.45e-04 +2022-05-08 13:41:02,341 INFO [train.py:715] (6/8) Epoch 15, batch 32150, loss[loss=0.1622, simple_loss=0.2358, pruned_loss=0.04428, over 4870.00 frames.], tot_loss[loss=0.133, simple_loss=0.207, pruned_loss=0.02944, over 972102.38 frames.], batch size: 20, lr: 1.45e-04 +2022-05-08 13:41:40,504 INFO [train.py:715] (6/8) Epoch 15, batch 32200, loss[loss=0.1175, simple_loss=0.1931, pruned_loss=0.02094, over 4950.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02943, over 972107.02 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 13:42:19,023 INFO [train.py:715] (6/8) Epoch 15, batch 32250, loss[loss=0.1627, simple_loss=0.2343, pruned_loss=0.0456, over 4945.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.0299, over 971364.55 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 13:42:56,891 INFO [train.py:715] (6/8) Epoch 15, batch 32300, loss[loss=0.1754, simple_loss=0.2628, pruned_loss=0.04394, over 4859.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02995, over 971404.59 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 13:43:35,757 INFO [train.py:715] (6/8) Epoch 15, batch 32350, loss[loss=0.11, simple_loss=0.1796, pruned_loss=0.02021, over 4788.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.02995, over 971665.85 frames.], batch size: 12, lr: 1.45e-04 +2022-05-08 13:44:14,153 INFO [train.py:715] (6/8) Epoch 15, batch 32400, loss[loss=0.133, simple_loss=0.209, pruned_loss=0.02851, over 4808.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03017, over 971784.08 frames.], batch size: 25, lr: 1.45e-04 +2022-05-08 13:44:51,906 INFO [train.py:715] (6/8) Epoch 15, batch 32450, loss[loss=0.12, simple_loss=0.1983, pruned_loss=0.02083, over 4740.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2082, pruned_loss=0.03047, over 971703.72 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 13:45:30,471 INFO [train.py:715] (6/8) Epoch 15, batch 32500, loss[loss=0.1306, simple_loss=0.206, pruned_loss=0.02756, over 4809.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2082, pruned_loss=0.03027, over 971525.41 frames.], batch size: 25, lr: 1.45e-04 +2022-05-08 13:46:08,934 INFO [train.py:715] (6/8) Epoch 15, batch 32550, loss[loss=0.136, simple_loss=0.2038, pruned_loss=0.03404, over 4772.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2082, pruned_loss=0.0304, over 971662.57 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 13:46:47,828 INFO [train.py:715] (6/8) Epoch 15, batch 32600, loss[loss=0.1424, simple_loss=0.2267, pruned_loss=0.02901, over 4785.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.03058, over 971508.30 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 13:47:26,419 INFO [train.py:715] (6/8) Epoch 15, batch 32650, loss[loss=0.1376, simple_loss=0.2155, pruned_loss=0.02985, over 4752.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03022, over 970742.10 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 13:48:05,085 INFO [train.py:715] (6/8) Epoch 15, batch 32700, loss[loss=0.1159, simple_loss=0.205, pruned_loss=0.01335, over 4945.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02959, over 971952.06 frames.], batch size: 24, lr: 1.45e-04 +2022-05-08 13:48:43,316 INFO [train.py:715] (6/8) Epoch 15, batch 32750, loss[loss=0.1651, simple_loss=0.2399, pruned_loss=0.04514, over 4824.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02968, over 972458.55 frames.], batch size: 25, lr: 1.45e-04 +2022-05-08 13:49:21,521 INFO [train.py:715] (6/8) Epoch 15, batch 32800, loss[loss=0.1403, simple_loss=0.2061, pruned_loss=0.03731, over 4842.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.0296, over 973016.50 frames.], batch size: 32, lr: 1.45e-04 +2022-05-08 13:49:59,269 INFO [train.py:715] (6/8) Epoch 15, batch 32850, loss[loss=0.1184, simple_loss=0.1944, pruned_loss=0.02125, over 4894.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02929, over 972319.76 frames.], batch size: 22, lr: 1.45e-04 +2022-05-08 13:50:37,484 INFO [train.py:715] (6/8) Epoch 15, batch 32900, loss[loss=0.1151, simple_loss=0.191, pruned_loss=0.01961, over 4906.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02923, over 971665.06 frames.], batch size: 29, lr: 1.45e-04 +2022-05-08 13:51:16,078 INFO [train.py:715] (6/8) Epoch 15, batch 32950, loss[loss=0.1323, simple_loss=0.2001, pruned_loss=0.03224, over 4790.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02948, over 971668.91 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 13:51:54,464 INFO [train.py:715] (6/8) Epoch 15, batch 33000, loss[loss=0.1143, simple_loss=0.1903, pruned_loss=0.01916, over 4917.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.029, over 972318.72 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 13:51:54,464 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 13:52:03,986 INFO [train.py:742] (6/8) Epoch 15, validation: loss=0.1052, simple_loss=0.1886, pruned_loss=0.01088, over 914524.00 frames. +2022-05-08 13:52:42,027 INFO [train.py:715] (6/8) Epoch 15, batch 33050, loss[loss=0.1399, simple_loss=0.2053, pruned_loss=0.03726, over 4988.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.02913, over 971797.73 frames.], batch size: 31, lr: 1.45e-04 +2022-05-08 13:53:20,377 INFO [train.py:715] (6/8) Epoch 15, batch 33100, loss[loss=0.1498, simple_loss=0.227, pruned_loss=0.03628, over 4788.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2067, pruned_loss=0.02957, over 971837.78 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 13:53:58,081 INFO [train.py:715] (6/8) Epoch 15, batch 33150, loss[loss=0.1793, simple_loss=0.2431, pruned_loss=0.05771, over 4897.00 frames.], tot_loss[loss=0.134, simple_loss=0.2077, pruned_loss=0.03017, over 972163.84 frames.], batch size: 19, lr: 1.44e-04 +2022-05-08 13:54:37,161 INFO [train.py:715] (6/8) Epoch 15, batch 33200, loss[loss=0.134, simple_loss=0.2068, pruned_loss=0.03058, over 4856.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2068, pruned_loss=0.02969, over 971727.26 frames.], batch size: 20, lr: 1.44e-04 +2022-05-08 13:55:15,594 INFO [train.py:715] (6/8) Epoch 15, batch 33250, loss[loss=0.1693, simple_loss=0.2413, pruned_loss=0.04863, over 4854.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2072, pruned_loss=0.03002, over 972331.51 frames.], batch size: 30, lr: 1.44e-04 +2022-05-08 13:55:53,705 INFO [train.py:715] (6/8) Epoch 15, batch 33300, loss[loss=0.1146, simple_loss=0.1803, pruned_loss=0.02449, over 4803.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2065, pruned_loss=0.0296, over 972627.51 frames.], batch size: 25, lr: 1.44e-04 +2022-05-08 13:56:31,675 INFO [train.py:715] (6/8) Epoch 15, batch 33350, loss[loss=0.1725, simple_loss=0.2428, pruned_loss=0.05112, over 4916.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2073, pruned_loss=0.03003, over 972190.74 frames.], batch size: 23, lr: 1.44e-04 +2022-05-08 13:57:09,332 INFO [train.py:715] (6/8) Epoch 15, batch 33400, loss[loss=0.1583, simple_loss=0.2264, pruned_loss=0.04515, over 4645.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2078, pruned_loss=0.03043, over 971572.60 frames.], batch size: 13, lr: 1.44e-04 +2022-05-08 13:57:47,385 INFO [train.py:715] (6/8) Epoch 15, batch 33450, loss[loss=0.1041, simple_loss=0.1797, pruned_loss=0.01427, over 4813.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03057, over 971300.00 frames.], batch size: 27, lr: 1.44e-04 +2022-05-08 13:58:25,100 INFO [train.py:715] (6/8) Epoch 15, batch 33500, loss[loss=0.1194, simple_loss=0.1968, pruned_loss=0.02103, over 4856.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2091, pruned_loss=0.03078, over 971161.84 frames.], batch size: 20, lr: 1.44e-04 +2022-05-08 13:59:02,923 INFO [train.py:715] (6/8) Epoch 15, batch 33550, loss[loss=0.138, simple_loss=0.2134, pruned_loss=0.03127, over 4979.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03098, over 970832.90 frames.], batch size: 14, lr: 1.44e-04 +2022-05-08 13:59:40,601 INFO [train.py:715] (6/8) Epoch 15, batch 33600, loss[loss=0.1132, simple_loss=0.1915, pruned_loss=0.01746, over 4847.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.0303, over 970722.47 frames.], batch size: 13, lr: 1.44e-04 +2022-05-08 14:00:18,651 INFO [train.py:715] (6/8) Epoch 15, batch 33650, loss[loss=0.1293, simple_loss=0.2021, pruned_loss=0.02828, over 4773.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2088, pruned_loss=0.03072, over 971267.96 frames.], batch size: 14, lr: 1.44e-04 +2022-05-08 14:00:56,118 INFO [train.py:715] (6/8) Epoch 15, batch 33700, loss[loss=0.1122, simple_loss=0.1939, pruned_loss=0.0153, over 4876.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2094, pruned_loss=0.03066, over 972265.14 frames.], batch size: 22, lr: 1.44e-04 +2022-05-08 14:01:33,644 INFO [train.py:715] (6/8) Epoch 15, batch 33750, loss[loss=0.1481, simple_loss=0.2194, pruned_loss=0.03841, over 4964.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2091, pruned_loss=0.03033, over 972869.84 frames.], batch size: 35, lr: 1.44e-04 +2022-05-08 14:02:11,482 INFO [train.py:715] (6/8) Epoch 15, batch 33800, loss[loss=0.1294, simple_loss=0.2116, pruned_loss=0.02354, over 4931.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2088, pruned_loss=0.03025, over 972746.95 frames.], batch size: 21, lr: 1.44e-04 +2022-05-08 14:02:48,673 INFO [train.py:715] (6/8) Epoch 15, batch 33850, loss[loss=0.1302, simple_loss=0.2022, pruned_loss=0.02909, over 4874.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03018, over 971670.64 frames.], batch size: 16, lr: 1.44e-04 +2022-05-08 14:03:26,484 INFO [train.py:715] (6/8) Epoch 15, batch 33900, loss[loss=0.1006, simple_loss=0.1816, pruned_loss=0.009849, over 4786.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03021, over 972063.84 frames.], batch size: 14, lr: 1.44e-04 +2022-05-08 14:04:04,821 INFO [train.py:715] (6/8) Epoch 15, batch 33950, loss[loss=0.1376, simple_loss=0.2162, pruned_loss=0.02946, over 4841.00 frames.], tot_loss[loss=0.1338, simple_loss=0.208, pruned_loss=0.02983, over 971332.10 frames.], batch size: 15, lr: 1.44e-04 +2022-05-08 14:04:42,872 INFO [train.py:715] (6/8) Epoch 15, batch 34000, loss[loss=0.117, simple_loss=0.1859, pruned_loss=0.02402, over 4892.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.03014, over 971637.25 frames.], batch size: 39, lr: 1.44e-04 +2022-05-08 14:05:20,768 INFO [train.py:715] (6/8) Epoch 15, batch 34050, loss[loss=0.1543, simple_loss=0.2179, pruned_loss=0.04535, over 4870.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02988, over 970858.90 frames.], batch size: 34, lr: 1.44e-04 +2022-05-08 14:05:58,929 INFO [train.py:715] (6/8) Epoch 15, batch 34100, loss[loss=0.1439, simple_loss=0.2181, pruned_loss=0.03486, over 4912.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03018, over 971289.35 frames.], batch size: 29, lr: 1.44e-04 +2022-05-08 14:06:37,188 INFO [train.py:715] (6/8) Epoch 15, batch 34150, loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03255, over 4893.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2078, pruned_loss=0.02956, over 971872.98 frames.], batch size: 19, lr: 1.44e-04 +2022-05-08 14:07:14,888 INFO [train.py:715] (6/8) Epoch 15, batch 34200, loss[loss=0.1334, simple_loss=0.2116, pruned_loss=0.02758, over 4974.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02926, over 972578.00 frames.], batch size: 24, lr: 1.44e-04 +2022-05-08 14:07:52,717 INFO [train.py:715] (6/8) Epoch 15, batch 34250, loss[loss=0.1342, simple_loss=0.192, pruned_loss=0.0382, over 4813.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02932, over 972860.66 frames.], batch size: 13, lr: 1.44e-04 +2022-05-08 14:08:30,683 INFO [train.py:715] (6/8) Epoch 15, batch 34300, loss[loss=0.1318, simple_loss=0.2129, pruned_loss=0.02534, over 4816.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02906, over 972232.37 frames.], batch size: 25, lr: 1.44e-04 +2022-05-08 14:09:08,609 INFO [train.py:715] (6/8) Epoch 15, batch 34350, loss[loss=0.1082, simple_loss=0.1889, pruned_loss=0.01377, over 4823.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2062, pruned_loss=0.02929, over 971754.56 frames.], batch size: 13, lr: 1.44e-04 +2022-05-08 14:09:45,970 INFO [train.py:715] (6/8) Epoch 15, batch 34400, loss[loss=0.09336, simple_loss=0.167, pruned_loss=0.009843, over 4782.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2064, pruned_loss=0.02941, over 972280.64 frames.], batch size: 14, lr: 1.44e-04 +2022-05-08 14:10:24,172 INFO [train.py:715] (6/8) Epoch 15, batch 34450, loss[loss=0.1114, simple_loss=0.185, pruned_loss=0.01888, over 4847.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2069, pruned_loss=0.0298, over 971846.52 frames.], batch size: 32, lr: 1.44e-04 +2022-05-08 14:11:02,052 INFO [train.py:715] (6/8) Epoch 15, batch 34500, loss[loss=0.135, simple_loss=0.2065, pruned_loss=0.03175, over 4938.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.02966, over 972577.84 frames.], batch size: 39, lr: 1.44e-04 +2022-05-08 14:11:39,388 INFO [train.py:715] (6/8) Epoch 15, batch 34550, loss[loss=0.1242, simple_loss=0.1982, pruned_loss=0.02512, over 4777.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.02999, over 971988.53 frames.], batch size: 17, lr: 1.44e-04 +2022-05-08 14:12:17,003 INFO [train.py:715] (6/8) Epoch 15, batch 34600, loss[loss=0.1233, simple_loss=0.1887, pruned_loss=0.02893, over 4785.00 frames.], tot_loss[loss=0.1347, simple_loss=0.209, pruned_loss=0.03019, over 971943.75 frames.], batch size: 12, lr: 1.44e-04 +2022-05-08 14:12:54,929 INFO [train.py:715] (6/8) Epoch 15, batch 34650, loss[loss=0.1378, simple_loss=0.2077, pruned_loss=0.034, over 4755.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03, over 971509.90 frames.], batch size: 19, lr: 1.44e-04 +2022-05-08 14:13:32,473 INFO [train.py:715] (6/8) Epoch 15, batch 34700, loss[loss=0.1415, simple_loss=0.2152, pruned_loss=0.0339, over 4980.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.02997, over 972280.64 frames.], batch size: 25, lr: 1.44e-04 +2022-05-08 14:14:09,601 INFO [train.py:715] (6/8) Epoch 15, batch 34750, loss[loss=0.1276, simple_loss=0.2073, pruned_loss=0.02394, over 4901.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02969, over 972011.90 frames.], batch size: 18, lr: 1.44e-04 +2022-05-08 14:14:44,839 INFO [train.py:715] (6/8) Epoch 15, batch 34800, loss[loss=0.1448, simple_loss=0.2266, pruned_loss=0.03147, over 4906.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.02964, over 971949.74 frames.], batch size: 18, lr: 1.44e-04 +2022-05-08 14:15:33,470 INFO [train.py:715] (6/8) Epoch 16, batch 0, loss[loss=0.1332, simple_loss=0.2087, pruned_loss=0.02888, over 4865.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2087, pruned_loss=0.02888, over 4865.00 frames.], batch size: 20, lr: 1.40e-04 +2022-05-08 14:16:11,658 INFO [train.py:715] (6/8) Epoch 16, batch 50, loss[loss=0.1141, simple_loss=0.1937, pruned_loss=0.01726, over 4791.00 frames.], tot_loss[loss=0.135, simple_loss=0.2099, pruned_loss=0.03004, over 219013.01 frames.], batch size: 21, lr: 1.40e-04 +2022-05-08 14:16:50,215 INFO [train.py:715] (6/8) Epoch 16, batch 100, loss[loss=0.1395, simple_loss=0.2105, pruned_loss=0.03428, over 4871.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02932, over 385599.40 frames.], batch size: 16, lr: 1.40e-04 +2022-05-08 14:17:27,946 INFO [train.py:715] (6/8) Epoch 16, batch 150, loss[loss=0.1302, simple_loss=0.204, pruned_loss=0.02822, over 4784.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2074, pruned_loss=0.02999, over 515926.06 frames.], batch size: 18, lr: 1.40e-04 +2022-05-08 14:18:06,152 INFO [train.py:715] (6/8) Epoch 16, batch 200, loss[loss=0.1607, simple_loss=0.238, pruned_loss=0.0417, over 4759.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2084, pruned_loss=0.03091, over 617358.48 frames.], batch size: 19, lr: 1.40e-04 +2022-05-08 14:18:44,279 INFO [train.py:715] (6/8) Epoch 16, batch 250, loss[loss=0.1673, simple_loss=0.2395, pruned_loss=0.0476, over 4916.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03098, over 696288.87 frames.], batch size: 17, lr: 1.40e-04 +2022-05-08 14:19:22,600 INFO [train.py:715] (6/8) Epoch 16, batch 300, loss[loss=0.1242, simple_loss=0.201, pruned_loss=0.02367, over 4861.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03019, over 757487.23 frames.], batch size: 20, lr: 1.40e-04 +2022-05-08 14:20:01,027 INFO [train.py:715] (6/8) Epoch 16, batch 350, loss[loss=0.1428, simple_loss=0.2025, pruned_loss=0.04158, over 4860.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03002, over 804121.07 frames.], batch size: 12, lr: 1.40e-04 +2022-05-08 14:20:38,708 INFO [train.py:715] (6/8) Epoch 16, batch 400, loss[loss=0.1314, simple_loss=0.1957, pruned_loss=0.03352, over 4956.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03009, over 841512.86 frames.], batch size: 21, lr: 1.40e-04 +2022-05-08 14:21:17,413 INFO [train.py:715] (6/8) Epoch 16, batch 450, loss[loss=0.1488, simple_loss=0.2171, pruned_loss=0.04025, over 4865.00 frames.], tot_loss[loss=0.134, simple_loss=0.2078, pruned_loss=0.03012, over 870330.39 frames.], batch size: 16, lr: 1.40e-04 +2022-05-08 14:21:55,826 INFO [train.py:715] (6/8) Epoch 16, batch 500, loss[loss=0.1299, simple_loss=0.2097, pruned_loss=0.0251, over 4799.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03047, over 893144.97 frames.], batch size: 24, lr: 1.40e-04 +2022-05-08 14:22:33,537 INFO [train.py:715] (6/8) Epoch 16, batch 550, loss[loss=0.1233, simple_loss=0.1984, pruned_loss=0.02406, over 4966.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2083, pruned_loss=0.03062, over 910031.75 frames.], batch size: 21, lr: 1.40e-04 +2022-05-08 14:23:12,214 INFO [train.py:715] (6/8) Epoch 16, batch 600, loss[loss=0.1405, simple_loss=0.2144, pruned_loss=0.03328, over 4707.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2082, pruned_loss=0.03063, over 923574.28 frames.], batch size: 15, lr: 1.40e-04 +2022-05-08 14:23:50,876 INFO [train.py:715] (6/8) Epoch 16, batch 650, loss[loss=0.135, simple_loss=0.197, pruned_loss=0.03646, over 4930.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2078, pruned_loss=0.03034, over 933789.64 frames.], batch size: 18, lr: 1.40e-04 +2022-05-08 14:24:28,547 INFO [train.py:715] (6/8) Epoch 16, batch 700, loss[loss=0.1568, simple_loss=0.2179, pruned_loss=0.04781, over 4885.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2087, pruned_loss=0.0307, over 942806.39 frames.], batch size: 32, lr: 1.40e-04 +2022-05-08 14:25:06,447 INFO [train.py:715] (6/8) Epoch 16, batch 750, loss[loss=0.1172, simple_loss=0.2007, pruned_loss=0.01681, over 4754.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03058, over 949552.15 frames.], batch size: 19, lr: 1.40e-04 +2022-05-08 14:25:45,233 INFO [train.py:715] (6/8) Epoch 16, batch 800, loss[loss=0.1451, simple_loss=0.2264, pruned_loss=0.03195, over 4898.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03025, over 954994.98 frames.], batch size: 22, lr: 1.40e-04 +2022-05-08 14:26:23,532 INFO [train.py:715] (6/8) Epoch 16, batch 850, loss[loss=0.1199, simple_loss=0.186, pruned_loss=0.02688, over 4850.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03047, over 958862.56 frames.], batch size: 20, lr: 1.40e-04 +2022-05-08 14:27:01,575 INFO [train.py:715] (6/8) Epoch 16, batch 900, loss[loss=0.1397, simple_loss=0.2134, pruned_loss=0.03298, over 4922.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03031, over 962137.58 frames.], batch size: 18, lr: 1.40e-04 +2022-05-08 14:27:39,692 INFO [train.py:715] (6/8) Epoch 16, batch 950, loss[loss=0.1236, simple_loss=0.1904, pruned_loss=0.02839, over 4825.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03054, over 964214.11 frames.], batch size: 13, lr: 1.40e-04 +2022-05-08 14:28:18,127 INFO [train.py:715] (6/8) Epoch 16, batch 1000, loss[loss=0.1509, simple_loss=0.2218, pruned_loss=0.03996, over 4869.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2084, pruned_loss=0.03031, over 966379.76 frames.], batch size: 16, lr: 1.40e-04 +2022-05-08 14:28:55,785 INFO [train.py:715] (6/8) Epoch 16, batch 1050, loss[loss=0.1221, simple_loss=0.1894, pruned_loss=0.02737, over 4891.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03026, over 967235.11 frames.], batch size: 22, lr: 1.40e-04 +2022-05-08 14:29:33,185 INFO [train.py:715] (6/8) Epoch 16, batch 1100, loss[loss=0.1171, simple_loss=0.1893, pruned_loss=0.02244, over 4827.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03069, over 969113.98 frames.], batch size: 27, lr: 1.40e-04 +2022-05-08 14:30:11,814 INFO [train.py:715] (6/8) Epoch 16, batch 1150, loss[loss=0.1125, simple_loss=0.1982, pruned_loss=0.01336, over 4816.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02969, over 970468.73 frames.], batch size: 25, lr: 1.40e-04 +2022-05-08 14:30:49,882 INFO [train.py:715] (6/8) Epoch 16, batch 1200, loss[loss=0.1417, simple_loss=0.2068, pruned_loss=0.03832, over 4870.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02971, over 970682.14 frames.], batch size: 32, lr: 1.40e-04 +2022-05-08 14:31:27,247 INFO [train.py:715] (6/8) Epoch 16, batch 1250, loss[loss=0.148, simple_loss=0.2172, pruned_loss=0.0394, over 4915.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2067, pruned_loss=0.02984, over 970885.50 frames.], batch size: 18, lr: 1.40e-04 +2022-05-08 14:32:05,204 INFO [train.py:715] (6/8) Epoch 16, batch 1300, loss[loss=0.1193, simple_loss=0.1867, pruned_loss=0.02598, over 4846.00 frames.], tot_loss[loss=0.1338, simple_loss=0.207, pruned_loss=0.0303, over 970746.85 frames.], batch size: 34, lr: 1.40e-04 +2022-05-08 14:32:43,365 INFO [train.py:715] (6/8) Epoch 16, batch 1350, loss[loss=0.1179, simple_loss=0.1959, pruned_loss=0.01998, over 4819.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2066, pruned_loss=0.0302, over 971529.96 frames.], batch size: 26, lr: 1.40e-04 +2022-05-08 14:33:21,096 INFO [train.py:715] (6/8) Epoch 16, batch 1400, loss[loss=0.1221, simple_loss=0.1975, pruned_loss=0.02341, over 4985.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2067, pruned_loss=0.03007, over 971324.88 frames.], batch size: 15, lr: 1.40e-04 +2022-05-08 14:33:59,220 INFO [train.py:715] (6/8) Epoch 16, batch 1450, loss[loss=0.1482, simple_loss=0.2199, pruned_loss=0.03826, over 4871.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2069, pruned_loss=0.02986, over 971211.20 frames.], batch size: 30, lr: 1.40e-04 +2022-05-08 14:34:37,200 INFO [train.py:715] (6/8) Epoch 16, batch 1500, loss[loss=0.1151, simple_loss=0.1906, pruned_loss=0.01981, over 4982.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2073, pruned_loss=0.03009, over 971970.46 frames.], batch size: 14, lr: 1.40e-04 +2022-05-08 14:35:14,921 INFO [train.py:715] (6/8) Epoch 16, batch 1550, loss[loss=0.1212, simple_loss=0.2027, pruned_loss=0.01984, over 4808.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2071, pruned_loss=0.03004, over 971684.63 frames.], batch size: 21, lr: 1.40e-04 +2022-05-08 14:35:52,771 INFO [train.py:715] (6/8) Epoch 16, batch 1600, loss[loss=0.1351, simple_loss=0.2205, pruned_loss=0.02487, over 4899.00 frames.], tot_loss[loss=0.133, simple_loss=0.2069, pruned_loss=0.02952, over 971401.60 frames.], batch size: 19, lr: 1.40e-04 +2022-05-08 14:36:30,168 INFO [train.py:715] (6/8) Epoch 16, batch 1650, loss[loss=0.1311, simple_loss=0.2137, pruned_loss=0.02424, over 4828.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02953, over 970805.96 frames.], batch size: 26, lr: 1.40e-04 +2022-05-08 14:37:07,995 INFO [train.py:715] (6/8) Epoch 16, batch 1700, loss[loss=0.1242, simple_loss=0.2022, pruned_loss=0.02312, over 4813.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02939, over 971243.16 frames.], batch size: 21, lr: 1.40e-04 +2022-05-08 14:37:46,153 INFO [train.py:715] (6/8) Epoch 16, batch 1750, loss[loss=0.1364, simple_loss=0.207, pruned_loss=0.03288, over 4868.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.0301, over 971148.08 frames.], batch size: 34, lr: 1.40e-04 +2022-05-08 14:38:24,068 INFO [train.py:715] (6/8) Epoch 16, batch 1800, loss[loss=0.131, simple_loss=0.2087, pruned_loss=0.02663, over 4934.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.03025, over 971412.66 frames.], batch size: 23, lr: 1.40e-04 +2022-05-08 14:39:02,376 INFO [train.py:715] (6/8) Epoch 16, batch 1850, loss[loss=0.1138, simple_loss=0.1915, pruned_loss=0.018, over 4779.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03019, over 970981.00 frames.], batch size: 18, lr: 1.40e-04 +2022-05-08 14:39:41,004 INFO [train.py:715] (6/8) Epoch 16, batch 1900, loss[loss=0.1348, simple_loss=0.1971, pruned_loss=0.03629, over 4785.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.02999, over 971392.55 frames.], batch size: 14, lr: 1.40e-04 +2022-05-08 14:40:18,874 INFO [train.py:715] (6/8) Epoch 16, batch 1950, loss[loss=0.1329, simple_loss=0.2008, pruned_loss=0.03246, over 4859.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2074, pruned_loss=0.02983, over 972069.22 frames.], batch size: 20, lr: 1.40e-04 +2022-05-08 14:40:57,047 INFO [train.py:715] (6/8) Epoch 16, batch 2000, loss[loss=0.1167, simple_loss=0.1921, pruned_loss=0.02067, over 4918.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02971, over 971963.12 frames.], batch size: 29, lr: 1.40e-04 +2022-05-08 14:41:35,847 INFO [train.py:715] (6/8) Epoch 16, batch 2050, loss[loss=0.1319, simple_loss=0.2079, pruned_loss=0.0279, over 4707.00 frames.], tot_loss[loss=0.1333, simple_loss=0.207, pruned_loss=0.02975, over 971279.95 frames.], batch size: 15, lr: 1.40e-04 +2022-05-08 14:42:14,590 INFO [train.py:715] (6/8) Epoch 16, batch 2100, loss[loss=0.1524, simple_loss=0.2241, pruned_loss=0.04033, over 4799.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2071, pruned_loss=0.0299, over 971245.00 frames.], batch size: 21, lr: 1.40e-04 +2022-05-08 14:42:52,444 INFO [train.py:715] (6/8) Epoch 16, batch 2150, loss[loss=0.152, simple_loss=0.2323, pruned_loss=0.03584, over 4923.00 frames.], tot_loss[loss=0.1333, simple_loss=0.207, pruned_loss=0.02978, over 971471.48 frames.], batch size: 23, lr: 1.40e-04 +2022-05-08 14:43:31,574 INFO [train.py:715] (6/8) Epoch 16, batch 2200, loss[loss=0.1158, simple_loss=0.1821, pruned_loss=0.02475, over 4783.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02975, over 970941.83 frames.], batch size: 14, lr: 1.40e-04 +2022-05-08 14:44:09,851 INFO [train.py:715] (6/8) Epoch 16, batch 2250, loss[loss=0.1349, simple_loss=0.2098, pruned_loss=0.03004, over 4799.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2073, pruned_loss=0.02964, over 970363.53 frames.], batch size: 25, lr: 1.40e-04 +2022-05-08 14:44:47,485 INFO [train.py:715] (6/8) Epoch 16, batch 2300, loss[loss=0.1215, simple_loss=0.1992, pruned_loss=0.02194, over 4990.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02989, over 970641.04 frames.], batch size: 24, lr: 1.40e-04 +2022-05-08 14:45:25,054 INFO [train.py:715] (6/8) Epoch 16, batch 2350, loss[loss=0.135, simple_loss=0.2121, pruned_loss=0.02897, over 4822.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.02993, over 970607.21 frames.], batch size: 25, lr: 1.40e-04 +2022-05-08 14:46:03,346 INFO [train.py:715] (6/8) Epoch 16, batch 2400, loss[loss=0.1313, simple_loss=0.2141, pruned_loss=0.02425, over 4987.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02977, over 971409.26 frames.], batch size: 25, lr: 1.40e-04 +2022-05-08 14:46:41,420 INFO [train.py:715] (6/8) Epoch 16, batch 2450, loss[loss=0.1275, simple_loss=0.2083, pruned_loss=0.02337, over 4849.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.02946, over 971451.57 frames.], batch size: 32, lr: 1.40e-04 +2022-05-08 14:47:18,881 INFO [train.py:715] (6/8) Epoch 16, batch 2500, loss[loss=0.1504, simple_loss=0.2424, pruned_loss=0.02921, over 4770.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.02969, over 971440.65 frames.], batch size: 18, lr: 1.40e-04 +2022-05-08 14:47:57,274 INFO [train.py:715] (6/8) Epoch 16, batch 2550, loss[loss=0.129, simple_loss=0.1968, pruned_loss=0.03062, over 4844.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.02958, over 971896.29 frames.], batch size: 13, lr: 1.40e-04 +2022-05-08 14:48:35,429 INFO [train.py:715] (6/8) Epoch 16, batch 2600, loss[loss=0.1165, simple_loss=0.1892, pruned_loss=0.02192, over 4760.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2075, pruned_loss=0.02983, over 971839.19 frames.], batch size: 12, lr: 1.40e-04 +2022-05-08 14:49:13,162 INFO [train.py:715] (6/8) Epoch 16, batch 2650, loss[loss=0.1434, simple_loss=0.2204, pruned_loss=0.03324, over 4869.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02949, over 972633.52 frames.], batch size: 22, lr: 1.40e-04 +2022-05-08 14:49:51,048 INFO [train.py:715] (6/8) Epoch 16, batch 2700, loss[loss=0.1365, simple_loss=0.2066, pruned_loss=0.03318, over 4705.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02927, over 972462.52 frames.], batch size: 15, lr: 1.40e-04 +2022-05-08 14:50:29,659 INFO [train.py:715] (6/8) Epoch 16, batch 2750, loss[loss=0.1358, simple_loss=0.2145, pruned_loss=0.02858, over 4817.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02967, over 972231.57 frames.], batch size: 27, lr: 1.40e-04 +2022-05-08 14:51:08,569 INFO [train.py:715] (6/8) Epoch 16, batch 2800, loss[loss=0.1201, simple_loss=0.1929, pruned_loss=0.02363, over 4976.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02997, over 974241.53 frames.], batch size: 15, lr: 1.40e-04 +2022-05-08 14:51:46,964 INFO [train.py:715] (6/8) Epoch 16, batch 2850, loss[loss=0.1356, simple_loss=0.2086, pruned_loss=0.03132, over 4819.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02979, over 973751.92 frames.], batch size: 27, lr: 1.40e-04 +2022-05-08 14:52:25,016 INFO [train.py:715] (6/8) Epoch 16, batch 2900, loss[loss=0.1106, simple_loss=0.1803, pruned_loss=0.02045, over 4970.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2071, pruned_loss=0.02971, over 973811.61 frames.], batch size: 24, lr: 1.40e-04 +2022-05-08 14:53:03,791 INFO [train.py:715] (6/8) Epoch 16, batch 2950, loss[loss=0.1424, simple_loss=0.2192, pruned_loss=0.0328, over 4904.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2074, pruned_loss=0.03015, over 972616.86 frames.], batch size: 29, lr: 1.40e-04 +2022-05-08 14:53:41,773 INFO [train.py:715] (6/8) Epoch 16, batch 3000, loss[loss=0.1237, simple_loss=0.1873, pruned_loss=0.03009, over 4773.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2073, pruned_loss=0.03021, over 972624.42 frames.], batch size: 12, lr: 1.40e-04 +2022-05-08 14:53:41,774 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 14:53:51,190 INFO [train.py:742] (6/8) Epoch 16, validation: loss=0.105, simple_loss=0.1885, pruned_loss=0.01074, over 914524.00 frames. +2022-05-08 14:54:29,008 INFO [train.py:715] (6/8) Epoch 16, batch 3050, loss[loss=0.1037, simple_loss=0.1659, pruned_loss=0.02079, over 4807.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2076, pruned_loss=0.03026, over 972465.30 frames.], batch size: 13, lr: 1.40e-04 +2022-05-08 14:55:09,458 INFO [train.py:715] (6/8) Epoch 16, batch 3100, loss[loss=0.1223, simple_loss=0.1961, pruned_loss=0.02429, over 4933.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02956, over 973286.38 frames.], batch size: 29, lr: 1.40e-04 +2022-05-08 14:55:47,850 INFO [train.py:715] (6/8) Epoch 16, batch 3150, loss[loss=0.1436, simple_loss=0.2216, pruned_loss=0.03286, over 4868.00 frames.], tot_loss[loss=0.134, simple_loss=0.2076, pruned_loss=0.03018, over 972687.89 frames.], batch size: 20, lr: 1.40e-04 +2022-05-08 14:56:26,009 INFO [train.py:715] (6/8) Epoch 16, batch 3200, loss[loss=0.1259, simple_loss=0.2068, pruned_loss=0.02249, over 4965.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03016, over 972385.59 frames.], batch size: 24, lr: 1.40e-04 +2022-05-08 14:57:04,240 INFO [train.py:715] (6/8) Epoch 16, batch 3250, loss[loss=0.1446, simple_loss=0.2246, pruned_loss=0.03227, over 4855.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03006, over 972091.95 frames.], batch size: 20, lr: 1.40e-04 +2022-05-08 14:57:42,063 INFO [train.py:715] (6/8) Epoch 16, batch 3300, loss[loss=0.1209, simple_loss=0.1949, pruned_loss=0.02349, over 4694.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02949, over 971555.70 frames.], batch size: 15, lr: 1.40e-04 +2022-05-08 14:58:20,052 INFO [train.py:715] (6/8) Epoch 16, batch 3350, loss[loss=0.1074, simple_loss=0.1796, pruned_loss=0.01757, over 4823.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02937, over 972127.73 frames.], batch size: 26, lr: 1.40e-04 +2022-05-08 14:58:57,931 INFO [train.py:715] (6/8) Epoch 16, batch 3400, loss[loss=0.1158, simple_loss=0.1965, pruned_loss=0.01751, over 4950.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02924, over 972950.23 frames.], batch size: 21, lr: 1.40e-04 +2022-05-08 14:59:35,865 INFO [train.py:715] (6/8) Epoch 16, batch 3450, loss[loss=0.128, simple_loss=0.2024, pruned_loss=0.02684, over 4780.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.02941, over 972727.27 frames.], batch size: 18, lr: 1.40e-04 +2022-05-08 15:00:13,954 INFO [train.py:715] (6/8) Epoch 16, batch 3500, loss[loss=0.125, simple_loss=0.2022, pruned_loss=0.02394, over 4799.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02931, over 972334.41 frames.], batch size: 21, lr: 1.40e-04 +2022-05-08 15:00:51,756 INFO [train.py:715] (6/8) Epoch 16, batch 3550, loss[loss=0.1378, simple_loss=0.2095, pruned_loss=0.03308, over 4815.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02947, over 971929.28 frames.], batch size: 26, lr: 1.40e-04 +2022-05-08 15:01:30,179 INFO [train.py:715] (6/8) Epoch 16, batch 3600, loss[loss=0.1332, simple_loss=0.2155, pruned_loss=0.02545, over 4816.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2059, pruned_loss=0.02884, over 972324.47 frames.], batch size: 26, lr: 1.40e-04 +2022-05-08 15:02:07,898 INFO [train.py:715] (6/8) Epoch 16, batch 3650, loss[loss=0.1477, simple_loss=0.2274, pruned_loss=0.03402, over 4810.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2062, pruned_loss=0.02904, over 972470.84 frames.], batch size: 21, lr: 1.40e-04 +2022-05-08 15:02:46,542 INFO [train.py:715] (6/8) Epoch 16, batch 3700, loss[loss=0.121, simple_loss=0.1983, pruned_loss=0.02186, over 4944.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02863, over 972603.77 frames.], batch size: 24, lr: 1.40e-04 +2022-05-08 15:03:25,028 INFO [train.py:715] (6/8) Epoch 16, batch 3750, loss[loss=0.1308, simple_loss=0.2065, pruned_loss=0.02759, over 4799.00 frames.], tot_loss[loss=0.132, simple_loss=0.2061, pruned_loss=0.029, over 972367.10 frames.], batch size: 21, lr: 1.40e-04 +2022-05-08 15:04:03,390 INFO [train.py:715] (6/8) Epoch 16, batch 3800, loss[loss=0.1119, simple_loss=0.193, pruned_loss=0.01542, over 4951.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2055, pruned_loss=0.0286, over 973038.05 frames.], batch size: 21, lr: 1.40e-04 +2022-05-08 15:04:42,255 INFO [train.py:715] (6/8) Epoch 16, batch 3850, loss[loss=0.1611, simple_loss=0.2365, pruned_loss=0.04286, over 4807.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02878, over 972723.13 frames.], batch size: 27, lr: 1.40e-04 +2022-05-08 15:05:21,015 INFO [train.py:715] (6/8) Epoch 16, batch 3900, loss[loss=0.1074, simple_loss=0.1828, pruned_loss=0.01603, over 4844.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02907, over 972191.67 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:05:58,860 INFO [train.py:715] (6/8) Epoch 16, batch 3950, loss[loss=0.1858, simple_loss=0.2502, pruned_loss=0.06066, over 4895.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02946, over 972016.21 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 15:06:36,787 INFO [train.py:715] (6/8) Epoch 16, batch 4000, loss[loss=0.144, simple_loss=0.2278, pruned_loss=0.0301, over 4850.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.0299, over 973042.25 frames.], batch size: 20, lr: 1.39e-04 +2022-05-08 15:07:14,744 INFO [train.py:715] (6/8) Epoch 16, batch 4050, loss[loss=0.1331, simple_loss=0.2115, pruned_loss=0.02735, over 4901.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03021, over 972380.96 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 15:07:52,148 INFO [train.py:715] (6/8) Epoch 16, batch 4100, loss[loss=0.1191, simple_loss=0.1781, pruned_loss=0.03012, over 4815.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2091, pruned_loss=0.03024, over 972211.35 frames.], batch size: 12, lr: 1.39e-04 +2022-05-08 15:08:29,798 INFO [train.py:715] (6/8) Epoch 16, batch 4150, loss[loss=0.1255, simple_loss=0.1993, pruned_loss=0.02583, over 4881.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2086, pruned_loss=0.03005, over 972295.16 frames.], batch size: 22, lr: 1.39e-04 +2022-05-08 15:09:07,465 INFO [train.py:715] (6/8) Epoch 16, batch 4200, loss[loss=0.1331, simple_loss=0.2043, pruned_loss=0.03096, over 4984.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2086, pruned_loss=0.02995, over 972599.39 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 15:09:45,633 INFO [train.py:715] (6/8) Epoch 16, batch 4250, loss[loss=0.1271, simple_loss=0.2061, pruned_loss=0.02408, over 4975.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03006, over 972869.93 frames.], batch size: 25, lr: 1.39e-04 +2022-05-08 15:10:23,340 INFO [train.py:715] (6/8) Epoch 16, batch 4300, loss[loss=0.1265, simple_loss=0.2085, pruned_loss=0.02221, over 4933.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02965, over 972846.33 frames.], batch size: 23, lr: 1.39e-04 +2022-05-08 15:11:01,196 INFO [train.py:715] (6/8) Epoch 16, batch 4350, loss[loss=0.1552, simple_loss=0.2219, pruned_loss=0.04422, over 4695.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2094, pruned_loss=0.03057, over 973506.55 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:11:39,306 INFO [train.py:715] (6/8) Epoch 16, batch 4400, loss[loss=0.1178, simple_loss=0.2018, pruned_loss=0.01696, over 4967.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03016, over 972530.97 frames.], batch size: 28, lr: 1.39e-04 +2022-05-08 15:12:17,135 INFO [train.py:715] (6/8) Epoch 16, batch 4450, loss[loss=0.143, simple_loss=0.2169, pruned_loss=0.03457, over 4870.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2091, pruned_loss=0.03057, over 972049.11 frames.], batch size: 22, lr: 1.39e-04 +2022-05-08 15:12:54,752 INFO [train.py:715] (6/8) Epoch 16, batch 4500, loss[loss=0.1198, simple_loss=0.1986, pruned_loss=0.02053, over 4785.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2092, pruned_loss=0.03027, over 972471.06 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 15:13:32,862 INFO [train.py:715] (6/8) Epoch 16, batch 4550, loss[loss=0.1589, simple_loss=0.2305, pruned_loss=0.04359, over 4823.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02978, over 972263.83 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:14:11,253 INFO [train.py:715] (6/8) Epoch 16, batch 4600, loss[loss=0.1319, simple_loss=0.1976, pruned_loss=0.03311, over 4702.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02981, over 971500.00 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:14:49,231 INFO [train.py:715] (6/8) Epoch 16, batch 4650, loss[loss=0.1336, simple_loss=0.207, pruned_loss=0.03015, over 4864.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03, over 970698.21 frames.], batch size: 20, lr: 1.39e-04 +2022-05-08 15:15:27,647 INFO [train.py:715] (6/8) Epoch 16, batch 4700, loss[loss=0.143, simple_loss=0.2056, pruned_loss=0.0402, over 4749.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2072, pruned_loss=0.02975, over 970981.00 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 15:16:06,242 INFO [train.py:715] (6/8) Epoch 16, batch 4750, loss[loss=0.1279, simple_loss=0.2068, pruned_loss=0.02448, over 4802.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02979, over 971989.72 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 15:16:44,846 INFO [train.py:715] (6/8) Epoch 16, batch 4800, loss[loss=0.1218, simple_loss=0.206, pruned_loss=0.0188, over 4765.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2072, pruned_loss=0.02984, over 972145.55 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 15:17:23,124 INFO [train.py:715] (6/8) Epoch 16, batch 4850, loss[loss=0.1426, simple_loss=0.222, pruned_loss=0.0316, over 4825.00 frames.], tot_loss[loss=0.1334, simple_loss=0.207, pruned_loss=0.02991, over 972224.13 frames.], batch size: 25, lr: 1.39e-04 +2022-05-08 15:18:01,825 INFO [train.py:715] (6/8) Epoch 16, batch 4900, loss[loss=0.1428, simple_loss=0.2214, pruned_loss=0.03204, over 4790.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2068, pruned_loss=0.02989, over 972355.45 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 15:18:40,698 INFO [train.py:715] (6/8) Epoch 16, batch 4950, loss[loss=0.1628, simple_loss=0.2332, pruned_loss=0.04624, over 4845.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2072, pruned_loss=0.02982, over 972584.14 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:19:18,942 INFO [train.py:715] (6/8) Epoch 16, batch 5000, loss[loss=0.1527, simple_loss=0.2245, pruned_loss=0.04041, over 4700.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2073, pruned_loss=0.02983, over 972161.07 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:19:57,139 INFO [train.py:715] (6/8) Epoch 16, batch 5050, loss[loss=0.1145, simple_loss=0.1871, pruned_loss=0.02098, over 4797.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2071, pruned_loss=0.03003, over 972222.27 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 15:20:35,447 INFO [train.py:715] (6/8) Epoch 16, batch 5100, loss[loss=0.1339, simple_loss=0.2134, pruned_loss=0.02721, over 4854.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03008, over 972082.82 frames.], batch size: 34, lr: 1.39e-04 +2022-05-08 15:21:13,349 INFO [train.py:715] (6/8) Epoch 16, batch 5150, loss[loss=0.1672, simple_loss=0.2437, pruned_loss=0.04536, over 4959.00 frames.], tot_loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.02976, over 972880.77 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:21:50,908 INFO [train.py:715] (6/8) Epoch 16, batch 5200, loss[loss=0.141, simple_loss=0.2215, pruned_loss=0.0302, over 4870.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02935, over 972861.40 frames.], batch size: 22, lr: 1.39e-04 +2022-05-08 15:22:28,867 INFO [train.py:715] (6/8) Epoch 16, batch 5250, loss[loss=0.115, simple_loss=0.1956, pruned_loss=0.01723, over 4911.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02942, over 973512.87 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 15:23:07,105 INFO [train.py:715] (6/8) Epoch 16, batch 5300, loss[loss=0.1275, simple_loss=0.1965, pruned_loss=0.02924, over 4783.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02928, over 973198.95 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 15:23:45,229 INFO [train.py:715] (6/8) Epoch 16, batch 5350, loss[loss=0.119, simple_loss=0.1975, pruned_loss=0.02025, over 4991.00 frames.], tot_loss[loss=0.133, simple_loss=0.207, pruned_loss=0.02953, over 972439.39 frames.], batch size: 14, lr: 1.39e-04 +2022-05-08 15:24:23,037 INFO [train.py:715] (6/8) Epoch 16, batch 5400, loss[loss=0.1379, simple_loss=0.2075, pruned_loss=0.03416, over 4910.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.0295, over 973149.79 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 15:25:00,888 INFO [train.py:715] (6/8) Epoch 16, batch 5450, loss[loss=0.1293, simple_loss=0.2087, pruned_loss=0.02491, over 4874.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02997, over 972863.64 frames.], batch size: 39, lr: 1.39e-04 +2022-05-08 15:25:38,706 INFO [train.py:715] (6/8) Epoch 16, batch 5500, loss[loss=0.1735, simple_loss=0.2471, pruned_loss=0.04998, over 4865.00 frames.], tot_loss[loss=0.1333, simple_loss=0.207, pruned_loss=0.02981, over 972297.97 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:26:16,326 INFO [train.py:715] (6/8) Epoch 16, batch 5550, loss[loss=0.1144, simple_loss=0.1912, pruned_loss=0.01884, over 4939.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2077, pruned_loss=0.02999, over 972707.66 frames.], batch size: 29, lr: 1.39e-04 +2022-05-08 15:26:54,076 INFO [train.py:715] (6/8) Epoch 16, batch 5600, loss[loss=0.1213, simple_loss=0.2003, pruned_loss=0.02116, over 4810.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02934, over 972636.52 frames.], batch size: 26, lr: 1.39e-04 +2022-05-08 15:27:32,730 INFO [train.py:715] (6/8) Epoch 16, batch 5650, loss[loss=0.1345, simple_loss=0.2142, pruned_loss=0.0274, over 4753.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02948, over 972276.60 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 15:28:10,548 INFO [train.py:715] (6/8) Epoch 16, batch 5700, loss[loss=0.1097, simple_loss=0.1847, pruned_loss=0.01734, over 4798.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.0293, over 972080.49 frames.], batch size: 14, lr: 1.39e-04 +2022-05-08 15:28:48,369 INFO [train.py:715] (6/8) Epoch 16, batch 5750, loss[loss=0.1242, simple_loss=0.1959, pruned_loss=0.02619, over 4921.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2072, pruned_loss=0.02988, over 971390.25 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 15:29:26,212 INFO [train.py:715] (6/8) Epoch 16, batch 5800, loss[loss=0.1123, simple_loss=0.1875, pruned_loss=0.0185, over 4871.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2068, pruned_loss=0.02968, over 971578.81 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:30:04,476 INFO [train.py:715] (6/8) Epoch 16, batch 5850, loss[loss=0.1386, simple_loss=0.2111, pruned_loss=0.03309, over 4977.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2069, pruned_loss=0.0301, over 971846.84 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:30:42,017 INFO [train.py:715] (6/8) Epoch 16, batch 5900, loss[loss=0.1126, simple_loss=0.1936, pruned_loss=0.01583, over 4935.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2075, pruned_loss=0.03001, over 971954.25 frames.], batch size: 29, lr: 1.39e-04 +2022-05-08 15:31:19,662 INFO [train.py:715] (6/8) Epoch 16, batch 5950, loss[loss=0.1409, simple_loss=0.2159, pruned_loss=0.03293, over 4905.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02997, over 972511.64 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 15:31:58,427 INFO [train.py:715] (6/8) Epoch 16, batch 6000, loss[loss=0.1261, simple_loss=0.1958, pruned_loss=0.02818, over 4855.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2074, pruned_loss=0.02979, over 971799.96 frames.], batch size: 20, lr: 1.39e-04 +2022-05-08 15:31:58,428 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 15:32:07,946 INFO [train.py:742] (6/8) Epoch 16, validation: loss=0.105, simple_loss=0.1885, pruned_loss=0.01082, over 914524.00 frames. +2022-05-08 15:32:46,979 INFO [train.py:715] (6/8) Epoch 16, batch 6050, loss[loss=0.1653, simple_loss=0.2522, pruned_loss=0.0392, over 4884.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.03004, over 971692.42 frames.], batch size: 20, lr: 1.39e-04 +2022-05-08 15:33:25,023 INFO [train.py:715] (6/8) Epoch 16, batch 6100, loss[loss=0.1427, simple_loss=0.2068, pruned_loss=0.03935, over 4878.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02981, over 971105.62 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:34:02,794 INFO [train.py:715] (6/8) Epoch 16, batch 6150, loss[loss=0.1194, simple_loss=0.1982, pruned_loss=0.02036, over 4972.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2084, pruned_loss=0.02969, over 971629.97 frames.], batch size: 28, lr: 1.39e-04 +2022-05-08 15:34:40,935 INFO [train.py:715] (6/8) Epoch 16, batch 6200, loss[loss=0.1445, simple_loss=0.2109, pruned_loss=0.03907, over 4707.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.02958, over 971662.85 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:35:19,472 INFO [train.py:715] (6/8) Epoch 16, batch 6250, loss[loss=0.1316, simple_loss=0.1996, pruned_loss=0.0318, over 4803.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02997, over 971975.38 frames.], batch size: 14, lr: 1.39e-04 +2022-05-08 15:35:57,116 INFO [train.py:715] (6/8) Epoch 16, batch 6300, loss[loss=0.1383, simple_loss=0.2247, pruned_loss=0.02592, over 4909.00 frames.], tot_loss[loss=0.134, simple_loss=0.2078, pruned_loss=0.0301, over 972057.39 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 15:36:34,887 INFO [train.py:715] (6/8) Epoch 16, batch 6350, loss[loss=0.1354, simple_loss=0.2215, pruned_loss=0.0247, over 4807.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.0295, over 971890.52 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 15:37:13,403 INFO [train.py:715] (6/8) Epoch 16, batch 6400, loss[loss=0.1239, simple_loss=0.2056, pruned_loss=0.02114, over 4812.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02913, over 972147.76 frames.], batch size: 27, lr: 1.39e-04 +2022-05-08 15:37:51,669 INFO [train.py:715] (6/8) Epoch 16, batch 6450, loss[loss=0.1797, simple_loss=0.2505, pruned_loss=0.0545, over 4696.00 frames.], tot_loss[loss=0.134, simple_loss=0.2086, pruned_loss=0.02973, over 972242.86 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:38:29,442 INFO [train.py:715] (6/8) Epoch 16, batch 6500, loss[loss=0.1234, simple_loss=0.2019, pruned_loss=0.02241, over 4743.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02952, over 972043.75 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:39:07,581 INFO [train.py:715] (6/8) Epoch 16, batch 6550, loss[loss=0.136, simple_loss=0.2043, pruned_loss=0.03383, over 4962.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02982, over 972425.47 frames.], batch size: 35, lr: 1.39e-04 +2022-05-08 15:39:46,028 INFO [train.py:715] (6/8) Epoch 16, batch 6600, loss[loss=0.1569, simple_loss=0.2396, pruned_loss=0.03709, over 4961.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02975, over 972505.86 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:40:23,829 INFO [train.py:715] (6/8) Epoch 16, batch 6650, loss[loss=0.1208, simple_loss=0.196, pruned_loss=0.02277, over 4859.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02973, over 972158.05 frames.], batch size: 20, lr: 1.39e-04 +2022-05-08 15:41:01,686 INFO [train.py:715] (6/8) Epoch 16, batch 6700, loss[loss=0.1369, simple_loss=0.2091, pruned_loss=0.03229, over 4805.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02968, over 971654.06 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 15:41:39,712 INFO [train.py:715] (6/8) Epoch 16, batch 6750, loss[loss=0.1274, simple_loss=0.2058, pruned_loss=0.02451, over 4951.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2092, pruned_loss=0.03015, over 971826.47 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 15:42:17,830 INFO [train.py:715] (6/8) Epoch 16, batch 6800, loss[loss=0.1177, simple_loss=0.1875, pruned_loss=0.02391, over 4900.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2091, pruned_loss=0.0302, over 971803.36 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 15:42:54,812 INFO [train.py:715] (6/8) Epoch 16, batch 6850, loss[loss=0.1484, simple_loss=0.2278, pruned_loss=0.03446, over 4758.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2091, pruned_loss=0.03021, over 971850.73 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 15:43:32,597 INFO [train.py:715] (6/8) Epoch 16, batch 6900, loss[loss=0.1094, simple_loss=0.1785, pruned_loss=0.02019, over 4833.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2086, pruned_loss=0.03, over 971135.71 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:44:10,713 INFO [train.py:715] (6/8) Epoch 16, batch 6950, loss[loss=0.1411, simple_loss=0.2162, pruned_loss=0.03298, over 4936.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.03021, over 971355.13 frames.], batch size: 23, lr: 1.39e-04 +2022-05-08 15:44:48,420 INFO [train.py:715] (6/8) Epoch 16, batch 7000, loss[loss=0.1147, simple_loss=0.1909, pruned_loss=0.01926, over 4820.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03, over 971822.16 frames.], batch size: 26, lr: 1.39e-04 +2022-05-08 15:45:26,357 INFO [train.py:715] (6/8) Epoch 16, batch 7050, loss[loss=0.1549, simple_loss=0.2303, pruned_loss=0.0398, over 4788.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2066, pruned_loss=0.02953, over 971430.96 frames.], batch size: 14, lr: 1.39e-04 +2022-05-08 15:46:04,192 INFO [train.py:715] (6/8) Epoch 16, batch 7100, loss[loss=0.1459, simple_loss=0.2211, pruned_loss=0.0354, over 4916.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2063, pruned_loss=0.02951, over 972208.32 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 15:46:42,647 INFO [train.py:715] (6/8) Epoch 16, batch 7150, loss[loss=0.1078, simple_loss=0.1764, pruned_loss=0.01963, over 4786.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.02949, over 971947.48 frames.], batch size: 12, lr: 1.39e-04 +2022-05-08 15:47:19,960 INFO [train.py:715] (6/8) Epoch 16, batch 7200, loss[loss=0.1293, simple_loss=0.2101, pruned_loss=0.0243, over 4771.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02938, over 971740.60 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 15:47:57,928 INFO [train.py:715] (6/8) Epoch 16, batch 7250, loss[loss=0.1724, simple_loss=0.2453, pruned_loss=0.04974, over 4846.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2074, pruned_loss=0.03003, over 971454.21 frames.], batch size: 20, lr: 1.39e-04 +2022-05-08 15:48:36,997 INFO [train.py:715] (6/8) Epoch 16, batch 7300, loss[loss=0.1567, simple_loss=0.2092, pruned_loss=0.05212, over 4977.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.03068, over 971424.62 frames.], batch size: 35, lr: 1.39e-04 +2022-05-08 15:49:15,790 INFO [train.py:715] (6/8) Epoch 16, batch 7350, loss[loss=0.1242, simple_loss=0.1911, pruned_loss=0.02863, over 4936.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.0301, over 972333.98 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 15:49:55,233 INFO [train.py:715] (6/8) Epoch 16, batch 7400, loss[loss=0.1373, simple_loss=0.2055, pruned_loss=0.03459, over 4855.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.03073, over 972040.38 frames.], batch size: 32, lr: 1.39e-04 +2022-05-08 15:50:34,949 INFO [train.py:715] (6/8) Epoch 16, batch 7450, loss[loss=0.1492, simple_loss=0.2079, pruned_loss=0.04526, over 4873.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03024, over 971966.13 frames.], batch size: 32, lr: 1.39e-04 +2022-05-08 15:51:14,633 INFO [train.py:715] (6/8) Epoch 16, batch 7500, loss[loss=0.1373, simple_loss=0.2203, pruned_loss=0.02717, over 4988.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03023, over 971611.32 frames.], batch size: 25, lr: 1.39e-04 +2022-05-08 15:51:53,675 INFO [train.py:715] (6/8) Epoch 16, batch 7550, loss[loss=0.1205, simple_loss=0.1962, pruned_loss=0.02242, over 4848.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03037, over 970882.87 frames.], batch size: 32, lr: 1.39e-04 +2022-05-08 15:52:33,693 INFO [train.py:715] (6/8) Epoch 16, batch 7600, loss[loss=0.1412, simple_loss=0.2185, pruned_loss=0.03199, over 4952.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02997, over 971893.43 frames.], batch size: 35, lr: 1.39e-04 +2022-05-08 15:53:14,075 INFO [train.py:715] (6/8) Epoch 16, batch 7650, loss[loss=0.1555, simple_loss=0.2349, pruned_loss=0.03802, over 4958.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03041, over 972218.23 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 15:53:54,207 INFO [train.py:715] (6/8) Epoch 16, batch 7700, loss[loss=0.1622, simple_loss=0.2413, pruned_loss=0.04152, over 4790.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03016, over 972278.27 frames.], batch size: 14, lr: 1.39e-04 +2022-05-08 15:54:33,725 INFO [train.py:715] (6/8) Epoch 16, batch 7750, loss[loss=0.1538, simple_loss=0.2319, pruned_loss=0.03786, over 4816.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.0302, over 972521.46 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 15:55:13,914 INFO [train.py:715] (6/8) Epoch 16, batch 7800, loss[loss=0.1504, simple_loss=0.2249, pruned_loss=0.03798, over 4962.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02991, over 973054.61 frames.], batch size: 39, lr: 1.39e-04 +2022-05-08 15:55:54,760 INFO [train.py:715] (6/8) Epoch 16, batch 7850, loss[loss=0.1517, simple_loss=0.2264, pruned_loss=0.03846, over 4914.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.02959, over 972809.36 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 15:56:34,169 INFO [train.py:715] (6/8) Epoch 16, batch 7900, loss[loss=0.1464, simple_loss=0.2143, pruned_loss=0.03926, over 4973.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02958, over 973187.61 frames.], batch size: 35, lr: 1.39e-04 +2022-05-08 15:57:14,050 INFO [train.py:715] (6/8) Epoch 16, batch 7950, loss[loss=0.1467, simple_loss=0.2027, pruned_loss=0.04536, over 4854.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2068, pruned_loss=0.02937, over 972636.93 frames.], batch size: 32, lr: 1.39e-04 +2022-05-08 15:57:54,555 INFO [train.py:715] (6/8) Epoch 16, batch 8000, loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03053, over 4882.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03023, over 972562.15 frames.], batch size: 30, lr: 1.39e-04 +2022-05-08 15:58:34,600 INFO [train.py:715] (6/8) Epoch 16, batch 8050, loss[loss=0.1225, simple_loss=0.2006, pruned_loss=0.02218, over 4836.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03016, over 972743.77 frames.], batch size: 25, lr: 1.39e-04 +2022-05-08 15:59:14,243 INFO [train.py:715] (6/8) Epoch 16, batch 8100, loss[loss=0.1346, simple_loss=0.2075, pruned_loss=0.03083, over 4768.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03007, over 972691.48 frames.], batch size: 14, lr: 1.39e-04 +2022-05-08 15:59:54,670 INFO [train.py:715] (6/8) Epoch 16, batch 8150, loss[loss=0.134, simple_loss=0.2092, pruned_loss=0.02937, over 4919.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02948, over 972386.19 frames.], batch size: 29, lr: 1.39e-04 +2022-05-08 16:00:35,742 INFO [train.py:715] (6/8) Epoch 16, batch 8200, loss[loss=0.1342, simple_loss=0.1972, pruned_loss=0.03557, over 4748.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02938, over 972082.03 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 16:01:15,830 INFO [train.py:715] (6/8) Epoch 16, batch 8250, loss[loss=0.1376, simple_loss=0.2177, pruned_loss=0.02874, over 4983.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02954, over 971517.32 frames.], batch size: 25, lr: 1.39e-04 +2022-05-08 16:01:55,578 INFO [train.py:715] (6/8) Epoch 16, batch 8300, loss[loss=0.1231, simple_loss=0.2057, pruned_loss=0.02027, over 4812.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02921, over 971985.74 frames.], batch size: 27, lr: 1.39e-04 +2022-05-08 16:02:36,300 INFO [train.py:715] (6/8) Epoch 16, batch 8350, loss[loss=0.1562, simple_loss=0.2263, pruned_loss=0.04308, over 4938.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02983, over 971833.15 frames.], batch size: 29, lr: 1.39e-04 +2022-05-08 16:03:16,605 INFO [train.py:715] (6/8) Epoch 16, batch 8400, loss[loss=0.1171, simple_loss=0.1923, pruned_loss=0.02094, over 4766.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02979, over 972015.67 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 16:03:55,139 INFO [train.py:715] (6/8) Epoch 16, batch 8450, loss[loss=0.1211, simple_loss=0.1944, pruned_loss=0.02391, over 4915.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.02991, over 971810.43 frames.], batch size: 29, lr: 1.39e-04 +2022-05-08 16:04:34,541 INFO [train.py:715] (6/8) Epoch 16, batch 8500, loss[loss=0.1649, simple_loss=0.2386, pruned_loss=0.04558, over 4749.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.03025, over 972680.99 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 16:05:13,259 INFO [train.py:715] (6/8) Epoch 16, batch 8550, loss[loss=0.1319, simple_loss=0.2132, pruned_loss=0.02532, over 4906.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2075, pruned_loss=0.03004, over 973513.67 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 16:05:51,577 INFO [train.py:715] (6/8) Epoch 16, batch 8600, loss[loss=0.1115, simple_loss=0.1874, pruned_loss=0.01781, over 4790.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02984, over 974200.93 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 16:06:29,596 INFO [train.py:715] (6/8) Epoch 16, batch 8650, loss[loss=0.1338, simple_loss=0.211, pruned_loss=0.02825, over 4916.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02947, over 974697.63 frames.], batch size: 23, lr: 1.39e-04 +2022-05-08 16:07:08,667 INFO [train.py:715] (6/8) Epoch 16, batch 8700, loss[loss=0.1528, simple_loss=0.2247, pruned_loss=0.04043, over 4879.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02977, over 974439.98 frames.], batch size: 22, lr: 1.39e-04 +2022-05-08 16:07:47,717 INFO [train.py:715] (6/8) Epoch 16, batch 8750, loss[loss=0.121, simple_loss=0.1993, pruned_loss=0.02134, over 4790.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2078, pruned_loss=0.0303, over 974890.19 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 16:08:26,279 INFO [train.py:715] (6/8) Epoch 16, batch 8800, loss[loss=0.1451, simple_loss=0.2171, pruned_loss=0.03658, over 4784.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03015, over 974120.17 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 16:09:04,967 INFO [train.py:715] (6/8) Epoch 16, batch 8850, loss[loss=0.1341, simple_loss=0.1975, pruned_loss=0.03539, over 4803.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2078, pruned_loss=0.03015, over 972972.97 frames.], batch size: 13, lr: 1.39e-04 +2022-05-08 16:09:44,464 INFO [train.py:715] (6/8) Epoch 16, batch 8900, loss[loss=0.1224, simple_loss=0.2025, pruned_loss=0.02113, over 4809.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2071, pruned_loss=0.02992, over 972853.61 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 16:10:22,904 INFO [train.py:715] (6/8) Epoch 16, batch 8950, loss[loss=0.1436, simple_loss=0.2194, pruned_loss=0.03385, over 4870.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2069, pruned_loss=0.02988, over 972926.14 frames.], batch size: 22, lr: 1.39e-04 +2022-05-08 16:11:01,136 INFO [train.py:715] (6/8) Epoch 16, batch 9000, loss[loss=0.1559, simple_loss=0.2296, pruned_loss=0.04115, over 4938.00 frames.], tot_loss[loss=0.133, simple_loss=0.2069, pruned_loss=0.02954, over 972292.06 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 16:11:01,137 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 16:11:23,894 INFO [train.py:742] (6/8) Epoch 16, validation: loss=0.105, simple_loss=0.1884, pruned_loss=0.01076, over 914524.00 frames. +2022-05-08 16:12:02,817 INFO [train.py:715] (6/8) Epoch 16, batch 9050, loss[loss=0.1148, simple_loss=0.1844, pruned_loss=0.02254, over 4888.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2066, pruned_loss=0.02935, over 971539.15 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 16:12:41,945 INFO [train.py:715] (6/8) Epoch 16, batch 9100, loss[loss=0.148, simple_loss=0.2204, pruned_loss=0.0378, over 4862.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.0296, over 972470.21 frames.], batch size: 20, lr: 1.39e-04 +2022-05-08 16:13:20,954 INFO [train.py:715] (6/8) Epoch 16, batch 9150, loss[loss=0.1434, simple_loss=0.2187, pruned_loss=0.03403, over 4977.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2074, pruned_loss=0.03006, over 972767.22 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 16:13:58,476 INFO [train.py:715] (6/8) Epoch 16, batch 9200, loss[loss=0.1152, simple_loss=0.1846, pruned_loss=0.02292, over 4833.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03008, over 971901.68 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 16:14:37,128 INFO [train.py:715] (6/8) Epoch 16, batch 9250, loss[loss=0.1354, simple_loss=0.215, pruned_loss=0.02794, over 4883.00 frames.], tot_loss[loss=0.1348, simple_loss=0.209, pruned_loss=0.03028, over 972627.01 frames.], batch size: 22, lr: 1.39e-04 +2022-05-08 16:15:16,082 INFO [train.py:715] (6/8) Epoch 16, batch 9300, loss[loss=0.164, simple_loss=0.2309, pruned_loss=0.04857, over 4972.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2094, pruned_loss=0.03057, over 972910.92 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 16:15:54,778 INFO [train.py:715] (6/8) Epoch 16, batch 9350, loss[loss=0.1116, simple_loss=0.19, pruned_loss=0.01655, over 4906.00 frames.], tot_loss[loss=0.134, simple_loss=0.2084, pruned_loss=0.02976, over 972542.27 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 16:16:33,101 INFO [train.py:715] (6/8) Epoch 16, batch 9400, loss[loss=0.1305, simple_loss=0.2147, pruned_loss=0.02318, over 4971.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02968, over 973276.99 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 16:17:11,620 INFO [train.py:715] (6/8) Epoch 16, batch 9450, loss[loss=0.1139, simple_loss=0.1916, pruned_loss=0.01807, over 4807.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2084, pruned_loss=0.02977, over 972538.04 frames.], batch size: 25, lr: 1.39e-04 +2022-05-08 16:17:50,526 INFO [train.py:715] (6/8) Epoch 16, batch 9500, loss[loss=0.1527, simple_loss=0.2174, pruned_loss=0.04397, over 4890.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2089, pruned_loss=0.03027, over 972172.35 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 16:18:28,784 INFO [train.py:715] (6/8) Epoch 16, batch 9550, loss[loss=0.1588, simple_loss=0.2175, pruned_loss=0.05003, over 4846.00 frames.], tot_loss[loss=0.135, simple_loss=0.2093, pruned_loss=0.03035, over 972597.50 frames.], batch size: 32, lr: 1.39e-04 +2022-05-08 16:19:08,108 INFO [train.py:715] (6/8) Epoch 16, batch 9600, loss[loss=0.1349, simple_loss=0.1966, pruned_loss=0.03655, over 4966.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03036, over 971852.42 frames.], batch size: 35, lr: 1.39e-04 +2022-05-08 16:19:47,972 INFO [train.py:715] (6/8) Epoch 16, batch 9650, loss[loss=0.1541, simple_loss=0.2329, pruned_loss=0.03761, over 4905.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02989, over 971336.65 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 16:20:27,615 INFO [train.py:715] (6/8) Epoch 16, batch 9700, loss[loss=0.147, simple_loss=0.2223, pruned_loss=0.03582, over 4803.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2083, pruned_loss=0.03046, over 971118.28 frames.], batch size: 25, lr: 1.39e-04 +2022-05-08 16:21:08,032 INFO [train.py:715] (6/8) Epoch 16, batch 9750, loss[loss=0.1399, simple_loss=0.231, pruned_loss=0.02436, over 4794.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03013, over 970848.50 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 16:21:49,081 INFO [train.py:715] (6/8) Epoch 16, batch 9800, loss[loss=0.121, simple_loss=0.1999, pruned_loss=0.02105, over 4873.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02955, over 970440.95 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 16:22:29,509 INFO [train.py:715] (6/8) Epoch 16, batch 9850, loss[loss=0.1422, simple_loss=0.2064, pruned_loss=0.03899, over 4950.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02958, over 971196.43 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 16:23:09,300 INFO [train.py:715] (6/8) Epoch 16, batch 9900, loss[loss=0.1262, simple_loss=0.2001, pruned_loss=0.02612, over 4786.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2068, pruned_loss=0.02937, over 971547.15 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 16:23:49,479 INFO [train.py:715] (6/8) Epoch 16, batch 9950, loss[loss=0.1225, simple_loss=0.1968, pruned_loss=0.02404, over 4963.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02979, over 971515.99 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 16:24:30,475 INFO [train.py:715] (6/8) Epoch 16, batch 10000, loss[loss=0.1319, simple_loss=0.2183, pruned_loss=0.02272, over 4899.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02974, over 971251.23 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 16:25:09,383 INFO [train.py:715] (6/8) Epoch 16, batch 10050, loss[loss=0.1392, simple_loss=0.2127, pruned_loss=0.03283, over 4767.00 frames.], tot_loss[loss=0.133, simple_loss=0.2069, pruned_loss=0.02952, over 971526.46 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 16:25:49,616 INFO [train.py:715] (6/8) Epoch 16, batch 10100, loss[loss=0.1435, simple_loss=0.2231, pruned_loss=0.032, over 4769.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2064, pruned_loss=0.02921, over 971828.56 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 16:26:30,390 INFO [train.py:715] (6/8) Epoch 16, batch 10150, loss[loss=0.1626, simple_loss=0.2295, pruned_loss=0.04787, over 4780.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.0292, over 971171.64 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 16:27:10,600 INFO [train.py:715] (6/8) Epoch 16, batch 10200, loss[loss=0.1539, simple_loss=0.2183, pruned_loss=0.04479, over 4940.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02925, over 971215.79 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 16:27:49,592 INFO [train.py:715] (6/8) Epoch 16, batch 10250, loss[loss=0.1118, simple_loss=0.185, pruned_loss=0.0193, over 4914.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02938, over 971572.27 frames.], batch size: 23, lr: 1.39e-04 +2022-05-08 16:28:29,499 INFO [train.py:715] (6/8) Epoch 16, batch 10300, loss[loss=0.1294, simple_loss=0.2012, pruned_loss=0.0288, over 4764.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02972, over 972727.67 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 16:29:09,154 INFO [train.py:715] (6/8) Epoch 16, batch 10350, loss[loss=0.1253, simple_loss=0.1955, pruned_loss=0.02754, over 4969.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02976, over 972625.14 frames.], batch size: 35, lr: 1.39e-04 +2022-05-08 16:29:47,478 INFO [train.py:715] (6/8) Epoch 16, batch 10400, loss[loss=0.1348, simple_loss=0.2138, pruned_loss=0.02794, over 4870.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03028, over 972272.97 frames.], batch size: 20, lr: 1.39e-04 +2022-05-08 16:30:26,271 INFO [train.py:715] (6/8) Epoch 16, batch 10450, loss[loss=0.1288, simple_loss=0.2112, pruned_loss=0.02315, over 4986.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.02999, over 972178.50 frames.], batch size: 28, lr: 1.39e-04 +2022-05-08 16:31:05,205 INFO [train.py:715] (6/8) Epoch 16, batch 10500, loss[loss=0.1303, simple_loss=0.2082, pruned_loss=0.02619, over 4821.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02961, over 971991.48 frames.], batch size: 25, lr: 1.39e-04 +2022-05-08 16:31:44,644 INFO [train.py:715] (6/8) Epoch 16, batch 10550, loss[loss=0.113, simple_loss=0.1822, pruned_loss=0.02189, over 4968.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02987, over 972722.15 frames.], batch size: 14, lr: 1.39e-04 +2022-05-08 16:32:22,613 INFO [train.py:715] (6/8) Epoch 16, batch 10600, loss[loss=0.1329, simple_loss=0.2055, pruned_loss=0.03013, over 4836.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02986, over 973178.78 frames.], batch size: 30, lr: 1.39e-04 +2022-05-08 16:33:01,319 INFO [train.py:715] (6/8) Epoch 16, batch 10650, loss[loss=0.1305, simple_loss=0.1913, pruned_loss=0.03484, over 4831.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02963, over 972681.98 frames.], batch size: 30, lr: 1.39e-04 +2022-05-08 16:33:40,768 INFO [train.py:715] (6/8) Epoch 16, batch 10700, loss[loss=0.134, simple_loss=0.2095, pruned_loss=0.02927, over 4900.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.02998, over 972575.21 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 16:34:19,618 INFO [train.py:715] (6/8) Epoch 16, batch 10750, loss[loss=0.1177, simple_loss=0.1946, pruned_loss=0.02045, over 4807.00 frames.], tot_loss[loss=0.134, simple_loss=0.2085, pruned_loss=0.02975, over 972750.69 frames.], batch size: 26, lr: 1.39e-04 +2022-05-08 16:34:58,517 INFO [train.py:715] (6/8) Epoch 16, batch 10800, loss[loss=0.1391, simple_loss=0.21, pruned_loss=0.03406, over 4805.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2086, pruned_loss=0.02997, over 973392.98 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 16:35:37,683 INFO [train.py:715] (6/8) Epoch 16, batch 10850, loss[loss=0.1205, simple_loss=0.1987, pruned_loss=0.02109, over 4867.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.0293, over 974085.35 frames.], batch size: 20, lr: 1.39e-04 +2022-05-08 16:36:17,340 INFO [train.py:715] (6/8) Epoch 16, batch 10900, loss[loss=0.1132, simple_loss=0.1882, pruned_loss=0.01912, over 4892.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02977, over 974256.45 frames.], batch size: 22, lr: 1.39e-04 +2022-05-08 16:36:55,567 INFO [train.py:715] (6/8) Epoch 16, batch 10950, loss[loss=0.1384, simple_loss=0.2222, pruned_loss=0.02733, over 4959.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2088, pruned_loss=0.02989, over 973065.70 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 16:37:34,537 INFO [train.py:715] (6/8) Epoch 16, batch 11000, loss[loss=0.1164, simple_loss=0.1913, pruned_loss=0.0207, over 4963.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2084, pruned_loss=0.02959, over 972596.19 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 16:38:13,990 INFO [train.py:715] (6/8) Epoch 16, batch 11050, loss[loss=0.1422, simple_loss=0.2077, pruned_loss=0.03834, over 4979.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2086, pruned_loss=0.02997, over 972684.59 frames.], batch size: 35, lr: 1.39e-04 +2022-05-08 16:38:55,233 INFO [train.py:715] (6/8) Epoch 16, batch 11100, loss[loss=0.1193, simple_loss=0.192, pruned_loss=0.02325, over 4947.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2084, pruned_loss=0.02963, over 971804.74 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 16:39:33,665 INFO [train.py:715] (6/8) Epoch 16, batch 11150, loss[loss=0.126, simple_loss=0.1901, pruned_loss=0.03092, over 4879.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02919, over 971870.44 frames.], batch size: 32, lr: 1.39e-04 +2022-05-08 16:40:12,913 INFO [train.py:715] (6/8) Epoch 16, batch 11200, loss[loss=0.1106, simple_loss=0.186, pruned_loss=0.01758, over 4985.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02944, over 971605.04 frames.], batch size: 14, lr: 1.39e-04 +2022-05-08 16:40:51,684 INFO [train.py:715] (6/8) Epoch 16, batch 11250, loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.0286, over 4902.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.0297, over 972538.07 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 16:41:29,875 INFO [train.py:715] (6/8) Epoch 16, batch 11300, loss[loss=0.1533, simple_loss=0.2241, pruned_loss=0.04126, over 4934.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02934, over 973113.70 frames.], batch size: 39, lr: 1.39e-04 +2022-05-08 16:42:08,150 INFO [train.py:715] (6/8) Epoch 16, batch 11350, loss[loss=0.139, simple_loss=0.2153, pruned_loss=0.03141, over 4984.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.0293, over 973528.56 frames.], batch size: 28, lr: 1.39e-04 +2022-05-08 16:42:47,128 INFO [train.py:715] (6/8) Epoch 16, batch 11400, loss[loss=0.114, simple_loss=0.1929, pruned_loss=0.01752, over 4784.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.02882, over 973626.66 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 16:43:25,156 INFO [train.py:715] (6/8) Epoch 16, batch 11450, loss[loss=0.1348, simple_loss=0.2174, pruned_loss=0.02611, over 4832.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2066, pruned_loss=0.02929, over 973738.60 frames.], batch size: 26, lr: 1.39e-04 +2022-05-08 16:44:03,088 INFO [train.py:715] (6/8) Epoch 16, batch 11500, loss[loss=0.1227, simple_loss=0.1929, pruned_loss=0.02623, over 4856.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02884, over 973874.88 frames.], batch size: 32, lr: 1.39e-04 +2022-05-08 16:44:41,779 INFO [train.py:715] (6/8) Epoch 16, batch 11550, loss[loss=0.127, simple_loss=0.1992, pruned_loss=0.02742, over 4910.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.02878, over 973054.02 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 16:45:20,367 INFO [train.py:715] (6/8) Epoch 16, batch 11600, loss[loss=0.1302, simple_loss=0.1998, pruned_loss=0.03031, over 4931.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2062, pruned_loss=0.02845, over 973644.78 frames.], batch size: 29, lr: 1.39e-04 +2022-05-08 16:45:57,961 INFO [train.py:715] (6/8) Epoch 16, batch 11650, loss[loss=0.1454, simple_loss=0.2299, pruned_loss=0.03039, over 4887.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02874, over 972821.17 frames.], batch size: 39, lr: 1.39e-04 +2022-05-08 16:46:36,440 INFO [train.py:715] (6/8) Epoch 16, batch 11700, loss[loss=0.1252, simple_loss=0.2026, pruned_loss=0.02394, over 4930.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02897, over 972834.16 frames.], batch size: 23, lr: 1.39e-04 +2022-05-08 16:47:15,521 INFO [train.py:715] (6/8) Epoch 16, batch 11750, loss[loss=0.135, simple_loss=0.2083, pruned_loss=0.03091, over 4808.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02936, over 973077.28 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 16:47:53,672 INFO [train.py:715] (6/8) Epoch 16, batch 11800, loss[loss=0.1287, simple_loss=0.1971, pruned_loss=0.0301, over 4914.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2064, pruned_loss=0.02941, over 973226.96 frames.], batch size: 29, lr: 1.39e-04 +2022-05-08 16:48:31,492 INFO [train.py:715] (6/8) Epoch 16, batch 11850, loss[loss=0.1491, simple_loss=0.2212, pruned_loss=0.03854, over 4991.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02934, over 973155.67 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 16:49:10,174 INFO [train.py:715] (6/8) Epoch 16, batch 11900, loss[loss=0.1341, simple_loss=0.2202, pruned_loss=0.02399, over 4967.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02901, over 972982.64 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 16:49:48,595 INFO [train.py:715] (6/8) Epoch 16, batch 11950, loss[loss=0.1198, simple_loss=0.1958, pruned_loss=0.02185, over 4748.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2066, pruned_loss=0.02946, over 972403.01 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 16:50:26,419 INFO [train.py:715] (6/8) Epoch 16, batch 12000, loss[loss=0.1127, simple_loss=0.1914, pruned_loss=0.01704, over 4965.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2058, pruned_loss=0.02884, over 971929.40 frames.], batch size: 24, lr: 1.38e-04 +2022-05-08 16:50:26,420 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 16:50:37,201 INFO [train.py:742] (6/8) Epoch 16, validation: loss=0.1049, simple_loss=0.1884, pruned_loss=0.01072, over 914524.00 frames. +2022-05-08 16:51:16,048 INFO [train.py:715] (6/8) Epoch 16, batch 12050, loss[loss=0.1418, simple_loss=0.2069, pruned_loss=0.03833, over 4754.00 frames.], tot_loss[loss=0.1315, simple_loss=0.206, pruned_loss=0.02854, over 972478.54 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 16:51:55,270 INFO [train.py:715] (6/8) Epoch 16, batch 12100, loss[loss=0.1637, simple_loss=0.2399, pruned_loss=0.0437, over 4974.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02892, over 971967.00 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 16:52:34,707 INFO [train.py:715] (6/8) Epoch 16, batch 12150, loss[loss=0.1061, simple_loss=0.1827, pruned_loss=0.01475, over 4835.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02887, over 971565.09 frames.], batch size: 26, lr: 1.38e-04 +2022-05-08 16:53:12,372 INFO [train.py:715] (6/8) Epoch 16, batch 12200, loss[loss=0.1281, simple_loss=0.2027, pruned_loss=0.02675, over 4950.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02914, over 971312.81 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 16:53:50,653 INFO [train.py:715] (6/8) Epoch 16, batch 12250, loss[loss=0.137, simple_loss=0.2117, pruned_loss=0.03114, over 4817.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02881, over 970899.13 frames.], batch size: 26, lr: 1.38e-04 +2022-05-08 16:54:29,706 INFO [train.py:715] (6/8) Epoch 16, batch 12300, loss[loss=0.1117, simple_loss=0.1854, pruned_loss=0.01903, over 4760.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2067, pruned_loss=0.02873, over 972089.96 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 16:55:08,786 INFO [train.py:715] (6/8) Epoch 16, batch 12350, loss[loss=0.1291, simple_loss=0.2125, pruned_loss=0.0229, over 4803.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02894, over 972209.92 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 16:55:47,047 INFO [train.py:715] (6/8) Epoch 16, batch 12400, loss[loss=0.1422, simple_loss=0.223, pruned_loss=0.03072, over 4771.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02895, over 971905.79 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 16:56:26,128 INFO [train.py:715] (6/8) Epoch 16, batch 12450, loss[loss=0.138, simple_loss=0.2063, pruned_loss=0.03483, over 4862.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02921, over 971649.98 frames.], batch size: 30, lr: 1.38e-04 +2022-05-08 16:57:05,989 INFO [train.py:715] (6/8) Epoch 16, batch 12500, loss[loss=0.1445, simple_loss=0.2119, pruned_loss=0.03853, over 4893.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02918, over 972655.38 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 16:57:44,595 INFO [train.py:715] (6/8) Epoch 16, batch 12550, loss[loss=0.1408, simple_loss=0.214, pruned_loss=0.03379, over 4952.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02949, over 972481.20 frames.], batch size: 39, lr: 1.38e-04 +2022-05-08 16:58:23,182 INFO [train.py:715] (6/8) Epoch 16, batch 12600, loss[loss=0.1315, simple_loss=0.2036, pruned_loss=0.02971, over 4859.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02919, over 972532.33 frames.], batch size: 12, lr: 1.38e-04 +2022-05-08 16:59:01,904 INFO [train.py:715] (6/8) Epoch 16, batch 12650, loss[loss=0.1356, simple_loss=0.2142, pruned_loss=0.0285, over 4792.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2084, pruned_loss=0.02972, over 972639.67 frames.], batch size: 24, lr: 1.38e-04 +2022-05-08 16:59:40,535 INFO [train.py:715] (6/8) Epoch 16, batch 12700, loss[loss=0.1133, simple_loss=0.1951, pruned_loss=0.01578, over 4986.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2078, pruned_loss=0.0294, over 972846.08 frames.], batch size: 25, lr: 1.38e-04 +2022-05-08 17:00:18,084 INFO [train.py:715] (6/8) Epoch 16, batch 12750, loss[loss=0.1577, simple_loss=0.2274, pruned_loss=0.04402, over 4880.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02958, over 972741.13 frames.], batch size: 39, lr: 1.38e-04 +2022-05-08 17:00:57,722 INFO [train.py:715] (6/8) Epoch 16, batch 12800, loss[loss=0.1428, simple_loss=0.2204, pruned_loss=0.03256, over 4766.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02975, over 972282.31 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 17:01:36,700 INFO [train.py:715] (6/8) Epoch 16, batch 12850, loss[loss=0.1328, simple_loss=0.1956, pruned_loss=0.03502, over 4939.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02953, over 973150.64 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 17:02:15,062 INFO [train.py:715] (6/8) Epoch 16, batch 12900, loss[loss=0.1225, simple_loss=0.1878, pruned_loss=0.02859, over 4981.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.02993, over 972047.70 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 17:02:53,775 INFO [train.py:715] (6/8) Epoch 16, batch 12950, loss[loss=0.1792, simple_loss=0.2539, pruned_loss=0.05221, over 4833.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02967, over 971432.00 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:03:32,794 INFO [train.py:715] (6/8) Epoch 16, batch 13000, loss[loss=0.137, simple_loss=0.2139, pruned_loss=0.02999, over 4780.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02957, over 971279.44 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 17:04:11,297 INFO [train.py:715] (6/8) Epoch 16, batch 13050, loss[loss=0.1253, simple_loss=0.1838, pruned_loss=0.03338, over 4845.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02988, over 970962.15 frames.], batch size: 12, lr: 1.38e-04 +2022-05-08 17:04:49,818 INFO [train.py:715] (6/8) Epoch 16, batch 13100, loss[loss=0.1254, simple_loss=0.2026, pruned_loss=0.02415, over 4800.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02974, over 970722.60 frames.], batch size: 12, lr: 1.38e-04 +2022-05-08 17:05:28,966 INFO [train.py:715] (6/8) Epoch 16, batch 13150, loss[loss=0.1164, simple_loss=0.1982, pruned_loss=0.01731, over 4755.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02942, over 970913.31 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 17:06:08,089 INFO [train.py:715] (6/8) Epoch 16, batch 13200, loss[loss=0.127, simple_loss=0.2058, pruned_loss=0.02409, over 4766.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02917, over 971424.90 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 17:06:46,170 INFO [train.py:715] (6/8) Epoch 16, batch 13250, loss[loss=0.1277, simple_loss=0.1976, pruned_loss=0.02893, over 4782.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02925, over 971263.57 frames.], batch size: 12, lr: 1.38e-04 +2022-05-08 17:07:25,022 INFO [train.py:715] (6/8) Epoch 16, batch 13300, loss[loss=0.1308, simple_loss=0.2048, pruned_loss=0.02842, over 4864.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02923, over 972180.03 frames.], batch size: 20, lr: 1.38e-04 +2022-05-08 17:08:04,376 INFO [train.py:715] (6/8) Epoch 16, batch 13350, loss[loss=0.1181, simple_loss=0.1933, pruned_loss=0.02148, over 4920.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2071, pruned_loss=0.02899, over 972461.84 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 17:08:42,683 INFO [train.py:715] (6/8) Epoch 16, batch 13400, loss[loss=0.1542, simple_loss=0.2177, pruned_loss=0.04539, over 4867.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02953, over 972028.04 frames.], batch size: 30, lr: 1.38e-04 +2022-05-08 17:09:21,146 INFO [train.py:715] (6/8) Epoch 16, batch 13450, loss[loss=0.1462, simple_loss=0.2162, pruned_loss=0.03812, over 4820.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02967, over 972425.19 frames.], batch size: 27, lr: 1.38e-04 +2022-05-08 17:10:00,909 INFO [train.py:715] (6/8) Epoch 16, batch 13500, loss[loss=0.1307, simple_loss=0.1957, pruned_loss=0.03282, over 4815.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02972, over 973089.50 frames.], batch size: 25, lr: 1.38e-04 +2022-05-08 17:10:39,257 INFO [train.py:715] (6/8) Epoch 16, batch 13550, loss[loss=0.1022, simple_loss=0.1795, pruned_loss=0.01243, over 4817.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03003, over 972413.27 frames.], batch size: 27, lr: 1.38e-04 +2022-05-08 17:11:17,370 INFO [train.py:715] (6/8) Epoch 16, batch 13600, loss[loss=0.135, simple_loss=0.2117, pruned_loss=0.02914, over 4909.00 frames.], tot_loss[loss=0.134, simple_loss=0.2084, pruned_loss=0.02981, over 973147.95 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 17:11:56,202 INFO [train.py:715] (6/8) Epoch 16, batch 13650, loss[loss=0.1426, simple_loss=0.2213, pruned_loss=0.032, over 4756.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2086, pruned_loss=0.0299, over 972899.76 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 17:12:35,124 INFO [train.py:715] (6/8) Epoch 16, batch 13700, loss[loss=0.1586, simple_loss=0.2176, pruned_loss=0.04982, over 4873.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03014, over 973258.91 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 17:13:13,516 INFO [train.py:715] (6/8) Epoch 16, batch 13750, loss[loss=0.1456, simple_loss=0.2294, pruned_loss=0.03089, over 4748.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.03, over 973057.55 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 17:13:52,022 INFO [train.py:715] (6/8) Epoch 16, batch 13800, loss[loss=0.1297, simple_loss=0.2118, pruned_loss=0.02376, over 4789.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02986, over 972867.08 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 17:14:30,649 INFO [train.py:715] (6/8) Epoch 16, batch 13850, loss[loss=0.1128, simple_loss=0.1855, pruned_loss=0.02002, over 4757.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2072, pruned_loss=0.02991, over 972828.66 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 17:15:08,621 INFO [train.py:715] (6/8) Epoch 16, batch 13900, loss[loss=0.1565, simple_loss=0.2358, pruned_loss=0.03858, over 4961.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2067, pruned_loss=0.02942, over 973812.86 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 17:15:46,307 INFO [train.py:715] (6/8) Epoch 16, batch 13950, loss[loss=0.1492, simple_loss=0.2259, pruned_loss=0.03628, over 4873.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.0292, over 973402.67 frames.], batch size: 22, lr: 1.38e-04 +2022-05-08 17:16:24,660 INFO [train.py:715] (6/8) Epoch 16, batch 14000, loss[loss=0.1167, simple_loss=0.1952, pruned_loss=0.01913, over 4955.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02943, over 972972.74 frames.], batch size: 24, lr: 1.38e-04 +2022-05-08 17:17:03,281 INFO [train.py:715] (6/8) Epoch 16, batch 14050, loss[loss=0.15, simple_loss=0.2167, pruned_loss=0.04158, over 4772.00 frames.], tot_loss[loss=0.133, simple_loss=0.2069, pruned_loss=0.02956, over 972753.58 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 17:17:41,056 INFO [train.py:715] (6/8) Epoch 16, batch 14100, loss[loss=0.1161, simple_loss=0.1973, pruned_loss=0.0174, over 4951.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02973, over 971583.09 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 17:18:18,775 INFO [train.py:715] (6/8) Epoch 16, batch 14150, loss[loss=0.1308, simple_loss=0.2088, pruned_loss=0.02647, over 4801.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.02957, over 972112.26 frames.], batch size: 24, lr: 1.38e-04 +2022-05-08 17:18:57,331 INFO [train.py:715] (6/8) Epoch 16, batch 14200, loss[loss=0.1173, simple_loss=0.1885, pruned_loss=0.02302, over 4854.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.02959, over 971943.93 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 17:19:36,022 INFO [train.py:715] (6/8) Epoch 16, batch 14250, loss[loss=0.1535, simple_loss=0.2274, pruned_loss=0.03979, over 4787.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02974, over 973109.42 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 17:20:14,644 INFO [train.py:715] (6/8) Epoch 16, batch 14300, loss[loss=0.1234, simple_loss=0.2011, pruned_loss=0.02282, over 4837.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2075, pruned_loss=0.02981, over 973007.18 frames.], batch size: 26, lr: 1.38e-04 +2022-05-08 17:20:53,346 INFO [train.py:715] (6/8) Epoch 16, batch 14350, loss[loss=0.1464, simple_loss=0.2231, pruned_loss=0.0349, over 4914.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2073, pruned_loss=0.02964, over 973637.50 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 17:21:32,539 INFO [train.py:715] (6/8) Epoch 16, batch 14400, loss[loss=0.1356, simple_loss=0.2107, pruned_loss=0.03025, over 4756.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.02998, over 972023.32 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 17:22:10,275 INFO [train.py:715] (6/8) Epoch 16, batch 14450, loss[loss=0.1247, simple_loss=0.2003, pruned_loss=0.02462, over 4932.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02967, over 972297.89 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 17:22:49,107 INFO [train.py:715] (6/8) Epoch 16, batch 14500, loss[loss=0.1623, simple_loss=0.2328, pruned_loss=0.04596, over 4840.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.02992, over 972388.48 frames.], batch size: 30, lr: 1.38e-04 +2022-05-08 17:23:28,041 INFO [train.py:715] (6/8) Epoch 16, batch 14550, loss[loss=0.1103, simple_loss=0.1867, pruned_loss=0.01691, over 4843.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02948, over 973234.46 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 17:24:06,708 INFO [train.py:715] (6/8) Epoch 16, batch 14600, loss[loss=0.1271, simple_loss=0.1978, pruned_loss=0.02821, over 4822.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2078, pruned_loss=0.02936, over 973300.92 frames.], batch size: 27, lr: 1.38e-04 +2022-05-08 17:24:44,978 INFO [train.py:715] (6/8) Epoch 16, batch 14650, loss[loss=0.1398, simple_loss=0.2165, pruned_loss=0.03151, over 4907.00 frames.], tot_loss[loss=0.133, simple_loss=0.2077, pruned_loss=0.02912, over 972894.89 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 17:25:23,557 INFO [train.py:715] (6/8) Epoch 16, batch 14700, loss[loss=0.1486, simple_loss=0.2238, pruned_loss=0.03663, over 4883.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2074, pruned_loss=0.02888, over 973174.85 frames.], batch size: 22, lr: 1.38e-04 +2022-05-08 17:26:02,854 INFO [train.py:715] (6/8) Epoch 16, batch 14750, loss[loss=0.1552, simple_loss=0.2368, pruned_loss=0.03685, over 4927.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02889, over 972488.44 frames.], batch size: 23, lr: 1.38e-04 +2022-05-08 17:26:40,648 INFO [train.py:715] (6/8) Epoch 16, batch 14800, loss[loss=0.136, simple_loss=0.2124, pruned_loss=0.02978, over 4884.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02929, over 972508.73 frames.], batch size: 22, lr: 1.38e-04 +2022-05-08 17:27:19,714 INFO [train.py:715] (6/8) Epoch 16, batch 14850, loss[loss=0.1439, simple_loss=0.2246, pruned_loss=0.0316, over 4893.00 frames.], tot_loss[loss=0.1338, simple_loss=0.208, pruned_loss=0.02984, over 972650.92 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 17:27:58,621 INFO [train.py:715] (6/8) Epoch 16, batch 14900, loss[loss=0.143, simple_loss=0.2056, pruned_loss=0.04024, over 4764.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2087, pruned_loss=0.03011, over 972961.37 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 17:28:37,057 INFO [train.py:715] (6/8) Epoch 16, batch 14950, loss[loss=0.1427, simple_loss=0.2038, pruned_loss=0.04073, over 4836.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03042, over 973030.28 frames.], batch size: 30, lr: 1.38e-04 +2022-05-08 17:29:16,115 INFO [train.py:715] (6/8) Epoch 16, batch 15000, loss[loss=0.1397, simple_loss=0.2218, pruned_loss=0.02876, over 4947.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2088, pruned_loss=0.03025, over 972529.68 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 17:29:16,115 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 17:29:25,726 INFO [train.py:742] (6/8) Epoch 16, validation: loss=0.1049, simple_loss=0.1884, pruned_loss=0.01069, over 914524.00 frames. +2022-05-08 17:30:03,998 INFO [train.py:715] (6/8) Epoch 16, batch 15050, loss[loss=0.1458, simple_loss=0.2237, pruned_loss=0.03391, over 4824.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2089, pruned_loss=0.03037, over 973202.55 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:30:42,064 INFO [train.py:715] (6/8) Epoch 16, batch 15100, loss[loss=0.1483, simple_loss=0.217, pruned_loss=0.03978, over 4693.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03006, over 972672.81 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:31:20,869 INFO [train.py:715] (6/8) Epoch 16, batch 15150, loss[loss=0.1329, simple_loss=0.2115, pruned_loss=0.0272, over 4917.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03005, over 972410.13 frames.], batch size: 29, lr: 1.38e-04 +2022-05-08 17:31:58,565 INFO [train.py:715] (6/8) Epoch 16, batch 15200, loss[loss=0.1398, simple_loss=0.2271, pruned_loss=0.0263, over 4785.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02975, over 972518.83 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 17:32:36,117 INFO [train.py:715] (6/8) Epoch 16, batch 15250, loss[loss=0.1499, simple_loss=0.2269, pruned_loss=0.03644, over 4785.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02971, over 972663.09 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 17:33:14,309 INFO [train.py:715] (6/8) Epoch 16, batch 15300, loss[loss=0.1383, simple_loss=0.2063, pruned_loss=0.03515, over 4831.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02951, over 972676.38 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 17:33:52,456 INFO [train.py:715] (6/8) Epoch 16, batch 15350, loss[loss=0.1377, simple_loss=0.2184, pruned_loss=0.02845, over 4980.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.0296, over 973639.34 frames.], batch size: 25, lr: 1.38e-04 +2022-05-08 17:34:30,725 INFO [train.py:715] (6/8) Epoch 16, batch 15400, loss[loss=0.1345, simple_loss=0.2155, pruned_loss=0.02677, over 4989.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02931, over 973842.93 frames.], batch size: 20, lr: 1.38e-04 +2022-05-08 17:35:08,737 INFO [train.py:715] (6/8) Epoch 16, batch 15450, loss[loss=0.1374, simple_loss=0.2082, pruned_loss=0.03328, over 4876.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02913, over 973172.48 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 17:35:47,168 INFO [train.py:715] (6/8) Epoch 16, batch 15500, loss[loss=0.125, simple_loss=0.1947, pruned_loss=0.02764, over 4929.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02892, over 973118.62 frames.], batch size: 29, lr: 1.38e-04 +2022-05-08 17:36:24,797 INFO [train.py:715] (6/8) Epoch 16, batch 15550, loss[loss=0.1071, simple_loss=0.1886, pruned_loss=0.01279, over 4962.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.0285, over 974370.83 frames.], batch size: 24, lr: 1.38e-04 +2022-05-08 17:37:02,464 INFO [train.py:715] (6/8) Epoch 16, batch 15600, loss[loss=0.1081, simple_loss=0.1735, pruned_loss=0.02137, over 4851.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02879, over 974224.90 frames.], batch size: 12, lr: 1.38e-04 +2022-05-08 17:37:41,081 INFO [train.py:715] (6/8) Epoch 16, batch 15650, loss[loss=0.1364, simple_loss=0.2003, pruned_loss=0.03623, over 4979.00 frames.], tot_loss[loss=0.1319, simple_loss=0.206, pruned_loss=0.02894, over 973527.18 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:38:19,115 INFO [train.py:715] (6/8) Epoch 16, batch 15700, loss[loss=0.1341, simple_loss=0.2097, pruned_loss=0.02924, over 4885.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2067, pruned_loss=0.02944, over 973631.32 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 17:38:56,849 INFO [train.py:715] (6/8) Epoch 16, batch 15750, loss[loss=0.1331, simple_loss=0.2099, pruned_loss=0.02816, over 4975.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02969, over 972507.94 frames.], batch size: 35, lr: 1.38e-04 +2022-05-08 17:39:34,739 INFO [train.py:715] (6/8) Epoch 16, batch 15800, loss[loss=0.1412, simple_loss=0.2116, pruned_loss=0.0354, over 4838.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2062, pruned_loss=0.02895, over 972595.12 frames.], batch size: 30, lr: 1.38e-04 +2022-05-08 17:40:13,083 INFO [train.py:715] (6/8) Epoch 16, batch 15850, loss[loss=0.1181, simple_loss=0.197, pruned_loss=0.01962, over 4986.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02921, over 973689.98 frames.], batch size: 26, lr: 1.38e-04 +2022-05-08 17:40:50,713 INFO [train.py:715] (6/8) Epoch 16, batch 15900, loss[loss=0.1362, simple_loss=0.216, pruned_loss=0.02826, over 4944.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02936, over 974192.49 frames.], batch size: 29, lr: 1.38e-04 +2022-05-08 17:41:28,315 INFO [train.py:715] (6/8) Epoch 16, batch 15950, loss[loss=0.1472, simple_loss=0.222, pruned_loss=0.03625, over 4958.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.02988, over 973944.25 frames.], batch size: 39, lr: 1.38e-04 +2022-05-08 17:42:06,730 INFO [train.py:715] (6/8) Epoch 16, batch 16000, loss[loss=0.1029, simple_loss=0.1772, pruned_loss=0.01428, over 4783.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.02995, over 973027.74 frames.], batch size: 12, lr: 1.38e-04 +2022-05-08 17:42:44,834 INFO [train.py:715] (6/8) Epoch 16, batch 16050, loss[loss=0.148, simple_loss=0.2326, pruned_loss=0.03172, over 4809.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03003, over 974102.32 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 17:43:22,459 INFO [train.py:715] (6/8) Epoch 16, batch 16100, loss[loss=0.1286, simple_loss=0.2039, pruned_loss=0.02664, over 4953.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02946, over 973302.54 frames.], batch size: 24, lr: 1.38e-04 +2022-05-08 17:43:59,972 INFO [train.py:715] (6/8) Epoch 16, batch 16150, loss[loss=0.1175, simple_loss=0.1992, pruned_loss=0.01789, over 4943.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2071, pruned_loss=0.02893, over 973301.29 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 17:44:38,353 INFO [train.py:715] (6/8) Epoch 16, batch 16200, loss[loss=0.1377, simple_loss=0.2094, pruned_loss=0.03306, over 4858.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.0291, over 972564.55 frames.], batch size: 20, lr: 1.38e-04 +2022-05-08 17:45:15,917 INFO [train.py:715] (6/8) Epoch 16, batch 16250, loss[loss=0.1092, simple_loss=0.1772, pruned_loss=0.02063, over 4943.00 frames.], tot_loss[loss=0.1334, simple_loss=0.208, pruned_loss=0.02945, over 972384.09 frames.], batch size: 29, lr: 1.38e-04 +2022-05-08 17:45:53,548 INFO [train.py:715] (6/8) Epoch 16, batch 16300, loss[loss=0.1804, simple_loss=0.239, pruned_loss=0.06086, over 4850.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2088, pruned_loss=0.03017, over 971891.78 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:46:31,874 INFO [train.py:715] (6/8) Epoch 16, batch 16350, loss[loss=0.1225, simple_loss=0.2033, pruned_loss=0.02088, over 4970.00 frames.], tot_loss[loss=0.135, simple_loss=0.2093, pruned_loss=0.03038, over 972850.33 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:47:10,533 INFO [train.py:715] (6/8) Epoch 16, batch 16400, loss[loss=0.1314, simple_loss=0.1967, pruned_loss=0.03303, over 4830.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2097, pruned_loss=0.03066, over 972869.92 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 17:47:47,570 INFO [train.py:715] (6/8) Epoch 16, batch 16450, loss[loss=0.1552, simple_loss=0.2244, pruned_loss=0.04302, over 4953.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2101, pruned_loss=0.03085, over 973356.26 frames.], batch size: 39, lr: 1.38e-04 +2022-05-08 17:48:25,520 INFO [train.py:715] (6/8) Epoch 16, batch 16500, loss[loss=0.1219, simple_loss=0.1942, pruned_loss=0.02477, over 4986.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2099, pruned_loss=0.03061, over 973182.76 frames.], batch size: 25, lr: 1.38e-04 +2022-05-08 17:49:04,091 INFO [train.py:715] (6/8) Epoch 16, batch 16550, loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.0292, over 4752.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03074, over 971931.91 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 17:49:41,515 INFO [train.py:715] (6/8) Epoch 16, batch 16600, loss[loss=0.1201, simple_loss=0.1945, pruned_loss=0.02283, over 4830.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03052, over 970777.19 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 17:50:19,531 INFO [train.py:715] (6/8) Epoch 16, batch 16650, loss[loss=0.1498, simple_loss=0.2351, pruned_loss=0.03231, over 4951.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2093, pruned_loss=0.03072, over 971470.91 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:50:57,794 INFO [train.py:715] (6/8) Epoch 16, batch 16700, loss[loss=0.1398, simple_loss=0.2124, pruned_loss=0.03362, over 4786.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2094, pruned_loss=0.03048, over 971730.93 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 17:51:35,937 INFO [train.py:715] (6/8) Epoch 16, batch 16750, loss[loss=0.1287, simple_loss=0.203, pruned_loss=0.02721, over 4786.00 frames.], tot_loss[loss=0.1348, simple_loss=0.209, pruned_loss=0.03029, over 971307.94 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 17:52:13,453 INFO [train.py:715] (6/8) Epoch 16, batch 16800, loss[loss=0.1228, simple_loss=0.1969, pruned_loss=0.0244, over 4741.00 frames.], tot_loss[loss=0.1351, simple_loss=0.209, pruned_loss=0.0306, over 971685.27 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 17:52:51,537 INFO [train.py:715] (6/8) Epoch 16, batch 16850, loss[loss=0.1189, simple_loss=0.1949, pruned_loss=0.02143, over 4937.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2078, pruned_loss=0.02962, over 971643.91 frames.], batch size: 29, lr: 1.38e-04 +2022-05-08 17:53:30,000 INFO [train.py:715] (6/8) Epoch 16, batch 16900, loss[loss=0.1455, simple_loss=0.2166, pruned_loss=0.03719, over 4968.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03061, over 971352.76 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 17:54:07,592 INFO [train.py:715] (6/8) Epoch 16, batch 16950, loss[loss=0.1239, simple_loss=0.2006, pruned_loss=0.02355, over 4777.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2089, pruned_loss=0.03025, over 971418.11 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 17:54:45,477 INFO [train.py:715] (6/8) Epoch 16, batch 17000, loss[loss=0.1567, simple_loss=0.2233, pruned_loss=0.04509, over 4909.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2091, pruned_loss=0.03017, over 971727.58 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 17:55:23,672 INFO [train.py:715] (6/8) Epoch 16, batch 17050, loss[loss=0.126, simple_loss=0.2056, pruned_loss=0.02322, over 4968.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2087, pruned_loss=0.03004, over 972811.32 frames.], batch size: 28, lr: 1.38e-04 +2022-05-08 17:56:02,257 INFO [train.py:715] (6/8) Epoch 16, batch 17100, loss[loss=0.147, simple_loss=0.229, pruned_loss=0.03252, over 4844.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03046, over 971868.98 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:56:39,332 INFO [train.py:715] (6/8) Epoch 16, batch 17150, loss[loss=0.1337, simple_loss=0.2082, pruned_loss=0.02965, over 4916.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.0302, over 972172.19 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 17:57:17,464 INFO [train.py:715] (6/8) Epoch 16, batch 17200, loss[loss=0.1395, simple_loss=0.2139, pruned_loss=0.0325, over 4962.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2092, pruned_loss=0.03024, over 972055.02 frames.], batch size: 24, lr: 1.38e-04 +2022-05-08 17:57:56,360 INFO [train.py:715] (6/8) Epoch 16, batch 17250, loss[loss=0.1309, simple_loss=0.2091, pruned_loss=0.02629, over 4844.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2087, pruned_loss=0.02991, over 971211.78 frames.], batch size: 30, lr: 1.38e-04 +2022-05-08 17:58:33,736 INFO [train.py:715] (6/8) Epoch 16, batch 17300, loss[loss=0.1247, simple_loss=0.2055, pruned_loss=0.02194, over 4805.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03004, over 970781.22 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 17:59:11,266 INFO [train.py:715] (6/8) Epoch 16, batch 17350, loss[loss=0.114, simple_loss=0.1874, pruned_loss=0.02026, over 4803.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02982, over 971652.13 frames.], batch size: 25, lr: 1.38e-04 +2022-05-08 17:59:49,075 INFO [train.py:715] (6/8) Epoch 16, batch 17400, loss[loss=0.1308, simple_loss=0.2052, pruned_loss=0.02821, over 4777.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.02999, over 971714.76 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 18:00:27,743 INFO [train.py:715] (6/8) Epoch 16, batch 17450, loss[loss=0.121, simple_loss=0.1993, pruned_loss=0.02135, over 4774.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2072, pruned_loss=0.0297, over 971210.02 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 18:01:04,505 INFO [train.py:715] (6/8) Epoch 16, batch 17500, loss[loss=0.1125, simple_loss=0.1935, pruned_loss=0.01578, over 4766.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02955, over 971249.29 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 18:01:42,654 INFO [train.py:715] (6/8) Epoch 16, batch 17550, loss[loss=0.1129, simple_loss=0.1863, pruned_loss=0.0197, over 4875.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.02965, over 969925.06 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 18:02:21,340 INFO [train.py:715] (6/8) Epoch 16, batch 17600, loss[loss=0.1322, simple_loss=0.2055, pruned_loss=0.02949, over 4949.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02969, over 970217.55 frames.], batch size: 39, lr: 1.38e-04 +2022-05-08 18:02:58,692 INFO [train.py:715] (6/8) Epoch 16, batch 17650, loss[loss=0.1033, simple_loss=0.183, pruned_loss=0.01179, over 4765.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2087, pruned_loss=0.02999, over 970055.36 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 18:03:36,635 INFO [train.py:715] (6/8) Epoch 16, batch 17700, loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02874, over 4768.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2093, pruned_loss=0.03009, over 970491.56 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 18:04:15,003 INFO [train.py:715] (6/8) Epoch 16, batch 17750, loss[loss=0.1431, simple_loss=0.2247, pruned_loss=0.03073, over 4797.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2088, pruned_loss=0.03011, over 971298.72 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 18:04:53,068 INFO [train.py:715] (6/8) Epoch 16, batch 17800, loss[loss=0.1543, simple_loss=0.229, pruned_loss=0.03983, over 4859.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02991, over 971870.51 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 18:05:30,277 INFO [train.py:715] (6/8) Epoch 16, batch 17850, loss[loss=0.1528, simple_loss=0.2159, pruned_loss=0.04484, over 4864.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02983, over 971597.50 frames.], batch size: 38, lr: 1.38e-04 +2022-05-08 18:06:08,449 INFO [train.py:715] (6/8) Epoch 16, batch 17900, loss[loss=0.1525, simple_loss=0.2196, pruned_loss=0.04272, over 4750.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03004, over 972306.72 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 18:06:46,890 INFO [train.py:715] (6/8) Epoch 16, batch 17950, loss[loss=0.1257, simple_loss=0.1998, pruned_loss=0.02584, over 4988.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03042, over 973058.94 frames.], batch size: 25, lr: 1.38e-04 +2022-05-08 18:07:24,271 INFO [train.py:715] (6/8) Epoch 16, batch 18000, loss[loss=0.1242, simple_loss=0.2072, pruned_loss=0.02064, over 4925.00 frames.], tot_loss[loss=0.134, simple_loss=0.2078, pruned_loss=0.03008, over 973498.46 frames.], batch size: 23, lr: 1.38e-04 +2022-05-08 18:07:24,272 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 18:07:33,812 INFO [train.py:742] (6/8) Epoch 16, validation: loss=0.105, simple_loss=0.1884, pruned_loss=0.01082, over 914524.00 frames. +2022-05-08 18:08:11,766 INFO [train.py:715] (6/8) Epoch 16, batch 18050, loss[loss=0.1294, simple_loss=0.2011, pruned_loss=0.02889, over 4747.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03004, over 973775.90 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 18:08:50,178 INFO [train.py:715] (6/8) Epoch 16, batch 18100, loss[loss=0.126, simple_loss=0.2048, pruned_loss=0.02359, over 4949.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2083, pruned_loss=0.03069, over 973431.73 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 18:09:28,823 INFO [train.py:715] (6/8) Epoch 16, batch 18150, loss[loss=0.1182, simple_loss=0.2021, pruned_loss=0.01715, over 4991.00 frames.], tot_loss[loss=0.1343, simple_loss=0.208, pruned_loss=0.03024, over 974040.87 frames.], batch size: 20, lr: 1.38e-04 +2022-05-08 18:10:07,472 INFO [train.py:715] (6/8) Epoch 16, batch 18200, loss[loss=0.1641, simple_loss=0.2361, pruned_loss=0.04608, over 4909.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03028, over 974037.03 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 18:10:45,083 INFO [train.py:715] (6/8) Epoch 16, batch 18250, loss[loss=0.1497, simple_loss=0.2314, pruned_loss=0.03402, over 4897.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03038, over 974235.18 frames.], batch size: 22, lr: 1.38e-04 +2022-05-08 18:11:23,861 INFO [train.py:715] (6/8) Epoch 16, batch 18300, loss[loss=0.1571, simple_loss=0.2164, pruned_loss=0.04891, over 4647.00 frames.], tot_loss[loss=0.135, simple_loss=0.2093, pruned_loss=0.03032, over 972589.33 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 18:12:02,963 INFO [train.py:715] (6/8) Epoch 16, batch 18350, loss[loss=0.1283, simple_loss=0.2021, pruned_loss=0.02723, over 4973.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2091, pruned_loss=0.03012, over 971740.28 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 18:12:40,735 INFO [train.py:715] (6/8) Epoch 16, batch 18400, loss[loss=0.1568, simple_loss=0.2362, pruned_loss=0.03873, over 4868.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2092, pruned_loss=0.03006, over 971555.50 frames.], batch size: 22, lr: 1.38e-04 +2022-05-08 18:13:19,258 INFO [train.py:715] (6/8) Epoch 16, batch 18450, loss[loss=0.08567, simple_loss=0.1553, pruned_loss=0.008028, over 4741.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2087, pruned_loss=0.02999, over 973010.04 frames.], batch size: 12, lr: 1.38e-04 +2022-05-08 18:13:57,866 INFO [train.py:715] (6/8) Epoch 16, batch 18500, loss[loss=0.1403, simple_loss=0.2187, pruned_loss=0.03094, over 4843.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02969, over 973556.59 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 18:14:36,370 INFO [train.py:715] (6/8) Epoch 16, batch 18550, loss[loss=0.1594, simple_loss=0.2292, pruned_loss=0.04475, over 4735.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.0299, over 972332.43 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 18:15:13,855 INFO [train.py:715] (6/8) Epoch 16, batch 18600, loss[loss=0.1315, simple_loss=0.2004, pruned_loss=0.03128, over 4889.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02989, over 972294.78 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 18:15:52,137 INFO [train.py:715] (6/8) Epoch 16, batch 18650, loss[loss=0.1139, simple_loss=0.1905, pruned_loss=0.01868, over 4942.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02951, over 972338.42 frames.], batch size: 29, lr: 1.38e-04 +2022-05-08 18:16:30,640 INFO [train.py:715] (6/8) Epoch 16, batch 18700, loss[loss=0.1259, simple_loss=0.19, pruned_loss=0.03094, over 4777.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.02948, over 972583.47 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 18:17:08,138 INFO [train.py:715] (6/8) Epoch 16, batch 18750, loss[loss=0.1299, simple_loss=0.2042, pruned_loss=0.02781, over 4864.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2074, pruned_loss=0.02975, over 972967.98 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 18:17:45,512 INFO [train.py:715] (6/8) Epoch 16, batch 18800, loss[loss=0.1244, simple_loss=0.192, pruned_loss=0.02837, over 4976.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02976, over 973326.83 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 18:18:23,821 INFO [train.py:715] (6/8) Epoch 16, batch 18850, loss[loss=0.1516, simple_loss=0.2285, pruned_loss=0.03738, over 4933.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2071, pruned_loss=0.02991, over 972970.71 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 18:19:02,090 INFO [train.py:715] (6/8) Epoch 16, batch 18900, loss[loss=0.1219, simple_loss=0.1925, pruned_loss=0.02568, over 4827.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03018, over 972871.42 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 18:19:39,522 INFO [train.py:715] (6/8) Epoch 16, batch 18950, loss[loss=0.1136, simple_loss=0.1849, pruned_loss=0.02112, over 4812.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.0299, over 972665.37 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 18:20:17,358 INFO [train.py:715] (6/8) Epoch 16, batch 19000, loss[loss=0.1626, simple_loss=0.2367, pruned_loss=0.04426, over 4734.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.0301, over 972346.45 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 18:20:55,964 INFO [train.py:715] (6/8) Epoch 16, batch 19050, loss[loss=0.1431, simple_loss=0.2075, pruned_loss=0.03932, over 4834.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03006, over 972115.29 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 18:21:36,430 INFO [train.py:715] (6/8) Epoch 16, batch 19100, loss[loss=0.1231, simple_loss=0.199, pruned_loss=0.02358, over 4991.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2077, pruned_loss=0.02992, over 972861.20 frames.], batch size: 20, lr: 1.38e-04 +2022-05-08 18:22:14,088 INFO [train.py:715] (6/8) Epoch 16, batch 19150, loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02908, over 4986.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2068, pruned_loss=0.02973, over 973096.21 frames.], batch size: 31, lr: 1.38e-04 +2022-05-08 18:22:52,369 INFO [train.py:715] (6/8) Epoch 16, batch 19200, loss[loss=0.125, simple_loss=0.1972, pruned_loss=0.02641, over 4966.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2067, pruned_loss=0.03, over 972874.71 frames.], batch size: 25, lr: 1.38e-04 +2022-05-08 18:23:31,020 INFO [train.py:715] (6/8) Epoch 16, batch 19250, loss[loss=0.1381, simple_loss=0.2231, pruned_loss=0.02651, over 4971.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02978, over 973361.55 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 18:24:08,553 INFO [train.py:715] (6/8) Epoch 16, batch 19300, loss[loss=0.1244, simple_loss=0.198, pruned_loss=0.02541, over 4863.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02981, over 973742.91 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 18:24:46,583 INFO [train.py:715] (6/8) Epoch 16, batch 19350, loss[loss=0.1991, simple_loss=0.2618, pruned_loss=0.06814, over 4880.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03023, over 974169.83 frames.], batch size: 38, lr: 1.38e-04 +2022-05-08 18:25:25,226 INFO [train.py:715] (6/8) Epoch 16, batch 19400, loss[loss=0.144, simple_loss=0.2244, pruned_loss=0.0318, over 4829.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03054, over 974388.68 frames.], batch size: 27, lr: 1.38e-04 +2022-05-08 18:26:03,260 INFO [train.py:715] (6/8) Epoch 16, batch 19450, loss[loss=0.1356, simple_loss=0.2058, pruned_loss=0.03266, over 4842.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.03014, over 974673.15 frames.], batch size: 26, lr: 1.38e-04 +2022-05-08 18:26:40,797 INFO [train.py:715] (6/8) Epoch 16, batch 19500, loss[loss=0.1588, simple_loss=0.2177, pruned_loss=0.04995, over 4791.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03002, over 973402.31 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 18:27:18,956 INFO [train.py:715] (6/8) Epoch 16, batch 19550, loss[loss=0.1314, simple_loss=0.2081, pruned_loss=0.02732, over 4784.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03006, over 972906.27 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 18:27:57,191 INFO [train.py:715] (6/8) Epoch 16, batch 19600, loss[loss=0.1249, simple_loss=0.2084, pruned_loss=0.02071, over 4935.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2073, pruned_loss=0.0299, over 972711.30 frames.], batch size: 29, lr: 1.38e-04 +2022-05-08 18:28:34,599 INFO [train.py:715] (6/8) Epoch 16, batch 19650, loss[loss=0.1167, simple_loss=0.1969, pruned_loss=0.0182, over 4867.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2073, pruned_loss=0.03003, over 972654.41 frames.], batch size: 20, lr: 1.38e-04 +2022-05-08 18:29:12,874 INFO [train.py:715] (6/8) Epoch 16, batch 19700, loss[loss=0.1529, simple_loss=0.224, pruned_loss=0.04085, over 4784.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03007, over 972626.68 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 18:29:51,092 INFO [train.py:715] (6/8) Epoch 16, batch 19750, loss[loss=0.1125, simple_loss=0.1933, pruned_loss=0.01584, over 4817.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03009, over 972387.90 frames.], batch size: 26, lr: 1.38e-04 +2022-05-08 18:30:28,917 INFO [train.py:715] (6/8) Epoch 16, batch 19800, loss[loss=0.146, simple_loss=0.2184, pruned_loss=0.03682, over 4907.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03003, over 972148.23 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 18:31:06,636 INFO [train.py:715] (6/8) Epoch 16, batch 19850, loss[loss=0.1663, simple_loss=0.2286, pruned_loss=0.05195, over 4859.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03005, over 972634.68 frames.], batch size: 30, lr: 1.38e-04 +2022-05-08 18:31:44,940 INFO [train.py:715] (6/8) Epoch 16, batch 19900, loss[loss=0.1562, simple_loss=0.2207, pruned_loss=0.04581, over 4952.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2079, pruned_loss=0.03054, over 972291.85 frames.], batch size: 35, lr: 1.38e-04 +2022-05-08 18:32:22,972 INFO [train.py:715] (6/8) Epoch 16, batch 19950, loss[loss=0.1231, simple_loss=0.2113, pruned_loss=0.01746, over 4911.00 frames.], tot_loss[loss=0.1344, simple_loss=0.208, pruned_loss=0.0304, over 973365.01 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 18:33:00,612 INFO [train.py:715] (6/8) Epoch 16, batch 20000, loss[loss=0.1427, simple_loss=0.215, pruned_loss=0.03521, over 4770.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2076, pruned_loss=0.03007, over 973245.96 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 18:33:38,890 INFO [train.py:715] (6/8) Epoch 16, batch 20050, loss[loss=0.1368, simple_loss=0.2116, pruned_loss=0.03097, over 4824.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02925, over 973535.59 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 18:34:17,305 INFO [train.py:715] (6/8) Epoch 16, batch 20100, loss[loss=0.13, simple_loss=0.2035, pruned_loss=0.02829, over 4858.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02902, over 973061.66 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 18:34:54,670 INFO [train.py:715] (6/8) Epoch 16, batch 20150, loss[loss=0.1387, simple_loss=0.2183, pruned_loss=0.02951, over 4901.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2061, pruned_loss=0.02909, over 972956.47 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 18:35:32,576 INFO [train.py:715] (6/8) Epoch 16, batch 20200, loss[loss=0.1567, simple_loss=0.2236, pruned_loss=0.04486, over 4697.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02946, over 972427.44 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 18:36:10,895 INFO [train.py:715] (6/8) Epoch 16, batch 20250, loss[loss=0.1354, simple_loss=0.2122, pruned_loss=0.02931, over 4813.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.0292, over 971856.98 frames.], batch size: 25, lr: 1.38e-04 +2022-05-08 18:36:49,189 INFO [train.py:715] (6/8) Epoch 16, batch 20300, loss[loss=0.1173, simple_loss=0.1817, pruned_loss=0.0264, over 4770.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02933, over 972127.91 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 18:37:27,016 INFO [train.py:715] (6/8) Epoch 16, batch 20350, loss[loss=0.1383, simple_loss=0.2275, pruned_loss=0.02453, over 4781.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02902, over 972636.32 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 18:38:05,173 INFO [train.py:715] (6/8) Epoch 16, batch 20400, loss[loss=0.1779, simple_loss=0.2473, pruned_loss=0.05428, over 4914.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02922, over 971686.08 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 18:38:43,165 INFO [train.py:715] (6/8) Epoch 16, batch 20450, loss[loss=0.1522, simple_loss=0.216, pruned_loss=0.04423, over 4778.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02904, over 971577.01 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 18:39:21,069 INFO [train.py:715] (6/8) Epoch 16, batch 20500, loss[loss=0.1337, simple_loss=0.2128, pruned_loss=0.02724, over 4919.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.02913, over 971108.63 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 18:39:58,715 INFO [train.py:715] (6/8) Epoch 16, batch 20550, loss[loss=0.1123, simple_loss=0.193, pruned_loss=0.0158, over 4989.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2074, pruned_loss=0.02908, over 971143.44 frames.], batch size: 28, lr: 1.37e-04 +2022-05-08 18:40:37,506 INFO [train.py:715] (6/8) Epoch 16, batch 20600, loss[loss=0.1272, simple_loss=0.1952, pruned_loss=0.02962, over 4792.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02935, over 971919.96 frames.], batch size: 24, lr: 1.37e-04 +2022-05-08 18:41:15,472 INFO [train.py:715] (6/8) Epoch 16, batch 20650, loss[loss=0.1144, simple_loss=0.1848, pruned_loss=0.02203, over 4824.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2065, pruned_loss=0.0295, over 971639.69 frames.], batch size: 25, lr: 1.37e-04 +2022-05-08 18:41:52,931 INFO [train.py:715] (6/8) Epoch 16, batch 20700, loss[loss=0.1273, simple_loss=0.1949, pruned_loss=0.02984, over 4858.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02935, over 971440.93 frames.], batch size: 32, lr: 1.37e-04 +2022-05-08 18:42:31,438 INFO [train.py:715] (6/8) Epoch 16, batch 20750, loss[loss=0.1159, simple_loss=0.1967, pruned_loss=0.01758, over 4934.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.02915, over 971914.57 frames.], batch size: 23, lr: 1.37e-04 +2022-05-08 18:43:09,456 INFO [train.py:715] (6/8) Epoch 16, batch 20800, loss[loss=0.1267, simple_loss=0.1958, pruned_loss=0.02884, over 4819.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.0291, over 971780.21 frames.], batch size: 12, lr: 1.37e-04 +2022-05-08 18:43:47,988 INFO [train.py:715] (6/8) Epoch 16, batch 20850, loss[loss=0.1179, simple_loss=0.1949, pruned_loss=0.02042, over 4958.00 frames.], tot_loss[loss=0.1321, simple_loss=0.206, pruned_loss=0.02908, over 972326.44 frames.], batch size: 24, lr: 1.37e-04 +2022-05-08 18:44:25,970 INFO [train.py:715] (6/8) Epoch 16, batch 20900, loss[loss=0.138, simple_loss=0.2084, pruned_loss=0.03379, over 4942.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2065, pruned_loss=0.0291, over 971936.23 frames.], batch size: 29, lr: 1.37e-04 +2022-05-08 18:45:05,239 INFO [train.py:715] (6/8) Epoch 16, batch 20950, loss[loss=0.1264, simple_loss=0.1935, pruned_loss=0.02962, over 4909.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02866, over 971241.16 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 18:45:43,429 INFO [train.py:715] (6/8) Epoch 16, batch 21000, loss[loss=0.1385, simple_loss=0.2112, pruned_loss=0.03286, over 4946.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2061, pruned_loss=0.02907, over 971248.32 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 18:45:43,430 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 18:45:53,029 INFO [train.py:742] (6/8) Epoch 16, validation: loss=0.1047, simple_loss=0.1882, pruned_loss=0.0106, over 914524.00 frames. +2022-05-08 18:46:31,927 INFO [train.py:715] (6/8) Epoch 16, batch 21050, loss[loss=0.1285, simple_loss=0.2117, pruned_loss=0.02268, over 4909.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.029, over 971973.07 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 18:47:10,489 INFO [train.py:715] (6/8) Epoch 16, batch 21100, loss[loss=0.1152, simple_loss=0.188, pruned_loss=0.02122, over 4696.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02944, over 971541.80 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 18:47:49,086 INFO [train.py:715] (6/8) Epoch 16, batch 21150, loss[loss=0.1329, simple_loss=0.2155, pruned_loss=0.02519, over 4899.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02922, over 972733.25 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 18:48:27,805 INFO [train.py:715] (6/8) Epoch 16, batch 21200, loss[loss=0.114, simple_loss=0.1884, pruned_loss=0.01982, over 4964.00 frames.], tot_loss[loss=0.133, simple_loss=0.2078, pruned_loss=0.02912, over 972048.72 frames.], batch size: 14, lr: 1.37e-04 +2022-05-08 18:49:06,864 INFO [train.py:715] (6/8) Epoch 16, batch 21250, loss[loss=0.1537, simple_loss=0.2336, pruned_loss=0.03693, over 4902.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2077, pruned_loss=0.02898, over 972482.34 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 18:49:44,934 INFO [train.py:715] (6/8) Epoch 16, batch 21300, loss[loss=0.1571, simple_loss=0.2214, pruned_loss=0.0464, over 4868.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2079, pruned_loss=0.02916, over 972573.20 frames.], batch size: 30, lr: 1.37e-04 +2022-05-08 18:50:23,537 INFO [train.py:715] (6/8) Epoch 16, batch 21350, loss[loss=0.1171, simple_loss=0.176, pruned_loss=0.02906, over 4839.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2079, pruned_loss=0.02939, over 973372.88 frames.], batch size: 12, lr: 1.37e-04 +2022-05-08 18:51:01,536 INFO [train.py:715] (6/8) Epoch 16, batch 21400, loss[loss=0.1486, simple_loss=0.2168, pruned_loss=0.04016, over 4983.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02951, over 973337.06 frames.], batch size: 14, lr: 1.37e-04 +2022-05-08 18:51:39,054 INFO [train.py:715] (6/8) Epoch 16, batch 21450, loss[loss=0.1786, simple_loss=0.2415, pruned_loss=0.05784, over 4977.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2089, pruned_loss=0.02978, over 972324.78 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 18:52:17,450 INFO [train.py:715] (6/8) Epoch 16, batch 21500, loss[loss=0.1229, simple_loss=0.1905, pruned_loss=0.02768, over 4864.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2088, pruned_loss=0.02976, over 972849.52 frames.], batch size: 32, lr: 1.37e-04 +2022-05-08 18:52:55,411 INFO [train.py:715] (6/8) Epoch 16, batch 21550, loss[loss=0.1365, simple_loss=0.2126, pruned_loss=0.03021, over 4776.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2088, pruned_loss=0.03005, over 972783.34 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 18:53:33,003 INFO [train.py:715] (6/8) Epoch 16, batch 21600, loss[loss=0.125, simple_loss=0.1976, pruned_loss=0.02614, over 4794.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02998, over 973789.73 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 18:54:11,337 INFO [train.py:715] (6/8) Epoch 16, batch 21650, loss[loss=0.1116, simple_loss=0.1864, pruned_loss=0.01835, over 4837.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02962, over 973121.77 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 18:54:49,121 INFO [train.py:715] (6/8) Epoch 16, batch 21700, loss[loss=0.1405, simple_loss=0.2214, pruned_loss=0.02983, over 4988.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2078, pruned_loss=0.02957, over 972873.20 frames.], batch size: 25, lr: 1.37e-04 +2022-05-08 18:55:27,325 INFO [train.py:715] (6/8) Epoch 16, batch 21750, loss[loss=0.1292, simple_loss=0.2015, pruned_loss=0.0284, over 4894.00 frames.], tot_loss[loss=0.133, simple_loss=0.2067, pruned_loss=0.02966, over 972410.38 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 18:56:04,817 INFO [train.py:715] (6/8) Epoch 16, batch 21800, loss[loss=0.117, simple_loss=0.1898, pruned_loss=0.02212, over 4786.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2074, pruned_loss=0.02993, over 972010.08 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 18:56:42,921 INFO [train.py:715] (6/8) Epoch 16, batch 21850, loss[loss=0.1229, simple_loss=0.2032, pruned_loss=0.02127, over 4811.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02947, over 972474.29 frames.], batch size: 24, lr: 1.37e-04 +2022-05-08 18:57:20,564 INFO [train.py:715] (6/8) Epoch 16, batch 21900, loss[loss=0.135, simple_loss=0.1996, pruned_loss=0.03527, over 4984.00 frames.], tot_loss[loss=0.133, simple_loss=0.2069, pruned_loss=0.02959, over 972464.36 frames.], batch size: 26, lr: 1.37e-04 +2022-05-08 18:57:57,978 INFO [train.py:715] (6/8) Epoch 16, batch 21950, loss[loss=0.1374, simple_loss=0.2104, pruned_loss=0.03218, over 4738.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02979, over 972812.41 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 18:58:36,386 INFO [train.py:715] (6/8) Epoch 16, batch 22000, loss[loss=0.1361, simple_loss=0.214, pruned_loss=0.02914, over 4870.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2063, pruned_loss=0.02962, over 973356.74 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 18:59:13,998 INFO [train.py:715] (6/8) Epoch 16, batch 22050, loss[loss=0.127, simple_loss=0.2006, pruned_loss=0.0267, over 4988.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.0296, over 973406.94 frames.], batch size: 28, lr: 1.37e-04 +2022-05-08 18:59:52,237 INFO [train.py:715] (6/8) Epoch 16, batch 22100, loss[loss=0.1413, simple_loss=0.2206, pruned_loss=0.03106, over 4824.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02988, over 972829.77 frames.], batch size: 27, lr: 1.37e-04 +2022-05-08 19:00:29,951 INFO [train.py:715] (6/8) Epoch 16, batch 22150, loss[loss=0.1155, simple_loss=0.1884, pruned_loss=0.02129, over 4929.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02971, over 973079.15 frames.], batch size: 23, lr: 1.37e-04 +2022-05-08 19:01:08,386 INFO [train.py:715] (6/8) Epoch 16, batch 22200, loss[loss=0.1212, simple_loss=0.2008, pruned_loss=0.02078, over 4968.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02942, over 973039.79 frames.], batch size: 28, lr: 1.37e-04 +2022-05-08 19:01:46,151 INFO [train.py:715] (6/8) Epoch 16, batch 22250, loss[loss=0.1498, simple_loss=0.2271, pruned_loss=0.03624, over 4797.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.02997, over 973522.44 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 19:02:24,236 INFO [train.py:715] (6/8) Epoch 16, batch 22300, loss[loss=0.1552, simple_loss=0.2292, pruned_loss=0.04057, over 4836.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2092, pruned_loss=0.02998, over 973082.13 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:03:02,794 INFO [train.py:715] (6/8) Epoch 16, batch 22350, loss[loss=0.1095, simple_loss=0.1793, pruned_loss=0.01989, over 4754.00 frames.], tot_loss[loss=0.135, simple_loss=0.2094, pruned_loss=0.03027, over 972821.93 frames.], batch size: 12, lr: 1.37e-04 +2022-05-08 19:03:40,842 INFO [train.py:715] (6/8) Epoch 16, batch 22400, loss[loss=0.117, simple_loss=0.1939, pruned_loss=0.02, over 4862.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.03058, over 972159.97 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 19:04:19,200 INFO [train.py:715] (6/8) Epoch 16, batch 22450, loss[loss=0.1406, simple_loss=0.2214, pruned_loss=0.02996, over 4881.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2082, pruned_loss=0.03031, over 972424.17 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 19:04:57,328 INFO [train.py:715] (6/8) Epoch 16, batch 22500, loss[loss=0.1545, simple_loss=0.2226, pruned_loss=0.0432, over 4820.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03003, over 972581.79 frames.], batch size: 25, lr: 1.37e-04 +2022-05-08 19:05:35,512 INFO [train.py:715] (6/8) Epoch 16, batch 22550, loss[loss=0.1454, simple_loss=0.2228, pruned_loss=0.03397, over 4780.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02991, over 972507.97 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 19:06:13,252 INFO [train.py:715] (6/8) Epoch 16, batch 22600, loss[loss=0.134, simple_loss=0.2033, pruned_loss=0.03236, over 4878.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03045, over 972835.98 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 19:06:50,940 INFO [train.py:715] (6/8) Epoch 16, batch 22650, loss[loss=0.1094, simple_loss=0.1893, pruned_loss=0.0147, over 4934.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.03021, over 972668.31 frames.], batch size: 29, lr: 1.37e-04 +2022-05-08 19:07:29,634 INFO [train.py:715] (6/8) Epoch 16, batch 22700, loss[loss=0.1279, simple_loss=0.2081, pruned_loss=0.02389, over 4973.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2088, pruned_loss=0.03025, over 972466.18 frames.], batch size: 28, lr: 1.37e-04 +2022-05-08 19:08:07,678 INFO [train.py:715] (6/8) Epoch 16, batch 22750, loss[loss=0.1091, simple_loss=0.1857, pruned_loss=0.01627, over 4804.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02983, over 972769.90 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 19:08:45,789 INFO [train.py:715] (6/8) Epoch 16, batch 22800, loss[loss=0.1171, simple_loss=0.1956, pruned_loss=0.01931, over 4780.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02929, over 972440.27 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 19:09:23,698 INFO [train.py:715] (6/8) Epoch 16, batch 22850, loss[loss=0.131, simple_loss=0.1932, pruned_loss=0.03444, over 4776.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03012, over 972649.49 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 19:10:01,847 INFO [train.py:715] (6/8) Epoch 16, batch 22900, loss[loss=0.1226, simple_loss=0.1997, pruned_loss=0.02274, over 4920.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03007, over 972897.47 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 19:10:39,883 INFO [train.py:715] (6/8) Epoch 16, batch 22950, loss[loss=0.1379, simple_loss=0.2104, pruned_loss=0.03268, over 4778.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02986, over 972792.58 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 19:11:17,831 INFO [train.py:715] (6/8) Epoch 16, batch 23000, loss[loss=0.1294, simple_loss=0.2042, pruned_loss=0.02728, over 4792.00 frames.], tot_loss[loss=0.133, simple_loss=0.2068, pruned_loss=0.02955, over 972315.56 frames.], batch size: 14, lr: 1.37e-04 +2022-05-08 19:11:56,368 INFO [train.py:715] (6/8) Epoch 16, batch 23050, loss[loss=0.1199, simple_loss=0.1988, pruned_loss=0.02052, over 4867.00 frames.], tot_loss[loss=0.1322, simple_loss=0.206, pruned_loss=0.02918, over 971659.42 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 19:12:34,516 INFO [train.py:715] (6/8) Epoch 16, batch 23100, loss[loss=0.1292, simple_loss=0.2051, pruned_loss=0.02666, over 4856.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.02959, over 971598.88 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:13:12,450 INFO [train.py:715] (6/8) Epoch 16, batch 23150, loss[loss=0.1398, simple_loss=0.2169, pruned_loss=0.03139, over 4984.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02928, over 972089.11 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:13:50,196 INFO [train.py:715] (6/8) Epoch 16, batch 23200, loss[loss=0.1569, simple_loss=0.2332, pruned_loss=0.04028, over 4789.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02932, over 973417.53 frames.], batch size: 24, lr: 1.37e-04 +2022-05-08 19:14:28,511 INFO [train.py:715] (6/8) Epoch 16, batch 23250, loss[loss=0.1389, simple_loss=0.2154, pruned_loss=0.0312, over 4713.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02941, over 973543.15 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:15:06,176 INFO [train.py:715] (6/8) Epoch 16, batch 23300, loss[loss=0.1454, simple_loss=0.2231, pruned_loss=0.03382, over 4867.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2083, pruned_loss=0.02962, over 973356.59 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 19:15:44,247 INFO [train.py:715] (6/8) Epoch 16, batch 23350, loss[loss=0.1253, simple_loss=0.1948, pruned_loss=0.0279, over 4794.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2085, pruned_loss=0.02986, over 973047.64 frames.], batch size: 24, lr: 1.37e-04 +2022-05-08 19:16:21,894 INFO [train.py:715] (6/8) Epoch 16, batch 23400, loss[loss=0.1464, simple_loss=0.2185, pruned_loss=0.03719, over 4883.00 frames.], tot_loss[loss=0.135, simple_loss=0.2096, pruned_loss=0.03024, over 972650.81 frames.], batch size: 32, lr: 1.37e-04 +2022-05-08 19:16:59,784 INFO [train.py:715] (6/8) Epoch 16, batch 23450, loss[loss=0.1145, simple_loss=0.1919, pruned_loss=0.0186, over 4735.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2081, pruned_loss=0.0294, over 971630.13 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 19:17:37,689 INFO [train.py:715] (6/8) Epoch 16, batch 23500, loss[loss=0.1136, simple_loss=0.1956, pruned_loss=0.01578, over 4759.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02967, over 971577.89 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 19:18:15,672 INFO [train.py:715] (6/8) Epoch 16, batch 23550, loss[loss=0.1598, simple_loss=0.2243, pruned_loss=0.0476, over 4842.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02985, over 970777.42 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:18:54,222 INFO [train.py:715] (6/8) Epoch 16, batch 23600, loss[loss=0.1478, simple_loss=0.2272, pruned_loss=0.03425, over 4923.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02976, over 971371.69 frames.], batch size: 23, lr: 1.37e-04 +2022-05-08 19:19:31,588 INFO [train.py:715] (6/8) Epoch 16, batch 23650, loss[loss=0.1352, simple_loss=0.1997, pruned_loss=0.03534, over 4972.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2073, pruned_loss=0.02932, over 972546.82 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:20:09,501 INFO [train.py:715] (6/8) Epoch 16, batch 23700, loss[loss=0.1519, simple_loss=0.2215, pruned_loss=0.04117, over 4959.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.02882, over 972605.41 frames.], batch size: 39, lr: 1.37e-04 +2022-05-08 19:20:47,876 INFO [train.py:715] (6/8) Epoch 16, batch 23750, loss[loss=0.1546, simple_loss=0.2298, pruned_loss=0.03967, over 4827.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.02908, over 972949.53 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 19:21:25,949 INFO [train.py:715] (6/8) Epoch 16, batch 23800, loss[loss=0.1171, simple_loss=0.1813, pruned_loss=0.02647, over 4830.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02919, over 972364.48 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:22:04,205 INFO [train.py:715] (6/8) Epoch 16, batch 23850, loss[loss=0.1428, simple_loss=0.2196, pruned_loss=0.033, over 4973.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02903, over 972343.41 frames.], batch size: 25, lr: 1.37e-04 +2022-05-08 19:22:42,141 INFO [train.py:715] (6/8) Epoch 16, batch 23900, loss[loss=0.1195, simple_loss=0.2012, pruned_loss=0.01885, over 4870.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02909, over 971789.28 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 19:23:20,418 INFO [train.py:715] (6/8) Epoch 16, batch 23950, loss[loss=0.1369, simple_loss=0.2183, pruned_loss=0.02779, over 4909.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02922, over 971565.55 frames.], batch size: 39, lr: 1.37e-04 +2022-05-08 19:23:57,817 INFO [train.py:715] (6/8) Epoch 16, batch 24000, loss[loss=0.1178, simple_loss=0.202, pruned_loss=0.01682, over 4847.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2067, pruned_loss=0.0288, over 972047.11 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 19:23:57,818 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 19:24:07,635 INFO [train.py:742] (6/8) Epoch 16, validation: loss=0.1049, simple_loss=0.1883, pruned_loss=0.01074, over 914524.00 frames. +2022-05-08 19:24:46,402 INFO [train.py:715] (6/8) Epoch 16, batch 24050, loss[loss=0.1435, simple_loss=0.2223, pruned_loss=0.03234, over 4876.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02982, over 971801.41 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 19:25:24,728 INFO [train.py:715] (6/8) Epoch 16, batch 24100, loss[loss=0.1312, simple_loss=0.2028, pruned_loss=0.02984, over 4934.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.02945, over 971742.46 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 19:26:03,113 INFO [train.py:715] (6/8) Epoch 16, batch 24150, loss[loss=0.1291, simple_loss=0.209, pruned_loss=0.02464, over 4788.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2082, pruned_loss=0.02983, over 972486.99 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 19:26:40,868 INFO [train.py:715] (6/8) Epoch 16, batch 24200, loss[loss=0.1158, simple_loss=0.1892, pruned_loss=0.0212, over 4902.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02938, over 973604.46 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 19:27:19,227 INFO [train.py:715] (6/8) Epoch 16, batch 24250, loss[loss=0.1329, simple_loss=0.2148, pruned_loss=0.02549, over 4775.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02983, over 972580.04 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 19:27:57,173 INFO [train.py:715] (6/8) Epoch 16, batch 24300, loss[loss=0.1011, simple_loss=0.1759, pruned_loss=0.01317, over 4862.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02953, over 972928.09 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 19:28:35,671 INFO [train.py:715] (6/8) Epoch 16, batch 24350, loss[loss=0.1599, simple_loss=0.2272, pruned_loss=0.04632, over 4959.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2073, pruned_loss=0.02924, over 972517.39 frames.], batch size: 35, lr: 1.37e-04 +2022-05-08 19:29:13,224 INFO [train.py:715] (6/8) Epoch 16, batch 24400, loss[loss=0.12, simple_loss=0.2055, pruned_loss=0.01723, over 4788.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02892, over 971808.68 frames.], batch size: 24, lr: 1.37e-04 +2022-05-08 19:29:50,783 INFO [train.py:715] (6/8) Epoch 16, batch 24450, loss[loss=0.1146, simple_loss=0.1877, pruned_loss=0.02078, over 4646.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2073, pruned_loss=0.02909, over 971988.48 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 19:30:28,699 INFO [train.py:715] (6/8) Epoch 16, batch 24500, loss[loss=0.1351, simple_loss=0.2119, pruned_loss=0.02917, over 4945.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02889, over 971744.93 frames.], batch size: 23, lr: 1.37e-04 +2022-05-08 19:31:06,549 INFO [train.py:715] (6/8) Epoch 16, batch 24550, loss[loss=0.1366, simple_loss=0.2107, pruned_loss=0.03126, over 4924.00 frames.], tot_loss[loss=0.1335, simple_loss=0.208, pruned_loss=0.0295, over 972180.77 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 19:31:43,993 INFO [train.py:715] (6/8) Epoch 16, batch 24600, loss[loss=0.138, simple_loss=0.2009, pruned_loss=0.03754, over 4765.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2087, pruned_loss=0.03002, over 972603.14 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 19:32:21,348 INFO [train.py:715] (6/8) Epoch 16, batch 24650, loss[loss=0.1225, simple_loss=0.1937, pruned_loss=0.02562, over 4949.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03011, over 971483.41 frames.], batch size: 35, lr: 1.37e-04 +2022-05-08 19:32:59,495 INFO [train.py:715] (6/8) Epoch 16, batch 24700, loss[loss=0.1279, simple_loss=0.2126, pruned_loss=0.02163, over 4907.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03011, over 972049.81 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 19:33:37,067 INFO [train.py:715] (6/8) Epoch 16, batch 24750, loss[loss=0.119, simple_loss=0.1967, pruned_loss=0.02068, over 4901.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02995, over 972872.53 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 19:34:14,866 INFO [train.py:715] (6/8) Epoch 16, batch 24800, loss[loss=0.1369, simple_loss=0.2121, pruned_loss=0.03091, over 4926.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2083, pruned_loss=0.02979, over 973134.50 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 19:34:52,611 INFO [train.py:715] (6/8) Epoch 16, batch 24850, loss[loss=0.1406, simple_loss=0.2158, pruned_loss=0.0327, over 4760.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03011, over 973516.81 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 19:35:30,368 INFO [train.py:715] (6/8) Epoch 16, batch 24900, loss[loss=0.1459, simple_loss=0.2211, pruned_loss=0.03535, over 4988.00 frames.], tot_loss[loss=0.1346, simple_loss=0.209, pruned_loss=0.0301, over 973615.83 frames.], batch size: 25, lr: 1.37e-04 +2022-05-08 19:36:08,065 INFO [train.py:715] (6/8) Epoch 16, batch 24950, loss[loss=0.1534, simple_loss=0.2282, pruned_loss=0.03933, over 4814.00 frames.], tot_loss[loss=0.1335, simple_loss=0.208, pruned_loss=0.02948, over 973289.72 frames.], batch size: 25, lr: 1.37e-04 +2022-05-08 19:36:45,484 INFO [train.py:715] (6/8) Epoch 16, batch 25000, loss[loss=0.1106, simple_loss=0.187, pruned_loss=0.0171, over 4751.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02952, over 973761.55 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 19:37:23,736 INFO [train.py:715] (6/8) Epoch 16, batch 25050, loss[loss=0.1333, simple_loss=0.2093, pruned_loss=0.0286, over 4816.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02974, over 973665.42 frames.], batch size: 25, lr: 1.37e-04 +2022-05-08 19:38:02,500 INFO [train.py:715] (6/8) Epoch 16, batch 25100, loss[loss=0.1205, simple_loss=0.2084, pruned_loss=0.01634, over 4826.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2084, pruned_loss=0.02963, over 972823.66 frames.], batch size: 26, lr: 1.37e-04 +2022-05-08 19:38:40,220 INFO [train.py:715] (6/8) Epoch 16, batch 25150, loss[loss=0.1451, simple_loss=0.2202, pruned_loss=0.03495, over 4900.00 frames.], tot_loss[loss=0.1331, simple_loss=0.208, pruned_loss=0.02915, over 972989.78 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 19:39:18,058 INFO [train.py:715] (6/8) Epoch 16, batch 25200, loss[loss=0.1269, simple_loss=0.2033, pruned_loss=0.02529, over 4962.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2081, pruned_loss=0.02941, over 972893.27 frames.], batch size: 35, lr: 1.37e-04 +2022-05-08 19:39:56,033 INFO [train.py:715] (6/8) Epoch 16, batch 25250, loss[loss=0.1239, simple_loss=0.1924, pruned_loss=0.02767, over 4862.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2084, pruned_loss=0.02931, over 972431.38 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 19:40:33,645 INFO [train.py:715] (6/8) Epoch 16, batch 25300, loss[loss=0.1162, simple_loss=0.1926, pruned_loss=0.01989, over 4793.00 frames.], tot_loss[loss=0.134, simple_loss=0.2088, pruned_loss=0.02957, over 972090.06 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 19:41:10,910 INFO [train.py:715] (6/8) Epoch 16, batch 25350, loss[loss=0.1382, simple_loss=0.2127, pruned_loss=0.03187, over 4777.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2085, pruned_loss=0.02988, over 971121.84 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 19:41:49,014 INFO [train.py:715] (6/8) Epoch 16, batch 25400, loss[loss=0.1203, simple_loss=0.1937, pruned_loss=0.02347, over 4895.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2086, pruned_loss=0.03002, over 971536.74 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 19:42:27,347 INFO [train.py:715] (6/8) Epoch 16, batch 25450, loss[loss=0.1231, simple_loss=0.1961, pruned_loss=0.02499, over 4871.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2084, pruned_loss=0.02969, over 971206.67 frames.], batch size: 38, lr: 1.37e-04 +2022-05-08 19:43:04,842 INFO [train.py:715] (6/8) Epoch 16, batch 25500, loss[loss=0.1583, simple_loss=0.2417, pruned_loss=0.03743, over 4641.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02947, over 971414.19 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 19:43:42,833 INFO [train.py:715] (6/8) Epoch 16, batch 25550, loss[loss=0.165, simple_loss=0.237, pruned_loss=0.04649, over 4943.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.0297, over 971466.86 frames.], batch size: 35, lr: 1.37e-04 +2022-05-08 19:44:21,342 INFO [train.py:715] (6/8) Epoch 16, batch 25600, loss[loss=0.1268, simple_loss=0.2057, pruned_loss=0.02391, over 4650.00 frames.], tot_loss[loss=0.134, simple_loss=0.2084, pruned_loss=0.02977, over 970934.78 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 19:45:00,126 INFO [train.py:715] (6/8) Epoch 16, batch 25650, loss[loss=0.1322, simple_loss=0.2017, pruned_loss=0.03135, over 4741.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2093, pruned_loss=0.02996, over 971157.31 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 19:45:38,354 INFO [train.py:715] (6/8) Epoch 16, batch 25700, loss[loss=0.1302, simple_loss=0.212, pruned_loss=0.0242, over 4935.00 frames.], tot_loss[loss=0.1335, simple_loss=0.208, pruned_loss=0.02955, over 971594.90 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 19:46:16,988 INFO [train.py:715] (6/8) Epoch 16, batch 25750, loss[loss=0.1518, simple_loss=0.2214, pruned_loss=0.04115, over 4948.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.0304, over 972283.80 frames.], batch size: 39, lr: 1.37e-04 +2022-05-08 19:46:55,623 INFO [train.py:715] (6/8) Epoch 16, batch 25800, loss[loss=0.1477, simple_loss=0.2347, pruned_loss=0.03034, over 4922.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03026, over 972067.42 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 19:47:34,228 INFO [train.py:715] (6/8) Epoch 16, batch 25850, loss[loss=0.1189, simple_loss=0.1856, pruned_loss=0.02603, over 4794.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03042, over 971065.96 frames.], batch size: 12, lr: 1.37e-04 +2022-05-08 19:48:13,044 INFO [train.py:715] (6/8) Epoch 16, batch 25900, loss[loss=0.1192, simple_loss=0.1984, pruned_loss=0.02001, over 4949.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03045, over 971546.39 frames.], batch size: 24, lr: 1.37e-04 +2022-05-08 19:48:52,466 INFO [train.py:715] (6/8) Epoch 16, batch 25950, loss[loss=0.1392, simple_loss=0.213, pruned_loss=0.03271, over 4782.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03046, over 971327.84 frames.], batch size: 14, lr: 1.37e-04 +2022-05-08 19:49:32,203 INFO [train.py:715] (6/8) Epoch 16, batch 26000, loss[loss=0.144, simple_loss=0.2163, pruned_loss=0.03583, over 4986.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2091, pruned_loss=0.03077, over 971778.80 frames.], batch size: 31, lr: 1.37e-04 +2022-05-08 19:50:11,560 INFO [train.py:715] (6/8) Epoch 16, batch 26050, loss[loss=0.1522, simple_loss=0.2181, pruned_loss=0.04314, over 4818.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03055, over 971382.95 frames.], batch size: 26, lr: 1.37e-04 +2022-05-08 19:50:50,794 INFO [train.py:715] (6/8) Epoch 16, batch 26100, loss[loss=0.1325, simple_loss=0.2, pruned_loss=0.03256, over 4814.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2079, pruned_loss=0.03047, over 971014.02 frames.], batch size: 26, lr: 1.37e-04 +2022-05-08 19:51:30,060 INFO [train.py:715] (6/8) Epoch 16, batch 26150, loss[loss=0.1419, simple_loss=0.2115, pruned_loss=0.03616, over 4787.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2076, pruned_loss=0.0304, over 971100.42 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 19:52:08,697 INFO [train.py:715] (6/8) Epoch 16, batch 26200, loss[loss=0.1318, simple_loss=0.204, pruned_loss=0.02986, over 4783.00 frames.], tot_loss[loss=0.134, simple_loss=0.2077, pruned_loss=0.03008, over 971303.47 frames.], batch size: 12, lr: 1.37e-04 +2022-05-08 19:52:48,170 INFO [train.py:715] (6/8) Epoch 16, batch 26250, loss[loss=0.1233, simple_loss=0.2015, pruned_loss=0.02253, over 4950.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02986, over 971013.07 frames.], batch size: 29, lr: 1.37e-04 +2022-05-08 19:53:27,327 INFO [train.py:715] (6/8) Epoch 16, batch 26300, loss[loss=0.09477, simple_loss=0.162, pruned_loss=0.01375, over 4801.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03014, over 971870.53 frames.], batch size: 25, lr: 1.37e-04 +2022-05-08 19:54:06,989 INFO [train.py:715] (6/8) Epoch 16, batch 26350, loss[loss=0.1183, simple_loss=0.1982, pruned_loss=0.01919, over 4868.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2091, pruned_loss=0.03039, over 970785.47 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 19:54:46,282 INFO [train.py:715] (6/8) Epoch 16, batch 26400, loss[loss=0.1218, simple_loss=0.1928, pruned_loss=0.02542, over 4737.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03016, over 970896.62 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 19:55:26,165 INFO [train.py:715] (6/8) Epoch 16, batch 26450, loss[loss=0.1242, simple_loss=0.197, pruned_loss=0.02569, over 4805.00 frames.], tot_loss[loss=0.133, simple_loss=0.207, pruned_loss=0.02944, over 970351.65 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 19:56:05,124 INFO [train.py:715] (6/8) Epoch 16, batch 26500, loss[loss=0.1184, simple_loss=0.1912, pruned_loss=0.02281, over 4836.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02911, over 971562.26 frames.], batch size: 26, lr: 1.37e-04 +2022-05-08 19:56:44,031 INFO [train.py:715] (6/8) Epoch 16, batch 26550, loss[loss=0.1794, simple_loss=0.2455, pruned_loss=0.05668, over 4978.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02939, over 971701.60 frames.], batch size: 39, lr: 1.37e-04 +2022-05-08 19:57:23,102 INFO [train.py:715] (6/8) Epoch 16, batch 26600, loss[loss=0.1312, simple_loss=0.2043, pruned_loss=0.02902, over 4954.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.02956, over 971612.98 frames.], batch size: 24, lr: 1.37e-04 +2022-05-08 19:58:02,096 INFO [train.py:715] (6/8) Epoch 16, batch 26650, loss[loss=0.1642, simple_loss=0.2426, pruned_loss=0.0429, over 4845.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02949, over 970496.30 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 19:58:41,426 INFO [train.py:715] (6/8) Epoch 16, batch 26700, loss[loss=0.1168, simple_loss=0.1959, pruned_loss=0.01888, over 4806.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02985, over 971312.10 frames.], batch size: 24, lr: 1.37e-04 +2022-05-08 19:59:20,664 INFO [train.py:715] (6/8) Epoch 16, batch 26750, loss[loss=0.1405, simple_loss=0.2163, pruned_loss=0.03235, over 4935.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03018, over 971962.74 frames.], batch size: 29, lr: 1.37e-04 +2022-05-08 20:00:00,472 INFO [train.py:715] (6/8) Epoch 16, batch 26800, loss[loss=0.136, simple_loss=0.2104, pruned_loss=0.03076, over 4928.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02984, over 972034.96 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 20:00:39,343 INFO [train.py:715] (6/8) Epoch 16, batch 26850, loss[loss=0.1112, simple_loss=0.1913, pruned_loss=0.01558, over 4811.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02992, over 971507.77 frames.], batch size: 26, lr: 1.37e-04 +2022-05-08 20:01:18,825 INFO [train.py:715] (6/8) Epoch 16, batch 26900, loss[loss=0.1532, simple_loss=0.2292, pruned_loss=0.03861, over 4920.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2076, pruned_loss=0.03012, over 972463.92 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 20:01:58,327 INFO [train.py:715] (6/8) Epoch 16, batch 26950, loss[loss=0.1429, simple_loss=0.2103, pruned_loss=0.03772, over 4823.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02969, over 972719.14 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 20:02:37,507 INFO [train.py:715] (6/8) Epoch 16, batch 27000, loss[loss=0.1034, simple_loss=0.1747, pruned_loss=0.01602, over 4743.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02967, over 972316.08 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 20:02:37,508 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 20:02:47,199 INFO [train.py:742] (6/8) Epoch 16, validation: loss=0.1048, simple_loss=0.1883, pruned_loss=0.01067, over 914524.00 frames. +2022-05-08 20:03:26,294 INFO [train.py:715] (6/8) Epoch 16, batch 27050, loss[loss=0.1155, simple_loss=0.1899, pruned_loss=0.02056, over 4847.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02927, over 971908.76 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 20:04:08,233 INFO [train.py:715] (6/8) Epoch 16, batch 27100, loss[loss=0.1359, simple_loss=0.2164, pruned_loss=0.02772, over 4713.00 frames.], tot_loss[loss=0.133, simple_loss=0.207, pruned_loss=0.02946, over 971107.33 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 20:04:47,172 INFO [train.py:715] (6/8) Epoch 16, batch 27150, loss[loss=0.1397, simple_loss=0.2107, pruned_loss=0.03431, over 4900.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.0296, over 971942.48 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 20:05:26,601 INFO [train.py:715] (6/8) Epoch 16, batch 27200, loss[loss=0.1072, simple_loss=0.1905, pruned_loss=0.01197, over 4831.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02941, over 972647.22 frames.], batch size: 26, lr: 1.37e-04 +2022-05-08 20:06:05,790 INFO [train.py:715] (6/8) Epoch 16, batch 27250, loss[loss=0.1452, simple_loss=0.2349, pruned_loss=0.02775, over 4987.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02918, over 972870.29 frames.], batch size: 26, lr: 1.37e-04 +2022-05-08 20:06:45,177 INFO [train.py:715] (6/8) Epoch 16, batch 27300, loss[loss=0.1585, simple_loss=0.2267, pruned_loss=0.04517, over 4784.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02944, over 973397.92 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 20:07:24,244 INFO [train.py:715] (6/8) Epoch 16, batch 27350, loss[loss=0.1051, simple_loss=0.1776, pruned_loss=0.01632, over 4981.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2078, pruned_loss=0.02949, over 972265.87 frames.], batch size: 14, lr: 1.37e-04 +2022-05-08 20:08:03,611 INFO [train.py:715] (6/8) Epoch 16, batch 27400, loss[loss=0.1286, simple_loss=0.2091, pruned_loss=0.02411, over 4780.00 frames.], tot_loss[loss=0.1334, simple_loss=0.208, pruned_loss=0.02942, over 971439.79 frames.], batch size: 12, lr: 1.37e-04 +2022-05-08 20:08:42,916 INFO [train.py:715] (6/8) Epoch 16, batch 27450, loss[loss=0.1572, simple_loss=0.2239, pruned_loss=0.04531, over 4636.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02951, over 972062.54 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 20:09:21,910 INFO [train.py:715] (6/8) Epoch 16, batch 27500, loss[loss=0.1928, simple_loss=0.2586, pruned_loss=0.06345, over 4699.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2089, pruned_loss=0.0299, over 971988.88 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 20:10:01,275 INFO [train.py:715] (6/8) Epoch 16, batch 27550, loss[loss=0.1543, simple_loss=0.2293, pruned_loss=0.0396, over 4747.00 frames.], tot_loss[loss=0.1344, simple_loss=0.209, pruned_loss=0.02988, over 971164.80 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 20:10:41,118 INFO [train.py:715] (6/8) Epoch 16, batch 27600, loss[loss=0.1328, simple_loss=0.2109, pruned_loss=0.02734, over 4835.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2091, pruned_loss=0.02994, over 971548.27 frames.], batch size: 30, lr: 1.37e-04 +2022-05-08 20:11:20,146 INFO [train.py:715] (6/8) Epoch 16, batch 27650, loss[loss=0.106, simple_loss=0.1762, pruned_loss=0.01788, over 4979.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2081, pruned_loss=0.0295, over 971464.62 frames.], batch size: 14, lr: 1.37e-04 +2022-05-08 20:11:59,673 INFO [train.py:715] (6/8) Epoch 16, batch 27700, loss[loss=0.1514, simple_loss=0.2324, pruned_loss=0.0352, over 4816.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2078, pruned_loss=0.02921, over 971170.23 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 20:12:38,981 INFO [train.py:715] (6/8) Epoch 16, batch 27750, loss[loss=0.1276, simple_loss=0.1938, pruned_loss=0.03069, over 4854.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2082, pruned_loss=0.02939, over 972460.63 frames.], batch size: 30, lr: 1.37e-04 +2022-05-08 20:13:18,196 INFO [train.py:715] (6/8) Epoch 16, batch 27800, loss[loss=0.1416, simple_loss=0.2189, pruned_loss=0.03213, over 4768.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.02991, over 971903.20 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 20:13:57,554 INFO [train.py:715] (6/8) Epoch 16, batch 27850, loss[loss=0.1678, simple_loss=0.2353, pruned_loss=0.05012, over 4691.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2092, pruned_loss=0.03048, over 972065.86 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 20:14:37,006 INFO [train.py:715] (6/8) Epoch 16, batch 27900, loss[loss=0.1373, simple_loss=0.2097, pruned_loss=0.03241, over 4765.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2093, pruned_loss=0.03057, over 971615.42 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 20:15:16,666 INFO [train.py:715] (6/8) Epoch 16, batch 27950, loss[loss=0.1171, simple_loss=0.1907, pruned_loss=0.02173, over 4972.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2088, pruned_loss=0.03031, over 972200.74 frames.], batch size: 24, lr: 1.37e-04 +2022-05-08 20:15:55,979 INFO [train.py:715] (6/8) Epoch 16, batch 28000, loss[loss=0.1246, simple_loss=0.2013, pruned_loss=0.02398, over 4814.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2087, pruned_loss=0.03, over 973113.16 frames.], batch size: 25, lr: 1.37e-04 +2022-05-08 20:16:35,540 INFO [train.py:715] (6/8) Epoch 16, batch 28050, loss[loss=0.1126, simple_loss=0.1912, pruned_loss=0.01705, over 4783.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2089, pruned_loss=0.03018, over 972517.84 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 20:17:15,205 INFO [train.py:715] (6/8) Epoch 16, batch 28100, loss[loss=0.1262, simple_loss=0.1992, pruned_loss=0.0266, over 4842.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02964, over 972388.59 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 20:17:54,189 INFO [train.py:715] (6/8) Epoch 16, batch 28150, loss[loss=0.128, simple_loss=0.1965, pruned_loss=0.02975, over 4644.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.02981, over 971803.40 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 20:18:33,942 INFO [train.py:715] (6/8) Epoch 16, batch 28200, loss[loss=0.1168, simple_loss=0.2066, pruned_loss=0.01351, over 4883.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02929, over 971101.49 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 20:19:13,273 INFO [train.py:715] (6/8) Epoch 16, batch 28250, loss[loss=0.1505, simple_loss=0.2176, pruned_loss=0.04171, over 4879.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02933, over 971134.04 frames.], batch size: 39, lr: 1.37e-04 +2022-05-08 20:19:51,892 INFO [train.py:715] (6/8) Epoch 16, batch 28300, loss[loss=0.1331, simple_loss=0.1983, pruned_loss=0.03392, over 4910.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02908, over 972121.17 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 20:20:31,606 INFO [train.py:715] (6/8) Epoch 16, batch 28350, loss[loss=0.1697, simple_loss=0.2375, pruned_loss=0.05097, over 4751.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02939, over 971831.04 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 20:21:11,559 INFO [train.py:715] (6/8) Epoch 16, batch 28400, loss[loss=0.1273, simple_loss=0.2095, pruned_loss=0.02259, over 4862.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02935, over 972504.82 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 20:21:51,021 INFO [train.py:715] (6/8) Epoch 16, batch 28450, loss[loss=0.1483, simple_loss=0.2197, pruned_loss=0.03844, over 4962.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02951, over 972902.88 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 20:22:29,726 INFO [train.py:715] (6/8) Epoch 16, batch 28500, loss[loss=0.1601, simple_loss=0.2386, pruned_loss=0.04079, over 4756.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02952, over 972254.33 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 20:23:09,888 INFO [train.py:715] (6/8) Epoch 16, batch 28550, loss[loss=0.1257, simple_loss=0.2009, pruned_loss=0.02528, over 4980.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2078, pruned_loss=0.02953, over 973015.95 frames.], batch size: 25, lr: 1.37e-04 +2022-05-08 20:23:49,362 INFO [train.py:715] (6/8) Epoch 16, batch 28600, loss[loss=0.1248, simple_loss=0.2018, pruned_loss=0.02394, over 4947.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02941, over 973534.76 frames.], batch size: 29, lr: 1.37e-04 +2022-05-08 20:24:28,948 INFO [train.py:715] (6/8) Epoch 16, batch 28650, loss[loss=0.1109, simple_loss=0.1874, pruned_loss=0.01719, over 4788.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02922, over 972849.54 frames.], batch size: 12, lr: 1.37e-04 +2022-05-08 20:25:08,098 INFO [train.py:715] (6/8) Epoch 16, batch 28700, loss[loss=0.1281, simple_loss=0.1982, pruned_loss=0.02896, over 4681.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02924, over 972408.44 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 20:25:47,662 INFO [train.py:715] (6/8) Epoch 16, batch 28750, loss[loss=0.12, simple_loss=0.2029, pruned_loss=0.01851, over 4868.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2078, pruned_loss=0.02958, over 972499.28 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 20:26:27,378 INFO [train.py:715] (6/8) Epoch 16, batch 28800, loss[loss=0.1206, simple_loss=0.1928, pruned_loss=0.02417, over 4913.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2073, pruned_loss=0.02907, over 973045.65 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 20:27:06,560 INFO [train.py:715] (6/8) Epoch 16, batch 28850, loss[loss=0.1343, simple_loss=0.205, pruned_loss=0.03182, over 4743.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02934, over 973073.91 frames.], batch size: 12, lr: 1.36e-04 +2022-05-08 20:27:46,356 INFO [train.py:715] (6/8) Epoch 16, batch 28900, loss[loss=0.1203, simple_loss=0.1953, pruned_loss=0.02266, over 4815.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.029, over 972262.14 frames.], batch size: 26, lr: 1.36e-04 +2022-05-08 20:28:25,940 INFO [train.py:715] (6/8) Epoch 16, batch 28950, loss[loss=0.1294, simple_loss=0.202, pruned_loss=0.02839, over 4753.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2074, pruned_loss=0.02925, over 972506.05 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 20:29:05,872 INFO [train.py:715] (6/8) Epoch 16, batch 29000, loss[loss=0.1335, simple_loss=0.2055, pruned_loss=0.03075, over 4759.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02933, over 971570.03 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 20:29:45,343 INFO [train.py:715] (6/8) Epoch 16, batch 29050, loss[loss=0.1298, simple_loss=0.2098, pruned_loss=0.02492, over 4968.00 frames.], tot_loss[loss=0.1333, simple_loss=0.208, pruned_loss=0.02928, over 972643.89 frames.], batch size: 39, lr: 1.36e-04 +2022-05-08 20:30:25,179 INFO [train.py:715] (6/8) Epoch 16, batch 29100, loss[loss=0.1324, simple_loss=0.2162, pruned_loss=0.02434, over 4884.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2078, pruned_loss=0.02926, over 972838.75 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 20:31:06,240 INFO [train.py:715] (6/8) Epoch 16, batch 29150, loss[loss=0.1318, simple_loss=0.2018, pruned_loss=0.03094, over 4794.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2079, pruned_loss=0.02916, over 972350.04 frames.], batch size: 24, lr: 1.36e-04 +2022-05-08 20:31:46,270 INFO [train.py:715] (6/8) Epoch 16, batch 29200, loss[loss=0.1355, simple_loss=0.2048, pruned_loss=0.03311, over 4743.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2084, pruned_loss=0.02955, over 971958.11 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 20:32:27,446 INFO [train.py:715] (6/8) Epoch 16, batch 29250, loss[loss=0.1379, simple_loss=0.2179, pruned_loss=0.02896, over 4920.00 frames.], tot_loss[loss=0.133, simple_loss=0.2077, pruned_loss=0.02918, over 972191.21 frames.], batch size: 39, lr: 1.36e-04 +2022-05-08 20:33:08,441 INFO [train.py:715] (6/8) Epoch 16, batch 29300, loss[loss=0.1416, simple_loss=0.2146, pruned_loss=0.03433, over 4810.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2074, pruned_loss=0.02897, over 971670.99 frames.], batch size: 14, lr: 1.36e-04 +2022-05-08 20:33:49,841 INFO [train.py:715] (6/8) Epoch 16, batch 29350, loss[loss=0.1603, simple_loss=0.2401, pruned_loss=0.04026, over 4976.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2085, pruned_loss=0.02965, over 972613.71 frames.], batch size: 35, lr: 1.36e-04 +2022-05-08 20:34:30,966 INFO [train.py:715] (6/8) Epoch 16, batch 29400, loss[loss=0.1251, simple_loss=0.1971, pruned_loss=0.02655, over 4705.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2089, pruned_loss=0.03002, over 972839.90 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 20:35:12,719 INFO [train.py:715] (6/8) Epoch 16, batch 29450, loss[loss=0.1046, simple_loss=0.1761, pruned_loss=0.01655, over 4784.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2083, pruned_loss=0.02973, over 972235.98 frames.], batch size: 14, lr: 1.36e-04 +2022-05-08 20:35:54,218 INFO [train.py:715] (6/8) Epoch 16, batch 29500, loss[loss=0.1249, simple_loss=0.1956, pruned_loss=0.02713, over 4793.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02977, over 973502.62 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 20:36:36,038 INFO [train.py:715] (6/8) Epoch 16, batch 29550, loss[loss=0.1331, simple_loss=0.2018, pruned_loss=0.03223, over 4936.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2073, pruned_loss=0.02987, over 973360.64 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 20:37:17,263 INFO [train.py:715] (6/8) Epoch 16, batch 29600, loss[loss=0.1208, simple_loss=0.1992, pruned_loss=0.02122, over 4868.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02972, over 973217.75 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 20:37:59,052 INFO [train.py:715] (6/8) Epoch 16, batch 29650, loss[loss=0.1243, simple_loss=0.2087, pruned_loss=0.01999, over 4775.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2074, pruned_loss=0.02998, over 972451.41 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 20:38:40,543 INFO [train.py:715] (6/8) Epoch 16, batch 29700, loss[loss=0.1508, simple_loss=0.2245, pruned_loss=0.03852, over 4891.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2065, pruned_loss=0.02969, over 972520.13 frames.], batch size: 38, lr: 1.36e-04 +2022-05-08 20:39:21,786 INFO [train.py:715] (6/8) Epoch 16, batch 29750, loss[loss=0.1475, simple_loss=0.2309, pruned_loss=0.03207, over 4888.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2068, pruned_loss=0.02992, over 971482.81 frames.], batch size: 22, lr: 1.36e-04 +2022-05-08 20:40:02,894 INFO [train.py:715] (6/8) Epoch 16, batch 29800, loss[loss=0.1444, simple_loss=0.2167, pruned_loss=0.03603, over 4923.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2066, pruned_loss=0.02996, over 971488.35 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 20:40:44,781 INFO [train.py:715] (6/8) Epoch 16, batch 29850, loss[loss=0.1222, simple_loss=0.1952, pruned_loss=0.02463, over 4766.00 frames.], tot_loss[loss=0.1336, simple_loss=0.207, pruned_loss=0.0301, over 972295.44 frames.], batch size: 14, lr: 1.36e-04 +2022-05-08 20:41:26,355 INFO [train.py:715] (6/8) Epoch 16, batch 29900, loss[loss=0.1385, simple_loss=0.2109, pruned_loss=0.03305, over 4917.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2072, pruned_loss=0.03, over 972568.12 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 20:42:07,630 INFO [train.py:715] (6/8) Epoch 16, batch 29950, loss[loss=0.1202, simple_loss=0.1973, pruned_loss=0.02157, over 4935.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03002, over 972649.36 frames.], batch size: 29, lr: 1.36e-04 +2022-05-08 20:42:50,229 INFO [train.py:715] (6/8) Epoch 16, batch 30000, loss[loss=0.1401, simple_loss=0.2089, pruned_loss=0.03572, over 4842.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2078, pruned_loss=0.03028, over 972835.66 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 20:42:50,230 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 20:43:01,793 INFO [train.py:742] (6/8) Epoch 16, validation: loss=0.1047, simple_loss=0.1883, pruned_loss=0.01058, over 914524.00 frames. +2022-05-08 20:43:44,296 INFO [train.py:715] (6/8) Epoch 16, batch 30050, loss[loss=0.1682, simple_loss=0.2424, pruned_loss=0.04704, over 4942.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2072, pruned_loss=0.02991, over 972840.75 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 20:44:26,135 INFO [train.py:715] (6/8) Epoch 16, batch 30100, loss[loss=0.1414, simple_loss=0.2199, pruned_loss=0.03147, over 4872.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02961, over 973650.21 frames.], batch size: 32, lr: 1.36e-04 +2022-05-08 20:45:06,925 INFO [train.py:715] (6/8) Epoch 16, batch 30150, loss[loss=0.1321, simple_loss=0.2102, pruned_loss=0.02699, over 4871.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2073, pruned_loss=0.02968, over 973749.25 frames.], batch size: 22, lr: 1.36e-04 +2022-05-08 20:45:48,621 INFO [train.py:715] (6/8) Epoch 16, batch 30200, loss[loss=0.1387, simple_loss=0.2227, pruned_loss=0.02736, over 4800.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02966, over 973363.35 frames.], batch size: 24, lr: 1.36e-04 +2022-05-08 20:46:29,880 INFO [train.py:715] (6/8) Epoch 16, batch 30250, loss[loss=0.09346, simple_loss=0.1659, pruned_loss=0.01053, over 4754.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02947, over 972736.12 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 20:47:09,984 INFO [train.py:715] (6/8) Epoch 16, batch 30300, loss[loss=0.1315, simple_loss=0.2103, pruned_loss=0.02633, over 4848.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2074, pruned_loss=0.02998, over 973215.00 frames.], batch size: 30, lr: 1.36e-04 +2022-05-08 20:47:50,185 INFO [train.py:715] (6/8) Epoch 16, batch 30350, loss[loss=0.1459, simple_loss=0.2238, pruned_loss=0.03403, over 4812.00 frames.], tot_loss[loss=0.1334, simple_loss=0.207, pruned_loss=0.02985, over 973322.96 frames.], batch size: 27, lr: 1.36e-04 +2022-05-08 20:48:30,614 INFO [train.py:715] (6/8) Epoch 16, batch 30400, loss[loss=0.1414, simple_loss=0.2132, pruned_loss=0.03479, over 4812.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02976, over 972701.28 frames.], batch size: 26, lr: 1.36e-04 +2022-05-08 20:49:10,252 INFO [train.py:715] (6/8) Epoch 16, batch 30450, loss[loss=0.1055, simple_loss=0.1882, pruned_loss=0.01137, over 4814.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2069, pruned_loss=0.02968, over 973073.33 frames.], batch size: 13, lr: 1.36e-04 +2022-05-08 20:49:49,410 INFO [train.py:715] (6/8) Epoch 16, batch 30500, loss[loss=0.1436, simple_loss=0.2116, pruned_loss=0.03784, over 4795.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2066, pruned_loss=0.0296, over 972931.65 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 20:50:29,355 INFO [train.py:715] (6/8) Epoch 16, batch 30550, loss[loss=0.1339, simple_loss=0.2006, pruned_loss=0.0336, over 4901.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2071, pruned_loss=0.02989, over 972564.16 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 20:51:09,820 INFO [train.py:715] (6/8) Epoch 16, batch 30600, loss[loss=0.1365, simple_loss=0.2087, pruned_loss=0.03214, over 4758.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02986, over 972381.96 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 20:51:49,045 INFO [train.py:715] (6/8) Epoch 16, batch 30650, loss[loss=0.1431, simple_loss=0.217, pruned_loss=0.03457, over 4911.00 frames.], tot_loss[loss=0.1333, simple_loss=0.207, pruned_loss=0.0298, over 972507.03 frames.], batch size: 39, lr: 1.36e-04 +2022-05-08 20:52:28,791 INFO [train.py:715] (6/8) Epoch 16, batch 30700, loss[loss=0.1553, simple_loss=0.2355, pruned_loss=0.03753, over 4697.00 frames.], tot_loss[loss=0.133, simple_loss=0.2064, pruned_loss=0.02976, over 972111.99 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 20:53:10,032 INFO [train.py:715] (6/8) Epoch 16, batch 30750, loss[loss=0.1181, simple_loss=0.1855, pruned_loss=0.02534, over 4767.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2061, pruned_loss=0.02935, over 972373.97 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 20:53:49,623 INFO [train.py:715] (6/8) Epoch 16, batch 30800, loss[loss=0.1342, simple_loss=0.2199, pruned_loss=0.02418, over 4879.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2062, pruned_loss=0.02935, over 972284.30 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 20:54:28,449 INFO [train.py:715] (6/8) Epoch 16, batch 30850, loss[loss=0.1057, simple_loss=0.1731, pruned_loss=0.01913, over 4816.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2054, pruned_loss=0.02898, over 972431.14 frames.], batch size: 13, lr: 1.36e-04 +2022-05-08 20:55:08,444 INFO [train.py:715] (6/8) Epoch 16, batch 30900, loss[loss=0.1685, simple_loss=0.2371, pruned_loss=0.04993, over 4939.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2061, pruned_loss=0.02921, over 972798.39 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 20:55:47,876 INFO [train.py:715] (6/8) Epoch 16, batch 30950, loss[loss=0.1085, simple_loss=0.1855, pruned_loss=0.01578, over 4926.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2066, pruned_loss=0.02953, over 973419.27 frames.], batch size: 29, lr: 1.36e-04 +2022-05-08 20:56:26,906 INFO [train.py:715] (6/8) Epoch 16, batch 31000, loss[loss=0.1271, simple_loss=0.1917, pruned_loss=0.03129, over 4783.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2069, pruned_loss=0.02975, over 972065.60 frames.], batch size: 12, lr: 1.36e-04 +2022-05-08 20:57:06,088 INFO [train.py:715] (6/8) Epoch 16, batch 31050, loss[loss=0.1099, simple_loss=0.1886, pruned_loss=0.0156, over 4810.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2072, pruned_loss=0.02974, over 972502.06 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 20:57:45,842 INFO [train.py:715] (6/8) Epoch 16, batch 31100, loss[loss=0.1359, simple_loss=0.2105, pruned_loss=0.03066, over 4976.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2073, pruned_loss=0.03002, over 972101.85 frames.], batch size: 14, lr: 1.36e-04 +2022-05-08 20:58:25,692 INFO [train.py:715] (6/8) Epoch 16, batch 31150, loss[loss=0.1921, simple_loss=0.2697, pruned_loss=0.05723, over 4953.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2094, pruned_loss=0.03046, over 972741.63 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 20:59:04,398 INFO [train.py:715] (6/8) Epoch 16, batch 31200, loss[loss=0.1315, simple_loss=0.2071, pruned_loss=0.02797, over 4845.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03033, over 972568.12 frames.], batch size: 30, lr: 1.36e-04 +2022-05-08 20:59:44,069 INFO [train.py:715] (6/8) Epoch 16, batch 31250, loss[loss=0.1264, simple_loss=0.2057, pruned_loss=0.02349, over 4693.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2077, pruned_loss=0.02994, over 971852.47 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 21:00:23,631 INFO [train.py:715] (6/8) Epoch 16, batch 31300, loss[loss=0.119, simple_loss=0.1965, pruned_loss=0.02074, over 4930.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02959, over 972336.24 frames.], batch size: 29, lr: 1.36e-04 +2022-05-08 21:01:03,211 INFO [train.py:715] (6/8) Epoch 16, batch 31350, loss[loss=0.1238, simple_loss=0.1997, pruned_loss=0.02393, over 4971.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2077, pruned_loss=0.02939, over 972701.93 frames.], batch size: 39, lr: 1.36e-04 +2022-05-08 21:01:42,656 INFO [train.py:715] (6/8) Epoch 16, batch 31400, loss[loss=0.12, simple_loss=0.2048, pruned_loss=0.01762, over 4926.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02962, over 973250.94 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 21:02:22,726 INFO [train.py:715] (6/8) Epoch 16, batch 31450, loss[loss=0.122, simple_loss=0.1959, pruned_loss=0.02401, over 4652.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02952, over 973083.98 frames.], batch size: 13, lr: 1.36e-04 +2022-05-08 21:03:01,700 INFO [train.py:715] (6/8) Epoch 16, batch 31500, loss[loss=0.135, simple_loss=0.2118, pruned_loss=0.02909, over 4911.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2078, pruned_loss=0.02943, over 972698.77 frames.], batch size: 29, lr: 1.36e-04 +2022-05-08 21:03:40,544 INFO [train.py:715] (6/8) Epoch 16, batch 31550, loss[loss=0.1263, simple_loss=0.1933, pruned_loss=0.02959, over 4789.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02929, over 973043.93 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 21:04:19,808 INFO [train.py:715] (6/8) Epoch 16, batch 31600, loss[loss=0.1134, simple_loss=0.1854, pruned_loss=0.02071, over 4856.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02882, over 972550.69 frames.], batch size: 30, lr: 1.36e-04 +2022-05-08 21:04:58,920 INFO [train.py:715] (6/8) Epoch 16, batch 31650, loss[loss=0.1511, simple_loss=0.225, pruned_loss=0.03862, over 4980.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02914, over 972863.93 frames.], batch size: 28, lr: 1.36e-04 +2022-05-08 21:05:37,858 INFO [train.py:715] (6/8) Epoch 16, batch 31700, loss[loss=0.1453, simple_loss=0.227, pruned_loss=0.03178, over 4775.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02922, over 973192.82 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:06:17,107 INFO [train.py:715] (6/8) Epoch 16, batch 31750, loss[loss=0.1234, simple_loss=0.2085, pruned_loss=0.01912, over 4830.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.02885, over 972343.43 frames.], batch size: 25, lr: 1.36e-04 +2022-05-08 21:06:56,945 INFO [train.py:715] (6/8) Epoch 16, batch 31800, loss[loss=0.1225, simple_loss=0.1995, pruned_loss=0.02274, over 4964.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.0291, over 972616.57 frames.], batch size: 24, lr: 1.36e-04 +2022-05-08 21:07:36,898 INFO [train.py:715] (6/8) Epoch 16, batch 31850, loss[loss=0.1413, simple_loss=0.2184, pruned_loss=0.03208, over 4857.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.029, over 972183.33 frames.], batch size: 20, lr: 1.36e-04 +2022-05-08 21:08:15,870 INFO [train.py:715] (6/8) Epoch 16, batch 31900, loss[loss=0.1305, simple_loss=0.2033, pruned_loss=0.02885, over 4948.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2075, pruned_loss=0.02994, over 972780.05 frames.], batch size: 23, lr: 1.36e-04 +2022-05-08 21:08:55,064 INFO [train.py:715] (6/8) Epoch 16, batch 31950, loss[loss=0.146, simple_loss=0.2175, pruned_loss=0.03727, over 4874.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2072, pruned_loss=0.02993, over 973215.88 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 21:09:34,442 INFO [train.py:715] (6/8) Epoch 16, batch 32000, loss[loss=0.1201, simple_loss=0.1967, pruned_loss=0.02174, over 4772.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2073, pruned_loss=0.02993, over 973342.70 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:10:13,549 INFO [train.py:715] (6/8) Epoch 16, batch 32050, loss[loss=0.1305, simple_loss=0.1991, pruned_loss=0.03089, over 4915.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2073, pruned_loss=0.02979, over 972508.15 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 21:10:53,100 INFO [train.py:715] (6/8) Epoch 16, batch 32100, loss[loss=0.1138, simple_loss=0.1912, pruned_loss=0.01822, over 4912.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02968, over 972676.56 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 21:11:32,624 INFO [train.py:715] (6/8) Epoch 16, batch 32150, loss[loss=0.1189, simple_loss=0.2026, pruned_loss=0.01766, over 4930.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02929, over 973261.39 frames.], batch size: 23, lr: 1.36e-04 +2022-05-08 21:12:12,706 INFO [train.py:715] (6/8) Epoch 16, batch 32200, loss[loss=0.1556, simple_loss=0.2393, pruned_loss=0.03592, over 4877.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2074, pruned_loss=0.02912, over 972663.98 frames.], batch size: 22, lr: 1.36e-04 +2022-05-08 21:12:51,837 INFO [train.py:715] (6/8) Epoch 16, batch 32250, loss[loss=0.1394, simple_loss=0.2193, pruned_loss=0.0298, over 4807.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02924, over 972430.73 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 21:13:31,321 INFO [train.py:715] (6/8) Epoch 16, batch 32300, loss[loss=0.1569, simple_loss=0.2333, pruned_loss=0.04022, over 4934.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02895, over 973202.19 frames.], batch size: 23, lr: 1.36e-04 +2022-05-08 21:14:11,345 INFO [train.py:715] (6/8) Epoch 16, batch 32350, loss[loss=0.126, simple_loss=0.2063, pruned_loss=0.02283, over 4923.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2078, pruned_loss=0.02938, over 972814.25 frames.], batch size: 23, lr: 1.36e-04 +2022-05-08 21:14:50,973 INFO [train.py:715] (6/8) Epoch 16, batch 32400, loss[loss=0.1206, simple_loss=0.1924, pruned_loss=0.02438, over 4916.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03, over 972813.60 frames.], batch size: 29, lr: 1.36e-04 +2022-05-08 21:15:29,999 INFO [train.py:715] (6/8) Epoch 16, batch 32450, loss[loss=0.1078, simple_loss=0.1884, pruned_loss=0.01361, over 4935.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2075, pruned_loss=0.02982, over 972301.20 frames.], batch size: 23, lr: 1.36e-04 +2022-05-08 21:16:10,036 INFO [train.py:715] (6/8) Epoch 16, batch 32500, loss[loss=0.1447, simple_loss=0.2151, pruned_loss=0.03714, over 4775.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2075, pruned_loss=0.02998, over 972775.03 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:16:49,326 INFO [train.py:715] (6/8) Epoch 16, batch 32550, loss[loss=0.1336, simple_loss=0.2024, pruned_loss=0.03238, over 4960.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2076, pruned_loss=0.02996, over 973969.96 frames.], batch size: 24, lr: 1.36e-04 +2022-05-08 21:17:28,290 INFO [train.py:715] (6/8) Epoch 16, batch 32600, loss[loss=0.1472, simple_loss=0.2245, pruned_loss=0.03495, over 4980.00 frames.], tot_loss[loss=0.134, simple_loss=0.2077, pruned_loss=0.03016, over 973338.49 frames.], batch size: 28, lr: 1.36e-04 +2022-05-08 21:18:07,203 INFO [train.py:715] (6/8) Epoch 16, batch 32650, loss[loss=0.1169, simple_loss=0.198, pruned_loss=0.01789, over 4983.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03007, over 972726.89 frames.], batch size: 28, lr: 1.36e-04 +2022-05-08 21:18:46,397 INFO [train.py:715] (6/8) Epoch 16, batch 32700, loss[loss=0.1549, simple_loss=0.2258, pruned_loss=0.04198, over 4745.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2074, pruned_loss=0.03005, over 972264.20 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 21:19:25,740 INFO [train.py:715] (6/8) Epoch 16, batch 32750, loss[loss=0.1227, simple_loss=0.1941, pruned_loss=0.02564, over 4799.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03031, over 971674.04 frames.], batch size: 25, lr: 1.36e-04 +2022-05-08 21:20:05,464 INFO [train.py:715] (6/8) Epoch 16, batch 32800, loss[loss=0.1239, simple_loss=0.1962, pruned_loss=0.0258, over 4859.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2076, pruned_loss=0.03015, over 972005.95 frames.], batch size: 20, lr: 1.36e-04 +2022-05-08 21:20:44,826 INFO [train.py:715] (6/8) Epoch 16, batch 32850, loss[loss=0.1009, simple_loss=0.17, pruned_loss=0.01593, over 4875.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02996, over 972336.99 frames.], batch size: 22, lr: 1.36e-04 +2022-05-08 21:21:24,455 INFO [train.py:715] (6/8) Epoch 16, batch 32900, loss[loss=0.129, simple_loss=0.1997, pruned_loss=0.02913, over 4950.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.03004, over 973249.71 frames.], batch size: 35, lr: 1.36e-04 +2022-05-08 21:22:03,429 INFO [train.py:715] (6/8) Epoch 16, batch 32950, loss[loss=0.1161, simple_loss=0.188, pruned_loss=0.02209, over 4918.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.03003, over 972374.64 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 21:22:42,573 INFO [train.py:715] (6/8) Epoch 16, batch 33000, loss[loss=0.1332, simple_loss=0.2055, pruned_loss=0.03044, over 4701.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02987, over 972555.63 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 21:22:42,574 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 21:22:55,770 INFO [train.py:742] (6/8) Epoch 16, validation: loss=0.105, simple_loss=0.1884, pruned_loss=0.01078, over 914524.00 frames. +2022-05-08 21:23:35,557 INFO [train.py:715] (6/8) Epoch 16, batch 33050, loss[loss=0.1513, simple_loss=0.2287, pruned_loss=0.03695, over 4913.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.02958, over 972173.41 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:24:14,885 INFO [train.py:715] (6/8) Epoch 16, batch 33100, loss[loss=0.1208, simple_loss=0.1882, pruned_loss=0.02673, over 4799.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02944, over 972020.87 frames.], batch size: 12, lr: 1.36e-04 +2022-05-08 21:24:54,262 INFO [train.py:715] (6/8) Epoch 16, batch 33150, loss[loss=0.1277, simple_loss=0.1923, pruned_loss=0.03152, over 4957.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02938, over 972448.07 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 21:25:34,118 INFO [train.py:715] (6/8) Epoch 16, batch 33200, loss[loss=0.1392, simple_loss=0.2005, pruned_loss=0.03895, over 4899.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02925, over 972057.06 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:26:13,848 INFO [train.py:715] (6/8) Epoch 16, batch 33250, loss[loss=0.131, simple_loss=0.2101, pruned_loss=0.02596, over 4953.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.02964, over 972102.76 frames.], batch size: 24, lr: 1.36e-04 +2022-05-08 21:26:53,438 INFO [train.py:715] (6/8) Epoch 16, batch 33300, loss[loss=0.1196, simple_loss=0.2011, pruned_loss=0.01901, over 4879.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.02913, over 972606.91 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 21:27:32,751 INFO [train.py:715] (6/8) Epoch 16, batch 33350, loss[loss=0.1375, simple_loss=0.2205, pruned_loss=0.02725, over 4784.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02924, over 972898.47 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 21:28:12,219 INFO [train.py:715] (6/8) Epoch 16, batch 33400, loss[loss=0.1437, simple_loss=0.2174, pruned_loss=0.03496, over 4767.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02971, over 973457.31 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 21:28:51,455 INFO [train.py:715] (6/8) Epoch 16, batch 33450, loss[loss=0.1023, simple_loss=0.174, pruned_loss=0.0153, over 4793.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03013, over 973255.43 frames.], batch size: 12, lr: 1.36e-04 +2022-05-08 21:29:30,507 INFO [train.py:715] (6/8) Epoch 16, batch 33500, loss[loss=0.1336, simple_loss=0.2141, pruned_loss=0.02658, over 4946.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02993, over 973159.56 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 21:30:09,461 INFO [train.py:715] (6/8) Epoch 16, batch 33550, loss[loss=0.15, simple_loss=0.221, pruned_loss=0.03944, over 4704.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03016, over 972498.47 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 21:30:49,230 INFO [train.py:715] (6/8) Epoch 16, batch 33600, loss[loss=0.146, simple_loss=0.2087, pruned_loss=0.04168, over 4895.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03009, over 972346.00 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 21:31:28,204 INFO [train.py:715] (6/8) Epoch 16, batch 33650, loss[loss=0.1297, simple_loss=0.2063, pruned_loss=0.02656, over 4744.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2086, pruned_loss=0.02977, over 972436.69 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 21:32:07,847 INFO [train.py:715] (6/8) Epoch 16, batch 33700, loss[loss=0.1221, simple_loss=0.1956, pruned_loss=0.0243, over 4894.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.02993, over 971983.95 frames.], batch size: 22, lr: 1.36e-04 +2022-05-08 21:32:46,797 INFO [train.py:715] (6/8) Epoch 16, batch 33750, loss[loss=0.1278, simple_loss=0.2048, pruned_loss=0.0254, over 4754.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.0302, over 972050.59 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 21:33:25,790 INFO [train.py:715] (6/8) Epoch 16, batch 33800, loss[loss=0.1219, simple_loss=0.192, pruned_loss=0.02592, over 4748.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03002, over 971923.00 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 21:34:05,046 INFO [train.py:715] (6/8) Epoch 16, batch 33850, loss[loss=0.1517, simple_loss=0.2263, pruned_loss=0.03859, over 4966.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03004, over 972571.67 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 21:34:44,272 INFO [train.py:715] (6/8) Epoch 16, batch 33900, loss[loss=0.143, simple_loss=0.2103, pruned_loss=0.03787, over 4870.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.02986, over 973293.92 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 21:35:24,623 INFO [train.py:715] (6/8) Epoch 16, batch 33950, loss[loss=0.142, simple_loss=0.2381, pruned_loss=0.02298, over 4829.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2078, pruned_loss=0.02962, over 973792.47 frames.], batch size: 26, lr: 1.36e-04 +2022-05-08 21:36:03,137 INFO [train.py:715] (6/8) Epoch 16, batch 34000, loss[loss=0.1458, simple_loss=0.2227, pruned_loss=0.03451, over 4975.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2075, pruned_loss=0.02918, over 974763.07 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 21:36:43,151 INFO [train.py:715] (6/8) Epoch 16, batch 34050, loss[loss=0.1026, simple_loss=0.1706, pruned_loss=0.01728, over 4836.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02915, over 974057.73 frames.], batch size: 12, lr: 1.36e-04 +2022-05-08 21:37:22,554 INFO [train.py:715] (6/8) Epoch 16, batch 34100, loss[loss=0.1359, simple_loss=0.2126, pruned_loss=0.02955, over 4892.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02931, over 973513.39 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 21:38:01,720 INFO [train.py:715] (6/8) Epoch 16, batch 34150, loss[loss=0.1214, simple_loss=0.2077, pruned_loss=0.01755, over 4983.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.02958, over 973639.39 frames.], batch size: 39, lr: 1.36e-04 +2022-05-08 21:38:41,100 INFO [train.py:715] (6/8) Epoch 16, batch 34200, loss[loss=0.1648, simple_loss=0.2332, pruned_loss=0.04815, over 4922.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02937, over 973792.54 frames.], batch size: 39, lr: 1.36e-04 +2022-05-08 21:39:20,453 INFO [train.py:715] (6/8) Epoch 16, batch 34250, loss[loss=0.115, simple_loss=0.1864, pruned_loss=0.02182, over 4888.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.02953, over 972464.08 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 21:40:00,520 INFO [train.py:715] (6/8) Epoch 16, batch 34300, loss[loss=0.1415, simple_loss=0.206, pruned_loss=0.03853, over 4806.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2067, pruned_loss=0.02975, over 971809.78 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 21:40:39,497 INFO [train.py:715] (6/8) Epoch 16, batch 34350, loss[loss=0.1216, simple_loss=0.1999, pruned_loss=0.02161, over 4971.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2074, pruned_loss=0.02979, over 971571.66 frames.], batch size: 24, lr: 1.36e-04 +2022-05-08 21:41:18,848 INFO [train.py:715] (6/8) Epoch 16, batch 34400, loss[loss=0.1401, simple_loss=0.2127, pruned_loss=0.03375, over 4839.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.02967, over 971845.09 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 21:41:58,452 INFO [train.py:715] (6/8) Epoch 16, batch 34450, loss[loss=0.1092, simple_loss=0.1825, pruned_loss=0.01794, over 4979.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.03006, over 971928.03 frames.], batch size: 24, lr: 1.36e-04 +2022-05-08 21:42:37,729 INFO [train.py:715] (6/8) Epoch 16, batch 34500, loss[loss=0.1438, simple_loss=0.2148, pruned_loss=0.03641, over 4968.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02953, over 972982.08 frames.], batch size: 14, lr: 1.36e-04 +2022-05-08 21:43:17,125 INFO [train.py:715] (6/8) Epoch 16, batch 34550, loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02939, over 4925.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02947, over 973110.07 frames.], batch size: 23, lr: 1.36e-04 +2022-05-08 21:43:56,246 INFO [train.py:715] (6/8) Epoch 16, batch 34600, loss[loss=0.1212, simple_loss=0.1989, pruned_loss=0.02175, over 4818.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02947, over 972897.31 frames.], batch size: 26, lr: 1.36e-04 +2022-05-08 21:44:36,201 INFO [train.py:715] (6/8) Epoch 16, batch 34650, loss[loss=0.1464, simple_loss=0.2219, pruned_loss=0.03542, over 4925.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02964, over 973723.66 frames.], batch size: 23, lr: 1.36e-04 +2022-05-08 21:45:15,694 INFO [train.py:715] (6/8) Epoch 16, batch 34700, loss[loss=0.118, simple_loss=0.2003, pruned_loss=0.01783, over 4891.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02926, over 973294.54 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:45:54,806 INFO [train.py:715] (6/8) Epoch 16, batch 34750, loss[loss=0.1336, simple_loss=0.212, pruned_loss=0.02764, over 4834.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2078, pruned_loss=0.02918, over 973813.16 frames.], batch size: 30, lr: 1.36e-04 +2022-05-08 21:46:32,021 INFO [train.py:715] (6/8) Epoch 16, batch 34800, loss[loss=0.1712, simple_loss=0.2407, pruned_loss=0.05087, over 4926.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02946, over 972957.35 frames.], batch size: 23, lr: 1.36e-04 +2022-05-08 21:47:23,863 INFO [train.py:715] (6/8) Epoch 17, batch 0, loss[loss=0.1475, simple_loss=0.2236, pruned_loss=0.03569, over 4814.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2236, pruned_loss=0.03569, over 4814.00 frames.], batch size: 27, lr: 1.32e-04 +2022-05-08 21:48:03,327 INFO [train.py:715] (6/8) Epoch 17, batch 50, loss[loss=0.1567, simple_loss=0.2257, pruned_loss=0.04387, over 4972.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03014, over 219686.82 frames.], batch size: 24, lr: 1.32e-04 +2022-05-08 21:48:44,387 INFO [train.py:715] (6/8) Epoch 17, batch 100, loss[loss=0.1323, simple_loss=0.2045, pruned_loss=0.03008, over 4939.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2057, pruned_loss=0.02896, over 387031.94 frames.], batch size: 39, lr: 1.32e-04 +2022-05-08 21:49:25,329 INFO [train.py:715] (6/8) Epoch 17, batch 150, loss[loss=0.1326, simple_loss=0.2091, pruned_loss=0.02804, over 4925.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02893, over 517580.05 frames.], batch size: 18, lr: 1.32e-04 +2022-05-08 21:50:06,398 INFO [train.py:715] (6/8) Epoch 17, batch 200, loss[loss=0.1545, simple_loss=0.2366, pruned_loss=0.03622, over 4808.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2058, pruned_loss=0.02916, over 618751.04 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 21:50:49,386 INFO [train.py:715] (6/8) Epoch 17, batch 250, loss[loss=0.158, simple_loss=0.2248, pruned_loss=0.04559, over 4847.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2059, pruned_loss=0.02935, over 696742.48 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 21:51:30,988 INFO [train.py:715] (6/8) Epoch 17, batch 300, loss[loss=0.1227, simple_loss=0.1954, pruned_loss=0.02496, over 4860.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2071, pruned_loss=0.02984, over 757572.07 frames.], batch size: 13, lr: 1.32e-04 +2022-05-08 21:52:11,854 INFO [train.py:715] (6/8) Epoch 17, batch 350, loss[loss=0.1437, simple_loss=0.2201, pruned_loss=0.03371, over 4773.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02988, over 804731.41 frames.], batch size: 18, lr: 1.32e-04 +2022-05-08 21:52:52,794 INFO [train.py:715] (6/8) Epoch 17, batch 400, loss[loss=0.1407, simple_loss=0.2069, pruned_loss=0.03721, over 4746.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03001, over 841988.63 frames.], batch size: 16, lr: 1.32e-04 +2022-05-08 21:53:33,711 INFO [train.py:715] (6/8) Epoch 17, batch 450, loss[loss=0.1093, simple_loss=0.1818, pruned_loss=0.01834, over 4867.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02935, over 870889.25 frames.], batch size: 32, lr: 1.32e-04 +2022-05-08 21:54:14,773 INFO [train.py:715] (6/8) Epoch 17, batch 500, loss[loss=0.1355, simple_loss=0.2121, pruned_loss=0.02946, over 4924.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02921, over 892799.12 frames.], batch size: 29, lr: 1.32e-04 +2022-05-08 21:54:56,781 INFO [train.py:715] (6/8) Epoch 17, batch 550, loss[loss=0.1446, simple_loss=0.2192, pruned_loss=0.03497, over 4693.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2064, pruned_loss=0.02928, over 909827.13 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 21:55:37,899 INFO [train.py:715] (6/8) Epoch 17, batch 600, loss[loss=0.1247, simple_loss=0.2071, pruned_loss=0.02112, over 4810.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.0292, over 923865.53 frames.], batch size: 26, lr: 1.32e-04 +2022-05-08 21:56:20,088 INFO [train.py:715] (6/8) Epoch 17, batch 650, loss[loss=0.1163, simple_loss=0.1926, pruned_loss=0.02002, over 4922.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02946, over 934435.15 frames.], batch size: 29, lr: 1.32e-04 +2022-05-08 21:57:01,710 INFO [train.py:715] (6/8) Epoch 17, batch 700, loss[loss=0.1168, simple_loss=0.1956, pruned_loss=0.01907, over 4794.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02961, over 942419.38 frames.], batch size: 24, lr: 1.32e-04 +2022-05-08 21:57:42,599 INFO [train.py:715] (6/8) Epoch 17, batch 750, loss[loss=0.1075, simple_loss=0.1762, pruned_loss=0.01939, over 4791.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.0299, over 948753.64 frames.], batch size: 14, lr: 1.32e-04 +2022-05-08 21:58:23,378 INFO [train.py:715] (6/8) Epoch 17, batch 800, loss[loss=0.1303, simple_loss=0.2052, pruned_loss=0.02766, over 4775.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.0297, over 953540.22 frames.], batch size: 14, lr: 1.32e-04 +2022-05-08 21:59:03,981 INFO [train.py:715] (6/8) Epoch 17, batch 850, loss[loss=0.1276, simple_loss=0.2058, pruned_loss=0.02469, over 4913.00 frames.], tot_loss[loss=0.1338, simple_loss=0.208, pruned_loss=0.02979, over 957010.49 frames.], batch size: 29, lr: 1.32e-04 +2022-05-08 21:59:45,380 INFO [train.py:715] (6/8) Epoch 17, batch 900, loss[loss=0.1448, simple_loss=0.2152, pruned_loss=0.0372, over 4832.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2085, pruned_loss=0.0303, over 960733.48 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 22:00:26,264 INFO [train.py:715] (6/8) Epoch 17, batch 950, loss[loss=0.1427, simple_loss=0.2202, pruned_loss=0.03258, over 4902.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.0299, over 963568.55 frames.], batch size: 29, lr: 1.32e-04 +2022-05-08 22:01:07,646 INFO [train.py:715] (6/8) Epoch 17, batch 1000, loss[loss=0.1317, simple_loss=0.2117, pruned_loss=0.0258, over 4821.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02984, over 965961.79 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:01:48,891 INFO [train.py:715] (6/8) Epoch 17, batch 1050, loss[loss=0.1202, simple_loss=0.2048, pruned_loss=0.01782, over 4942.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2074, pruned_loss=0.03007, over 966717.85 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 22:02:29,886 INFO [train.py:715] (6/8) Epoch 17, batch 1100, loss[loss=0.1007, simple_loss=0.1661, pruned_loss=0.01762, over 4773.00 frames.], tot_loss[loss=0.1345, simple_loss=0.208, pruned_loss=0.03055, over 967524.63 frames.], batch size: 12, lr: 1.32e-04 +2022-05-08 22:03:10,396 INFO [train.py:715] (6/8) Epoch 17, batch 1150, loss[loss=0.152, simple_loss=0.2268, pruned_loss=0.03861, over 4841.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2082, pruned_loss=0.03047, over 969004.81 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 22:03:51,862 INFO [train.py:715] (6/8) Epoch 17, batch 1200, loss[loss=0.1167, simple_loss=0.1905, pruned_loss=0.02146, over 4910.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03023, over 969804.13 frames.], batch size: 17, lr: 1.32e-04 +2022-05-08 22:04:32,829 INFO [train.py:715] (6/8) Epoch 17, batch 1250, loss[loss=0.1324, simple_loss=0.2077, pruned_loss=0.02861, over 4883.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2081, pruned_loss=0.03046, over 970687.44 frames.], batch size: 19, lr: 1.32e-04 +2022-05-08 22:05:13,886 INFO [train.py:715] (6/8) Epoch 17, batch 1300, loss[loss=0.1439, simple_loss=0.2186, pruned_loss=0.03456, over 4796.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.02997, over 971894.53 frames.], batch size: 14, lr: 1.32e-04 +2022-05-08 22:05:55,247 INFO [train.py:715] (6/8) Epoch 17, batch 1350, loss[loss=0.1387, simple_loss=0.2126, pruned_loss=0.03242, over 4856.00 frames.], tot_loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.0297, over 972043.71 frames.], batch size: 20, lr: 1.32e-04 +2022-05-08 22:06:36,198 INFO [train.py:715] (6/8) Epoch 17, batch 1400, loss[loss=0.1085, simple_loss=0.1934, pruned_loss=0.01177, over 4941.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.0294, over 972660.76 frames.], batch size: 29, lr: 1.32e-04 +2022-05-08 22:07:16,803 INFO [train.py:715] (6/8) Epoch 17, batch 1450, loss[loss=0.1564, simple_loss=0.2373, pruned_loss=0.03777, over 4784.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02971, over 973209.08 frames.], batch size: 17, lr: 1.32e-04 +2022-05-08 22:07:57,515 INFO [train.py:715] (6/8) Epoch 17, batch 1500, loss[loss=0.1173, simple_loss=0.1979, pruned_loss=0.01835, over 4978.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.0296, over 972831.08 frames.], batch size: 28, lr: 1.32e-04 +2022-05-08 22:08:39,012 INFO [train.py:715] (6/8) Epoch 17, batch 1550, loss[loss=0.113, simple_loss=0.1966, pruned_loss=0.0147, over 4823.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2088, pruned_loss=0.03, over 972708.02 frames.], batch size: 26, lr: 1.32e-04 +2022-05-08 22:09:20,357 INFO [train.py:715] (6/8) Epoch 17, batch 1600, loss[loss=0.109, simple_loss=0.1895, pruned_loss=0.01429, over 4823.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02976, over 971428.74 frames.], batch size: 13, lr: 1.32e-04 +2022-05-08 22:10:01,029 INFO [train.py:715] (6/8) Epoch 17, batch 1650, loss[loss=0.123, simple_loss=0.202, pruned_loss=0.02196, over 4947.00 frames.], tot_loss[loss=0.1342, simple_loss=0.209, pruned_loss=0.02971, over 972484.20 frames.], batch size: 29, lr: 1.32e-04 +2022-05-08 22:10:42,370 INFO [train.py:715] (6/8) Epoch 17, batch 1700, loss[loss=0.1204, simple_loss=0.1961, pruned_loss=0.02235, over 4951.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2081, pruned_loss=0.02933, over 973383.13 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 22:11:23,611 INFO [train.py:715] (6/8) Epoch 17, batch 1750, loss[loss=0.1271, simple_loss=0.2031, pruned_loss=0.02558, over 4987.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2084, pruned_loss=0.02933, over 972955.14 frames.], batch size: 28, lr: 1.32e-04 +2022-05-08 22:12:04,541 INFO [train.py:715] (6/8) Epoch 17, batch 1800, loss[loss=0.1312, simple_loss=0.2031, pruned_loss=0.02959, over 4960.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.02979, over 972930.79 frames.], batch size: 28, lr: 1.32e-04 +2022-05-08 22:12:45,639 INFO [train.py:715] (6/8) Epoch 17, batch 1850, loss[loss=0.1203, simple_loss=0.1908, pruned_loss=0.02487, over 4860.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.02996, over 972700.88 frames.], batch size: 20, lr: 1.32e-04 +2022-05-08 22:13:27,333 INFO [train.py:715] (6/8) Epoch 17, batch 1900, loss[loss=0.1036, simple_loss=0.1783, pruned_loss=0.0145, over 4817.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02948, over 973624.50 frames.], batch size: 27, lr: 1.32e-04 +2022-05-08 22:14:08,485 INFO [train.py:715] (6/8) Epoch 17, batch 1950, loss[loss=0.1366, simple_loss=0.2072, pruned_loss=0.03303, over 4940.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2068, pruned_loss=0.02944, over 973742.40 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 22:14:49,363 INFO [train.py:715] (6/8) Epoch 17, batch 2000, loss[loss=0.1533, simple_loss=0.221, pruned_loss=0.04282, over 4978.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2066, pruned_loss=0.02935, over 973749.73 frames.], batch size: 39, lr: 1.32e-04 +2022-05-08 22:15:30,345 INFO [train.py:715] (6/8) Epoch 17, batch 2050, loss[loss=0.1289, simple_loss=0.1996, pruned_loss=0.02909, over 4864.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02915, over 973923.62 frames.], batch size: 30, lr: 1.32e-04 +2022-05-08 22:16:11,452 INFO [train.py:715] (6/8) Epoch 17, batch 2100, loss[loss=0.1121, simple_loss=0.1853, pruned_loss=0.01947, over 4852.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02898, over 973149.02 frames.], batch size: 13, lr: 1.32e-04 +2022-05-08 22:16:52,789 INFO [train.py:715] (6/8) Epoch 17, batch 2150, loss[loss=0.1573, simple_loss=0.2369, pruned_loss=0.03887, over 4740.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2073, pruned_loss=0.02927, over 973207.54 frames.], batch size: 16, lr: 1.32e-04 +2022-05-08 22:17:34,187 INFO [train.py:715] (6/8) Epoch 17, batch 2200, loss[loss=0.1543, simple_loss=0.2308, pruned_loss=0.03888, over 4869.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.02962, over 972174.07 frames.], batch size: 32, lr: 1.32e-04 +2022-05-08 22:18:15,309 INFO [train.py:715] (6/8) Epoch 17, batch 2250, loss[loss=0.1399, simple_loss=0.2147, pruned_loss=0.03253, over 4937.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02929, over 972531.84 frames.], batch size: 23, lr: 1.32e-04 +2022-05-08 22:18:56,047 INFO [train.py:715] (6/8) Epoch 17, batch 2300, loss[loss=0.1329, simple_loss=0.201, pruned_loss=0.03236, over 4831.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02913, over 972457.58 frames.], batch size: 30, lr: 1.32e-04 +2022-05-08 22:19:36,481 INFO [train.py:715] (6/8) Epoch 17, batch 2350, loss[loss=0.1206, simple_loss=0.1927, pruned_loss=0.0242, over 4853.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2056, pruned_loss=0.02837, over 972109.69 frames.], batch size: 13, lr: 1.32e-04 +2022-05-08 22:20:17,319 INFO [train.py:715] (6/8) Epoch 17, batch 2400, loss[loss=0.1346, simple_loss=0.2004, pruned_loss=0.03435, over 4843.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2053, pruned_loss=0.02853, over 970857.93 frames.], batch size: 32, lr: 1.32e-04 +2022-05-08 22:20:58,233 INFO [train.py:715] (6/8) Epoch 17, batch 2450, loss[loss=0.1266, simple_loss=0.2017, pruned_loss=0.02579, over 4948.00 frames.], tot_loss[loss=0.131, simple_loss=0.2052, pruned_loss=0.02836, over 970651.03 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 22:21:39,041 INFO [train.py:715] (6/8) Epoch 17, batch 2500, loss[loss=0.1591, simple_loss=0.2454, pruned_loss=0.03642, over 4763.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2059, pruned_loss=0.02882, over 971054.60 frames.], batch size: 18, lr: 1.32e-04 +2022-05-08 22:22:20,004 INFO [train.py:715] (6/8) Epoch 17, batch 2550, loss[loss=0.1247, simple_loss=0.1929, pruned_loss=0.02828, over 4769.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.0289, over 971952.15 frames.], batch size: 14, lr: 1.32e-04 +2022-05-08 22:23:00,963 INFO [train.py:715] (6/8) Epoch 17, batch 2600, loss[loss=0.1175, simple_loss=0.1993, pruned_loss=0.0178, over 4982.00 frames.], tot_loss[loss=0.1315, simple_loss=0.206, pruned_loss=0.02851, over 972792.49 frames.], batch size: 28, lr: 1.32e-04 +2022-05-08 22:23:42,203 INFO [train.py:715] (6/8) Epoch 17, batch 2650, loss[loss=0.1411, simple_loss=0.2045, pruned_loss=0.03887, over 4962.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02854, over 973505.09 frames.], batch size: 35, lr: 1.32e-04 +2022-05-08 22:24:22,847 INFO [train.py:715] (6/8) Epoch 17, batch 2700, loss[loss=0.1155, simple_loss=0.1984, pruned_loss=0.01629, over 4960.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.0289, over 973714.97 frames.], batch size: 39, lr: 1.32e-04 +2022-05-08 22:25:04,064 INFO [train.py:715] (6/8) Epoch 17, batch 2750, loss[loss=0.1328, simple_loss=0.2028, pruned_loss=0.03135, over 4839.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2056, pruned_loss=0.02844, over 972966.17 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 22:25:44,580 INFO [train.py:715] (6/8) Epoch 17, batch 2800, loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.02976, over 4994.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2056, pruned_loss=0.02835, over 973332.89 frames.], batch size: 24, lr: 1.32e-04 +2022-05-08 22:26:25,559 INFO [train.py:715] (6/8) Epoch 17, batch 2850, loss[loss=0.1606, simple_loss=0.2271, pruned_loss=0.047, over 4734.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.0288, over 972838.00 frames.], batch size: 16, lr: 1.32e-04 +2022-05-08 22:27:06,304 INFO [train.py:715] (6/8) Epoch 17, batch 2900, loss[loss=0.135, simple_loss=0.2188, pruned_loss=0.02563, over 4975.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02932, over 972946.60 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 22:27:47,270 INFO [train.py:715] (6/8) Epoch 17, batch 2950, loss[loss=0.132, simple_loss=0.2112, pruned_loss=0.0264, over 4929.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.0292, over 971967.20 frames.], batch size: 39, lr: 1.32e-04 +2022-05-08 22:28:28,419 INFO [train.py:715] (6/8) Epoch 17, batch 3000, loss[loss=0.1264, simple_loss=0.2, pruned_loss=0.02641, over 4936.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02932, over 971848.86 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 22:28:28,420 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 22:28:43,493 INFO [train.py:742] (6/8) Epoch 17, validation: loss=0.1047, simple_loss=0.1882, pruned_loss=0.01063, over 914524.00 frames. +2022-05-08 22:29:24,684 INFO [train.py:715] (6/8) Epoch 17, batch 3050, loss[loss=0.1279, simple_loss=0.1964, pruned_loss=0.02968, over 4810.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02935, over 971992.55 frames.], batch size: 14, lr: 1.32e-04 +2022-05-08 22:30:05,287 INFO [train.py:715] (6/8) Epoch 17, batch 3100, loss[loss=0.1309, simple_loss=0.2164, pruned_loss=0.02269, over 4892.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02884, over 972381.57 frames.], batch size: 22, lr: 1.32e-04 +2022-05-08 22:30:46,409 INFO [train.py:715] (6/8) Epoch 17, batch 3150, loss[loss=0.1362, simple_loss=0.207, pruned_loss=0.03268, over 4987.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2073, pruned_loss=0.02929, over 972873.95 frames.], batch size: 16, lr: 1.32e-04 +2022-05-08 22:31:26,342 INFO [train.py:715] (6/8) Epoch 17, batch 3200, loss[loss=0.117, simple_loss=0.1847, pruned_loss=0.02467, over 4850.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02925, over 972843.48 frames.], batch size: 32, lr: 1.32e-04 +2022-05-08 22:32:07,676 INFO [train.py:715] (6/8) Epoch 17, batch 3250, loss[loss=0.1681, simple_loss=0.2345, pruned_loss=0.0509, over 4814.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02938, over 973563.50 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:32:47,739 INFO [train.py:715] (6/8) Epoch 17, batch 3300, loss[loss=0.1644, simple_loss=0.238, pruned_loss=0.04541, over 4773.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02931, over 973664.49 frames.], batch size: 18, lr: 1.32e-04 +2022-05-08 22:33:28,439 INFO [train.py:715] (6/8) Epoch 17, batch 3350, loss[loss=0.122, simple_loss=0.1997, pruned_loss=0.02216, over 4804.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2082, pruned_loss=0.02953, over 973535.95 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:34:09,187 INFO [train.py:715] (6/8) Epoch 17, batch 3400, loss[loss=0.1433, simple_loss=0.2071, pruned_loss=0.03971, over 4963.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03005, over 973947.68 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 22:34:50,593 INFO [train.py:715] (6/8) Epoch 17, batch 3450, loss[loss=0.1268, simple_loss=0.2017, pruned_loss=0.02596, over 4980.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2083, pruned_loss=0.02977, over 973372.13 frames.], batch size: 28, lr: 1.32e-04 +2022-05-08 22:35:30,945 INFO [train.py:715] (6/8) Epoch 17, batch 3500, loss[loss=0.1546, simple_loss=0.2364, pruned_loss=0.03641, over 4976.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02947, over 974118.25 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 22:36:11,166 INFO [train.py:715] (6/8) Epoch 17, batch 3550, loss[loss=0.134, simple_loss=0.2169, pruned_loss=0.02553, over 4922.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02918, over 972440.63 frames.], batch size: 23, lr: 1.32e-04 +2022-05-08 22:36:52,132 INFO [train.py:715] (6/8) Epoch 17, batch 3600, loss[loss=0.1331, simple_loss=0.2098, pruned_loss=0.0282, over 4819.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2062, pruned_loss=0.02907, over 973153.58 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:37:31,756 INFO [train.py:715] (6/8) Epoch 17, batch 3650, loss[loss=0.1656, simple_loss=0.2364, pruned_loss=0.04743, over 4899.00 frames.], tot_loss[loss=0.1319, simple_loss=0.206, pruned_loss=0.02887, over 972902.45 frames.], batch size: 22, lr: 1.32e-04 +2022-05-08 22:38:11,920 INFO [train.py:715] (6/8) Epoch 17, batch 3700, loss[loss=0.115, simple_loss=0.1925, pruned_loss=0.01878, over 4754.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2059, pruned_loss=0.0289, over 973320.57 frames.], batch size: 19, lr: 1.32e-04 +2022-05-08 22:38:52,847 INFO [train.py:715] (6/8) Epoch 17, batch 3750, loss[loss=0.1045, simple_loss=0.174, pruned_loss=0.01752, over 4983.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02911, over 972657.17 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:39:33,614 INFO [train.py:715] (6/8) Epoch 17, batch 3800, loss[loss=0.1329, simple_loss=0.2104, pruned_loss=0.0277, over 4830.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2058, pruned_loss=0.02886, over 972542.24 frames.], batch size: 26, lr: 1.32e-04 +2022-05-08 22:40:14,220 INFO [train.py:715] (6/8) Epoch 17, batch 3850, loss[loss=0.1228, simple_loss=0.1978, pruned_loss=0.02389, over 4985.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2064, pruned_loss=0.02933, over 972138.44 frames.], batch size: 14, lr: 1.32e-04 +2022-05-08 22:40:54,281 INFO [train.py:715] (6/8) Epoch 17, batch 3900, loss[loss=0.1274, simple_loss=0.2029, pruned_loss=0.02591, over 4873.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02904, over 971925.34 frames.], batch size: 16, lr: 1.32e-04 +2022-05-08 22:41:35,763 INFO [train.py:715] (6/8) Epoch 17, batch 3950, loss[loss=0.1608, simple_loss=0.2267, pruned_loss=0.0474, over 4826.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.0296, over 971455.30 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:42:15,634 INFO [train.py:715] (6/8) Epoch 17, batch 4000, loss[loss=0.1261, simple_loss=0.2009, pruned_loss=0.02564, over 4853.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02971, over 970900.11 frames.], batch size: 20, lr: 1.32e-04 +2022-05-08 22:42:56,133 INFO [train.py:715] (6/8) Epoch 17, batch 4050, loss[loss=0.1233, simple_loss=0.2005, pruned_loss=0.0231, over 4816.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.02961, over 971186.71 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:43:36,613 INFO [train.py:715] (6/8) Epoch 17, batch 4100, loss[loss=0.1104, simple_loss=0.1828, pruned_loss=0.01898, over 4962.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2078, pruned_loss=0.02949, over 971855.44 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 22:44:17,670 INFO [train.py:715] (6/8) Epoch 17, batch 4150, loss[loss=0.1282, simple_loss=0.2081, pruned_loss=0.02416, over 4938.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02968, over 971371.04 frames.], batch size: 23, lr: 1.32e-04 +2022-05-08 22:44:56,910 INFO [train.py:715] (6/8) Epoch 17, batch 4200, loss[loss=0.1578, simple_loss=0.2201, pruned_loss=0.04776, over 4867.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02957, over 971064.98 frames.], batch size: 32, lr: 1.32e-04 +2022-05-08 22:45:36,946 INFO [train.py:715] (6/8) Epoch 17, batch 4250, loss[loss=0.1647, simple_loss=0.2363, pruned_loss=0.04655, over 4772.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02977, over 971399.55 frames.], batch size: 16, lr: 1.32e-04 +2022-05-08 22:46:18,120 INFO [train.py:715] (6/8) Epoch 17, batch 4300, loss[loss=0.1438, simple_loss=0.2205, pruned_loss=0.03349, over 4967.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02926, over 971882.17 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 22:46:58,170 INFO [train.py:715] (6/8) Epoch 17, batch 4350, loss[loss=0.1226, simple_loss=0.1982, pruned_loss=0.02351, over 4803.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2087, pruned_loss=0.02961, over 972363.33 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 22:47:38,042 INFO [train.py:715] (6/8) Epoch 17, batch 4400, loss[loss=0.1541, simple_loss=0.2224, pruned_loss=0.04286, over 4742.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2084, pruned_loss=0.02951, over 973301.29 frames.], batch size: 16, lr: 1.31e-04 +2022-05-08 22:48:18,898 INFO [train.py:715] (6/8) Epoch 17, batch 4450, loss[loss=0.1545, simple_loss=0.2264, pruned_loss=0.04128, over 4870.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02959, over 972822.13 frames.], batch size: 32, lr: 1.31e-04 +2022-05-08 22:48:59,888 INFO [train.py:715] (6/8) Epoch 17, batch 4500, loss[loss=0.1228, simple_loss=0.195, pruned_loss=0.02526, over 4797.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.0296, over 972458.65 frames.], batch size: 12, lr: 1.31e-04 +2022-05-08 22:49:39,765 INFO [train.py:715] (6/8) Epoch 17, batch 4550, loss[loss=0.1256, simple_loss=0.1976, pruned_loss=0.02687, over 4846.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02867, over 973062.82 frames.], batch size: 32, lr: 1.31e-04 +2022-05-08 22:50:20,201 INFO [train.py:715] (6/8) Epoch 17, batch 4600, loss[loss=0.145, simple_loss=0.2284, pruned_loss=0.03083, over 4881.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02902, over 973029.80 frames.], batch size: 16, lr: 1.31e-04 +2022-05-08 22:51:01,214 INFO [train.py:715] (6/8) Epoch 17, batch 4650, loss[loss=0.1104, simple_loss=0.1817, pruned_loss=0.0196, over 4855.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02945, over 972915.86 frames.], batch size: 30, lr: 1.31e-04 +2022-05-08 22:51:41,126 INFO [train.py:715] (6/8) Epoch 17, batch 4700, loss[loss=0.1262, simple_loss=0.2092, pruned_loss=0.02158, over 4748.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02933, over 972294.06 frames.], batch size: 16, lr: 1.31e-04 +2022-05-08 22:52:21,069 INFO [train.py:715] (6/8) Epoch 17, batch 4750, loss[loss=0.1205, simple_loss=0.1984, pruned_loss=0.02127, over 4981.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02956, over 971946.29 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 22:53:02,045 INFO [train.py:715] (6/8) Epoch 17, batch 4800, loss[loss=0.1275, simple_loss=0.2079, pruned_loss=0.02354, over 4803.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02956, over 972135.70 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 22:53:42,803 INFO [train.py:715] (6/8) Epoch 17, batch 4850, loss[loss=0.1665, simple_loss=0.2496, pruned_loss=0.0417, over 4956.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2077, pruned_loss=0.02934, over 972087.96 frames.], batch size: 24, lr: 1.31e-04 +2022-05-08 22:54:22,670 INFO [train.py:715] (6/8) Epoch 17, batch 4900, loss[loss=0.1056, simple_loss=0.1882, pruned_loss=0.01155, over 4819.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2075, pruned_loss=0.02919, over 972025.88 frames.], batch size: 25, lr: 1.31e-04 +2022-05-08 22:55:03,103 INFO [train.py:715] (6/8) Epoch 17, batch 4950, loss[loss=0.1789, simple_loss=0.2521, pruned_loss=0.05291, over 4894.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2085, pruned_loss=0.02965, over 971529.91 frames.], batch size: 39, lr: 1.31e-04 +2022-05-08 22:55:44,142 INFO [train.py:715] (6/8) Epoch 17, batch 5000, loss[loss=0.1367, simple_loss=0.2148, pruned_loss=0.02932, over 4768.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02966, over 971038.92 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 22:56:24,631 INFO [train.py:715] (6/8) Epoch 17, batch 5050, loss[loss=0.1508, simple_loss=0.2345, pruned_loss=0.03353, over 4950.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2083, pruned_loss=0.02965, over 972633.19 frames.], batch size: 29, lr: 1.31e-04 +2022-05-08 22:57:04,197 INFO [train.py:715] (6/8) Epoch 17, batch 5100, loss[loss=0.1319, simple_loss=0.2068, pruned_loss=0.02845, over 4790.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02944, over 971713.70 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 22:57:44,986 INFO [train.py:715] (6/8) Epoch 17, batch 5150, loss[loss=0.1371, simple_loss=0.2174, pruned_loss=0.02841, over 4839.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2081, pruned_loss=0.02949, over 972131.30 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 22:58:26,134 INFO [train.py:715] (6/8) Epoch 17, batch 5200, loss[loss=0.1117, simple_loss=0.183, pruned_loss=0.02023, over 4853.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02926, over 972338.79 frames.], batch size: 20, lr: 1.31e-04 +2022-05-08 22:59:05,343 INFO [train.py:715] (6/8) Epoch 17, batch 5250, loss[loss=0.1414, simple_loss=0.2088, pruned_loss=0.03702, over 4692.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2077, pruned_loss=0.02991, over 972570.91 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 22:59:44,891 INFO [train.py:715] (6/8) Epoch 17, batch 5300, loss[loss=0.1319, simple_loss=0.2068, pruned_loss=0.0285, over 4791.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02947, over 972302.65 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:00:25,452 INFO [train.py:715] (6/8) Epoch 17, batch 5350, loss[loss=0.1347, simple_loss=0.1942, pruned_loss=0.03765, over 4759.00 frames.], tot_loss[loss=0.1338, simple_loss=0.208, pruned_loss=0.02977, over 972445.60 frames.], batch size: 12, lr: 1.31e-04 +2022-05-08 23:01:06,241 INFO [train.py:715] (6/8) Epoch 17, batch 5400, loss[loss=0.1373, simple_loss=0.2053, pruned_loss=0.03468, over 4932.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02948, over 971719.99 frames.], batch size: 23, lr: 1.31e-04 +2022-05-08 23:01:45,349 INFO [train.py:715] (6/8) Epoch 17, batch 5450, loss[loss=0.1439, simple_loss=0.2119, pruned_loss=0.03792, over 4764.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2071, pruned_loss=0.0296, over 971223.72 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:02:26,552 INFO [train.py:715] (6/8) Epoch 17, batch 5500, loss[loss=0.1503, simple_loss=0.221, pruned_loss=0.03985, over 4779.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02975, over 972126.12 frames.], batch size: 17, lr: 1.31e-04 +2022-05-08 23:03:07,882 INFO [train.py:715] (6/8) Epoch 17, batch 5550, loss[loss=0.1613, simple_loss=0.2262, pruned_loss=0.0482, over 4845.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2059, pruned_loss=0.02922, over 971542.29 frames.], batch size: 32, lr: 1.31e-04 +2022-05-08 23:03:46,995 INFO [train.py:715] (6/8) Epoch 17, batch 5600, loss[loss=0.1183, simple_loss=0.1957, pruned_loss=0.02049, over 4919.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2064, pruned_loss=0.02952, over 972182.43 frames.], batch size: 23, lr: 1.31e-04 +2022-05-08 23:04:27,254 INFO [train.py:715] (6/8) Epoch 17, batch 5650, loss[loss=0.1409, simple_loss=0.2204, pruned_loss=0.03069, over 4856.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02941, over 971850.27 frames.], batch size: 20, lr: 1.31e-04 +2022-05-08 23:05:08,288 INFO [train.py:715] (6/8) Epoch 17, batch 5700, loss[loss=0.1284, simple_loss=0.2047, pruned_loss=0.02605, over 4993.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02931, over 972706.10 frames.], batch size: 16, lr: 1.31e-04 +2022-05-08 23:05:48,477 INFO [train.py:715] (6/8) Epoch 17, batch 5750, loss[loss=0.1098, simple_loss=0.1828, pruned_loss=0.01842, over 4823.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02943, over 973485.44 frames.], batch size: 26, lr: 1.31e-04 +2022-05-08 23:06:27,754 INFO [train.py:715] (6/8) Epoch 17, batch 5800, loss[loss=0.1708, simple_loss=0.2409, pruned_loss=0.05035, over 4804.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.029, over 973140.27 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:07:08,774 INFO [train.py:715] (6/8) Epoch 17, batch 5850, loss[loss=0.1286, simple_loss=0.1996, pruned_loss=0.02882, over 4886.00 frames.], tot_loss[loss=0.1319, simple_loss=0.206, pruned_loss=0.02891, over 973194.32 frames.], batch size: 17, lr: 1.31e-04 +2022-05-08 23:07:49,084 INFO [train.py:715] (6/8) Epoch 17, batch 5900, loss[loss=0.1184, simple_loss=0.2032, pruned_loss=0.01683, over 4832.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.0286, over 972623.87 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:08:29,709 INFO [train.py:715] (6/8) Epoch 17, batch 5950, loss[loss=0.1345, simple_loss=0.2045, pruned_loss=0.03222, over 4780.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2057, pruned_loss=0.02873, over 972549.88 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:09:09,148 INFO [train.py:715] (6/8) Epoch 17, batch 6000, loss[loss=0.1421, simple_loss=0.2123, pruned_loss=0.036, over 4907.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02888, over 972630.99 frames.], batch size: 17, lr: 1.31e-04 +2022-05-08 23:09:09,149 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 23:09:23,454 INFO [train.py:742] (6/8) Epoch 17, validation: loss=0.1047, simple_loss=0.1881, pruned_loss=0.01069, over 914524.00 frames. +2022-05-08 23:10:02,842 INFO [train.py:715] (6/8) Epoch 17, batch 6050, loss[loss=0.1268, simple_loss=0.2058, pruned_loss=0.02391, over 4920.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.02908, over 973320.45 frames.], batch size: 17, lr: 1.31e-04 +2022-05-08 23:10:43,310 INFO [train.py:715] (6/8) Epoch 17, batch 6100, loss[loss=0.1228, simple_loss=0.1918, pruned_loss=0.02692, over 4979.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02908, over 973403.89 frames.], batch size: 14, lr: 1.31e-04 +2022-05-08 23:11:22,428 INFO [train.py:715] (6/8) Epoch 17, batch 6150, loss[loss=0.1356, simple_loss=0.2172, pruned_loss=0.02707, over 4757.00 frames.], tot_loss[loss=0.132, simple_loss=0.2061, pruned_loss=0.02892, over 973293.49 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 23:12:02,009 INFO [train.py:715] (6/8) Epoch 17, batch 6200, loss[loss=0.1183, simple_loss=0.1911, pruned_loss=0.02272, over 4825.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02922, over 972983.08 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:12:42,483 INFO [train.py:715] (6/8) Epoch 17, batch 6250, loss[loss=0.1788, simple_loss=0.2281, pruned_loss=0.06475, over 4832.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.0295, over 971174.76 frames.], batch size: 30, lr: 1.31e-04 +2022-05-08 23:13:22,267 INFO [train.py:715] (6/8) Epoch 17, batch 6300, loss[loss=0.1025, simple_loss=0.1785, pruned_loss=0.01326, over 4940.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.02912, over 971628.37 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 23:14:01,684 INFO [train.py:715] (6/8) Epoch 17, batch 6350, loss[loss=0.1392, simple_loss=0.2149, pruned_loss=0.0318, over 4932.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2066, pruned_loss=0.02939, over 971980.15 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:14:41,502 INFO [train.py:715] (6/8) Epoch 17, batch 6400, loss[loss=0.1419, simple_loss=0.2164, pruned_loss=0.0337, over 4972.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02959, over 973326.71 frames.], batch size: 24, lr: 1.31e-04 +2022-05-08 23:15:21,779 INFO [train.py:715] (6/8) Epoch 17, batch 6450, loss[loss=0.1224, simple_loss=0.1967, pruned_loss=0.02402, over 4871.00 frames.], tot_loss[loss=0.133, simple_loss=0.207, pruned_loss=0.02949, over 973815.93 frames.], batch size: 16, lr: 1.31e-04 +2022-05-08 23:16:01,145 INFO [train.py:715] (6/8) Epoch 17, batch 6500, loss[loss=0.1227, simple_loss=0.1874, pruned_loss=0.02905, over 4988.00 frames.], tot_loss[loss=0.133, simple_loss=0.2067, pruned_loss=0.02964, over 973511.51 frames.], batch size: 25, lr: 1.31e-04 +2022-05-08 23:16:40,480 INFO [train.py:715] (6/8) Epoch 17, batch 6550, loss[loss=0.1368, simple_loss=0.2124, pruned_loss=0.03058, over 4887.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2076, pruned_loss=0.02998, over 973058.11 frames.], batch size: 22, lr: 1.31e-04 +2022-05-08 23:17:20,866 INFO [train.py:715] (6/8) Epoch 17, batch 6600, loss[loss=0.1225, simple_loss=0.1985, pruned_loss=0.02323, over 4805.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02955, over 972978.68 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 23:18:01,040 INFO [train.py:715] (6/8) Epoch 17, batch 6650, loss[loss=0.1429, simple_loss=0.22, pruned_loss=0.0329, over 4956.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02924, over 973010.52 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 23:18:40,501 INFO [train.py:715] (6/8) Epoch 17, batch 6700, loss[loss=0.1462, simple_loss=0.2148, pruned_loss=0.03882, over 4760.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02922, over 972795.47 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 23:19:20,731 INFO [train.py:715] (6/8) Epoch 17, batch 6750, loss[loss=0.1527, simple_loss=0.2185, pruned_loss=0.04348, over 4790.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.02957, over 973125.01 frames.], batch size: 14, lr: 1.31e-04 +2022-05-08 23:20:00,499 INFO [train.py:715] (6/8) Epoch 17, batch 6800, loss[loss=0.141, simple_loss=0.2105, pruned_loss=0.03575, over 4813.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02903, over 972212.33 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 23:20:41,170 INFO [train.py:715] (6/8) Epoch 17, batch 6850, loss[loss=0.147, simple_loss=0.2203, pruned_loss=0.03683, over 4770.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2071, pruned_loss=0.02893, over 972038.52 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:21:20,242 INFO [train.py:715] (6/8) Epoch 17, batch 6900, loss[loss=0.1435, simple_loss=0.2236, pruned_loss=0.03175, over 4827.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2077, pruned_loss=0.02931, over 972259.81 frames.], batch size: 27, lr: 1.31e-04 +2022-05-08 23:22:00,933 INFO [train.py:715] (6/8) Epoch 17, batch 6950, loss[loss=0.1302, simple_loss=0.2088, pruned_loss=0.02582, over 4838.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2078, pruned_loss=0.02926, over 972256.23 frames.], batch size: 30, lr: 1.31e-04 +2022-05-08 23:22:40,664 INFO [train.py:715] (6/8) Epoch 17, batch 7000, loss[loss=0.1364, simple_loss=0.2137, pruned_loss=0.02955, over 4810.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2071, pruned_loss=0.02875, over 972894.96 frames.], batch size: 26, lr: 1.31e-04 +2022-05-08 23:23:20,254 INFO [train.py:715] (6/8) Epoch 17, batch 7050, loss[loss=0.1487, simple_loss=0.2268, pruned_loss=0.03534, over 4899.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02909, over 972233.69 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 23:24:00,504 INFO [train.py:715] (6/8) Epoch 17, batch 7100, loss[loss=0.132, simple_loss=0.2109, pruned_loss=0.02652, over 4914.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02884, over 971347.26 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:24:40,024 INFO [train.py:715] (6/8) Epoch 17, batch 7150, loss[loss=0.1141, simple_loss=0.1915, pruned_loss=0.01833, over 4800.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02863, over 970554.08 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 23:25:19,625 INFO [train.py:715] (6/8) Epoch 17, batch 7200, loss[loss=0.1019, simple_loss=0.1665, pruned_loss=0.0186, over 4848.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2063, pruned_loss=0.02869, over 970500.20 frames.], batch size: 12, lr: 1.31e-04 +2022-05-08 23:25:58,582 INFO [train.py:715] (6/8) Epoch 17, batch 7250, loss[loss=0.1284, simple_loss=0.2138, pruned_loss=0.02149, over 4911.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2064, pruned_loss=0.02845, over 970725.90 frames.], batch size: 29, lr: 1.31e-04 +2022-05-08 23:26:39,072 INFO [train.py:715] (6/8) Epoch 17, batch 7300, loss[loss=0.1381, simple_loss=0.2198, pruned_loss=0.02819, over 4913.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2065, pruned_loss=0.02839, over 970844.56 frames.], batch size: 39, lr: 1.31e-04 +2022-05-08 23:27:18,028 INFO [train.py:715] (6/8) Epoch 17, batch 7350, loss[loss=0.111, simple_loss=0.1878, pruned_loss=0.01716, over 4964.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2064, pruned_loss=0.02816, over 971303.04 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:27:56,381 INFO [train.py:715] (6/8) Epoch 17, batch 7400, loss[loss=0.1401, simple_loss=0.2132, pruned_loss=0.03354, over 4787.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2065, pruned_loss=0.02828, over 971414.54 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:28:36,429 INFO [train.py:715] (6/8) Epoch 17, batch 7450, loss[loss=0.1356, simple_loss=0.2116, pruned_loss=0.02976, over 4961.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2064, pruned_loss=0.02826, over 972549.68 frames.], batch size: 14, lr: 1.31e-04 +2022-05-08 23:29:15,433 INFO [train.py:715] (6/8) Epoch 17, batch 7500, loss[loss=0.1273, simple_loss=0.2082, pruned_loss=0.0232, over 4829.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2057, pruned_loss=0.02841, over 972437.78 frames.], batch size: 26, lr: 1.31e-04 +2022-05-08 23:29:55,163 INFO [train.py:715] (6/8) Epoch 17, batch 7550, loss[loss=0.1159, simple_loss=0.1894, pruned_loss=0.02121, over 4687.00 frames.], tot_loss[loss=0.131, simple_loss=0.2054, pruned_loss=0.02836, over 972080.23 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:30:34,490 INFO [train.py:715] (6/8) Epoch 17, batch 7600, loss[loss=0.1445, simple_loss=0.2086, pruned_loss=0.04014, over 4829.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2053, pruned_loss=0.02847, over 972745.24 frames.], batch size: 30, lr: 1.31e-04 +2022-05-08 23:31:14,613 INFO [train.py:715] (6/8) Epoch 17, batch 7650, loss[loss=0.1429, simple_loss=0.2221, pruned_loss=0.0319, over 4813.00 frames.], tot_loss[loss=0.131, simple_loss=0.2055, pruned_loss=0.02825, over 972317.38 frames.], batch size: 25, lr: 1.31e-04 +2022-05-08 23:31:54,496 INFO [train.py:715] (6/8) Epoch 17, batch 7700, loss[loss=0.1369, simple_loss=0.2086, pruned_loss=0.03266, over 4828.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2055, pruned_loss=0.02839, over 972390.03 frames.], batch size: 30, lr: 1.31e-04 +2022-05-08 23:32:33,792 INFO [train.py:715] (6/8) Epoch 17, batch 7750, loss[loss=0.1303, simple_loss=0.2026, pruned_loss=0.02902, over 4712.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2056, pruned_loss=0.02843, over 972517.43 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:33:14,389 INFO [train.py:715] (6/8) Epoch 17, batch 7800, loss[loss=0.1202, simple_loss=0.197, pruned_loss=0.02176, over 4906.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2051, pruned_loss=0.0286, over 972056.67 frames.], batch size: 29, lr: 1.31e-04 +2022-05-08 23:33:54,604 INFO [train.py:715] (6/8) Epoch 17, batch 7850, loss[loss=0.1278, simple_loss=0.1962, pruned_loss=0.02969, over 4871.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2051, pruned_loss=0.02854, over 971684.96 frames.], batch size: 20, lr: 1.31e-04 +2022-05-08 23:34:34,851 INFO [train.py:715] (6/8) Epoch 17, batch 7900, loss[loss=0.1386, simple_loss=0.2074, pruned_loss=0.03485, over 4788.00 frames.], tot_loss[loss=0.132, simple_loss=0.2058, pruned_loss=0.02913, over 971098.45 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:35:13,814 INFO [train.py:715] (6/8) Epoch 17, batch 7950, loss[loss=0.1331, simple_loss=0.198, pruned_loss=0.03409, over 4784.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2071, pruned_loss=0.02982, over 972739.78 frames.], batch size: 14, lr: 1.31e-04 +2022-05-08 23:35:53,567 INFO [train.py:715] (6/8) Epoch 17, batch 8000, loss[loss=0.1443, simple_loss=0.2206, pruned_loss=0.03401, over 4971.00 frames.], tot_loss[loss=0.1321, simple_loss=0.206, pruned_loss=0.02914, over 973418.00 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:36:33,457 INFO [train.py:715] (6/8) Epoch 17, batch 8050, loss[loss=0.132, simple_loss=0.2124, pruned_loss=0.02582, over 4797.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.0288, over 973669.84 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 23:37:12,793 INFO [train.py:715] (6/8) Epoch 17, batch 8100, loss[loss=0.1363, simple_loss=0.2159, pruned_loss=0.02834, over 4703.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.02884, over 972578.28 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:37:52,682 INFO [train.py:715] (6/8) Epoch 17, batch 8150, loss[loss=0.1177, simple_loss=0.1894, pruned_loss=0.02301, over 4877.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.02882, over 972582.72 frames.], batch size: 16, lr: 1.31e-04 +2022-05-08 23:38:32,356 INFO [train.py:715] (6/8) Epoch 17, batch 8200, loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03002, over 4751.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2052, pruned_loss=0.02849, over 972972.83 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 23:39:14,690 INFO [train.py:715] (6/8) Epoch 17, batch 8250, loss[loss=0.1217, simple_loss=0.1981, pruned_loss=0.02266, over 4856.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2054, pruned_loss=0.02864, over 973586.81 frames.], batch size: 20, lr: 1.31e-04 +2022-05-08 23:39:53,905 INFO [train.py:715] (6/8) Epoch 17, batch 8300, loss[loss=0.1293, simple_loss=0.2105, pruned_loss=0.0241, over 4965.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02909, over 973403.32 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:40:33,621 INFO [train.py:715] (6/8) Epoch 17, batch 8350, loss[loss=0.1492, simple_loss=0.2213, pruned_loss=0.03849, over 4944.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02936, over 973489.70 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 23:41:13,217 INFO [train.py:715] (6/8) Epoch 17, batch 8400, loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.02878, over 4851.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02988, over 973022.34 frames.], batch size: 30, lr: 1.31e-04 +2022-05-08 23:41:52,758 INFO [train.py:715] (6/8) Epoch 17, batch 8450, loss[loss=0.1283, simple_loss=0.2076, pruned_loss=0.02447, over 4883.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.0294, over 972567.95 frames.], batch size: 38, lr: 1.31e-04 +2022-05-08 23:42:32,328 INFO [train.py:715] (6/8) Epoch 17, batch 8500, loss[loss=0.1272, simple_loss=0.2082, pruned_loss=0.02312, over 4952.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02898, over 972892.23 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:43:12,140 INFO [train.py:715] (6/8) Epoch 17, batch 8550, loss[loss=0.1274, simple_loss=0.2153, pruned_loss=0.01975, over 4955.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02915, over 972556.07 frames.], batch size: 24, lr: 1.31e-04 +2022-05-08 23:43:52,001 INFO [train.py:715] (6/8) Epoch 17, batch 8600, loss[loss=0.1408, simple_loss=0.2047, pruned_loss=0.03846, over 4663.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02921, over 972568.64 frames.], batch size: 14, lr: 1.31e-04 +2022-05-08 23:44:31,010 INFO [train.py:715] (6/8) Epoch 17, batch 8650, loss[loss=0.1567, simple_loss=0.2305, pruned_loss=0.04141, over 4912.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02874, over 971792.34 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 23:45:10,883 INFO [train.py:715] (6/8) Epoch 17, batch 8700, loss[loss=0.1484, simple_loss=0.2239, pruned_loss=0.03647, over 4867.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.0292, over 972383.80 frames.], batch size: 22, lr: 1.31e-04 +2022-05-08 23:45:50,315 INFO [train.py:715] (6/8) Epoch 17, batch 8750, loss[loss=0.1531, simple_loss=0.2139, pruned_loss=0.04609, over 4968.00 frames.], tot_loss[loss=0.1338, simple_loss=0.208, pruned_loss=0.02982, over 972828.40 frames.], batch size: 35, lr: 1.31e-04 +2022-05-08 23:46:29,857 INFO [train.py:715] (6/8) Epoch 17, batch 8800, loss[loss=0.1326, simple_loss=0.2051, pruned_loss=0.03004, over 4949.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2071, pruned_loss=0.02967, over 972848.82 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 23:47:09,589 INFO [train.py:715] (6/8) Epoch 17, batch 8850, loss[loss=0.1244, simple_loss=0.1995, pruned_loss=0.02459, over 4923.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2065, pruned_loss=0.02906, over 973439.69 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:47:48,798 INFO [train.py:715] (6/8) Epoch 17, batch 8900, loss[loss=0.1094, simple_loss=0.1801, pruned_loss=0.01933, over 4941.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02884, over 973169.49 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 23:48:28,445 INFO [train.py:715] (6/8) Epoch 17, batch 8950, loss[loss=0.1187, simple_loss=0.1844, pruned_loss=0.0265, over 4821.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2073, pruned_loss=0.02909, over 973677.41 frames.], batch size: 27, lr: 1.31e-04 +2022-05-08 23:49:07,470 INFO [train.py:715] (6/8) Epoch 17, batch 9000, loss[loss=0.1462, simple_loss=0.2159, pruned_loss=0.03819, over 4817.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02913, over 973219.75 frames.], batch size: 26, lr: 1.31e-04 +2022-05-08 23:49:07,471 INFO [train.py:733] (6/8) Computing validation loss +2022-05-08 23:49:17,246 INFO [train.py:742] (6/8) Epoch 17, validation: loss=0.1048, simple_loss=0.1882, pruned_loss=0.01072, over 914524.00 frames. +2022-05-08 23:49:56,411 INFO [train.py:715] (6/8) Epoch 17, batch 9050, loss[loss=0.1698, simple_loss=0.2504, pruned_loss=0.04463, over 4862.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.02928, over 973414.08 frames.], batch size: 30, lr: 1.31e-04 +2022-05-08 23:50:36,249 INFO [train.py:715] (6/8) Epoch 17, batch 9100, loss[loss=0.1182, simple_loss=0.2005, pruned_loss=0.01793, over 4953.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02937, over 973813.12 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 23:51:15,868 INFO [train.py:715] (6/8) Epoch 17, batch 9150, loss[loss=0.1237, simple_loss=0.2042, pruned_loss=0.02154, over 4784.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02927, over 974004.84 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:51:54,744 INFO [train.py:715] (6/8) Epoch 17, batch 9200, loss[loss=0.1497, simple_loss=0.227, pruned_loss=0.03623, over 4968.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.0294, over 973346.90 frames.], batch size: 35, lr: 1.31e-04 +2022-05-08 23:52:34,933 INFO [train.py:715] (6/8) Epoch 17, batch 9250, loss[loss=0.1141, simple_loss=0.1929, pruned_loss=0.01762, over 4898.00 frames.], tot_loss[loss=0.133, simple_loss=0.2076, pruned_loss=0.0292, over 971883.91 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 23:53:14,618 INFO [train.py:715] (6/8) Epoch 17, batch 9300, loss[loss=0.1105, simple_loss=0.185, pruned_loss=0.01799, over 4785.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02939, over 970841.23 frames.], batch size: 17, lr: 1.31e-04 +2022-05-08 23:53:53,953 INFO [train.py:715] (6/8) Epoch 17, batch 9350, loss[loss=0.1224, simple_loss=0.1948, pruned_loss=0.02504, over 4978.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02928, over 971352.86 frames.], batch size: 24, lr: 1.31e-04 +2022-05-08 23:54:33,279 INFO [train.py:715] (6/8) Epoch 17, batch 9400, loss[loss=0.1368, simple_loss=0.2124, pruned_loss=0.03061, over 4865.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02903, over 971580.88 frames.], batch size: 20, lr: 1.31e-04 +2022-05-08 23:55:13,717 INFO [train.py:715] (6/8) Epoch 17, batch 9450, loss[loss=0.1512, simple_loss=0.2215, pruned_loss=0.04042, over 4892.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02912, over 972354.61 frames.], batch size: 32, lr: 1.31e-04 +2022-05-08 23:55:53,692 INFO [train.py:715] (6/8) Epoch 17, batch 9500, loss[loss=0.1472, simple_loss=0.2225, pruned_loss=0.036, over 4926.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02933, over 971759.44 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:56:32,945 INFO [train.py:715] (6/8) Epoch 17, batch 9550, loss[loss=0.1248, simple_loss=0.2041, pruned_loss=0.02275, over 4986.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02872, over 971791.94 frames.], batch size: 14, lr: 1.31e-04 +2022-05-08 23:57:12,482 INFO [train.py:715] (6/8) Epoch 17, batch 9600, loss[loss=0.1296, simple_loss=0.2041, pruned_loss=0.02759, over 4852.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02948, over 971924.67 frames.], batch size: 34, lr: 1.31e-04 +2022-05-08 23:57:52,762 INFO [train.py:715] (6/8) Epoch 17, batch 9650, loss[loss=0.1243, simple_loss=0.2007, pruned_loss=0.02396, over 4797.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02909, over 971774.68 frames.], batch size: 25, lr: 1.31e-04 +2022-05-08 23:58:31,950 INFO [train.py:715] (6/8) Epoch 17, batch 9700, loss[loss=0.1263, simple_loss=0.2009, pruned_loss=0.02585, over 4968.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02913, over 971940.32 frames.], batch size: 24, lr: 1.31e-04 +2022-05-08 23:59:11,715 INFO [train.py:715] (6/8) Epoch 17, batch 9750, loss[loss=0.1518, simple_loss=0.2174, pruned_loss=0.04309, over 4834.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02901, over 971987.42 frames.], batch size: 30, lr: 1.31e-04 +2022-05-08 23:59:51,460 INFO [train.py:715] (6/8) Epoch 17, batch 9800, loss[loss=0.1303, simple_loss=0.2069, pruned_loss=0.02687, over 4784.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2081, pruned_loss=0.0294, over 971809.44 frames.], batch size: 14, lr: 1.31e-04 +2022-05-09 00:00:31,064 INFO [train.py:715] (6/8) Epoch 17, batch 9850, loss[loss=0.1369, simple_loss=0.2182, pruned_loss=0.02783, over 4925.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2084, pruned_loss=0.02949, over 972057.25 frames.], batch size: 39, lr: 1.31e-04 +2022-05-09 00:01:10,444 INFO [train.py:715] (6/8) Epoch 17, batch 9900, loss[loss=0.1227, simple_loss=0.2001, pruned_loss=0.02264, over 4916.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2082, pruned_loss=0.02959, over 972460.85 frames.], batch size: 23, lr: 1.31e-04 +2022-05-09 00:01:49,850 INFO [train.py:715] (6/8) Epoch 17, batch 9950, loss[loss=0.1508, simple_loss=0.2273, pruned_loss=0.03718, over 4901.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2082, pruned_loss=0.02961, over 971921.18 frames.], batch size: 39, lr: 1.31e-04 +2022-05-09 00:02:30,144 INFO [train.py:715] (6/8) Epoch 17, batch 10000, loss[loss=0.1324, simple_loss=0.2138, pruned_loss=0.02544, over 4942.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02918, over 971887.19 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:03:09,409 INFO [train.py:715] (6/8) Epoch 17, batch 10050, loss[loss=0.1224, simple_loss=0.1896, pruned_loss=0.02765, over 4699.00 frames.], tot_loss[loss=0.1335, simple_loss=0.208, pruned_loss=0.02954, over 971693.02 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:03:48,279 INFO [train.py:715] (6/8) Epoch 17, batch 10100, loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.03278, over 4795.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2064, pruned_loss=0.02927, over 972052.94 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:04:27,595 INFO [train.py:715] (6/8) Epoch 17, batch 10150, loss[loss=0.1297, simple_loss=0.2095, pruned_loss=0.02498, over 4698.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02898, over 972152.42 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:05:06,927 INFO [train.py:715] (6/8) Epoch 17, batch 10200, loss[loss=0.1293, simple_loss=0.2053, pruned_loss=0.02665, over 4990.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02886, over 971988.99 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:05:44,871 INFO [train.py:715] (6/8) Epoch 17, batch 10250, loss[loss=0.1195, simple_loss=0.1894, pruned_loss=0.0248, over 4788.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02916, over 972679.28 frames.], batch size: 17, lr: 1.31e-04 +2022-05-09 00:06:24,650 INFO [train.py:715] (6/8) Epoch 17, batch 10300, loss[loss=0.1544, simple_loss=0.2243, pruned_loss=0.04224, over 4838.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02936, over 972641.69 frames.], batch size: 30, lr: 1.31e-04 +2022-05-09 00:07:04,577 INFO [train.py:715] (6/8) Epoch 17, batch 10350, loss[loss=0.1151, simple_loss=0.1942, pruned_loss=0.01802, over 4832.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02937, over 972394.76 frames.], batch size: 13, lr: 1.31e-04 +2022-05-09 00:07:43,246 INFO [train.py:715] (6/8) Epoch 17, batch 10400, loss[loss=0.1372, simple_loss=0.2154, pruned_loss=0.02948, over 4947.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02936, over 972647.65 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:08:22,348 INFO [train.py:715] (6/8) Epoch 17, batch 10450, loss[loss=0.1245, simple_loss=0.1864, pruned_loss=0.03129, over 4898.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2064, pruned_loss=0.02933, over 971470.19 frames.], batch size: 19, lr: 1.31e-04 +2022-05-09 00:09:02,377 INFO [train.py:715] (6/8) Epoch 17, batch 10500, loss[loss=0.1346, simple_loss=0.2249, pruned_loss=0.02212, over 4952.00 frames.], tot_loss[loss=0.133, simple_loss=0.207, pruned_loss=0.02948, over 971873.97 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:09:41,416 INFO [train.py:715] (6/8) Epoch 17, batch 10550, loss[loss=0.1357, simple_loss=0.21, pruned_loss=0.03073, over 4869.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02913, over 971801.11 frames.], batch size: 20, lr: 1.31e-04 +2022-05-09 00:10:19,766 INFO [train.py:715] (6/8) Epoch 17, batch 10600, loss[loss=0.145, simple_loss=0.2195, pruned_loss=0.0352, over 4807.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2059, pruned_loss=0.02887, over 972056.73 frames.], batch size: 26, lr: 1.31e-04 +2022-05-09 00:10:59,068 INFO [train.py:715] (6/8) Epoch 17, batch 10650, loss[loss=0.1526, simple_loss=0.2343, pruned_loss=0.03544, over 4908.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2061, pruned_loss=0.02913, over 971996.05 frames.], batch size: 19, lr: 1.31e-04 +2022-05-09 00:11:38,594 INFO [train.py:715] (6/8) Epoch 17, batch 10700, loss[loss=0.1577, simple_loss=0.2364, pruned_loss=0.03949, over 4948.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02898, over 972780.11 frames.], batch size: 39, lr: 1.31e-04 +2022-05-09 00:12:17,262 INFO [train.py:715] (6/8) Epoch 17, batch 10750, loss[loss=0.123, simple_loss=0.2137, pruned_loss=0.01618, over 4801.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02876, over 972531.01 frames.], batch size: 25, lr: 1.31e-04 +2022-05-09 00:12:56,257 INFO [train.py:715] (6/8) Epoch 17, batch 10800, loss[loss=0.1204, simple_loss=0.1937, pruned_loss=0.02358, over 4796.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02889, over 972959.87 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:13:36,023 INFO [train.py:715] (6/8) Epoch 17, batch 10850, loss[loss=0.1207, simple_loss=0.1993, pruned_loss=0.02102, over 4960.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.029, over 973608.52 frames.], batch size: 14, lr: 1.31e-04 +2022-05-09 00:14:15,593 INFO [train.py:715] (6/8) Epoch 17, batch 10900, loss[loss=0.1221, simple_loss=0.1895, pruned_loss=0.02738, over 4860.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.0294, over 973460.83 frames.], batch size: 20, lr: 1.31e-04 +2022-05-09 00:14:53,779 INFO [train.py:715] (6/8) Epoch 17, batch 10950, loss[loss=0.1487, simple_loss=0.2208, pruned_loss=0.03834, over 4973.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.02962, over 973547.04 frames.], batch size: 25, lr: 1.31e-04 +2022-05-09 00:15:33,875 INFO [train.py:715] (6/8) Epoch 17, batch 11000, loss[loss=0.1354, simple_loss=0.2136, pruned_loss=0.0286, over 4937.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2066, pruned_loss=0.02934, over 973395.82 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:16:13,751 INFO [train.py:715] (6/8) Epoch 17, batch 11050, loss[loss=0.125, simple_loss=0.2112, pruned_loss=0.01941, over 4972.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.0294, over 973040.59 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:16:52,425 INFO [train.py:715] (6/8) Epoch 17, batch 11100, loss[loss=0.1578, simple_loss=0.2315, pruned_loss=0.04211, over 4690.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02915, over 973089.12 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:17:31,484 INFO [train.py:715] (6/8) Epoch 17, batch 11150, loss[loss=0.1168, simple_loss=0.1865, pruned_loss=0.02352, over 4879.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02889, over 973774.20 frames.], batch size: 32, lr: 1.31e-04 +2022-05-09 00:18:11,488 INFO [train.py:715] (6/8) Epoch 17, batch 11200, loss[loss=0.1432, simple_loss=0.2093, pruned_loss=0.03859, over 4964.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02924, over 972564.93 frames.], batch size: 35, lr: 1.31e-04 +2022-05-09 00:18:51,598 INFO [train.py:715] (6/8) Epoch 17, batch 11250, loss[loss=0.1172, simple_loss=0.1951, pruned_loss=0.0197, over 4789.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02885, over 972932.14 frames.], batch size: 17, lr: 1.31e-04 +2022-05-09 00:19:29,837 INFO [train.py:715] (6/8) Epoch 17, batch 11300, loss[loss=0.1382, simple_loss=0.2065, pruned_loss=0.03491, over 4766.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2054, pruned_loss=0.02858, over 972684.37 frames.], batch size: 14, lr: 1.31e-04 +2022-05-09 00:20:09,306 INFO [train.py:715] (6/8) Epoch 17, batch 11350, loss[loss=0.1093, simple_loss=0.1867, pruned_loss=0.01592, over 4870.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02821, over 973376.06 frames.], batch size: 22, lr: 1.31e-04 +2022-05-09 00:20:49,489 INFO [train.py:715] (6/8) Epoch 17, batch 11400, loss[loss=0.1195, simple_loss=0.2105, pruned_loss=0.01425, over 4945.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2057, pruned_loss=0.0281, over 972482.03 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:21:28,503 INFO [train.py:715] (6/8) Epoch 17, batch 11450, loss[loss=0.1302, simple_loss=0.2002, pruned_loss=0.03016, over 4806.00 frames.], tot_loss[loss=0.1315, simple_loss=0.206, pruned_loss=0.02849, over 972359.61 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:22:07,513 INFO [train.py:715] (6/8) Epoch 17, batch 11500, loss[loss=0.1478, simple_loss=0.2331, pruned_loss=0.03131, over 4851.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2056, pruned_loss=0.02893, over 972418.62 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:22:47,222 INFO [train.py:715] (6/8) Epoch 17, batch 11550, loss[loss=0.101, simple_loss=0.1677, pruned_loss=0.0172, over 4774.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2062, pruned_loss=0.02904, over 972369.35 frames.], batch size: 12, lr: 1.31e-04 +2022-05-09 00:23:27,163 INFO [train.py:715] (6/8) Epoch 17, batch 11600, loss[loss=0.1154, simple_loss=0.1808, pruned_loss=0.02495, over 4806.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.0292, over 972264.66 frames.], batch size: 12, lr: 1.31e-04 +2022-05-09 00:24:05,130 INFO [train.py:715] (6/8) Epoch 17, batch 11650, loss[loss=0.1551, simple_loss=0.2306, pruned_loss=0.03979, over 4882.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02906, over 971818.86 frames.], batch size: 32, lr: 1.31e-04 +2022-05-09 00:24:44,956 INFO [train.py:715] (6/8) Epoch 17, batch 11700, loss[loss=0.1465, simple_loss=0.2233, pruned_loss=0.03483, over 4836.00 frames.], tot_loss[loss=0.1322, simple_loss=0.206, pruned_loss=0.02922, over 971568.08 frames.], batch size: 30, lr: 1.31e-04 +2022-05-09 00:25:24,937 INFO [train.py:715] (6/8) Epoch 17, batch 11750, loss[loss=0.1339, simple_loss=0.2059, pruned_loss=0.03096, over 4908.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02975, over 971745.34 frames.], batch size: 22, lr: 1.31e-04 +2022-05-09 00:26:03,882 INFO [train.py:715] (6/8) Epoch 17, batch 11800, loss[loss=0.1276, simple_loss=0.2076, pruned_loss=0.02375, over 4967.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2061, pruned_loss=0.02934, over 972092.48 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:26:42,877 INFO [train.py:715] (6/8) Epoch 17, batch 11850, loss[loss=0.1157, simple_loss=0.1934, pruned_loss=0.01898, over 4865.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02956, over 972842.08 frames.], batch size: 20, lr: 1.31e-04 +2022-05-09 00:27:22,152 INFO [train.py:715] (6/8) Epoch 17, batch 11900, loss[loss=0.1104, simple_loss=0.1856, pruned_loss=0.01764, over 4941.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.0291, over 972259.96 frames.], batch size: 39, lr: 1.31e-04 +2022-05-09 00:28:01,951 INFO [train.py:715] (6/8) Epoch 17, batch 11950, loss[loss=0.1327, simple_loss=0.2023, pruned_loss=0.03157, over 4913.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02903, over 972081.20 frames.], batch size: 19, lr: 1.31e-04 +2022-05-09 00:28:40,982 INFO [train.py:715] (6/8) Epoch 17, batch 12000, loss[loss=0.1286, simple_loss=0.2002, pruned_loss=0.02848, over 4832.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02892, over 972380.33 frames.], batch size: 12, lr: 1.31e-04 +2022-05-09 00:28:40,982 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 00:28:52,719 INFO [train.py:742] (6/8) Epoch 17, validation: loss=0.1048, simple_loss=0.1882, pruned_loss=0.0107, over 914524.00 frames. +2022-05-09 00:29:31,842 INFO [train.py:715] (6/8) Epoch 17, batch 12050, loss[loss=0.1523, simple_loss=0.2206, pruned_loss=0.04199, over 4857.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02889, over 972376.00 frames.], batch size: 32, lr: 1.31e-04 +2022-05-09 00:30:10,921 INFO [train.py:715] (6/8) Epoch 17, batch 12100, loss[loss=0.12, simple_loss=0.1968, pruned_loss=0.02159, over 4924.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02919, over 972768.11 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:30:50,937 INFO [train.py:715] (6/8) Epoch 17, batch 12150, loss[loss=0.1346, simple_loss=0.2062, pruned_loss=0.03156, over 4926.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.02946, over 971510.57 frames.], batch size: 39, lr: 1.31e-04 +2022-05-09 00:31:29,662 INFO [train.py:715] (6/8) Epoch 17, batch 12200, loss[loss=0.1396, simple_loss=0.2107, pruned_loss=0.03428, over 4778.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2061, pruned_loss=0.02952, over 970964.64 frames.], batch size: 17, lr: 1.31e-04 +2022-05-09 00:32:08,198 INFO [train.py:715] (6/8) Epoch 17, batch 12250, loss[loss=0.1384, simple_loss=0.2104, pruned_loss=0.03321, over 4918.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2066, pruned_loss=0.02948, over 970615.40 frames.], batch size: 39, lr: 1.31e-04 +2022-05-09 00:32:47,690 INFO [train.py:715] (6/8) Epoch 17, batch 12300, loss[loss=0.1229, simple_loss=0.1941, pruned_loss=0.02586, over 4975.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02932, over 970783.23 frames.], batch size: 35, lr: 1.31e-04 +2022-05-09 00:33:26,863 INFO [train.py:715] (6/8) Epoch 17, batch 12350, loss[loss=0.1138, simple_loss=0.1923, pruned_loss=0.0176, over 4871.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.02944, over 971047.09 frames.], batch size: 22, lr: 1.31e-04 +2022-05-09 00:34:05,568 INFO [train.py:715] (6/8) Epoch 17, batch 12400, loss[loss=0.1299, simple_loss=0.2086, pruned_loss=0.02561, over 4827.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.0292, over 971642.83 frames.], batch size: 12, lr: 1.31e-04 +2022-05-09 00:34:44,617 INFO [train.py:715] (6/8) Epoch 17, batch 12450, loss[loss=0.1472, simple_loss=0.2223, pruned_loss=0.03599, over 4853.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02907, over 971891.45 frames.], batch size: 20, lr: 1.31e-04 +2022-05-09 00:35:24,998 INFO [train.py:715] (6/8) Epoch 17, batch 12500, loss[loss=0.1123, simple_loss=0.1896, pruned_loss=0.01745, over 4797.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.0288, over 971323.14 frames.], batch size: 12, lr: 1.31e-04 +2022-05-09 00:36:03,581 INFO [train.py:715] (6/8) Epoch 17, batch 12550, loss[loss=0.1356, simple_loss=0.2043, pruned_loss=0.03345, over 4939.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02875, over 970562.83 frames.], batch size: 23, lr: 1.31e-04 +2022-05-09 00:36:42,926 INFO [train.py:715] (6/8) Epoch 17, batch 12600, loss[loss=0.1318, simple_loss=0.2101, pruned_loss=0.02672, over 4806.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02882, over 970975.00 frames.], batch size: 24, lr: 1.31e-04 +2022-05-09 00:37:22,860 INFO [train.py:715] (6/8) Epoch 17, batch 12650, loss[loss=0.1463, simple_loss=0.2241, pruned_loss=0.0342, over 4868.00 frames.], tot_loss[loss=0.1321, simple_loss=0.206, pruned_loss=0.02911, over 971163.64 frames.], batch size: 20, lr: 1.31e-04 +2022-05-09 00:38:02,855 INFO [train.py:715] (6/8) Epoch 17, batch 12700, loss[loss=0.1249, simple_loss=0.1945, pruned_loss=0.02769, over 4844.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.0289, over 971160.76 frames.], batch size: 30, lr: 1.31e-04 +2022-05-09 00:38:42,164 INFO [train.py:715] (6/8) Epoch 17, batch 12750, loss[loss=0.1392, simple_loss=0.213, pruned_loss=0.03271, over 4894.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02895, over 972073.65 frames.], batch size: 17, lr: 1.31e-04 +2022-05-09 00:39:20,964 INFO [train.py:715] (6/8) Epoch 17, batch 12800, loss[loss=0.1307, simple_loss=0.202, pruned_loss=0.02974, over 4754.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02864, over 972156.30 frames.], batch size: 19, lr: 1.31e-04 +2022-05-09 00:40:00,597 INFO [train.py:715] (6/8) Epoch 17, batch 12850, loss[loss=0.1703, simple_loss=0.2561, pruned_loss=0.04221, over 4875.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02885, over 972975.90 frames.], batch size: 22, lr: 1.31e-04 +2022-05-09 00:40:39,042 INFO [train.py:715] (6/8) Epoch 17, batch 12900, loss[loss=0.1408, simple_loss=0.2114, pruned_loss=0.03508, over 4981.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02933, over 973232.49 frames.], batch size: 14, lr: 1.31e-04 +2022-05-09 00:41:18,431 INFO [train.py:715] (6/8) Epoch 17, batch 12950, loss[loss=0.1377, simple_loss=0.2015, pruned_loss=0.03694, over 4838.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.02928, over 973327.75 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:41:57,021 INFO [train.py:715] (6/8) Epoch 17, batch 13000, loss[loss=0.1248, simple_loss=0.1977, pruned_loss=0.02589, over 4847.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.02919, over 973564.20 frames.], batch size: 32, lr: 1.31e-04 +2022-05-09 00:42:36,105 INFO [train.py:715] (6/8) Epoch 17, batch 13050, loss[loss=0.1335, simple_loss=0.2063, pruned_loss=0.03038, over 4952.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.0292, over 973269.94 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:43:15,229 INFO [train.py:715] (6/8) Epoch 17, batch 13100, loss[loss=0.1458, simple_loss=0.217, pruned_loss=0.03731, over 4913.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02945, over 972464.37 frames.], batch size: 17, lr: 1.31e-04 +2022-05-09 00:43:54,021 INFO [train.py:715] (6/8) Epoch 17, batch 13150, loss[loss=0.1285, simple_loss=0.1993, pruned_loss=0.02883, over 4887.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02982, over 972833.83 frames.], batch size: 22, lr: 1.31e-04 +2022-05-09 00:44:33,800 INFO [train.py:715] (6/8) Epoch 17, batch 13200, loss[loss=0.1069, simple_loss=0.1804, pruned_loss=0.01674, over 4832.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.0292, over 972628.99 frames.], batch size: 12, lr: 1.31e-04 +2022-05-09 00:45:12,325 INFO [train.py:715] (6/8) Epoch 17, batch 13250, loss[loss=0.1437, simple_loss=0.2168, pruned_loss=0.03533, over 4842.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02919, over 972304.52 frames.], batch size: 20, lr: 1.31e-04 +2022-05-09 00:45:51,625 INFO [train.py:715] (6/8) Epoch 17, batch 13300, loss[loss=0.1497, simple_loss=0.2238, pruned_loss=0.03782, over 4934.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02944, over 972589.75 frames.], batch size: 23, lr: 1.31e-04 +2022-05-09 00:46:30,713 INFO [train.py:715] (6/8) Epoch 17, batch 13350, loss[loss=0.1275, simple_loss=0.1937, pruned_loss=0.03061, over 4764.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.0293, over 971867.99 frames.], batch size: 19, lr: 1.31e-04 +2022-05-09 00:47:09,946 INFO [train.py:715] (6/8) Epoch 17, batch 13400, loss[loss=0.117, simple_loss=0.1928, pruned_loss=0.02055, over 4759.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2066, pruned_loss=0.02948, over 970909.10 frames.], batch size: 19, lr: 1.31e-04 +2022-05-09 00:47:49,251 INFO [train.py:715] (6/8) Epoch 17, batch 13450, loss[loss=0.1596, simple_loss=0.231, pruned_loss=0.04408, over 4828.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2075, pruned_loss=0.03015, over 971342.09 frames.], batch size: 30, lr: 1.30e-04 +2022-05-09 00:48:27,714 INFO [train.py:715] (6/8) Epoch 17, batch 13500, loss[loss=0.1298, simple_loss=0.1931, pruned_loss=0.03327, over 4814.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2076, pruned_loss=0.03044, over 971706.03 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 00:49:07,423 INFO [train.py:715] (6/8) Epoch 17, batch 13550, loss[loss=0.126, simple_loss=0.1897, pruned_loss=0.03118, over 4840.00 frames.], tot_loss[loss=0.1343, simple_loss=0.208, pruned_loss=0.03034, over 972482.65 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 00:49:45,777 INFO [train.py:715] (6/8) Epoch 17, batch 13600, loss[loss=0.1427, simple_loss=0.2182, pruned_loss=0.03358, over 4778.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.03033, over 972505.46 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 00:50:24,821 INFO [train.py:715] (6/8) Epoch 17, batch 13650, loss[loss=0.1385, simple_loss=0.2144, pruned_loss=0.03132, over 4947.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02981, over 972306.33 frames.], batch size: 39, lr: 1.30e-04 +2022-05-09 00:51:04,639 INFO [train.py:715] (6/8) Epoch 17, batch 13700, loss[loss=0.1355, simple_loss=0.1946, pruned_loss=0.03821, over 4783.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02969, over 971045.54 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 00:51:43,961 INFO [train.py:715] (6/8) Epoch 17, batch 13750, loss[loss=0.1364, simple_loss=0.2131, pruned_loss=0.02988, over 4988.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02968, over 971348.89 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 00:52:24,101 INFO [train.py:715] (6/8) Epoch 17, batch 13800, loss[loss=0.148, simple_loss=0.2297, pruned_loss=0.0332, over 4974.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.02997, over 972113.94 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 00:53:03,512 INFO [train.py:715] (6/8) Epoch 17, batch 13850, loss[loss=0.1478, simple_loss=0.2185, pruned_loss=0.03849, over 4912.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02968, over 971883.50 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 00:53:43,339 INFO [train.py:715] (6/8) Epoch 17, batch 13900, loss[loss=0.135, simple_loss=0.1906, pruned_loss=0.03974, over 4949.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.02958, over 972284.59 frames.], batch size: 35, lr: 1.30e-04 +2022-05-09 00:54:22,810 INFO [train.py:715] (6/8) Epoch 17, batch 13950, loss[loss=0.1211, simple_loss=0.1994, pruned_loss=0.02141, over 4789.00 frames.], tot_loss[loss=0.133, simple_loss=0.207, pruned_loss=0.02943, over 972041.23 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 00:55:02,843 INFO [train.py:715] (6/8) Epoch 17, batch 14000, loss[loss=0.1258, simple_loss=0.1956, pruned_loss=0.02798, over 4847.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02941, over 972077.73 frames.], batch size: 30, lr: 1.30e-04 +2022-05-09 00:55:42,004 INFO [train.py:715] (6/8) Epoch 17, batch 14050, loss[loss=0.1221, simple_loss=0.1906, pruned_loss=0.02677, over 4843.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02953, over 972437.32 frames.], batch size: 12, lr: 1.30e-04 +2022-05-09 00:56:21,077 INFO [train.py:715] (6/8) Epoch 17, batch 14100, loss[loss=0.1192, simple_loss=0.1945, pruned_loss=0.02198, over 4821.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.02945, over 972479.00 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 00:57:01,248 INFO [train.py:715] (6/8) Epoch 17, batch 14150, loss[loss=0.1457, simple_loss=0.2313, pruned_loss=0.0301, over 4710.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02957, over 972376.63 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 00:57:40,317 INFO [train.py:715] (6/8) Epoch 17, batch 14200, loss[loss=0.1327, simple_loss=0.2164, pruned_loss=0.02446, over 4818.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02904, over 972098.76 frames.], batch size: 25, lr: 1.30e-04 +2022-05-09 00:58:19,833 INFO [train.py:715] (6/8) Epoch 17, batch 14250, loss[loss=0.1216, simple_loss=0.1906, pruned_loss=0.02632, over 4889.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2065, pruned_loss=0.02945, over 972114.69 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 00:58:59,001 INFO [train.py:715] (6/8) Epoch 17, batch 14300, loss[loss=0.1415, simple_loss=0.2186, pruned_loss=0.03218, over 4688.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2064, pruned_loss=0.02917, over 971797.50 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 00:59:38,855 INFO [train.py:715] (6/8) Epoch 17, batch 14350, loss[loss=0.1329, simple_loss=0.2136, pruned_loss=0.02606, over 4914.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.02898, over 971643.11 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 01:00:17,889 INFO [train.py:715] (6/8) Epoch 17, batch 14400, loss[loss=0.1421, simple_loss=0.2208, pruned_loss=0.03175, over 4816.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2059, pruned_loss=0.0287, over 972708.51 frames.], batch size: 25, lr: 1.30e-04 +2022-05-09 01:00:56,580 INFO [train.py:715] (6/8) Epoch 17, batch 14450, loss[loss=0.1359, simple_loss=0.2038, pruned_loss=0.03398, over 4762.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2055, pruned_loss=0.02867, over 972996.79 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 01:01:36,315 INFO [train.py:715] (6/8) Epoch 17, batch 14500, loss[loss=0.1469, simple_loss=0.2173, pruned_loss=0.0382, over 4964.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.02881, over 972997.03 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 01:02:15,675 INFO [train.py:715] (6/8) Epoch 17, batch 14550, loss[loss=0.1273, simple_loss=0.2199, pruned_loss=0.01739, over 4908.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02888, over 972447.49 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 01:02:54,149 INFO [train.py:715] (6/8) Epoch 17, batch 14600, loss[loss=0.1539, simple_loss=0.2119, pruned_loss=0.04799, over 4793.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02943, over 972282.28 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 01:03:33,789 INFO [train.py:715] (6/8) Epoch 17, batch 14650, loss[loss=0.1292, simple_loss=0.2065, pruned_loss=0.02591, over 4827.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02976, over 971449.14 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 01:04:13,442 INFO [train.py:715] (6/8) Epoch 17, batch 14700, loss[loss=0.1254, simple_loss=0.2035, pruned_loss=0.02372, over 4930.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2066, pruned_loss=0.02936, over 971209.25 frames.], batch size: 23, lr: 1.30e-04 +2022-05-09 01:04:52,654 INFO [train.py:715] (6/8) Epoch 17, batch 14750, loss[loss=0.1543, simple_loss=0.2356, pruned_loss=0.03649, over 4874.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02969, over 971286.75 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 01:05:31,538 INFO [train.py:715] (6/8) Epoch 17, batch 14800, loss[loss=0.1381, simple_loss=0.2148, pruned_loss=0.03066, over 4817.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2074, pruned_loss=0.02987, over 971700.04 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 01:06:11,602 INFO [train.py:715] (6/8) Epoch 17, batch 14850, loss[loss=0.127, simple_loss=0.1882, pruned_loss=0.03294, over 4813.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2076, pruned_loss=0.0301, over 971202.99 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 01:06:50,384 INFO [train.py:715] (6/8) Epoch 17, batch 14900, loss[loss=0.1259, simple_loss=0.2126, pruned_loss=0.01954, over 4885.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03002, over 970366.16 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 01:07:29,348 INFO [train.py:715] (6/8) Epoch 17, batch 14950, loss[loss=0.1397, simple_loss=0.2195, pruned_loss=0.02996, over 4790.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2076, pruned_loss=0.03026, over 970386.66 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 01:08:09,016 INFO [train.py:715] (6/8) Epoch 17, batch 15000, loss[loss=0.1336, simple_loss=0.2175, pruned_loss=0.02483, over 4953.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03018, over 971800.06 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 01:08:09,016 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 01:08:19,082 INFO [train.py:742] (6/8) Epoch 17, validation: loss=0.1046, simple_loss=0.1881, pruned_loss=0.01059, over 914524.00 frames. +2022-05-09 01:08:59,145 INFO [train.py:715] (6/8) Epoch 17, batch 15050, loss[loss=0.1316, simple_loss=0.2121, pruned_loss=0.02551, over 4850.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.0299, over 971486.58 frames.], batch size: 32, lr: 1.30e-04 +2022-05-09 01:09:38,652 INFO [train.py:715] (6/8) Epoch 17, batch 15100, loss[loss=0.1319, simple_loss=0.1977, pruned_loss=0.03307, over 4698.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02978, over 972251.52 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 01:10:17,578 INFO [train.py:715] (6/8) Epoch 17, batch 15150, loss[loss=0.1201, simple_loss=0.1937, pruned_loss=0.02325, over 4888.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.02944, over 972618.75 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 01:10:56,613 INFO [train.py:715] (6/8) Epoch 17, batch 15200, loss[loss=0.1217, simple_loss=0.202, pruned_loss=0.02073, over 4984.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02898, over 972740.86 frames.], batch size: 28, lr: 1.30e-04 +2022-05-09 01:11:36,239 INFO [train.py:715] (6/8) Epoch 17, batch 15250, loss[loss=0.1256, simple_loss=0.1988, pruned_loss=0.02624, over 4955.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02937, over 973149.62 frames.], batch size: 35, lr: 1.30e-04 +2022-05-09 01:12:15,605 INFO [train.py:715] (6/8) Epoch 17, batch 15300, loss[loss=0.1341, simple_loss=0.2067, pruned_loss=0.03068, over 4963.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.02911, over 973876.34 frames.], batch size: 35, lr: 1.30e-04 +2022-05-09 01:12:53,858 INFO [train.py:715] (6/8) Epoch 17, batch 15350, loss[loss=0.1195, simple_loss=0.1926, pruned_loss=0.02327, over 4767.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02905, over 973920.52 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 01:13:33,406 INFO [train.py:715] (6/8) Epoch 17, batch 15400, loss[loss=0.1277, simple_loss=0.2029, pruned_loss=0.02628, over 4836.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2073, pruned_loss=0.02929, over 973314.04 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 01:14:12,480 INFO [train.py:715] (6/8) Epoch 17, batch 15450, loss[loss=0.1313, simple_loss=0.1947, pruned_loss=0.03395, over 4690.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02925, over 973130.89 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 01:14:51,823 INFO [train.py:715] (6/8) Epoch 17, batch 15500, loss[loss=0.1163, simple_loss=0.1908, pruned_loss=0.02091, over 4874.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02952, over 972267.23 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 01:15:30,650 INFO [train.py:715] (6/8) Epoch 17, batch 15550, loss[loss=0.1293, simple_loss=0.19, pruned_loss=0.03428, over 4872.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.02953, over 972252.89 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 01:16:10,372 INFO [train.py:715] (6/8) Epoch 17, batch 15600, loss[loss=0.1538, simple_loss=0.2189, pruned_loss=0.04433, over 4856.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.0296, over 972336.43 frames.], batch size: 32, lr: 1.30e-04 +2022-05-09 01:16:49,791 INFO [train.py:715] (6/8) Epoch 17, batch 15650, loss[loss=0.1453, simple_loss=0.2163, pruned_loss=0.03716, over 4915.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02905, over 971872.62 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 01:17:27,920 INFO [train.py:715] (6/8) Epoch 17, batch 15700, loss[loss=0.1608, simple_loss=0.2322, pruned_loss=0.04474, over 4936.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2066, pruned_loss=0.02932, over 971125.30 frames.], batch size: 29, lr: 1.30e-04 +2022-05-09 01:18:07,732 INFO [train.py:715] (6/8) Epoch 17, batch 15750, loss[loss=0.1211, simple_loss=0.1939, pruned_loss=0.02422, over 4810.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.02957, over 970931.93 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 01:18:47,137 INFO [train.py:715] (6/8) Epoch 17, batch 15800, loss[loss=0.1441, simple_loss=0.2123, pruned_loss=0.038, over 4811.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.029, over 971138.51 frames.], batch size: 12, lr: 1.30e-04 +2022-05-09 01:19:26,086 INFO [train.py:715] (6/8) Epoch 17, batch 15850, loss[loss=0.1208, simple_loss=0.2022, pruned_loss=0.01972, over 4869.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02922, over 971442.28 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 01:20:04,721 INFO [train.py:715] (6/8) Epoch 17, batch 15900, loss[loss=0.1273, simple_loss=0.2087, pruned_loss=0.02291, over 4752.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02884, over 971582.94 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 01:20:44,136 INFO [train.py:715] (6/8) Epoch 17, batch 15950, loss[loss=0.1472, simple_loss=0.2231, pruned_loss=0.03564, over 4981.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02893, over 971423.76 frames.], batch size: 27, lr: 1.30e-04 +2022-05-09 01:21:23,632 INFO [train.py:715] (6/8) Epoch 17, batch 16000, loss[loss=0.1451, simple_loss=0.232, pruned_loss=0.02913, over 4926.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02888, over 971199.05 frames.], batch size: 39, lr: 1.30e-04 +2022-05-09 01:22:02,019 INFO [train.py:715] (6/8) Epoch 17, batch 16050, loss[loss=0.1258, simple_loss=0.1986, pruned_loss=0.02647, over 4874.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02911, over 971498.53 frames.], batch size: 32, lr: 1.30e-04 +2022-05-09 01:22:42,061 INFO [train.py:715] (6/8) Epoch 17, batch 16100, loss[loss=0.1427, simple_loss=0.2149, pruned_loss=0.03524, over 4983.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02925, over 971756.93 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 01:23:21,963 INFO [train.py:715] (6/8) Epoch 17, batch 16150, loss[loss=0.1172, simple_loss=0.1934, pruned_loss=0.02047, over 4964.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02938, over 971798.64 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 01:24:01,721 INFO [train.py:715] (6/8) Epoch 17, batch 16200, loss[loss=0.154, simple_loss=0.2299, pruned_loss=0.03902, over 4875.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2064, pruned_loss=0.02931, over 972463.88 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 01:24:43,149 INFO [train.py:715] (6/8) Epoch 17, batch 16250, loss[loss=0.109, simple_loss=0.1817, pruned_loss=0.01821, over 4779.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2065, pruned_loss=0.02909, over 973320.07 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 01:25:23,146 INFO [train.py:715] (6/8) Epoch 17, batch 16300, loss[loss=0.1209, simple_loss=0.196, pruned_loss=0.02291, over 4809.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2055, pruned_loss=0.02873, over 973494.45 frames.], batch size: 27, lr: 1.30e-04 +2022-05-09 01:26:02,220 INFO [train.py:715] (6/8) Epoch 17, batch 16350, loss[loss=0.1277, simple_loss=0.2089, pruned_loss=0.02326, over 4810.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2057, pruned_loss=0.02885, over 972835.84 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 01:26:40,874 INFO [train.py:715] (6/8) Epoch 17, batch 16400, loss[loss=0.1068, simple_loss=0.1901, pruned_loss=0.0118, over 4936.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2054, pruned_loss=0.02861, over 972561.04 frames.], batch size: 29, lr: 1.30e-04 +2022-05-09 01:27:20,590 INFO [train.py:715] (6/8) Epoch 17, batch 16450, loss[loss=0.1411, simple_loss=0.2168, pruned_loss=0.03271, over 4974.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2063, pruned_loss=0.0292, over 972521.93 frames.], batch size: 31, lr: 1.30e-04 +2022-05-09 01:28:00,553 INFO [train.py:715] (6/8) Epoch 17, batch 16500, loss[loss=0.1341, simple_loss=0.1995, pruned_loss=0.03441, over 4950.00 frames.], tot_loss[loss=0.133, simple_loss=0.2068, pruned_loss=0.02963, over 972248.11 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 01:28:39,571 INFO [train.py:715] (6/8) Epoch 17, batch 16550, loss[loss=0.1552, simple_loss=0.2208, pruned_loss=0.04479, over 4689.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2074, pruned_loss=0.02982, over 971862.49 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 01:29:18,074 INFO [train.py:715] (6/8) Epoch 17, batch 16600, loss[loss=0.1343, simple_loss=0.216, pruned_loss=0.02631, over 4968.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02952, over 972614.16 frames.], batch size: 28, lr: 1.30e-04 +2022-05-09 01:29:58,265 INFO [train.py:715] (6/8) Epoch 17, batch 16650, loss[loss=0.1275, simple_loss=0.2052, pruned_loss=0.02484, over 4873.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02987, over 972405.34 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 01:30:38,048 INFO [train.py:715] (6/8) Epoch 17, batch 16700, loss[loss=0.155, simple_loss=0.2221, pruned_loss=0.04401, over 4746.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.0292, over 971672.91 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 01:31:16,489 INFO [train.py:715] (6/8) Epoch 17, batch 16750, loss[loss=0.1653, simple_loss=0.2477, pruned_loss=0.04142, over 4820.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02932, over 971028.19 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 01:31:56,313 INFO [train.py:715] (6/8) Epoch 17, batch 16800, loss[loss=0.123, simple_loss=0.1948, pruned_loss=0.02562, over 4852.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02923, over 971351.24 frames.], batch size: 30, lr: 1.30e-04 +2022-05-09 01:32:35,735 INFO [train.py:715] (6/8) Epoch 17, batch 16850, loss[loss=0.1305, simple_loss=0.2037, pruned_loss=0.0287, over 4747.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.0292, over 970649.82 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 01:33:15,644 INFO [train.py:715] (6/8) Epoch 17, batch 16900, loss[loss=0.1513, simple_loss=0.2153, pruned_loss=0.04362, over 4938.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02985, over 971443.95 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 01:33:53,871 INFO [train.py:715] (6/8) Epoch 17, batch 16950, loss[loss=0.1534, simple_loss=0.2258, pruned_loss=0.04054, over 4922.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02916, over 971666.73 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 01:34:33,424 INFO [train.py:715] (6/8) Epoch 17, batch 17000, loss[loss=0.1347, simple_loss=0.196, pruned_loss=0.03671, over 4987.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02953, over 971974.71 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 01:35:12,912 INFO [train.py:715] (6/8) Epoch 17, batch 17050, loss[loss=0.1272, simple_loss=0.21, pruned_loss=0.0222, over 4798.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2078, pruned_loss=0.02964, over 971167.44 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 01:35:51,177 INFO [train.py:715] (6/8) Epoch 17, batch 17100, loss[loss=0.1227, simple_loss=0.1945, pruned_loss=0.02542, over 4929.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02977, over 971301.30 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 01:36:30,683 INFO [train.py:715] (6/8) Epoch 17, batch 17150, loss[loss=0.1283, simple_loss=0.2122, pruned_loss=0.02225, over 4868.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02883, over 971607.82 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 01:37:10,053 INFO [train.py:715] (6/8) Epoch 17, batch 17200, loss[loss=0.1511, simple_loss=0.2199, pruned_loss=0.04108, over 4800.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.0292, over 971673.66 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 01:37:48,557 INFO [train.py:715] (6/8) Epoch 17, batch 17250, loss[loss=0.1173, simple_loss=0.1911, pruned_loss=0.02172, over 4866.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.02922, over 971622.65 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 01:38:26,822 INFO [train.py:715] (6/8) Epoch 17, batch 17300, loss[loss=0.1466, simple_loss=0.2206, pruned_loss=0.03625, over 4892.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2065, pruned_loss=0.02902, over 972122.45 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 01:39:06,139 INFO [train.py:715] (6/8) Epoch 17, batch 17350, loss[loss=0.1157, simple_loss=0.2032, pruned_loss=0.01411, over 4944.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.02913, over 972062.92 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 01:39:45,339 INFO [train.py:715] (6/8) Epoch 17, batch 17400, loss[loss=0.1305, simple_loss=0.2052, pruned_loss=0.02794, over 4851.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02891, over 971909.55 frames.], batch size: 32, lr: 1.30e-04 +2022-05-09 01:40:23,325 INFO [train.py:715] (6/8) Epoch 17, batch 17450, loss[loss=0.1178, simple_loss=0.1882, pruned_loss=0.02374, over 4726.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02927, over 971689.89 frames.], batch size: 12, lr: 1.30e-04 +2022-05-09 01:41:03,015 INFO [train.py:715] (6/8) Epoch 17, batch 17500, loss[loss=0.1326, simple_loss=0.2137, pruned_loss=0.02579, over 4764.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.0294, over 972600.15 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 01:41:42,137 INFO [train.py:715] (6/8) Epoch 17, batch 17550, loss[loss=0.1278, simple_loss=0.2018, pruned_loss=0.02695, over 4952.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02948, over 972146.71 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 01:42:20,894 INFO [train.py:715] (6/8) Epoch 17, batch 17600, loss[loss=0.1404, simple_loss=0.2253, pruned_loss=0.02774, over 4811.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02952, over 973392.50 frames.], batch size: 27, lr: 1.30e-04 +2022-05-09 01:42:59,404 INFO [train.py:715] (6/8) Epoch 17, batch 17650, loss[loss=0.0961, simple_loss=0.1652, pruned_loss=0.0135, over 4816.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02931, over 972815.09 frames.], batch size: 12, lr: 1.30e-04 +2022-05-09 01:43:38,885 INFO [train.py:715] (6/8) Epoch 17, batch 17700, loss[loss=0.1286, simple_loss=0.1984, pruned_loss=0.02939, over 4796.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02895, over 972380.28 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 01:44:17,602 INFO [train.py:715] (6/8) Epoch 17, batch 17750, loss[loss=0.1314, simple_loss=0.2029, pruned_loss=0.02993, over 4820.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02845, over 973108.55 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 01:44:56,093 INFO [train.py:715] (6/8) Epoch 17, batch 17800, loss[loss=0.1113, simple_loss=0.1882, pruned_loss=0.01724, over 4991.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02898, over 973700.95 frames.], batch size: 25, lr: 1.30e-04 +2022-05-09 01:45:35,677 INFO [train.py:715] (6/8) Epoch 17, batch 17850, loss[loss=0.1804, simple_loss=0.2347, pruned_loss=0.06308, over 4964.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02928, over 973436.88 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 01:46:14,668 INFO [train.py:715] (6/8) Epoch 17, batch 17900, loss[loss=0.1083, simple_loss=0.1814, pruned_loss=0.0176, over 4922.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02917, over 973158.57 frames.], batch size: 29, lr: 1.30e-04 +2022-05-09 01:46:54,033 INFO [train.py:715] (6/8) Epoch 17, batch 17950, loss[loss=0.1491, simple_loss=0.2247, pruned_loss=0.03676, over 4913.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2064, pruned_loss=0.02916, over 972129.62 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 01:47:32,277 INFO [train.py:715] (6/8) Epoch 17, batch 18000, loss[loss=0.1213, simple_loss=0.2028, pruned_loss=0.01985, over 4856.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2065, pruned_loss=0.02941, over 971333.71 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 01:47:32,278 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 01:47:42,061 INFO [train.py:742] (6/8) Epoch 17, validation: loss=0.1047, simple_loss=0.1881, pruned_loss=0.01066, over 914524.00 frames. +2022-05-09 01:48:20,787 INFO [train.py:715] (6/8) Epoch 17, batch 18050, loss[loss=0.1192, simple_loss=0.2008, pruned_loss=0.0188, over 4809.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02921, over 972581.01 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 01:49:00,415 INFO [train.py:715] (6/8) Epoch 17, batch 18100, loss[loss=0.1319, simple_loss=0.2106, pruned_loss=0.02658, over 4740.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02907, over 971470.06 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 01:49:39,800 INFO [train.py:715] (6/8) Epoch 17, batch 18150, loss[loss=0.1478, simple_loss=0.2143, pruned_loss=0.04068, over 4749.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2074, pruned_loss=0.02899, over 971451.29 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 01:50:17,780 INFO [train.py:715] (6/8) Epoch 17, batch 18200, loss[loss=0.1415, simple_loss=0.2153, pruned_loss=0.03381, over 4846.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2077, pruned_loss=0.02939, over 971709.64 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 01:50:57,535 INFO [train.py:715] (6/8) Epoch 17, batch 18250, loss[loss=0.1453, simple_loss=0.2254, pruned_loss=0.0326, over 4754.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02949, over 972513.35 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 01:51:37,066 INFO [train.py:715] (6/8) Epoch 17, batch 18300, loss[loss=0.1389, simple_loss=0.2267, pruned_loss=0.02553, over 4822.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02938, over 972207.40 frames.], batch size: 27, lr: 1.30e-04 +2022-05-09 01:52:15,577 INFO [train.py:715] (6/8) Epoch 17, batch 18350, loss[loss=0.1381, simple_loss=0.1926, pruned_loss=0.04178, over 4822.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02961, over 972454.48 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 01:52:55,004 INFO [train.py:715] (6/8) Epoch 17, batch 18400, loss[loss=0.1559, simple_loss=0.2324, pruned_loss=0.03977, over 4745.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.03001, over 972323.41 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 01:53:33,901 INFO [train.py:715] (6/8) Epoch 17, batch 18450, loss[loss=0.1322, simple_loss=0.2057, pruned_loss=0.02934, over 4653.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03013, over 971864.59 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 01:54:13,137 INFO [train.py:715] (6/8) Epoch 17, batch 18500, loss[loss=0.1638, simple_loss=0.2308, pruned_loss=0.04844, over 4938.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02969, over 971873.55 frames.], batch size: 39, lr: 1.30e-04 +2022-05-09 01:54:51,418 INFO [train.py:715] (6/8) Epoch 17, batch 18550, loss[loss=0.1763, simple_loss=0.2355, pruned_loss=0.05856, over 4881.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02952, over 973111.47 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 01:55:30,376 INFO [train.py:715] (6/8) Epoch 17, batch 18600, loss[loss=0.1087, simple_loss=0.1834, pruned_loss=0.01697, over 4970.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02934, over 973302.41 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 01:56:09,534 INFO [train.py:715] (6/8) Epoch 17, batch 18650, loss[loss=0.1134, simple_loss=0.1903, pruned_loss=0.0182, over 4929.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02936, over 973173.24 frames.], batch size: 29, lr: 1.30e-04 +2022-05-09 01:56:47,380 INFO [train.py:715] (6/8) Epoch 17, batch 18700, loss[loss=0.1625, simple_loss=0.2212, pruned_loss=0.05191, over 4780.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2081, pruned_loss=0.02957, over 971959.76 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 01:57:27,057 INFO [train.py:715] (6/8) Epoch 17, batch 18750, loss[loss=0.1197, simple_loss=0.1934, pruned_loss=0.02299, over 4949.00 frames.], tot_loss[loss=0.1334, simple_loss=0.208, pruned_loss=0.02942, over 971768.96 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 01:58:06,646 INFO [train.py:715] (6/8) Epoch 17, batch 18800, loss[loss=0.1534, simple_loss=0.2387, pruned_loss=0.03411, over 4966.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02899, over 971874.16 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 01:58:45,353 INFO [train.py:715] (6/8) Epoch 17, batch 18850, loss[loss=0.1235, simple_loss=0.1951, pruned_loss=0.02594, over 4831.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02914, over 972014.22 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 01:59:23,458 INFO [train.py:715] (6/8) Epoch 17, batch 18900, loss[loss=0.1419, simple_loss=0.2298, pruned_loss=0.02707, over 4837.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02932, over 971552.98 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 02:00:02,554 INFO [train.py:715] (6/8) Epoch 17, batch 18950, loss[loss=0.113, simple_loss=0.1851, pruned_loss=0.02045, over 4782.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2077, pruned_loss=0.02935, over 971947.75 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 02:00:41,838 INFO [train.py:715] (6/8) Epoch 17, batch 19000, loss[loss=0.1336, simple_loss=0.2155, pruned_loss=0.02586, over 4917.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2081, pruned_loss=0.02944, over 972207.33 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 02:01:20,329 INFO [train.py:715] (6/8) Epoch 17, batch 19050, loss[loss=0.1423, simple_loss=0.2117, pruned_loss=0.03639, over 4917.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2085, pruned_loss=0.02922, over 972831.20 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 02:01:59,758 INFO [train.py:715] (6/8) Epoch 17, batch 19100, loss[loss=0.1391, simple_loss=0.2121, pruned_loss=0.03307, over 4940.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2085, pruned_loss=0.02949, over 972980.48 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 02:02:38,889 INFO [train.py:715] (6/8) Epoch 17, batch 19150, loss[loss=0.1016, simple_loss=0.1779, pruned_loss=0.01268, over 4692.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02924, over 972671.75 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:03:17,330 INFO [train.py:715] (6/8) Epoch 17, batch 19200, loss[loss=0.1409, simple_loss=0.2152, pruned_loss=0.03326, over 4939.00 frames.], tot_loss[loss=0.1324, simple_loss=0.207, pruned_loss=0.02884, over 973411.62 frames.], batch size: 29, lr: 1.30e-04 +2022-05-09 02:03:56,162 INFO [train.py:715] (6/8) Epoch 17, batch 19250, loss[loss=0.1262, simple_loss=0.1989, pruned_loss=0.02676, over 4688.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02889, over 972327.72 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:04:35,742 INFO [train.py:715] (6/8) Epoch 17, batch 19300, loss[loss=0.1167, simple_loss=0.193, pruned_loss=0.02022, over 4980.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2073, pruned_loss=0.02905, over 972041.31 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 02:05:15,466 INFO [train.py:715] (6/8) Epoch 17, batch 19350, loss[loss=0.1273, simple_loss=0.1989, pruned_loss=0.02783, over 4808.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02892, over 971133.17 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 02:05:54,630 INFO [train.py:715] (6/8) Epoch 17, batch 19400, loss[loss=0.1479, simple_loss=0.2274, pruned_loss=0.03418, over 4937.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.02885, over 970987.71 frames.], batch size: 38, lr: 1.30e-04 +2022-05-09 02:06:34,196 INFO [train.py:715] (6/8) Epoch 17, batch 19450, loss[loss=0.1253, simple_loss=0.195, pruned_loss=0.02778, over 4873.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02902, over 972302.70 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 02:07:13,759 INFO [train.py:715] (6/8) Epoch 17, batch 19500, loss[loss=0.1215, simple_loss=0.198, pruned_loss=0.02249, over 4883.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2056, pruned_loss=0.02884, over 972396.96 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 02:07:53,348 INFO [train.py:715] (6/8) Epoch 17, batch 19550, loss[loss=0.136, simple_loss=0.2041, pruned_loss=0.03399, over 4989.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2055, pruned_loss=0.02887, over 972272.70 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:08:31,626 INFO [train.py:715] (6/8) Epoch 17, batch 19600, loss[loss=0.152, simple_loss=0.2228, pruned_loss=0.04064, over 4893.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2059, pruned_loss=0.02883, over 972925.57 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 02:09:11,590 INFO [train.py:715] (6/8) Epoch 17, batch 19650, loss[loss=0.1347, simple_loss=0.2022, pruned_loss=0.03361, over 4828.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2058, pruned_loss=0.02889, over 972804.44 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:09:51,455 INFO [train.py:715] (6/8) Epoch 17, batch 19700, loss[loss=0.1352, simple_loss=0.2068, pruned_loss=0.03181, over 4964.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2063, pruned_loss=0.02953, over 972681.58 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 02:10:30,065 INFO [train.py:715] (6/8) Epoch 17, batch 19750, loss[loss=0.1282, simple_loss=0.1978, pruned_loss=0.0293, over 4837.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2078, pruned_loss=0.03018, over 972722.88 frames.], batch size: 12, lr: 1.30e-04 +2022-05-09 02:11:09,373 INFO [train.py:715] (6/8) Epoch 17, batch 19800, loss[loss=0.1255, simple_loss=0.193, pruned_loss=0.02902, over 4906.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03013, over 971798.08 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 02:11:47,964 INFO [train.py:715] (6/8) Epoch 17, batch 19850, loss[loss=0.1109, simple_loss=0.1959, pruned_loss=0.01298, over 4931.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2089, pruned_loss=0.03019, over 971852.65 frames.], batch size: 23, lr: 1.30e-04 +2022-05-09 02:12:26,934 INFO [train.py:715] (6/8) Epoch 17, batch 19900, loss[loss=0.09596, simple_loss=0.168, pruned_loss=0.01198, over 4862.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02999, over 971104.05 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 02:13:05,192 INFO [train.py:715] (6/8) Epoch 17, batch 19950, loss[loss=0.09711, simple_loss=0.1681, pruned_loss=0.01305, over 4962.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02968, over 972444.92 frames.], batch size: 35, lr: 1.30e-04 +2022-05-09 02:13:44,434 INFO [train.py:715] (6/8) Epoch 17, batch 20000, loss[loss=0.1559, simple_loss=0.2248, pruned_loss=0.04349, over 4690.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02942, over 972508.23 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:14:24,054 INFO [train.py:715] (6/8) Epoch 17, batch 20050, loss[loss=0.1318, simple_loss=0.2032, pruned_loss=0.03024, over 4832.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02909, over 972843.71 frames.], batch size: 30, lr: 1.30e-04 +2022-05-09 02:15:03,205 INFO [train.py:715] (6/8) Epoch 17, batch 20100, loss[loss=0.1557, simple_loss=0.2286, pruned_loss=0.04145, over 4892.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.02959, over 971598.11 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 02:15:42,011 INFO [train.py:715] (6/8) Epoch 17, batch 20150, loss[loss=0.1199, simple_loss=0.1911, pruned_loss=0.02437, over 4990.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.0294, over 972225.95 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 02:16:22,284 INFO [train.py:715] (6/8) Epoch 17, batch 20200, loss[loss=0.1227, simple_loss=0.1984, pruned_loss=0.02343, over 4820.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02975, over 972126.74 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 02:17:02,715 INFO [train.py:715] (6/8) Epoch 17, batch 20250, loss[loss=0.1384, simple_loss=0.2145, pruned_loss=0.03115, over 4689.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02937, over 972024.07 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:17:40,779 INFO [train.py:715] (6/8) Epoch 17, batch 20300, loss[loss=0.1238, simple_loss=0.1927, pruned_loss=0.02744, over 4889.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2066, pruned_loss=0.02931, over 972830.72 frames.], batch size: 32, lr: 1.30e-04 +2022-05-09 02:18:20,528 INFO [train.py:715] (6/8) Epoch 17, batch 20350, loss[loss=0.1828, simple_loss=0.2407, pruned_loss=0.06241, over 4965.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02951, over 972308.62 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:19:00,639 INFO [train.py:715] (6/8) Epoch 17, batch 20400, loss[loss=0.1145, simple_loss=0.1839, pruned_loss=0.02257, over 4763.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02929, over 972458.55 frames.], batch size: 12, lr: 1.30e-04 +2022-05-09 02:19:39,226 INFO [train.py:715] (6/8) Epoch 17, batch 20450, loss[loss=0.1293, simple_loss=0.2018, pruned_loss=0.02837, over 4836.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.0292, over 971914.21 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 02:20:17,929 INFO [train.py:715] (6/8) Epoch 17, batch 20500, loss[loss=0.1463, simple_loss=0.2243, pruned_loss=0.03417, over 4937.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2066, pruned_loss=0.0294, over 972046.27 frames.], batch size: 39, lr: 1.30e-04 +2022-05-09 02:20:57,780 INFO [train.py:715] (6/8) Epoch 17, batch 20550, loss[loss=0.1295, simple_loss=0.2, pruned_loss=0.02952, over 4951.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2067, pruned_loss=0.02985, over 971892.58 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 02:21:36,917 INFO [train.py:715] (6/8) Epoch 17, batch 20600, loss[loss=0.09673, simple_loss=0.1706, pruned_loss=0.01142, over 4861.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2069, pruned_loss=0.02967, over 972515.95 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 02:22:15,103 INFO [train.py:715] (6/8) Epoch 17, batch 20650, loss[loss=0.1359, simple_loss=0.2236, pruned_loss=0.02404, over 4963.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2073, pruned_loss=0.03006, over 973417.64 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 02:22:54,074 INFO [train.py:715] (6/8) Epoch 17, batch 20700, loss[loss=0.1245, simple_loss=0.2035, pruned_loss=0.02274, over 4902.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02972, over 973786.27 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 02:23:33,732 INFO [train.py:715] (6/8) Epoch 17, batch 20750, loss[loss=0.1118, simple_loss=0.1902, pruned_loss=0.01667, over 4781.00 frames.], tot_loss[loss=0.134, simple_loss=0.2076, pruned_loss=0.03022, over 973358.52 frames.], batch size: 12, lr: 1.30e-04 +2022-05-09 02:24:12,680 INFO [train.py:715] (6/8) Epoch 17, batch 20800, loss[loss=0.1238, simple_loss=0.2002, pruned_loss=0.02369, over 4904.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2078, pruned_loss=0.03024, over 973047.28 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 02:24:51,255 INFO [train.py:715] (6/8) Epoch 17, batch 20850, loss[loss=0.1078, simple_loss=0.1773, pruned_loss=0.01911, over 4790.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2069, pruned_loss=0.02978, over 973055.64 frames.], batch size: 12, lr: 1.30e-04 +2022-05-09 02:25:30,266 INFO [train.py:715] (6/8) Epoch 17, batch 20900, loss[loss=0.1099, simple_loss=0.1781, pruned_loss=0.02084, over 4815.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2056, pruned_loss=0.02948, over 971838.41 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 02:26:10,249 INFO [train.py:715] (6/8) Epoch 17, batch 20950, loss[loss=0.1413, simple_loss=0.2228, pruned_loss=0.02987, over 4849.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2057, pruned_loss=0.02945, over 972496.37 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 02:26:48,270 INFO [train.py:715] (6/8) Epoch 17, batch 21000, loss[loss=0.1106, simple_loss=0.1909, pruned_loss=0.01512, over 4981.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2052, pruned_loss=0.02918, over 972624.44 frames.], batch size: 25, lr: 1.30e-04 +2022-05-09 02:26:48,271 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 02:27:00,912 INFO [train.py:742] (6/8) Epoch 17, validation: loss=0.1049, simple_loss=0.1882, pruned_loss=0.01077, over 914524.00 frames. +2022-05-09 02:27:38,933 INFO [train.py:715] (6/8) Epoch 17, batch 21050, loss[loss=0.1146, simple_loss=0.1904, pruned_loss=0.01933, over 4775.00 frames.], tot_loss[loss=0.132, simple_loss=0.2055, pruned_loss=0.02918, over 973029.29 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 02:28:18,323 INFO [train.py:715] (6/8) Epoch 17, batch 21100, loss[loss=0.1153, simple_loss=0.1938, pruned_loss=0.01839, over 4979.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2053, pruned_loss=0.02903, over 973063.49 frames.], batch size: 25, lr: 1.30e-04 +2022-05-09 02:28:58,387 INFO [train.py:715] (6/8) Epoch 17, batch 21150, loss[loss=0.1401, simple_loss=0.2141, pruned_loss=0.03303, over 4667.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2058, pruned_loss=0.02906, over 972777.27 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 02:29:37,032 INFO [train.py:715] (6/8) Epoch 17, batch 21200, loss[loss=0.1328, simple_loss=0.2029, pruned_loss=0.03136, over 4876.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2063, pruned_loss=0.02923, over 973033.97 frames.], batch size: 32, lr: 1.30e-04 +2022-05-09 02:30:15,715 INFO [train.py:715] (6/8) Epoch 17, batch 21250, loss[loss=0.1225, simple_loss=0.188, pruned_loss=0.02848, over 4775.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.02871, over 971542.18 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 02:30:55,581 INFO [train.py:715] (6/8) Epoch 17, batch 21300, loss[loss=0.1236, simple_loss=0.2042, pruned_loss=0.02153, over 4759.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02847, over 972503.51 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 02:31:35,369 INFO [train.py:715] (6/8) Epoch 17, batch 21350, loss[loss=0.1644, simple_loss=0.2443, pruned_loss=0.04221, over 4909.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02878, over 972720.72 frames.], batch size: 39, lr: 1.30e-04 +2022-05-09 02:32:13,593 INFO [train.py:715] (6/8) Epoch 17, batch 21400, loss[loss=0.1692, simple_loss=0.2355, pruned_loss=0.05141, over 4773.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02895, over 973179.04 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 02:32:53,764 INFO [train.py:715] (6/8) Epoch 17, batch 21450, loss[loss=0.1332, simple_loss=0.217, pruned_loss=0.02474, over 4832.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.0294, over 973258.40 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 02:33:33,555 INFO [train.py:715] (6/8) Epoch 17, batch 21500, loss[loss=0.1405, simple_loss=0.2185, pruned_loss=0.03121, over 4858.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.0299, over 972226.50 frames.], batch size: 32, lr: 1.30e-04 +2022-05-09 02:34:12,050 INFO [train.py:715] (6/8) Epoch 17, batch 21550, loss[loss=0.1557, simple_loss=0.2099, pruned_loss=0.05075, over 4901.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02946, over 972534.71 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 02:34:51,492 INFO [train.py:715] (6/8) Epoch 17, batch 21600, loss[loss=0.137, simple_loss=0.2157, pruned_loss=0.02913, over 4970.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02931, over 972616.85 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 02:35:31,961 INFO [train.py:715] (6/8) Epoch 17, batch 21650, loss[loss=0.1112, simple_loss=0.1905, pruned_loss=0.01594, over 4821.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02921, over 972578.32 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:36:11,046 INFO [train.py:715] (6/8) Epoch 17, batch 21700, loss[loss=0.1207, simple_loss=0.1919, pruned_loss=0.02477, over 4809.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2071, pruned_loss=0.02897, over 972746.43 frames.], batch size: 25, lr: 1.30e-04 +2022-05-09 02:36:49,697 INFO [train.py:715] (6/8) Epoch 17, batch 21750, loss[loss=0.1666, simple_loss=0.2265, pruned_loss=0.05336, over 4764.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02915, over 972242.03 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 02:37:29,252 INFO [train.py:715] (6/8) Epoch 17, batch 21800, loss[loss=0.1291, simple_loss=0.2072, pruned_loss=0.02552, over 4985.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02897, over 972423.63 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 02:38:08,217 INFO [train.py:715] (6/8) Epoch 17, batch 21850, loss[loss=0.1172, simple_loss=0.1849, pruned_loss=0.02475, over 4887.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02912, over 972498.32 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 02:38:47,463 INFO [train.py:715] (6/8) Epoch 17, batch 21900, loss[loss=0.1386, simple_loss=0.2063, pruned_loss=0.03545, over 4781.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2074, pruned_loss=0.02911, over 971811.01 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 02:39:25,954 INFO [train.py:715] (6/8) Epoch 17, batch 21950, loss[loss=0.1293, simple_loss=0.2049, pruned_loss=0.02688, over 4804.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02886, over 972360.90 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 02:40:05,674 INFO [train.py:715] (6/8) Epoch 17, batch 22000, loss[loss=0.1465, simple_loss=0.2106, pruned_loss=0.04122, over 4791.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2069, pruned_loss=0.02873, over 972487.30 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 02:40:45,440 INFO [train.py:715] (6/8) Epoch 17, batch 22050, loss[loss=0.1173, simple_loss=0.1886, pruned_loss=0.02296, over 4981.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2075, pruned_loss=0.02917, over 972798.65 frames.], batch size: 28, lr: 1.30e-04 +2022-05-09 02:41:23,868 INFO [train.py:715] (6/8) Epoch 17, batch 22100, loss[loss=0.1204, simple_loss=0.2005, pruned_loss=0.02015, over 4834.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.0287, over 972637.10 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 02:42:03,599 INFO [train.py:715] (6/8) Epoch 17, batch 22150, loss[loss=0.1311, simple_loss=0.203, pruned_loss=0.02963, over 4905.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02875, over 972342.44 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 02:42:43,489 INFO [train.py:715] (6/8) Epoch 17, batch 22200, loss[loss=0.1202, simple_loss=0.1951, pruned_loss=0.0226, over 4991.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02903, over 972380.72 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 02:43:22,390 INFO [train.py:715] (6/8) Epoch 17, batch 22250, loss[loss=0.1299, simple_loss=0.2119, pruned_loss=0.02401, over 4842.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02941, over 973441.93 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 02:44:01,345 INFO [train.py:715] (6/8) Epoch 17, batch 22300, loss[loss=0.1354, simple_loss=0.2087, pruned_loss=0.03102, over 4956.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2082, pruned_loss=0.02975, over 973277.59 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 02:44:41,267 INFO [train.py:715] (6/8) Epoch 17, batch 22350, loss[loss=0.1204, simple_loss=0.2022, pruned_loss=0.01928, over 4860.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02956, over 972343.23 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 02:45:20,840 INFO [train.py:715] (6/8) Epoch 17, batch 22400, loss[loss=0.1175, simple_loss=0.1873, pruned_loss=0.02386, over 4837.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02976, over 971894.30 frames.], batch size: 12, lr: 1.30e-04 +2022-05-09 02:45:59,655 INFO [train.py:715] (6/8) Epoch 17, batch 22450, loss[loss=0.1315, simple_loss=0.212, pruned_loss=0.02552, over 4973.00 frames.], tot_loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.0297, over 971956.17 frames.], batch size: 35, lr: 1.30e-04 +2022-05-09 02:46:38,626 INFO [train.py:715] (6/8) Epoch 17, batch 22500, loss[loss=0.142, simple_loss=0.217, pruned_loss=0.03349, over 4699.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2065, pruned_loss=0.02903, over 971954.14 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:47:18,396 INFO [train.py:715] (6/8) Epoch 17, batch 22550, loss[loss=0.104, simple_loss=0.1858, pruned_loss=0.01109, over 4910.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02884, over 972181.54 frames.], batch size: 23, lr: 1.30e-04 +2022-05-09 02:47:56,725 INFO [train.py:715] (6/8) Epoch 17, batch 22600, loss[loss=0.1343, simple_loss=0.2196, pruned_loss=0.02447, over 4921.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02857, over 972470.32 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 02:48:36,270 INFO [train.py:715] (6/8) Epoch 17, batch 22650, loss[loss=0.1317, simple_loss=0.1958, pruned_loss=0.03383, over 4837.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2063, pruned_loss=0.02869, over 972663.92 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:49:15,735 INFO [train.py:715] (6/8) Epoch 17, batch 22700, loss[loss=0.09807, simple_loss=0.1726, pruned_loss=0.01177, over 4759.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02897, over 972086.86 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 02:49:54,665 INFO [train.py:715] (6/8) Epoch 17, batch 22750, loss[loss=0.1453, simple_loss=0.2144, pruned_loss=0.03808, over 4794.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02896, over 972513.35 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 02:50:33,051 INFO [train.py:715] (6/8) Epoch 17, batch 22800, loss[loss=0.1758, simple_loss=0.2454, pruned_loss=0.05317, over 4903.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02935, over 972967.18 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 02:51:12,445 INFO [train.py:715] (6/8) Epoch 17, batch 22850, loss[loss=0.1395, simple_loss=0.2227, pruned_loss=0.02815, over 4894.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02898, over 972809.00 frames.], batch size: 22, lr: 1.29e-04 +2022-05-09 02:51:52,339 INFO [train.py:715] (6/8) Epoch 17, batch 22900, loss[loss=0.1327, simple_loss=0.2063, pruned_loss=0.02954, over 4870.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02898, over 972520.40 frames.], batch size: 20, lr: 1.29e-04 +2022-05-09 02:52:30,190 INFO [train.py:715] (6/8) Epoch 17, batch 22950, loss[loss=0.1215, simple_loss=0.1983, pruned_loss=0.02238, over 4863.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02892, over 972634.59 frames.], batch size: 20, lr: 1.29e-04 +2022-05-09 02:53:10,087 INFO [train.py:715] (6/8) Epoch 17, batch 23000, loss[loss=0.1242, simple_loss=0.1922, pruned_loss=0.02808, over 4788.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.0293, over 972551.24 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 02:53:50,345 INFO [train.py:715] (6/8) Epoch 17, batch 23050, loss[loss=0.127, simple_loss=0.2098, pruned_loss=0.02211, over 4881.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03003, over 971871.64 frames.], batch size: 22, lr: 1.29e-04 +2022-05-09 02:54:29,513 INFO [train.py:715] (6/8) Epoch 17, batch 23100, loss[loss=0.1316, simple_loss=0.2157, pruned_loss=0.02374, over 4917.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02966, over 971700.44 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 02:55:07,925 INFO [train.py:715] (6/8) Epoch 17, batch 23150, loss[loss=0.1127, simple_loss=0.1827, pruned_loss=0.02141, over 4870.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02949, over 971326.06 frames.], batch size: 13, lr: 1.29e-04 +2022-05-09 02:55:47,706 INFO [train.py:715] (6/8) Epoch 17, batch 23200, loss[loss=0.1445, simple_loss=0.2103, pruned_loss=0.03934, over 4916.00 frames.], tot_loss[loss=0.133, simple_loss=0.2069, pruned_loss=0.02953, over 972374.19 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 02:56:27,725 INFO [train.py:715] (6/8) Epoch 17, batch 23250, loss[loss=0.1367, simple_loss=0.2023, pruned_loss=0.03549, over 4848.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2063, pruned_loss=0.02947, over 972704.56 frames.], batch size: 13, lr: 1.29e-04 +2022-05-09 02:57:05,642 INFO [train.py:715] (6/8) Epoch 17, batch 23300, loss[loss=0.1149, simple_loss=0.1855, pruned_loss=0.02211, over 4880.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.02958, over 972591.42 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 02:57:44,994 INFO [train.py:715] (6/8) Epoch 17, batch 23350, loss[loss=0.1282, simple_loss=0.209, pruned_loss=0.02371, over 4720.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02935, over 971088.75 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 02:58:25,104 INFO [train.py:715] (6/8) Epoch 17, batch 23400, loss[loss=0.1182, simple_loss=0.1982, pruned_loss=0.01909, over 4881.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.0284, over 972259.24 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 02:59:03,872 INFO [train.py:715] (6/8) Epoch 17, batch 23450, loss[loss=0.1355, simple_loss=0.2018, pruned_loss=0.03456, over 4960.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2058, pruned_loss=0.02865, over 972507.53 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 02:59:42,964 INFO [train.py:715] (6/8) Epoch 17, batch 23500, loss[loss=0.1298, simple_loss=0.2061, pruned_loss=0.02677, over 4945.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.0284, over 973011.62 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 03:00:22,281 INFO [train.py:715] (6/8) Epoch 17, batch 23550, loss[loss=0.1353, simple_loss=0.2104, pruned_loss=0.03008, over 4760.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.0284, over 971999.88 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:01:01,970 INFO [train.py:715] (6/8) Epoch 17, batch 23600, loss[loss=0.116, simple_loss=0.193, pruned_loss=0.0195, over 4954.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2056, pruned_loss=0.02829, over 972009.62 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 03:01:40,310 INFO [train.py:715] (6/8) Epoch 17, batch 23650, loss[loss=0.09973, simple_loss=0.1788, pruned_loss=0.01033, over 4986.00 frames.], tot_loss[loss=0.1315, simple_loss=0.206, pruned_loss=0.0285, over 971343.37 frames.], batch size: 27, lr: 1.29e-04 +2022-05-09 03:02:19,923 INFO [train.py:715] (6/8) Epoch 17, batch 23700, loss[loss=0.1161, simple_loss=0.1918, pruned_loss=0.02018, over 4819.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2058, pruned_loss=0.02822, over 971278.66 frames.], batch size: 26, lr: 1.29e-04 +2022-05-09 03:02:59,510 INFO [train.py:715] (6/8) Epoch 17, batch 23750, loss[loss=0.1229, simple_loss=0.2033, pruned_loss=0.02127, over 4789.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2063, pruned_loss=0.02838, over 971552.27 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 03:03:38,389 INFO [train.py:715] (6/8) Epoch 17, batch 23800, loss[loss=0.109, simple_loss=0.1906, pruned_loss=0.01372, over 4803.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02857, over 971159.32 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 03:04:16,665 INFO [train.py:715] (6/8) Epoch 17, batch 23850, loss[loss=0.1542, simple_loss=0.226, pruned_loss=0.04124, over 4944.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2072, pruned_loss=0.02879, over 971666.04 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 03:04:56,709 INFO [train.py:715] (6/8) Epoch 17, batch 23900, loss[loss=0.1167, simple_loss=0.2001, pruned_loss=0.01671, over 4926.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2075, pruned_loss=0.02917, over 971741.93 frames.], batch size: 23, lr: 1.29e-04 +2022-05-09 03:05:35,890 INFO [train.py:715] (6/8) Epoch 17, batch 23950, loss[loss=0.1169, simple_loss=0.1992, pruned_loss=0.0173, over 4907.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2082, pruned_loss=0.02956, over 971987.44 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 03:06:14,200 INFO [train.py:715] (6/8) Epoch 17, batch 24000, loss[loss=0.1251, simple_loss=0.2018, pruned_loss=0.02424, over 4958.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2091, pruned_loss=0.03039, over 972927.39 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 03:06:14,201 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 03:06:24,068 INFO [train.py:742] (6/8) Epoch 17, validation: loss=0.1047, simple_loss=0.1881, pruned_loss=0.01067, over 914524.00 frames. +2022-05-09 03:07:02,583 INFO [train.py:715] (6/8) Epoch 17, batch 24050, loss[loss=0.1198, simple_loss=0.1975, pruned_loss=0.02105, over 4833.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.0298, over 973287.41 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:07:41,977 INFO [train.py:715] (6/8) Epoch 17, batch 24100, loss[loss=0.1288, simple_loss=0.2061, pruned_loss=0.02569, over 4755.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2073, pruned_loss=0.02986, over 973072.19 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 03:08:22,153 INFO [train.py:715] (6/8) Epoch 17, batch 24150, loss[loss=0.1418, simple_loss=0.208, pruned_loss=0.03778, over 4788.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2065, pruned_loss=0.02955, over 972559.54 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 03:09:00,904 INFO [train.py:715] (6/8) Epoch 17, batch 24200, loss[loss=0.1173, simple_loss=0.1928, pruned_loss=0.02092, over 4929.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2069, pruned_loss=0.02966, over 972510.59 frames.], batch size: 23, lr: 1.29e-04 +2022-05-09 03:09:42,457 INFO [train.py:715] (6/8) Epoch 17, batch 24250, loss[loss=0.1522, simple_loss=0.2194, pruned_loss=0.04244, over 4960.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2059, pruned_loss=0.0291, over 972484.64 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 03:10:23,064 INFO [train.py:715] (6/8) Epoch 17, batch 24300, loss[loss=0.1512, simple_loss=0.2256, pruned_loss=0.03837, over 4984.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.0296, over 973243.07 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 03:11:02,616 INFO [train.py:715] (6/8) Epoch 17, batch 24350, loss[loss=0.1381, simple_loss=0.2095, pruned_loss=0.03338, over 4775.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2064, pruned_loss=0.02938, over 972290.64 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 03:11:41,995 INFO [train.py:715] (6/8) Epoch 17, batch 24400, loss[loss=0.1603, simple_loss=0.2308, pruned_loss=0.04495, over 4961.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2071, pruned_loss=0.02986, over 971144.22 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 03:12:21,144 INFO [train.py:715] (6/8) Epoch 17, batch 24450, loss[loss=0.1377, simple_loss=0.2111, pruned_loss=0.03212, over 4928.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2066, pruned_loss=0.02982, over 971148.89 frames.], batch size: 23, lr: 1.29e-04 +2022-05-09 03:13:01,332 INFO [train.py:715] (6/8) Epoch 17, batch 24500, loss[loss=0.1105, simple_loss=0.1967, pruned_loss=0.01215, over 4822.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2061, pruned_loss=0.02938, over 970372.64 frames.], batch size: 13, lr: 1.29e-04 +2022-05-09 03:13:40,455 INFO [train.py:715] (6/8) Epoch 17, batch 24550, loss[loss=0.1532, simple_loss=0.2286, pruned_loss=0.03888, over 4827.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2063, pruned_loss=0.02924, over 970393.68 frames.], batch size: 26, lr: 1.29e-04 +2022-05-09 03:14:19,286 INFO [train.py:715] (6/8) Epoch 17, batch 24600, loss[loss=0.1337, simple_loss=0.2057, pruned_loss=0.03087, over 4810.00 frames.], tot_loss[loss=0.132, simple_loss=0.2061, pruned_loss=0.02898, over 971096.04 frames.], batch size: 27, lr: 1.29e-04 +2022-05-09 03:14:59,441 INFO [train.py:715] (6/8) Epoch 17, batch 24650, loss[loss=0.1177, simple_loss=0.1988, pruned_loss=0.01827, over 4976.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02918, over 971268.86 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:15:39,741 INFO [train.py:715] (6/8) Epoch 17, batch 24700, loss[loss=0.129, simple_loss=0.1953, pruned_loss=0.03133, over 4840.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2074, pruned_loss=0.02918, over 972553.63 frames.], batch size: 12, lr: 1.29e-04 +2022-05-09 03:16:18,264 INFO [train.py:715] (6/8) Epoch 17, batch 24750, loss[loss=0.1327, simple_loss=0.2035, pruned_loss=0.03094, over 4687.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2074, pruned_loss=0.02915, over 972541.72 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:16:58,108 INFO [train.py:715] (6/8) Epoch 17, batch 24800, loss[loss=0.1362, simple_loss=0.2108, pruned_loss=0.03077, over 4901.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02926, over 972760.10 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 03:17:37,956 INFO [train.py:715] (6/8) Epoch 17, batch 24850, loss[loss=0.1151, simple_loss=0.1826, pruned_loss=0.02376, over 4938.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02902, over 973221.32 frames.], batch size: 35, lr: 1.29e-04 +2022-05-09 03:18:17,584 INFO [train.py:715] (6/8) Epoch 17, batch 24900, loss[loss=0.146, simple_loss=0.2109, pruned_loss=0.04061, over 4966.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02884, over 973705.30 frames.], batch size: 35, lr: 1.29e-04 +2022-05-09 03:18:56,118 INFO [train.py:715] (6/8) Epoch 17, batch 24950, loss[loss=0.1219, simple_loss=0.1942, pruned_loss=0.02484, over 4778.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02886, over 973177.13 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 03:19:35,620 INFO [train.py:715] (6/8) Epoch 17, batch 25000, loss[loss=0.1441, simple_loss=0.2317, pruned_loss=0.0282, over 4879.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02911, over 972906.54 frames.], batch size: 38, lr: 1.29e-04 +2022-05-09 03:20:14,003 INFO [train.py:715] (6/8) Epoch 17, batch 25050, loss[loss=0.1348, simple_loss=0.2089, pruned_loss=0.03034, over 4885.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02912, over 972873.56 frames.], batch size: 22, lr: 1.29e-04 +2022-05-09 03:20:53,001 INFO [train.py:715] (6/8) Epoch 17, batch 25100, loss[loss=0.1278, simple_loss=0.2024, pruned_loss=0.02664, over 4717.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02898, over 973137.24 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:21:32,982 INFO [train.py:715] (6/8) Epoch 17, batch 25150, loss[loss=0.1183, simple_loss=0.1883, pruned_loss=0.02417, over 4826.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02917, over 972617.10 frames.], batch size: 13, lr: 1.29e-04 +2022-05-09 03:22:12,877 INFO [train.py:715] (6/8) Epoch 17, batch 25200, loss[loss=0.1253, simple_loss=0.2111, pruned_loss=0.01979, over 4942.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02946, over 972845.85 frames.], batch size: 23, lr: 1.29e-04 +2022-05-09 03:22:51,919 INFO [train.py:715] (6/8) Epoch 17, batch 25250, loss[loss=0.1296, simple_loss=0.2067, pruned_loss=0.0262, over 4814.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02918, over 972105.95 frames.], batch size: 25, lr: 1.29e-04 +2022-05-09 03:23:31,040 INFO [train.py:715] (6/8) Epoch 17, batch 25300, loss[loss=0.1306, simple_loss=0.2037, pruned_loss=0.02872, over 4700.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2058, pruned_loss=0.02876, over 972820.15 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:24:11,043 INFO [train.py:715] (6/8) Epoch 17, batch 25350, loss[loss=0.1342, simple_loss=0.2025, pruned_loss=0.03289, over 4979.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2059, pruned_loss=0.02886, over 972785.57 frames.], batch size: 27, lr: 1.29e-04 +2022-05-09 03:24:49,789 INFO [train.py:715] (6/8) Epoch 17, batch 25400, loss[loss=0.1616, simple_loss=0.2428, pruned_loss=0.04015, over 4847.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02862, over 973760.23 frames.], batch size: 34, lr: 1.29e-04 +2022-05-09 03:25:28,942 INFO [train.py:715] (6/8) Epoch 17, batch 25450, loss[loss=0.1277, simple_loss=0.2033, pruned_loss=0.02608, over 4791.00 frames.], tot_loss[loss=0.1315, simple_loss=0.206, pruned_loss=0.02852, over 972822.19 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 03:26:08,066 INFO [train.py:715] (6/8) Epoch 17, batch 25500, loss[loss=0.1388, simple_loss=0.2028, pruned_loss=0.03737, over 4772.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02858, over 972581.56 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:26:47,844 INFO [train.py:715] (6/8) Epoch 17, batch 25550, loss[loss=0.1505, simple_loss=0.2288, pruned_loss=0.03613, over 4876.00 frames.], tot_loss[loss=0.133, simple_loss=0.2077, pruned_loss=0.02912, over 972121.88 frames.], batch size: 38, lr: 1.29e-04 +2022-05-09 03:27:26,917 INFO [train.py:715] (6/8) Epoch 17, batch 25600, loss[loss=0.1423, simple_loss=0.2141, pruned_loss=0.0352, over 4745.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2081, pruned_loss=0.02928, over 972188.99 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 03:28:05,428 INFO [train.py:715] (6/8) Epoch 17, batch 25650, loss[loss=0.122, simple_loss=0.1942, pruned_loss=0.02491, over 4839.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2082, pruned_loss=0.02927, over 972502.11 frames.], batch size: 25, lr: 1.29e-04 +2022-05-09 03:28:45,200 INFO [train.py:715] (6/8) Epoch 17, batch 25700, loss[loss=0.1345, simple_loss=0.2057, pruned_loss=0.03165, over 4900.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02941, over 972771.61 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:29:24,289 INFO [train.py:715] (6/8) Epoch 17, batch 25750, loss[loss=0.139, simple_loss=0.2216, pruned_loss=0.02819, over 4965.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02936, over 972825.63 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 03:30:03,686 INFO [train.py:715] (6/8) Epoch 17, batch 25800, loss[loss=0.1193, simple_loss=0.1931, pruned_loss=0.02273, over 4879.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02941, over 973353.82 frames.], batch size: 22, lr: 1.29e-04 +2022-05-09 03:30:43,166 INFO [train.py:715] (6/8) Epoch 17, batch 25850, loss[loss=0.1092, simple_loss=0.1803, pruned_loss=0.019, over 4778.00 frames.], tot_loss[loss=0.1333, simple_loss=0.208, pruned_loss=0.02929, over 973498.80 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 03:31:22,528 INFO [train.py:715] (6/8) Epoch 17, batch 25900, loss[loss=0.1007, simple_loss=0.1675, pruned_loss=0.01695, over 4969.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02888, over 973653.16 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:32:01,053 INFO [train.py:715] (6/8) Epoch 17, batch 25950, loss[loss=0.122, simple_loss=0.2028, pruned_loss=0.02058, over 4799.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2081, pruned_loss=0.02959, over 973353.29 frames.], batch size: 25, lr: 1.29e-04 +2022-05-09 03:32:39,483 INFO [train.py:715] (6/8) Epoch 17, batch 26000, loss[loss=0.1166, simple_loss=0.1965, pruned_loss=0.01833, over 4979.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2075, pruned_loss=0.02982, over 973759.70 frames.], batch size: 28, lr: 1.29e-04 +2022-05-09 03:33:19,125 INFO [train.py:715] (6/8) Epoch 17, batch 26050, loss[loss=0.1528, simple_loss=0.2318, pruned_loss=0.03684, over 4823.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2068, pruned_loss=0.02975, over 972909.57 frames.], batch size: 25, lr: 1.29e-04 +2022-05-09 03:33:57,731 INFO [train.py:715] (6/8) Epoch 17, batch 26100, loss[loss=0.1227, simple_loss=0.2049, pruned_loss=0.02022, over 4929.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2065, pruned_loss=0.02945, over 972667.99 frames.], batch size: 23, lr: 1.29e-04 +2022-05-09 03:34:37,127 INFO [train.py:715] (6/8) Epoch 17, batch 26150, loss[loss=0.1251, simple_loss=0.2034, pruned_loss=0.02337, over 4983.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.02909, over 973772.65 frames.], batch size: 28, lr: 1.29e-04 +2022-05-09 03:35:16,508 INFO [train.py:715] (6/8) Epoch 17, batch 26200, loss[loss=0.1429, simple_loss=0.2218, pruned_loss=0.03202, over 4830.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02921, over 973659.91 frames.], batch size: 26, lr: 1.29e-04 +2022-05-09 03:35:56,482 INFO [train.py:715] (6/8) Epoch 17, batch 26250, loss[loss=0.1487, simple_loss=0.2146, pruned_loss=0.04139, over 4806.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2059, pruned_loss=0.02889, over 973187.93 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 03:36:35,148 INFO [train.py:715] (6/8) Epoch 17, batch 26300, loss[loss=0.1369, simple_loss=0.2036, pruned_loss=0.03509, over 4900.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02946, over 974442.78 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 03:37:13,925 INFO [train.py:715] (6/8) Epoch 17, batch 26350, loss[loss=0.1599, simple_loss=0.2227, pruned_loss=0.04852, over 4981.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02908, over 973872.16 frames.], batch size: 35, lr: 1.29e-04 +2022-05-09 03:37:53,869 INFO [train.py:715] (6/8) Epoch 17, batch 26400, loss[loss=0.1189, simple_loss=0.1892, pruned_loss=0.0243, over 4731.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02963, over 973034.03 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 03:38:32,580 INFO [train.py:715] (6/8) Epoch 17, batch 26450, loss[loss=0.1283, simple_loss=0.2016, pruned_loss=0.02746, over 4947.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02933, over 972801.06 frames.], batch size: 29, lr: 1.29e-04 +2022-05-09 03:39:11,789 INFO [train.py:715] (6/8) Epoch 17, batch 26500, loss[loss=0.1279, simple_loss=0.2046, pruned_loss=0.02554, over 4752.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02895, over 972851.67 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 03:39:51,011 INFO [train.py:715] (6/8) Epoch 17, batch 26550, loss[loss=0.1399, simple_loss=0.2031, pruned_loss=0.03832, over 4774.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.0285, over 972663.17 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 03:40:29,945 INFO [train.py:715] (6/8) Epoch 17, batch 26600, loss[loss=0.1328, simple_loss=0.2131, pruned_loss=0.02624, over 4863.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02867, over 973745.10 frames.], batch size: 20, lr: 1.29e-04 +2022-05-09 03:41:08,354 INFO [train.py:715] (6/8) Epoch 17, batch 26650, loss[loss=0.1961, simple_loss=0.2528, pruned_loss=0.06964, over 4652.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02885, over 972852.54 frames.], batch size: 13, lr: 1.29e-04 +2022-05-09 03:41:47,385 INFO [train.py:715] (6/8) Epoch 17, batch 26700, loss[loss=0.1373, simple_loss=0.2131, pruned_loss=0.03074, over 4902.00 frames.], tot_loss[loss=0.132, simple_loss=0.2068, pruned_loss=0.02857, over 972715.42 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:42:26,808 INFO [train.py:715] (6/8) Epoch 17, batch 26750, loss[loss=0.147, simple_loss=0.2307, pruned_loss=0.03163, over 4760.00 frames.], tot_loss[loss=0.132, simple_loss=0.2071, pruned_loss=0.02844, over 973391.38 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 03:43:05,136 INFO [train.py:715] (6/8) Epoch 17, batch 26800, loss[loss=0.1362, simple_loss=0.2154, pruned_loss=0.02852, over 4825.00 frames.], tot_loss[loss=0.1319, simple_loss=0.207, pruned_loss=0.0284, over 973624.52 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:43:43,933 INFO [train.py:715] (6/8) Epoch 17, batch 26850, loss[loss=0.1117, simple_loss=0.183, pruned_loss=0.02018, over 4914.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2075, pruned_loss=0.0288, over 972642.65 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 03:44:23,816 INFO [train.py:715] (6/8) Epoch 17, batch 26900, loss[loss=0.1282, simple_loss=0.1996, pruned_loss=0.0284, over 4926.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02856, over 973375.49 frames.], batch size: 29, lr: 1.29e-04 +2022-05-09 03:45:02,981 INFO [train.py:715] (6/8) Epoch 17, batch 26950, loss[loss=0.1244, simple_loss=0.2066, pruned_loss=0.02109, over 4785.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02862, over 973177.75 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 03:45:41,693 INFO [train.py:715] (6/8) Epoch 17, batch 27000, loss[loss=0.1462, simple_loss=0.2045, pruned_loss=0.044, over 4882.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02928, over 972317.26 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 03:45:41,694 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 03:45:51,483 INFO [train.py:742] (6/8) Epoch 17, validation: loss=0.1047, simple_loss=0.188, pruned_loss=0.0107, over 914524.00 frames. +2022-05-09 03:46:30,447 INFO [train.py:715] (6/8) Epoch 17, batch 27050, loss[loss=0.1062, simple_loss=0.1786, pruned_loss=0.01683, over 4821.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02935, over 972839.10 frames.], batch size: 26, lr: 1.29e-04 +2022-05-09 03:47:09,963 INFO [train.py:715] (6/8) Epoch 17, batch 27100, loss[loss=0.1183, simple_loss=0.1879, pruned_loss=0.02437, over 4769.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02935, over 972170.77 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 03:47:49,464 INFO [train.py:715] (6/8) Epoch 17, batch 27150, loss[loss=0.1575, simple_loss=0.2291, pruned_loss=0.04301, over 4765.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2089, pruned_loss=0.02969, over 972440.39 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:48:27,668 INFO [train.py:715] (6/8) Epoch 17, batch 27200, loss[loss=0.1368, simple_loss=0.2209, pruned_loss=0.02633, over 4907.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2086, pruned_loss=0.02988, over 973384.01 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:49:06,449 INFO [train.py:715] (6/8) Epoch 17, batch 27250, loss[loss=0.1161, simple_loss=0.1829, pruned_loss=0.02466, over 4850.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02964, over 973244.66 frames.], batch size: 32, lr: 1.29e-04 +2022-05-09 03:49:46,077 INFO [train.py:715] (6/8) Epoch 17, batch 27300, loss[loss=0.1437, simple_loss=0.2155, pruned_loss=0.03592, over 4969.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2082, pruned_loss=0.02946, over 972608.46 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:50:25,176 INFO [train.py:715] (6/8) Epoch 17, batch 27350, loss[loss=0.1326, simple_loss=0.2102, pruned_loss=0.02744, over 4840.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02964, over 973779.66 frames.], batch size: 30, lr: 1.29e-04 +2022-05-09 03:51:04,614 INFO [train.py:715] (6/8) Epoch 17, batch 27400, loss[loss=0.1378, simple_loss=0.218, pruned_loss=0.02885, over 4866.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02986, over 973379.16 frames.], batch size: 30, lr: 1.29e-04 +2022-05-09 03:51:43,504 INFO [train.py:715] (6/8) Epoch 17, batch 27450, loss[loss=0.1234, simple_loss=0.1975, pruned_loss=0.02466, over 4972.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.02992, over 973178.27 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:52:23,157 INFO [train.py:715] (6/8) Epoch 17, batch 27500, loss[loss=0.1145, simple_loss=0.1977, pruned_loss=0.01564, over 4902.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2064, pruned_loss=0.02938, over 972815.56 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:53:01,815 INFO [train.py:715] (6/8) Epoch 17, batch 27550, loss[loss=0.1309, simple_loss=0.2107, pruned_loss=0.02555, over 4904.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02917, over 972331.40 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:53:40,308 INFO [train.py:715] (6/8) Epoch 17, batch 27600, loss[loss=0.1441, simple_loss=0.2185, pruned_loss=0.03489, over 4969.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2071, pruned_loss=0.02902, over 972499.44 frames.], batch size: 35, lr: 1.29e-04 +2022-05-09 03:54:19,261 INFO [train.py:715] (6/8) Epoch 17, batch 27650, loss[loss=0.1274, simple_loss=0.2034, pruned_loss=0.02572, over 4783.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2071, pruned_loss=0.02876, over 972676.01 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 03:54:57,854 INFO [train.py:715] (6/8) Epoch 17, batch 27700, loss[loss=0.1483, simple_loss=0.2202, pruned_loss=0.03822, over 4778.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2073, pruned_loss=0.02909, over 972483.88 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 03:55:37,182 INFO [train.py:715] (6/8) Epoch 17, batch 27750, loss[loss=0.1279, simple_loss=0.2089, pruned_loss=0.02349, over 4745.00 frames.], tot_loss[loss=0.1324, simple_loss=0.207, pruned_loss=0.02895, over 972398.35 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 03:56:16,916 INFO [train.py:715] (6/8) Epoch 17, batch 27800, loss[loss=0.1393, simple_loss=0.2119, pruned_loss=0.03335, over 4800.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02895, over 972347.35 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 03:56:57,484 INFO [train.py:715] (6/8) Epoch 17, batch 27850, loss[loss=0.141, simple_loss=0.2293, pruned_loss=0.02637, over 4910.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2074, pruned_loss=0.02908, over 970728.43 frames.], batch size: 23, lr: 1.29e-04 +2022-05-09 03:57:37,280 INFO [train.py:715] (6/8) Epoch 17, batch 27900, loss[loss=0.1399, simple_loss=0.2067, pruned_loss=0.03661, over 4922.00 frames.], tot_loss[loss=0.1322, simple_loss=0.207, pruned_loss=0.02873, over 971600.89 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 03:58:16,557 INFO [train.py:715] (6/8) Epoch 17, batch 27950, loss[loss=0.1426, simple_loss=0.2063, pruned_loss=0.0394, over 4770.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02918, over 971680.48 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 03:58:56,517 INFO [train.py:715] (6/8) Epoch 17, batch 28000, loss[loss=0.1543, simple_loss=0.2207, pruned_loss=0.04395, over 4894.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2082, pruned_loss=0.02949, over 971819.50 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 03:59:36,523 INFO [train.py:715] (6/8) Epoch 17, batch 28050, loss[loss=0.1207, simple_loss=0.2066, pruned_loss=0.01734, over 4946.00 frames.], tot_loss[loss=0.1335, simple_loss=0.208, pruned_loss=0.02954, over 972959.96 frames.], batch size: 29, lr: 1.29e-04 +2022-05-09 04:00:15,256 INFO [train.py:715] (6/8) Epoch 17, batch 28100, loss[loss=0.1429, simple_loss=0.2119, pruned_loss=0.03695, over 4975.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02953, over 973071.78 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 04:00:54,617 INFO [train.py:715] (6/8) Epoch 17, batch 28150, loss[loss=0.1447, simple_loss=0.2109, pruned_loss=0.0392, over 4871.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02952, over 972981.30 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 04:01:33,616 INFO [train.py:715] (6/8) Epoch 17, batch 28200, loss[loss=0.1487, simple_loss=0.2153, pruned_loss=0.04107, over 4859.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02945, over 973241.43 frames.], batch size: 32, lr: 1.29e-04 +2022-05-09 04:02:12,005 INFO [train.py:715] (6/8) Epoch 17, batch 28250, loss[loss=0.1078, simple_loss=0.1766, pruned_loss=0.01947, over 4799.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2068, pruned_loss=0.02936, over 972953.93 frames.], batch size: 12, lr: 1.29e-04 +2022-05-09 04:02:50,450 INFO [train.py:715] (6/8) Epoch 17, batch 28300, loss[loss=0.1309, simple_loss=0.2157, pruned_loss=0.02301, over 4686.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.0292, over 973490.50 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 04:03:29,619 INFO [train.py:715] (6/8) Epoch 17, batch 28350, loss[loss=0.1338, simple_loss=0.209, pruned_loss=0.02927, over 4983.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2078, pruned_loss=0.02962, over 973051.08 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 04:04:09,198 INFO [train.py:715] (6/8) Epoch 17, batch 28400, loss[loss=0.1334, simple_loss=0.2106, pruned_loss=0.02813, over 4783.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02942, over 973424.57 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 04:04:48,215 INFO [train.py:715] (6/8) Epoch 17, batch 28450, loss[loss=0.1365, simple_loss=0.2181, pruned_loss=0.02748, over 4801.00 frames.], tot_loss[loss=0.1333, simple_loss=0.208, pruned_loss=0.02932, over 973473.89 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 04:05:26,447 INFO [train.py:715] (6/8) Epoch 17, batch 28500, loss[loss=0.1258, simple_loss=0.2, pruned_loss=0.02575, over 4969.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2083, pruned_loss=0.02932, over 972891.94 frames.], batch size: 28, lr: 1.29e-04 +2022-05-09 04:06:06,478 INFO [train.py:715] (6/8) Epoch 17, batch 28550, loss[loss=0.1434, simple_loss=0.2077, pruned_loss=0.03952, over 4745.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02948, over 973259.92 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 04:06:45,104 INFO [train.py:715] (6/8) Epoch 17, batch 28600, loss[loss=0.1204, simple_loss=0.1933, pruned_loss=0.02375, over 4971.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02928, over 972782.97 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 04:07:23,875 INFO [train.py:715] (6/8) Epoch 17, batch 28650, loss[loss=0.1277, simple_loss=0.201, pruned_loss=0.02721, over 4907.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02872, over 972120.61 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:08:02,259 INFO [train.py:715] (6/8) Epoch 17, batch 28700, loss[loss=0.1297, simple_loss=0.2058, pruned_loss=0.02676, over 4952.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02886, over 971757.22 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 04:08:41,580 INFO [train.py:715] (6/8) Epoch 17, batch 28750, loss[loss=0.1372, simple_loss=0.2074, pruned_loss=0.03351, over 4839.00 frames.], tot_loss[loss=0.132, simple_loss=0.207, pruned_loss=0.02851, over 972442.62 frames.], batch size: 30, lr: 1.29e-04 +2022-05-09 04:09:20,212 INFO [train.py:715] (6/8) Epoch 17, batch 28800, loss[loss=0.1174, simple_loss=0.1904, pruned_loss=0.0222, over 4821.00 frames.], tot_loss[loss=0.133, simple_loss=0.2078, pruned_loss=0.0291, over 972157.18 frames.], batch size: 26, lr: 1.29e-04 +2022-05-09 04:09:58,910 INFO [train.py:715] (6/8) Epoch 17, batch 28850, loss[loss=0.0999, simple_loss=0.1712, pruned_loss=0.01427, over 4976.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2077, pruned_loss=0.02888, over 972687.02 frames.], batch size: 25, lr: 1.29e-04 +2022-05-09 04:10:37,994 INFO [train.py:715] (6/8) Epoch 17, batch 28900, loss[loss=0.1337, simple_loss=0.2071, pruned_loss=0.03008, over 4928.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2082, pruned_loss=0.02915, over 972690.86 frames.], batch size: 23, lr: 1.29e-04 +2022-05-09 04:11:16,522 INFO [train.py:715] (6/8) Epoch 17, batch 28950, loss[loss=0.137, simple_loss=0.2093, pruned_loss=0.03228, over 4965.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2085, pruned_loss=0.02941, over 972638.57 frames.], batch size: 35, lr: 1.29e-04 +2022-05-09 04:11:54,928 INFO [train.py:715] (6/8) Epoch 17, batch 29000, loss[loss=0.131, simple_loss=0.2077, pruned_loss=0.02717, over 4779.00 frames.], tot_loss[loss=0.1329, simple_loss=0.208, pruned_loss=0.02889, over 972409.74 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 04:12:33,662 INFO [train.py:715] (6/8) Epoch 17, batch 29050, loss[loss=0.1312, simple_loss=0.2078, pruned_loss=0.02736, over 4812.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2084, pruned_loss=0.02944, over 973213.33 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 04:13:13,015 INFO [train.py:715] (6/8) Epoch 17, batch 29100, loss[loss=0.1562, simple_loss=0.2287, pruned_loss=0.04182, over 4944.00 frames.], tot_loss[loss=0.1334, simple_loss=0.208, pruned_loss=0.02934, over 973492.95 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 04:13:51,912 INFO [train.py:715] (6/8) Epoch 17, batch 29150, loss[loss=0.1471, simple_loss=0.223, pruned_loss=0.03564, over 4964.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02969, over 973754.25 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 04:14:30,019 INFO [train.py:715] (6/8) Epoch 17, batch 29200, loss[loss=0.1178, simple_loss=0.1905, pruned_loss=0.0226, over 4891.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.02962, over 973309.57 frames.], batch size: 22, lr: 1.29e-04 +2022-05-09 04:15:09,527 INFO [train.py:715] (6/8) Epoch 17, batch 29250, loss[loss=0.1075, simple_loss=0.1711, pruned_loss=0.02198, over 4825.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02978, over 973306.52 frames.], batch size: 12, lr: 1.29e-04 +2022-05-09 04:15:49,149 INFO [train.py:715] (6/8) Epoch 17, batch 29300, loss[loss=0.1235, simple_loss=0.2026, pruned_loss=0.02216, over 4833.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2074, pruned_loss=0.03, over 973320.24 frames.], batch size: 13, lr: 1.29e-04 +2022-05-09 04:16:27,575 INFO [train.py:715] (6/8) Epoch 17, batch 29350, loss[loss=0.1211, simple_loss=0.1883, pruned_loss=0.02702, over 4791.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.02961, over 972852.93 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 04:17:06,162 INFO [train.py:715] (6/8) Epoch 17, batch 29400, loss[loss=0.1759, simple_loss=0.2472, pruned_loss=0.05235, over 4766.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02966, over 972960.52 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:17:45,842 INFO [train.py:715] (6/8) Epoch 17, batch 29450, loss[loss=0.1252, simple_loss=0.1958, pruned_loss=0.02726, over 4972.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02936, over 972828.84 frames.], batch size: 35, lr: 1.29e-04 +2022-05-09 04:18:24,966 INFO [train.py:715] (6/8) Epoch 17, batch 29500, loss[loss=0.1621, simple_loss=0.2323, pruned_loss=0.04594, over 4930.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2075, pruned_loss=0.02996, over 973002.70 frames.], batch size: 29, lr: 1.29e-04 +2022-05-09 04:19:03,886 INFO [train.py:715] (6/8) Epoch 17, batch 29550, loss[loss=0.1393, simple_loss=0.2118, pruned_loss=0.0334, over 4931.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2064, pruned_loss=0.02936, over 972754.85 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 04:19:43,170 INFO [train.py:715] (6/8) Epoch 17, batch 29600, loss[loss=0.1213, simple_loss=0.1958, pruned_loss=0.02339, over 4850.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2064, pruned_loss=0.0293, over 972825.08 frames.], batch size: 30, lr: 1.29e-04 +2022-05-09 04:20:22,745 INFO [train.py:715] (6/8) Epoch 17, batch 29650, loss[loss=0.1347, simple_loss=0.208, pruned_loss=0.03071, over 4815.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2066, pruned_loss=0.02953, over 972542.97 frames.], batch size: 26, lr: 1.29e-04 +2022-05-09 04:21:01,521 INFO [train.py:715] (6/8) Epoch 17, batch 29700, loss[loss=0.1121, simple_loss=0.182, pruned_loss=0.02114, over 4729.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2063, pruned_loss=0.02951, over 972184.45 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 04:21:40,468 INFO [train.py:715] (6/8) Epoch 17, batch 29750, loss[loss=0.1341, simple_loss=0.2016, pruned_loss=0.03334, over 4880.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2059, pruned_loss=0.02914, over 973079.56 frames.], batch size: 22, lr: 1.29e-04 +2022-05-09 04:22:20,624 INFO [train.py:715] (6/8) Epoch 17, batch 29800, loss[loss=0.1388, simple_loss=0.2182, pruned_loss=0.02973, over 4926.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.02912, over 973263.20 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 04:22:59,619 INFO [train.py:715] (6/8) Epoch 17, batch 29850, loss[loss=0.1105, simple_loss=0.1818, pruned_loss=0.01963, over 4821.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02939, over 972740.14 frames.], batch size: 13, lr: 1.29e-04 +2022-05-09 04:23:38,915 INFO [train.py:715] (6/8) Epoch 17, batch 29900, loss[loss=0.1184, simple_loss=0.1913, pruned_loss=0.02277, over 4789.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2062, pruned_loss=0.02922, over 972060.33 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 04:24:18,624 INFO [train.py:715] (6/8) Epoch 17, batch 29950, loss[loss=0.1255, simple_loss=0.2138, pruned_loss=0.01854, over 4941.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02917, over 972536.60 frames.], batch size: 23, lr: 1.29e-04 +2022-05-09 04:24:58,029 INFO [train.py:715] (6/8) Epoch 17, batch 30000, loss[loss=0.125, simple_loss=0.193, pruned_loss=0.02851, over 4963.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.029, over 972525.53 frames.], batch size: 35, lr: 1.29e-04 +2022-05-09 04:24:58,030 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 04:25:08,261 INFO [train.py:742] (6/8) Epoch 17, validation: loss=0.1047, simple_loss=0.188, pruned_loss=0.01065, over 914524.00 frames. +2022-05-09 04:25:48,091 INFO [train.py:715] (6/8) Epoch 17, batch 30050, loss[loss=0.1204, simple_loss=0.2088, pruned_loss=0.01603, over 4827.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02878, over 972890.76 frames.], batch size: 27, lr: 1.29e-04 +2022-05-09 04:26:27,729 INFO [train.py:715] (6/8) Epoch 17, batch 30100, loss[loss=0.124, simple_loss=0.2053, pruned_loss=0.0213, over 4881.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.0291, over 972359.82 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 04:27:06,814 INFO [train.py:715] (6/8) Epoch 17, batch 30150, loss[loss=0.1082, simple_loss=0.1744, pruned_loss=0.02096, over 4690.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.02927, over 971932.55 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 04:27:46,313 INFO [train.py:715] (6/8) Epoch 17, batch 30200, loss[loss=0.1648, simple_loss=0.2413, pruned_loss=0.04412, over 4936.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02929, over 971705.33 frames.], batch size: 35, lr: 1.29e-04 +2022-05-09 04:28:25,426 INFO [train.py:715] (6/8) Epoch 17, batch 30250, loss[loss=0.1087, simple_loss=0.1824, pruned_loss=0.01751, over 4800.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02918, over 972384.41 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 04:29:04,423 INFO [train.py:715] (6/8) Epoch 17, batch 30300, loss[loss=0.1345, simple_loss=0.2062, pruned_loss=0.03141, over 4751.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02939, over 972967.96 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 04:29:44,189 INFO [train.py:715] (6/8) Epoch 17, batch 30350, loss[loss=0.1241, simple_loss=0.2025, pruned_loss=0.02282, over 4796.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2065, pruned_loss=0.02935, over 972223.06 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 04:30:23,370 INFO [train.py:715] (6/8) Epoch 17, batch 30400, loss[loss=0.1664, simple_loss=0.2304, pruned_loss=0.05118, over 4807.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.02927, over 971619.49 frames.], batch size: 13, lr: 1.29e-04 +2022-05-09 04:31:02,094 INFO [train.py:715] (6/8) Epoch 17, batch 30450, loss[loss=0.1317, simple_loss=0.2135, pruned_loss=0.02493, over 4819.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02872, over 971971.63 frames.], batch size: 27, lr: 1.29e-04 +2022-05-09 04:31:41,826 INFO [train.py:715] (6/8) Epoch 17, batch 30500, loss[loss=0.1161, simple_loss=0.1959, pruned_loss=0.0181, over 4749.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02892, over 972382.78 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:32:21,640 INFO [train.py:715] (6/8) Epoch 17, batch 30550, loss[loss=0.1255, simple_loss=0.2045, pruned_loss=0.02321, over 4899.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02914, over 971809.27 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:33:01,425 INFO [train.py:715] (6/8) Epoch 17, batch 30600, loss[loss=0.1075, simple_loss=0.1806, pruned_loss=0.01721, over 4880.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2073, pruned_loss=0.02911, over 971989.77 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:33:40,319 INFO [train.py:715] (6/8) Epoch 17, batch 30650, loss[loss=0.132, simple_loss=0.2117, pruned_loss=0.02614, over 4805.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02922, over 971986.41 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 04:34:20,061 INFO [train.py:715] (6/8) Epoch 17, batch 30700, loss[loss=0.1457, simple_loss=0.2245, pruned_loss=0.03346, over 4982.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.02909, over 972197.35 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 04:34:59,088 INFO [train.py:715] (6/8) Epoch 17, batch 30750, loss[loss=0.1179, simple_loss=0.1944, pruned_loss=0.02069, over 4809.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02932, over 971800.56 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 04:35:38,918 INFO [train.py:715] (6/8) Epoch 17, batch 30800, loss[loss=0.1539, simple_loss=0.2378, pruned_loss=0.03499, over 4875.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02949, over 971964.20 frames.], batch size: 20, lr: 1.29e-04 +2022-05-09 04:36:18,142 INFO [train.py:715] (6/8) Epoch 17, batch 30850, loss[loss=0.1281, simple_loss=0.1999, pruned_loss=0.02815, over 4778.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02939, over 972343.67 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 04:36:58,384 INFO [train.py:715] (6/8) Epoch 17, batch 30900, loss[loss=0.1042, simple_loss=0.1681, pruned_loss=0.02019, over 4951.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2059, pruned_loss=0.0287, over 971485.31 frames.], batch size: 29, lr: 1.29e-04 +2022-05-09 04:37:38,033 INFO [train.py:715] (6/8) Epoch 17, batch 30950, loss[loss=0.1402, simple_loss=0.2074, pruned_loss=0.03652, over 4987.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02907, over 971445.54 frames.], batch size: 31, lr: 1.29e-04 +2022-05-09 04:38:17,302 INFO [train.py:715] (6/8) Epoch 17, batch 31000, loss[loss=0.121, simple_loss=0.1891, pruned_loss=0.0264, over 4766.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2067, pruned_loss=0.02945, over 971318.29 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:38:57,028 INFO [train.py:715] (6/8) Epoch 17, batch 31050, loss[loss=0.112, simple_loss=0.1954, pruned_loss=0.01427, over 4804.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2053, pruned_loss=0.02851, over 971562.11 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 04:39:36,075 INFO [train.py:715] (6/8) Epoch 17, batch 31100, loss[loss=0.1297, simple_loss=0.2134, pruned_loss=0.02297, over 4749.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02966, over 972354.03 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:40:15,212 INFO [train.py:715] (6/8) Epoch 17, batch 31150, loss[loss=0.1413, simple_loss=0.2064, pruned_loss=0.03807, over 4828.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.0298, over 973414.33 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 04:40:54,505 INFO [train.py:715] (6/8) Epoch 17, batch 31200, loss[loss=0.1418, simple_loss=0.2127, pruned_loss=0.03542, over 4865.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2077, pruned_loss=0.02989, over 973078.60 frames.], batch size: 32, lr: 1.29e-04 +2022-05-09 04:41:34,599 INFO [train.py:715] (6/8) Epoch 17, batch 31250, loss[loss=0.1761, simple_loss=0.2512, pruned_loss=0.05051, over 4828.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2075, pruned_loss=0.03014, over 972700.62 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 04:42:13,896 INFO [train.py:715] (6/8) Epoch 17, batch 31300, loss[loss=0.1345, simple_loss=0.1994, pruned_loss=0.03485, over 4820.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2071, pruned_loss=0.03015, over 973002.74 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 04:42:53,284 INFO [train.py:715] (6/8) Epoch 17, batch 31350, loss[loss=0.1232, simple_loss=0.1935, pruned_loss=0.0265, over 4936.00 frames.], tot_loss[loss=0.133, simple_loss=0.2068, pruned_loss=0.02966, over 973114.38 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 04:43:32,644 INFO [train.py:715] (6/8) Epoch 17, batch 31400, loss[loss=0.1418, simple_loss=0.2212, pruned_loss=0.0312, over 4969.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2064, pruned_loss=0.02931, over 973213.55 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 04:44:11,254 INFO [train.py:715] (6/8) Epoch 17, batch 31450, loss[loss=0.1347, simple_loss=0.2196, pruned_loss=0.02494, over 4926.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.02949, over 973393.05 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 04:44:51,222 INFO [train.py:715] (6/8) Epoch 17, batch 31500, loss[loss=0.1223, simple_loss=0.1886, pruned_loss=0.02802, over 4767.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2067, pruned_loss=0.0295, over 972918.63 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 04:45:29,942 INFO [train.py:715] (6/8) Epoch 17, batch 31550, loss[loss=0.1258, simple_loss=0.2108, pruned_loss=0.0204, over 4810.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2063, pruned_loss=0.02927, over 973753.99 frames.], batch size: 26, lr: 1.29e-04 +2022-05-09 04:46:09,497 INFO [train.py:715] (6/8) Epoch 17, batch 31600, loss[loss=0.1371, simple_loss=0.2103, pruned_loss=0.03194, over 4830.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2056, pruned_loss=0.02896, over 972966.40 frames.], batch size: 26, lr: 1.29e-04 +2022-05-09 04:46:48,902 INFO [train.py:715] (6/8) Epoch 17, batch 31650, loss[loss=0.1204, simple_loss=0.1972, pruned_loss=0.0218, over 4903.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02872, over 972514.16 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 04:47:28,186 INFO [train.py:715] (6/8) Epoch 17, batch 31700, loss[loss=0.148, simple_loss=0.236, pruned_loss=0.03002, over 4883.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.02841, over 972123.93 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 04:48:07,939 INFO [train.py:715] (6/8) Epoch 17, batch 31750, loss[loss=0.1789, simple_loss=0.2585, pruned_loss=0.04967, over 4978.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02873, over 971823.20 frames.], batch size: 28, lr: 1.29e-04 +2022-05-09 04:48:47,180 INFO [train.py:715] (6/8) Epoch 17, batch 31800, loss[loss=0.1009, simple_loss=0.1798, pruned_loss=0.011, over 4790.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2063, pruned_loss=0.02816, over 970901.05 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 04:49:27,386 INFO [train.py:715] (6/8) Epoch 17, batch 31850, loss[loss=0.1445, simple_loss=0.2191, pruned_loss=0.03491, over 4795.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2068, pruned_loss=0.02847, over 970630.39 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 04:50:06,507 INFO [train.py:715] (6/8) Epoch 17, batch 31900, loss[loss=0.1456, simple_loss=0.221, pruned_loss=0.03511, over 4781.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2075, pruned_loss=0.02883, over 970513.96 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 04:50:45,991 INFO [train.py:715] (6/8) Epoch 17, batch 31950, loss[loss=0.1277, simple_loss=0.2115, pruned_loss=0.02191, over 4805.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2076, pruned_loss=0.02871, over 970698.52 frames.], batch size: 25, lr: 1.29e-04 +2022-05-09 04:51:25,764 INFO [train.py:715] (6/8) Epoch 17, batch 32000, loss[loss=0.1273, simple_loss=0.2073, pruned_loss=0.02366, over 4893.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2082, pruned_loss=0.02879, over 971350.24 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:52:04,649 INFO [train.py:715] (6/8) Epoch 17, batch 32050, loss[loss=0.1649, simple_loss=0.2349, pruned_loss=0.04744, over 4811.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2075, pruned_loss=0.02867, over 971363.19 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 04:52:44,372 INFO [train.py:715] (6/8) Epoch 17, batch 32100, loss[loss=0.1213, simple_loss=0.2001, pruned_loss=0.02124, over 4884.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2075, pruned_loss=0.02873, over 971367.83 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:53:23,406 INFO [train.py:715] (6/8) Epoch 17, batch 32150, loss[loss=0.1449, simple_loss=0.2127, pruned_loss=0.03851, over 4835.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2065, pruned_loss=0.02854, over 971347.39 frames.], batch size: 30, lr: 1.29e-04 +2022-05-09 04:54:02,776 INFO [train.py:715] (6/8) Epoch 17, batch 32200, loss[loss=0.141, simple_loss=0.2221, pruned_loss=0.02993, over 4922.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02861, over 972717.52 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 04:54:45,064 INFO [train.py:715] (6/8) Epoch 17, batch 32250, loss[loss=0.1377, simple_loss=0.2091, pruned_loss=0.03317, over 4936.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2062, pruned_loss=0.02843, over 971843.67 frames.], batch size: 23, lr: 1.29e-04 +2022-05-09 04:55:24,423 INFO [train.py:715] (6/8) Epoch 17, batch 32300, loss[loss=0.1072, simple_loss=0.1843, pruned_loss=0.01506, over 4866.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2057, pruned_loss=0.02794, over 971990.88 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 04:56:04,329 INFO [train.py:715] (6/8) Epoch 17, batch 32350, loss[loss=0.152, simple_loss=0.2194, pruned_loss=0.0423, over 4854.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2062, pruned_loss=0.02813, over 972965.93 frames.], batch size: 13, lr: 1.29e-04 +2022-05-09 04:56:43,385 INFO [train.py:715] (6/8) Epoch 17, batch 32400, loss[loss=0.1142, simple_loss=0.1989, pruned_loss=0.01479, over 4929.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2056, pruned_loss=0.02814, over 972829.97 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 04:57:22,535 INFO [train.py:715] (6/8) Epoch 17, batch 32450, loss[loss=0.1607, simple_loss=0.2401, pruned_loss=0.04063, over 4950.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02856, over 973378.07 frames.], batch size: 21, lr: 1.28e-04 +2022-05-09 04:58:02,575 INFO [train.py:715] (6/8) Epoch 17, batch 32500, loss[loss=0.1166, simple_loss=0.1939, pruned_loss=0.01965, over 4909.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02864, over 973464.31 frames.], batch size: 29, lr: 1.28e-04 +2022-05-09 04:58:41,973 INFO [train.py:715] (6/8) Epoch 17, batch 32550, loss[loss=0.134, simple_loss=0.2027, pruned_loss=0.03267, over 4901.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02874, over 973456.27 frames.], batch size: 17, lr: 1.28e-04 +2022-05-09 04:59:21,562 INFO [train.py:715] (6/8) Epoch 17, batch 32600, loss[loss=0.1674, simple_loss=0.2414, pruned_loss=0.04675, over 4974.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02894, over 973367.27 frames.], batch size: 14, lr: 1.28e-04 +2022-05-09 05:00:01,071 INFO [train.py:715] (6/8) Epoch 17, batch 32650, loss[loss=0.1298, simple_loss=0.2025, pruned_loss=0.02854, over 4907.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02866, over 973277.09 frames.], batch size: 18, lr: 1.28e-04 +2022-05-09 05:00:39,810 INFO [train.py:715] (6/8) Epoch 17, batch 32700, loss[loss=0.1725, simple_loss=0.2322, pruned_loss=0.05637, over 4904.00 frames.], tot_loss[loss=0.131, simple_loss=0.2056, pruned_loss=0.02823, over 972665.12 frames.], batch size: 17, lr: 1.28e-04 +2022-05-09 05:01:19,992 INFO [train.py:715] (6/8) Epoch 17, batch 32750, loss[loss=0.1478, simple_loss=0.2132, pruned_loss=0.04122, over 4760.00 frames.], tot_loss[loss=0.131, simple_loss=0.2056, pruned_loss=0.02821, over 972498.27 frames.], batch size: 12, lr: 1.28e-04 +2022-05-09 05:01:59,337 INFO [train.py:715] (6/8) Epoch 17, batch 32800, loss[loss=0.1112, simple_loss=0.173, pruned_loss=0.02467, over 4867.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2049, pruned_loss=0.02779, over 973158.43 frames.], batch size: 13, lr: 1.28e-04 +2022-05-09 05:02:38,974 INFO [train.py:715] (6/8) Epoch 17, batch 32850, loss[loss=0.1105, simple_loss=0.1859, pruned_loss=0.01758, over 4825.00 frames.], tot_loss[loss=0.1299, simple_loss=0.2043, pruned_loss=0.02771, over 972960.60 frames.], batch size: 26, lr: 1.28e-04 +2022-05-09 05:03:18,524 INFO [train.py:715] (6/8) Epoch 17, batch 32900, loss[loss=0.1232, simple_loss=0.1941, pruned_loss=0.0261, over 4920.00 frames.], tot_loss[loss=0.1297, simple_loss=0.2041, pruned_loss=0.02763, over 973375.65 frames.], batch size: 29, lr: 1.28e-04 +2022-05-09 05:03:58,029 INFO [train.py:715] (6/8) Epoch 17, batch 32950, loss[loss=0.1476, simple_loss=0.2354, pruned_loss=0.02983, over 4784.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2048, pruned_loss=0.02797, over 973455.79 frames.], batch size: 14, lr: 1.28e-04 +2022-05-09 05:04:36,963 INFO [train.py:715] (6/8) Epoch 17, batch 33000, loss[loss=0.1429, simple_loss=0.2161, pruned_loss=0.03485, over 4973.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02839, over 972644.63 frames.], batch size: 35, lr: 1.28e-04 +2022-05-09 05:04:36,964 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 05:04:49,645 INFO [train.py:742] (6/8) Epoch 17, validation: loss=0.1049, simple_loss=0.1881, pruned_loss=0.0108, over 914524.00 frames. +2022-05-09 05:05:28,991 INFO [train.py:715] (6/8) Epoch 17, batch 33050, loss[loss=0.1146, simple_loss=0.193, pruned_loss=0.01806, over 4891.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02887, over 971822.80 frames.], batch size: 19, lr: 1.28e-04 +2022-05-09 05:06:08,150 INFO [train.py:715] (6/8) Epoch 17, batch 33100, loss[loss=0.1187, simple_loss=0.199, pruned_loss=0.01914, over 4983.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.0287, over 972774.04 frames.], batch size: 25, lr: 1.28e-04 +2022-05-09 05:06:47,451 INFO [train.py:715] (6/8) Epoch 17, batch 33150, loss[loss=0.1143, simple_loss=0.1869, pruned_loss=0.02087, over 4835.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2063, pruned_loss=0.02865, over 972971.19 frames.], batch size: 15, lr: 1.28e-04 +2022-05-09 05:07:27,187 INFO [train.py:715] (6/8) Epoch 17, batch 33200, loss[loss=0.1265, simple_loss=0.2029, pruned_loss=0.025, over 4757.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02889, over 972291.04 frames.], batch size: 16, lr: 1.28e-04 +2022-05-09 05:08:06,800 INFO [train.py:715] (6/8) Epoch 17, batch 33250, loss[loss=0.1223, simple_loss=0.2022, pruned_loss=0.02117, over 4911.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02933, over 972797.52 frames.], batch size: 17, lr: 1.28e-04 +2022-05-09 05:08:46,109 INFO [train.py:715] (6/8) Epoch 17, batch 33300, loss[loss=0.129, simple_loss=0.2039, pruned_loss=0.02709, over 4942.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2074, pruned_loss=0.02893, over 972458.53 frames.], batch size: 23, lr: 1.28e-04 +2022-05-09 05:09:25,688 INFO [train.py:715] (6/8) Epoch 17, batch 33350, loss[loss=0.1165, simple_loss=0.1962, pruned_loss=0.01846, over 4744.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2079, pruned_loss=0.02926, over 972361.58 frames.], batch size: 16, lr: 1.28e-04 +2022-05-09 05:10:05,502 INFO [train.py:715] (6/8) Epoch 17, batch 33400, loss[loss=0.1379, simple_loss=0.2055, pruned_loss=0.03519, over 4934.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.0293, over 972470.26 frames.], batch size: 29, lr: 1.28e-04 +2022-05-09 05:10:44,830 INFO [train.py:715] (6/8) Epoch 17, batch 33450, loss[loss=0.1324, simple_loss=0.2062, pruned_loss=0.02934, over 4941.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2073, pruned_loss=0.02923, over 972743.63 frames.], batch size: 35, lr: 1.28e-04 +2022-05-09 05:11:24,379 INFO [train.py:715] (6/8) Epoch 17, batch 33500, loss[loss=0.15, simple_loss=0.2389, pruned_loss=0.03051, over 4933.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.02909, over 972665.90 frames.], batch size: 23, lr: 1.28e-04 +2022-05-09 05:12:04,592 INFO [train.py:715] (6/8) Epoch 17, batch 33550, loss[loss=0.1439, simple_loss=0.2231, pruned_loss=0.03234, over 4643.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2073, pruned_loss=0.02928, over 972551.98 frames.], batch size: 13, lr: 1.28e-04 +2022-05-09 05:12:44,750 INFO [train.py:715] (6/8) Epoch 17, batch 33600, loss[loss=0.1204, simple_loss=0.1965, pruned_loss=0.02218, over 4960.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02937, over 973161.02 frames.], batch size: 24, lr: 1.28e-04 +2022-05-09 05:13:23,725 INFO [train.py:715] (6/8) Epoch 17, batch 33650, loss[loss=0.1286, simple_loss=0.2023, pruned_loss=0.02748, over 4906.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02916, over 973666.04 frames.], batch size: 17, lr: 1.28e-04 +2022-05-09 05:14:03,362 INFO [train.py:715] (6/8) Epoch 17, batch 33700, loss[loss=0.136, simple_loss=0.2016, pruned_loss=0.03516, over 4867.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02913, over 973990.28 frames.], batch size: 34, lr: 1.28e-04 +2022-05-09 05:14:42,582 INFO [train.py:715] (6/8) Epoch 17, batch 33750, loss[loss=0.1386, simple_loss=0.2052, pruned_loss=0.03597, over 4926.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02913, over 973199.03 frames.], batch size: 18, lr: 1.28e-04 +2022-05-09 05:15:21,395 INFO [train.py:715] (6/8) Epoch 17, batch 33800, loss[loss=0.1285, simple_loss=0.2051, pruned_loss=0.02597, over 4937.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2073, pruned_loss=0.02924, over 972599.47 frames.], batch size: 29, lr: 1.28e-04 +2022-05-09 05:16:01,532 INFO [train.py:715] (6/8) Epoch 17, batch 33850, loss[loss=0.1279, simple_loss=0.2045, pruned_loss=0.02569, over 4782.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02902, over 971419.82 frames.], batch size: 14, lr: 1.28e-04 +2022-05-09 05:16:41,837 INFO [train.py:715] (6/8) Epoch 17, batch 33900, loss[loss=0.1004, simple_loss=0.1681, pruned_loss=0.01634, over 4765.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02943, over 971805.68 frames.], batch size: 19, lr: 1.28e-04 +2022-05-09 05:17:21,091 INFO [train.py:715] (6/8) Epoch 17, batch 33950, loss[loss=0.1285, simple_loss=0.213, pruned_loss=0.02202, over 4975.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02899, over 972094.35 frames.], batch size: 15, lr: 1.28e-04 +2022-05-09 05:18:00,094 INFO [train.py:715] (6/8) Epoch 17, batch 34000, loss[loss=0.1439, simple_loss=0.2158, pruned_loss=0.03604, over 4749.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02911, over 971557.56 frames.], batch size: 16, lr: 1.28e-04 +2022-05-09 05:18:39,512 INFO [train.py:715] (6/8) Epoch 17, batch 34050, loss[loss=0.1282, simple_loss=0.2028, pruned_loss=0.02679, over 4866.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.02897, over 971045.76 frames.], batch size: 16, lr: 1.28e-04 +2022-05-09 05:19:19,508 INFO [train.py:715] (6/8) Epoch 17, batch 34100, loss[loss=0.1491, simple_loss=0.2117, pruned_loss=0.04323, over 4834.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02925, over 971270.04 frames.], batch size: 15, lr: 1.28e-04 +2022-05-09 05:19:58,312 INFO [train.py:715] (6/8) Epoch 17, batch 34150, loss[loss=0.1716, simple_loss=0.2183, pruned_loss=0.06246, over 4763.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2078, pruned_loss=0.02945, over 972758.13 frames.], batch size: 19, lr: 1.28e-04 +2022-05-09 05:20:37,456 INFO [train.py:715] (6/8) Epoch 17, batch 34200, loss[loss=0.1204, simple_loss=0.2021, pruned_loss=0.01935, over 4889.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02961, over 972562.29 frames.], batch size: 22, lr: 1.28e-04 +2022-05-09 05:21:16,561 INFO [train.py:715] (6/8) Epoch 17, batch 34250, loss[loss=0.1064, simple_loss=0.1828, pruned_loss=0.01506, over 4813.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2081, pruned_loss=0.02906, over 972837.28 frames.], batch size: 13, lr: 1.28e-04 +2022-05-09 05:21:55,282 INFO [train.py:715] (6/8) Epoch 17, batch 34300, loss[loss=0.1273, simple_loss=0.208, pruned_loss=0.02328, over 4912.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2082, pruned_loss=0.02915, over 974040.89 frames.], batch size: 39, lr: 1.28e-04 +2022-05-09 05:22:34,168 INFO [train.py:715] (6/8) Epoch 17, batch 34350, loss[loss=0.1191, simple_loss=0.1935, pruned_loss=0.02238, over 4933.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2066, pruned_loss=0.02836, over 973652.38 frames.], batch size: 29, lr: 1.28e-04 +2022-05-09 05:23:13,528 INFO [train.py:715] (6/8) Epoch 17, batch 34400, loss[loss=0.1142, simple_loss=0.1951, pruned_loss=0.01663, over 4911.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02829, over 973983.94 frames.], batch size: 19, lr: 1.28e-04 +2022-05-09 05:23:52,517 INFO [train.py:715] (6/8) Epoch 17, batch 34450, loss[loss=0.1374, simple_loss=0.216, pruned_loss=0.02936, over 4801.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02839, over 973920.92 frames.], batch size: 24, lr: 1.28e-04 +2022-05-09 05:24:30,973 INFO [train.py:715] (6/8) Epoch 17, batch 34500, loss[loss=0.1335, simple_loss=0.2092, pruned_loss=0.02891, over 4649.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2058, pruned_loss=0.02882, over 972658.12 frames.], batch size: 13, lr: 1.28e-04 +2022-05-09 05:25:09,847 INFO [train.py:715] (6/8) Epoch 17, batch 34550, loss[loss=0.1245, simple_loss=0.202, pruned_loss=0.02346, over 4751.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02862, over 972841.56 frames.], batch size: 16, lr: 1.28e-04 +2022-05-09 05:25:48,999 INFO [train.py:715] (6/8) Epoch 17, batch 34600, loss[loss=0.1361, simple_loss=0.2139, pruned_loss=0.02911, over 4754.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2062, pruned_loss=0.02901, over 972960.09 frames.], batch size: 19, lr: 1.28e-04 +2022-05-09 05:26:27,696 INFO [train.py:715] (6/8) Epoch 17, batch 34650, loss[loss=0.1236, simple_loss=0.2053, pruned_loss=0.02089, over 4783.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2059, pruned_loss=0.02885, over 974114.15 frames.], batch size: 14, lr: 1.28e-04 +2022-05-09 05:27:06,965 INFO [train.py:715] (6/8) Epoch 17, batch 34700, loss[loss=0.1241, simple_loss=0.2025, pruned_loss=0.02285, over 4818.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02876, over 974049.22 frames.], batch size: 25, lr: 1.28e-04 +2022-05-09 05:27:45,528 INFO [train.py:715] (6/8) Epoch 17, batch 34750, loss[loss=0.1738, simple_loss=0.242, pruned_loss=0.05283, over 4934.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02892, over 973807.53 frames.], batch size: 21, lr: 1.28e-04 +2022-05-09 05:28:22,200 INFO [train.py:715] (6/8) Epoch 17, batch 34800, loss[loss=0.1445, simple_loss=0.2122, pruned_loss=0.03834, over 4779.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02925, over 972202.50 frames.], batch size: 12, lr: 1.28e-04 +2022-05-09 05:29:12,361 INFO [train.py:715] (6/8) Epoch 18, batch 0, loss[loss=0.1237, simple_loss=0.2034, pruned_loss=0.02198, over 4818.00 frames.], tot_loss[loss=0.1237, simple_loss=0.2034, pruned_loss=0.02198, over 4818.00 frames.], batch size: 26, lr: 1.25e-04 +2022-05-09 05:29:51,057 INFO [train.py:715] (6/8) Epoch 18, batch 50, loss[loss=0.126, simple_loss=0.1874, pruned_loss=0.03232, over 4918.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2065, pruned_loss=0.03067, over 219802.70 frames.], batch size: 17, lr: 1.25e-04 +2022-05-09 05:30:31,045 INFO [train.py:715] (6/8) Epoch 18, batch 100, loss[loss=0.132, simple_loss=0.2085, pruned_loss=0.02773, over 4811.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02894, over 387408.32 frames.], batch size: 25, lr: 1.25e-04 +2022-05-09 05:31:10,962 INFO [train.py:715] (6/8) Epoch 18, batch 150, loss[loss=0.1079, simple_loss=0.1827, pruned_loss=0.01655, over 4732.00 frames.], tot_loss[loss=0.1327, simple_loss=0.206, pruned_loss=0.02964, over 516841.89 frames.], batch size: 12, lr: 1.25e-04 +2022-05-09 05:31:50,267 INFO [train.py:715] (6/8) Epoch 18, batch 200, loss[loss=0.1375, simple_loss=0.2174, pruned_loss=0.02877, over 4930.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2063, pruned_loss=0.02935, over 618112.95 frames.], batch size: 23, lr: 1.25e-04 +2022-05-09 05:32:29,112 INFO [train.py:715] (6/8) Epoch 18, batch 250, loss[loss=0.1318, simple_loss=0.2116, pruned_loss=0.02602, over 4781.00 frames.], tot_loss[loss=0.1333, simple_loss=0.207, pruned_loss=0.02976, over 695962.59 frames.], batch size: 17, lr: 1.25e-04 +2022-05-09 05:33:08,585 INFO [train.py:715] (6/8) Epoch 18, batch 300, loss[loss=0.132, simple_loss=0.212, pruned_loss=0.02606, over 4798.00 frames.], tot_loss[loss=0.134, simple_loss=0.2076, pruned_loss=0.03026, over 757484.99 frames.], batch size: 21, lr: 1.25e-04 +2022-05-09 05:33:48,413 INFO [train.py:715] (6/8) Epoch 18, batch 350, loss[loss=0.2128, simple_loss=0.2942, pruned_loss=0.06572, over 4888.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02988, over 805020.53 frames.], batch size: 16, lr: 1.25e-04 +2022-05-09 05:34:27,374 INFO [train.py:715] (6/8) Epoch 18, batch 400, loss[loss=0.1298, simple_loss=0.2011, pruned_loss=0.02922, over 4698.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2082, pruned_loss=0.03028, over 841301.92 frames.], batch size: 15, lr: 1.25e-04 +2022-05-09 05:35:07,164 INFO [train.py:715] (6/8) Epoch 18, batch 450, loss[loss=0.1258, simple_loss=0.2078, pruned_loss=0.02189, over 4749.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03045, over 870653.22 frames.], batch size: 19, lr: 1.25e-04 +2022-05-09 05:35:47,346 INFO [train.py:715] (6/8) Epoch 18, batch 500, loss[loss=0.147, simple_loss=0.2166, pruned_loss=0.03868, over 4825.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03022, over 893068.33 frames.], batch size: 15, lr: 1.25e-04 +2022-05-09 05:36:27,095 INFO [train.py:715] (6/8) Epoch 18, batch 550, loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03086, over 4811.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02942, over 910101.33 frames.], batch size: 27, lr: 1.25e-04 +2022-05-09 05:37:06,108 INFO [train.py:715] (6/8) Epoch 18, batch 600, loss[loss=0.1562, simple_loss=0.2268, pruned_loss=0.04279, over 4771.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2081, pruned_loss=0.02957, over 923988.55 frames.], batch size: 14, lr: 1.25e-04 +2022-05-09 05:37:45,640 INFO [train.py:715] (6/8) Epoch 18, batch 650, loss[loss=0.1407, simple_loss=0.2067, pruned_loss=0.03735, over 4920.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02959, over 935228.99 frames.], batch size: 17, lr: 1.25e-04 +2022-05-09 05:38:25,480 INFO [train.py:715] (6/8) Epoch 18, batch 700, loss[loss=0.1161, simple_loss=0.1936, pruned_loss=0.01926, over 4792.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02894, over 943890.16 frames.], batch size: 24, lr: 1.25e-04 +2022-05-09 05:39:04,431 INFO [train.py:715] (6/8) Epoch 18, batch 750, loss[loss=0.1253, simple_loss=0.1901, pruned_loss=0.03027, over 4805.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02957, over 950493.50 frames.], batch size: 15, lr: 1.25e-04 +2022-05-09 05:39:43,257 INFO [train.py:715] (6/8) Epoch 18, batch 800, loss[loss=0.1109, simple_loss=0.1777, pruned_loss=0.02207, over 4766.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02947, over 956518.35 frames.], batch size: 19, lr: 1.25e-04 +2022-05-09 05:40:22,749 INFO [train.py:715] (6/8) Epoch 18, batch 850, loss[loss=0.1332, simple_loss=0.2055, pruned_loss=0.03048, over 4863.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2065, pruned_loss=0.02952, over 959314.79 frames.], batch size: 32, lr: 1.25e-04 +2022-05-09 05:41:02,307 INFO [train.py:715] (6/8) Epoch 18, batch 900, loss[loss=0.1416, simple_loss=0.2158, pruned_loss=0.03375, over 4786.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2065, pruned_loss=0.02956, over 962314.50 frames.], batch size: 24, lr: 1.25e-04 +2022-05-09 05:41:41,281 INFO [train.py:715] (6/8) Epoch 18, batch 950, loss[loss=0.1288, simple_loss=0.2059, pruned_loss=0.0258, over 4855.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2067, pruned_loss=0.02977, over 964477.15 frames.], batch size: 20, lr: 1.25e-04 +2022-05-09 05:42:20,890 INFO [train.py:715] (6/8) Epoch 18, batch 1000, loss[loss=0.1241, simple_loss=0.2019, pruned_loss=0.0231, over 4882.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2061, pruned_loss=0.02913, over 966526.86 frames.], batch size: 22, lr: 1.25e-04 +2022-05-09 05:43:00,532 INFO [train.py:715] (6/8) Epoch 18, batch 1050, loss[loss=0.1282, simple_loss=0.2163, pruned_loss=0.02006, over 4930.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02893, over 968656.22 frames.], batch size: 23, lr: 1.25e-04 +2022-05-09 05:43:39,935 INFO [train.py:715] (6/8) Epoch 18, batch 1100, loss[loss=0.1295, simple_loss=0.1964, pruned_loss=0.03128, over 4815.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02903, over 970030.19 frames.], batch size: 12, lr: 1.25e-04 +2022-05-09 05:44:18,728 INFO [train.py:715] (6/8) Epoch 18, batch 1150, loss[loss=0.17, simple_loss=0.2368, pruned_loss=0.05163, over 4689.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.029, over 970291.81 frames.], batch size: 15, lr: 1.25e-04 +2022-05-09 05:44:58,549 INFO [train.py:715] (6/8) Epoch 18, batch 1200, loss[loss=0.119, simple_loss=0.1941, pruned_loss=0.02199, over 4939.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2057, pruned_loss=0.02852, over 971431.06 frames.], batch size: 35, lr: 1.25e-04 +2022-05-09 05:45:38,524 INFO [train.py:715] (6/8) Epoch 18, batch 1250, loss[loss=0.1259, simple_loss=0.1988, pruned_loss=0.02651, over 4835.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.02883, over 970602.01 frames.], batch size: 30, lr: 1.25e-04 +2022-05-09 05:46:17,555 INFO [train.py:715] (6/8) Epoch 18, batch 1300, loss[loss=0.1155, simple_loss=0.1947, pruned_loss=0.01813, over 4927.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02899, over 970910.30 frames.], batch size: 29, lr: 1.25e-04 +2022-05-09 05:46:56,375 INFO [train.py:715] (6/8) Epoch 18, batch 1350, loss[loss=0.1226, simple_loss=0.1991, pruned_loss=0.0231, over 4968.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.02881, over 971112.68 frames.], batch size: 24, lr: 1.25e-04 +2022-05-09 05:47:35,781 INFO [train.py:715] (6/8) Epoch 18, batch 1400, loss[loss=0.1477, simple_loss=0.2172, pruned_loss=0.03909, over 4876.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2056, pruned_loss=0.02888, over 972019.36 frames.], batch size: 16, lr: 1.25e-04 +2022-05-09 05:48:15,009 INFO [train.py:715] (6/8) Epoch 18, batch 1450, loss[loss=0.1671, simple_loss=0.2205, pruned_loss=0.05684, over 4789.00 frames.], tot_loss[loss=0.132, simple_loss=0.2058, pruned_loss=0.02907, over 972441.71 frames.], batch size: 12, lr: 1.25e-04 +2022-05-09 05:48:53,405 INFO [train.py:715] (6/8) Epoch 18, batch 1500, loss[loss=0.1385, simple_loss=0.2065, pruned_loss=0.03525, over 4871.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02913, over 972443.20 frames.], batch size: 20, lr: 1.25e-04 +2022-05-09 05:49:32,908 INFO [train.py:715] (6/8) Epoch 18, batch 1550, loss[loss=0.1629, simple_loss=0.2229, pruned_loss=0.05146, over 4909.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2067, pruned_loss=0.02945, over 972391.21 frames.], batch size: 17, lr: 1.25e-04 +2022-05-09 05:50:12,328 INFO [train.py:715] (6/8) Epoch 18, batch 1600, loss[loss=0.1101, simple_loss=0.1834, pruned_loss=0.01836, over 4783.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02917, over 972032.81 frames.], batch size: 12, lr: 1.25e-04 +2022-05-09 05:50:51,524 INFO [train.py:715] (6/8) Epoch 18, batch 1650, loss[loss=0.1342, simple_loss=0.2118, pruned_loss=0.02831, over 4691.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2053, pruned_loss=0.02845, over 971993.68 frames.], batch size: 15, lr: 1.25e-04 +2022-05-09 05:51:30,470 INFO [train.py:715] (6/8) Epoch 18, batch 1700, loss[loss=0.1494, simple_loss=0.2321, pruned_loss=0.03335, over 4977.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.02906, over 972200.16 frames.], batch size: 24, lr: 1.25e-04 +2022-05-09 05:52:09,886 INFO [train.py:715] (6/8) Epoch 18, batch 1750, loss[loss=0.1136, simple_loss=0.1962, pruned_loss=0.01553, over 4987.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2054, pruned_loss=0.02877, over 971771.42 frames.], batch size: 28, lr: 1.25e-04 +2022-05-09 05:52:49,172 INFO [train.py:715] (6/8) Epoch 18, batch 1800, loss[loss=0.1357, simple_loss=0.2127, pruned_loss=0.02932, over 4935.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2055, pruned_loss=0.02878, over 972395.29 frames.], batch size: 23, lr: 1.25e-04 +2022-05-09 05:53:27,456 INFO [train.py:715] (6/8) Epoch 18, batch 1850, loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02941, over 4892.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.02913, over 972579.47 frames.], batch size: 22, lr: 1.25e-04 +2022-05-09 05:54:06,245 INFO [train.py:715] (6/8) Epoch 18, batch 1900, loss[loss=0.1028, simple_loss=0.1838, pruned_loss=0.01087, over 4979.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02904, over 972353.88 frames.], batch size: 26, lr: 1.25e-04 +2022-05-09 05:54:45,624 INFO [train.py:715] (6/8) Epoch 18, batch 1950, loss[loss=0.1208, simple_loss=0.1866, pruned_loss=0.0275, over 4861.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02917, over 972971.60 frames.], batch size: 13, lr: 1.25e-04 +2022-05-09 05:55:24,356 INFO [train.py:715] (6/8) Epoch 18, batch 2000, loss[loss=0.1435, simple_loss=0.2117, pruned_loss=0.03764, over 4795.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02916, over 972809.72 frames.], batch size: 14, lr: 1.25e-04 +2022-05-09 05:56:02,849 INFO [train.py:715] (6/8) Epoch 18, batch 2050, loss[loss=0.1146, simple_loss=0.1954, pruned_loss=0.01692, over 4781.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02903, over 972701.92 frames.], batch size: 17, lr: 1.25e-04 +2022-05-09 05:56:42,082 INFO [train.py:715] (6/8) Epoch 18, batch 2100, loss[loss=0.1425, simple_loss=0.2164, pruned_loss=0.03428, over 4923.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2059, pruned_loss=0.02871, over 972571.62 frames.], batch size: 18, lr: 1.25e-04 +2022-05-09 05:57:21,527 INFO [train.py:715] (6/8) Epoch 18, batch 2150, loss[loss=0.1521, simple_loss=0.2177, pruned_loss=0.04327, over 4826.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02886, over 973433.42 frames.], batch size: 30, lr: 1.25e-04 +2022-05-09 05:57:59,834 INFO [train.py:715] (6/8) Epoch 18, batch 2200, loss[loss=0.1227, simple_loss=0.2004, pruned_loss=0.02245, over 4985.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.02846, over 972787.85 frames.], batch size: 28, lr: 1.25e-04 +2022-05-09 05:58:39,483 INFO [train.py:715] (6/8) Epoch 18, batch 2250, loss[loss=0.09324, simple_loss=0.1589, pruned_loss=0.0138, over 4828.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2057, pruned_loss=0.02807, over 972652.61 frames.], batch size: 13, lr: 1.25e-04 +2022-05-09 05:59:18,831 INFO [train.py:715] (6/8) Epoch 18, batch 2300, loss[loss=0.1301, simple_loss=0.204, pruned_loss=0.02812, over 4821.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2051, pruned_loss=0.02799, over 972066.79 frames.], batch size: 13, lr: 1.25e-04 +2022-05-09 05:59:57,624 INFO [train.py:715] (6/8) Epoch 18, batch 2350, loss[loss=0.1206, simple_loss=0.195, pruned_loss=0.02308, over 4922.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2047, pruned_loss=0.02787, over 972127.91 frames.], batch size: 29, lr: 1.25e-04 +2022-05-09 06:00:36,234 INFO [train.py:715] (6/8) Epoch 18, batch 2400, loss[loss=0.1178, simple_loss=0.2, pruned_loss=0.01775, over 4785.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2047, pruned_loss=0.02825, over 972467.50 frames.], batch size: 17, lr: 1.25e-04 +2022-05-09 06:01:15,696 INFO [train.py:715] (6/8) Epoch 18, batch 2450, loss[loss=0.1435, simple_loss=0.2187, pruned_loss=0.03409, over 4979.00 frames.], tot_loss[loss=0.131, simple_loss=0.2054, pruned_loss=0.02833, over 972832.93 frames.], batch size: 25, lr: 1.25e-04 +2022-05-09 06:01:55,090 INFO [train.py:715] (6/8) Epoch 18, batch 2500, loss[loss=0.1451, simple_loss=0.2112, pruned_loss=0.03947, over 4745.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2054, pruned_loss=0.02851, over 972410.12 frames.], batch size: 16, lr: 1.25e-04 +2022-05-09 06:02:33,098 INFO [train.py:715] (6/8) Epoch 18, batch 2550, loss[loss=0.1543, simple_loss=0.2111, pruned_loss=0.04879, over 4897.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02892, over 972520.74 frames.], batch size: 19, lr: 1.25e-04 +2022-05-09 06:03:11,870 INFO [train.py:715] (6/8) Epoch 18, batch 2600, loss[loss=0.1261, simple_loss=0.1903, pruned_loss=0.031, over 4809.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02902, over 972209.35 frames.], batch size: 12, lr: 1.25e-04 +2022-05-09 06:03:51,793 INFO [train.py:715] (6/8) Epoch 18, batch 2650, loss[loss=0.1446, simple_loss=0.2197, pruned_loss=0.03471, over 4878.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02871, over 972740.95 frames.], batch size: 16, lr: 1.25e-04 +2022-05-09 06:04:30,530 INFO [train.py:715] (6/8) Epoch 18, batch 2700, loss[loss=0.1582, simple_loss=0.2178, pruned_loss=0.04934, over 4837.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02895, over 973222.90 frames.], batch size: 30, lr: 1.25e-04 +2022-05-09 06:05:08,892 INFO [train.py:715] (6/8) Epoch 18, batch 2750, loss[loss=0.1403, simple_loss=0.2107, pruned_loss=0.03495, over 4954.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02911, over 972550.59 frames.], batch size: 14, lr: 1.25e-04 +2022-05-09 06:05:47,993 INFO [train.py:715] (6/8) Epoch 18, batch 2800, loss[loss=0.1591, simple_loss=0.2354, pruned_loss=0.04139, over 4967.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02939, over 972330.20 frames.], batch size: 35, lr: 1.25e-04 +2022-05-09 06:06:27,524 INFO [train.py:715] (6/8) Epoch 18, batch 2850, loss[loss=0.1521, simple_loss=0.2214, pruned_loss=0.04137, over 4866.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02918, over 972726.98 frames.], batch size: 16, lr: 1.25e-04 +2022-05-09 06:07:06,111 INFO [train.py:715] (6/8) Epoch 18, batch 2900, loss[loss=0.1463, simple_loss=0.2364, pruned_loss=0.0281, over 4961.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02962, over 972202.38 frames.], batch size: 24, lr: 1.25e-04 +2022-05-09 06:07:44,919 INFO [train.py:715] (6/8) Epoch 18, batch 2950, loss[loss=0.1081, simple_loss=0.1792, pruned_loss=0.01844, over 4815.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02897, over 972375.08 frames.], batch size: 27, lr: 1.25e-04 +2022-05-09 06:08:24,286 INFO [train.py:715] (6/8) Epoch 18, batch 3000, loss[loss=0.1544, simple_loss=0.224, pruned_loss=0.0424, over 4781.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2074, pruned_loss=0.02906, over 972311.91 frames.], batch size: 17, lr: 1.25e-04 +2022-05-09 06:08:24,286 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 06:08:34,097 INFO [train.py:742] (6/8) Epoch 18, validation: loss=0.1047, simple_loss=0.1881, pruned_loss=0.01065, over 914524.00 frames. +2022-05-09 06:09:14,108 INFO [train.py:715] (6/8) Epoch 18, batch 3050, loss[loss=0.1604, simple_loss=0.2355, pruned_loss=0.04262, over 4944.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02905, over 972326.50 frames.], batch size: 39, lr: 1.25e-04 +2022-05-09 06:09:52,628 INFO [train.py:715] (6/8) Epoch 18, batch 3100, loss[loss=0.148, simple_loss=0.2198, pruned_loss=0.03811, over 4952.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02916, over 972641.25 frames.], batch size: 39, lr: 1.25e-04 +2022-05-09 06:10:31,514 INFO [train.py:715] (6/8) Epoch 18, batch 3150, loss[loss=0.1387, simple_loss=0.2185, pruned_loss=0.02949, over 4917.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02924, over 972163.69 frames.], batch size: 19, lr: 1.25e-04 +2022-05-09 06:11:10,550 INFO [train.py:715] (6/8) Epoch 18, batch 3200, loss[loss=0.127, simple_loss=0.204, pruned_loss=0.02504, over 4908.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2066, pruned_loss=0.02947, over 972398.07 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 06:11:50,032 INFO [train.py:715] (6/8) Epoch 18, batch 3250, loss[loss=0.1409, simple_loss=0.2136, pruned_loss=0.03411, over 4901.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2066, pruned_loss=0.02984, over 972283.31 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 06:12:28,194 INFO [train.py:715] (6/8) Epoch 18, batch 3300, loss[loss=0.1226, simple_loss=0.2074, pruned_loss=0.01891, over 4926.00 frames.], tot_loss[loss=0.1333, simple_loss=0.207, pruned_loss=0.02978, over 973073.28 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 06:13:07,652 INFO [train.py:715] (6/8) Epoch 18, batch 3350, loss[loss=0.191, simple_loss=0.2639, pruned_loss=0.05906, over 4801.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.02988, over 972874.50 frames.], batch size: 24, lr: 1.24e-04 +2022-05-09 06:13:47,790 INFO [train.py:715] (6/8) Epoch 18, batch 3400, loss[loss=0.1493, simple_loss=0.2124, pruned_loss=0.04309, over 4943.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03035, over 973044.37 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:14:26,395 INFO [train.py:715] (6/8) Epoch 18, batch 3450, loss[loss=0.1219, simple_loss=0.2021, pruned_loss=0.02083, over 4786.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.02997, over 971779.60 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 06:15:05,251 INFO [train.py:715] (6/8) Epoch 18, batch 3500, loss[loss=0.13, simple_loss=0.2023, pruned_loss=0.02888, over 4914.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03021, over 972740.58 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 06:15:45,337 INFO [train.py:715] (6/8) Epoch 18, batch 3550, loss[loss=0.117, simple_loss=0.1939, pruned_loss=0.02006, over 4956.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2072, pruned_loss=0.03009, over 971650.47 frames.], batch size: 39, lr: 1.24e-04 +2022-05-09 06:16:24,514 INFO [train.py:715] (6/8) Epoch 18, batch 3600, loss[loss=0.1603, simple_loss=0.2397, pruned_loss=0.04044, over 4774.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2067, pruned_loss=0.02998, over 971969.43 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 06:17:03,261 INFO [train.py:715] (6/8) Epoch 18, batch 3650, loss[loss=0.1157, simple_loss=0.1859, pruned_loss=0.0227, over 4945.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2059, pruned_loss=0.02932, over 972680.64 frames.], batch size: 35, lr: 1.24e-04 +2022-05-09 06:17:42,732 INFO [train.py:715] (6/8) Epoch 18, batch 3700, loss[loss=0.1135, simple_loss=0.184, pruned_loss=0.02154, over 4955.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2063, pruned_loss=0.0295, over 972761.47 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:18:22,002 INFO [train.py:715] (6/8) Epoch 18, batch 3750, loss[loss=0.1481, simple_loss=0.2254, pruned_loss=0.03538, over 4979.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02936, over 973510.12 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 06:18:59,955 INFO [train.py:715] (6/8) Epoch 18, batch 3800, loss[loss=0.1359, simple_loss=0.2067, pruned_loss=0.03253, over 4882.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2058, pruned_loss=0.02893, over 973888.74 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 06:19:39,334 INFO [train.py:715] (6/8) Epoch 18, batch 3850, loss[loss=0.1091, simple_loss=0.1847, pruned_loss=0.01668, over 4966.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02909, over 974299.98 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 06:20:19,351 INFO [train.py:715] (6/8) Epoch 18, batch 3900, loss[loss=0.1348, simple_loss=0.1981, pruned_loss=0.0357, over 4954.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2072, pruned_loss=0.02964, over 972907.64 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 06:20:57,825 INFO [train.py:715] (6/8) Epoch 18, batch 3950, loss[loss=0.1184, simple_loss=0.1823, pruned_loss=0.02727, over 4985.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02932, over 972824.29 frames.], batch size: 35, lr: 1.24e-04 +2022-05-09 06:21:37,243 INFO [train.py:715] (6/8) Epoch 18, batch 4000, loss[loss=0.1159, simple_loss=0.1984, pruned_loss=0.01675, over 4793.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02909, over 971665.98 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 06:22:16,738 INFO [train.py:715] (6/8) Epoch 18, batch 4050, loss[loss=0.1346, simple_loss=0.2077, pruned_loss=0.03079, over 4905.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02925, over 971974.48 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 06:22:56,016 INFO [train.py:715] (6/8) Epoch 18, batch 4100, loss[loss=0.1108, simple_loss=0.1814, pruned_loss=0.02008, over 4813.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02955, over 972415.41 frames.], batch size: 26, lr: 1.24e-04 +2022-05-09 06:23:34,764 INFO [train.py:715] (6/8) Epoch 18, batch 4150, loss[loss=0.1323, simple_loss=0.216, pruned_loss=0.02429, over 4908.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.02979, over 973066.24 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 06:24:14,201 INFO [train.py:715] (6/8) Epoch 18, batch 4200, loss[loss=0.1554, simple_loss=0.219, pruned_loss=0.04593, over 4934.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02908, over 973205.19 frames.], batch size: 35, lr: 1.24e-04 +2022-05-09 06:24:53,586 INFO [train.py:715] (6/8) Epoch 18, batch 4250, loss[loss=0.1462, simple_loss=0.2195, pruned_loss=0.03644, over 4936.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.02899, over 972734.04 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 06:25:32,493 INFO [train.py:715] (6/8) Epoch 18, batch 4300, loss[loss=0.1093, simple_loss=0.1834, pruned_loss=0.01756, over 4799.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.0291, over 973058.23 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 06:26:12,602 INFO [train.py:715] (6/8) Epoch 18, batch 4350, loss[loss=0.145, simple_loss=0.209, pruned_loss=0.04047, over 4919.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02929, over 973487.03 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 06:26:52,062 INFO [train.py:715] (6/8) Epoch 18, batch 4400, loss[loss=0.1041, simple_loss=0.1815, pruned_loss=0.01339, over 4830.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02893, over 972115.67 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:27:31,552 INFO [train.py:715] (6/8) Epoch 18, batch 4450, loss[loss=0.1058, simple_loss=0.1795, pruned_loss=0.01604, over 4829.00 frames.], tot_loss[loss=0.131, simple_loss=0.2054, pruned_loss=0.02828, over 972180.33 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 06:28:09,905 INFO [train.py:715] (6/8) Epoch 18, batch 4500, loss[loss=0.1254, simple_loss=0.2068, pruned_loss=0.02195, over 4815.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.0284, over 973009.02 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 06:28:49,169 INFO [train.py:715] (6/8) Epoch 18, batch 4550, loss[loss=0.1623, simple_loss=0.235, pruned_loss=0.04486, over 4845.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2066, pruned_loss=0.02846, over 971984.95 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:29:29,020 INFO [train.py:715] (6/8) Epoch 18, batch 4600, loss[loss=0.1217, simple_loss=0.1939, pruned_loss=0.02471, over 4951.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02866, over 971895.71 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 06:30:07,897 INFO [train.py:715] (6/8) Epoch 18, batch 4650, loss[loss=0.14, simple_loss=0.212, pruned_loss=0.03398, over 4850.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02875, over 971796.77 frames.], batch size: 32, lr: 1.24e-04 +2022-05-09 06:30:47,016 INFO [train.py:715] (6/8) Epoch 18, batch 4700, loss[loss=0.1129, simple_loss=0.1756, pruned_loss=0.02506, over 4904.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2059, pruned_loss=0.02913, over 971929.57 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 06:31:26,068 INFO [train.py:715] (6/8) Epoch 18, batch 4750, loss[loss=0.1202, simple_loss=0.1815, pruned_loss=0.02946, over 4848.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2064, pruned_loss=0.02967, over 972823.80 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 06:32:06,184 INFO [train.py:715] (6/8) Epoch 18, batch 4800, loss[loss=0.1204, simple_loss=0.1979, pruned_loss=0.02148, over 4695.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2066, pruned_loss=0.02988, over 972957.24 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:32:44,920 INFO [train.py:715] (6/8) Epoch 18, batch 4850, loss[loss=0.1628, simple_loss=0.2512, pruned_loss=0.03725, over 4916.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2068, pruned_loss=0.02977, over 973306.41 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 06:33:24,377 INFO [train.py:715] (6/8) Epoch 18, batch 4900, loss[loss=0.1316, simple_loss=0.2078, pruned_loss=0.02774, over 4804.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02928, over 972852.63 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 06:34:04,567 INFO [train.py:715] (6/8) Epoch 18, batch 4950, loss[loss=0.1288, simple_loss=0.2096, pruned_loss=0.02399, over 4831.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02914, over 973653.90 frames.], batch size: 26, lr: 1.24e-04 +2022-05-09 06:34:43,676 INFO [train.py:715] (6/8) Epoch 18, batch 5000, loss[loss=0.1426, simple_loss=0.2155, pruned_loss=0.03487, over 4773.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02945, over 972611.95 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 06:35:22,381 INFO [train.py:715] (6/8) Epoch 18, batch 5050, loss[loss=0.115, simple_loss=0.1894, pruned_loss=0.02031, over 4855.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02919, over 971900.12 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 06:36:01,532 INFO [train.py:715] (6/8) Epoch 18, batch 5100, loss[loss=0.1416, simple_loss=0.2171, pruned_loss=0.03306, over 4944.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02935, over 972566.70 frames.], batch size: 39, lr: 1.24e-04 +2022-05-09 06:36:41,089 INFO [train.py:715] (6/8) Epoch 18, batch 5150, loss[loss=0.1331, simple_loss=0.2113, pruned_loss=0.02749, over 4794.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02951, over 972775.84 frames.], batch size: 24, lr: 1.24e-04 +2022-05-09 06:37:19,654 INFO [train.py:715] (6/8) Epoch 18, batch 5200, loss[loss=0.09949, simple_loss=0.1767, pruned_loss=0.01115, over 4806.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02926, over 972622.72 frames.], batch size: 12, lr: 1.24e-04 +2022-05-09 06:37:59,022 INFO [train.py:715] (6/8) Epoch 18, batch 5250, loss[loss=0.1513, simple_loss=0.2203, pruned_loss=0.04111, over 4963.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2078, pruned_loss=0.02949, over 972916.09 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 06:38:38,941 INFO [train.py:715] (6/8) Epoch 18, batch 5300, loss[loss=0.1636, simple_loss=0.2381, pruned_loss=0.0445, over 4695.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2071, pruned_loss=0.02897, over 972484.16 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:39:18,965 INFO [train.py:715] (6/8) Epoch 18, batch 5350, loss[loss=0.1164, simple_loss=0.1919, pruned_loss=0.02044, over 4843.00 frames.], tot_loss[loss=0.1323, simple_loss=0.207, pruned_loss=0.02877, over 972408.47 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 06:39:57,059 INFO [train.py:715] (6/8) Epoch 18, batch 5400, loss[loss=0.1428, simple_loss=0.2241, pruned_loss=0.03077, over 4816.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2073, pruned_loss=0.02869, over 973093.76 frames.], batch size: 26, lr: 1.24e-04 +2022-05-09 06:40:38,719 INFO [train.py:715] (6/8) Epoch 18, batch 5450, loss[loss=0.1165, simple_loss=0.1931, pruned_loss=0.01994, over 4764.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2065, pruned_loss=0.0285, over 972540.61 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 06:41:19,098 INFO [train.py:715] (6/8) Epoch 18, batch 5500, loss[loss=0.1332, simple_loss=0.2085, pruned_loss=0.029, over 4804.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02903, over 973071.29 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 06:41:58,081 INFO [train.py:715] (6/8) Epoch 18, batch 5550, loss[loss=0.1207, simple_loss=0.1978, pruned_loss=0.02174, over 4738.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.0291, over 973157.31 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 06:42:36,886 INFO [train.py:715] (6/8) Epoch 18, batch 5600, loss[loss=0.1434, simple_loss=0.2168, pruned_loss=0.03499, over 4889.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.02886, over 973063.70 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 06:43:15,927 INFO [train.py:715] (6/8) Epoch 18, batch 5650, loss[loss=0.1272, simple_loss=0.2002, pruned_loss=0.02708, over 4737.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02894, over 973335.46 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 06:43:55,547 INFO [train.py:715] (6/8) Epoch 18, batch 5700, loss[loss=0.1414, simple_loss=0.2196, pruned_loss=0.03164, over 4897.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02892, over 972828.66 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 06:44:33,669 INFO [train.py:715] (6/8) Epoch 18, batch 5750, loss[loss=0.1451, simple_loss=0.226, pruned_loss=0.03208, over 4905.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02942, over 972981.60 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 06:45:12,600 INFO [train.py:715] (6/8) Epoch 18, batch 5800, loss[loss=0.1311, simple_loss=0.2033, pruned_loss=0.02944, over 4842.00 frames.], tot_loss[loss=0.1324, simple_loss=0.207, pruned_loss=0.02889, over 972346.32 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 06:45:52,341 INFO [train.py:715] (6/8) Epoch 18, batch 5850, loss[loss=0.1127, simple_loss=0.1881, pruned_loss=0.01866, over 4842.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2071, pruned_loss=0.02855, over 972904.58 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 06:46:31,450 INFO [train.py:715] (6/8) Epoch 18, batch 5900, loss[loss=0.139, simple_loss=0.209, pruned_loss=0.0345, over 4851.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2072, pruned_loss=0.02883, over 972073.66 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:47:10,145 INFO [train.py:715] (6/8) Epoch 18, batch 5950, loss[loss=0.1625, simple_loss=0.2228, pruned_loss=0.05107, over 4971.00 frames.], tot_loss[loss=0.133, simple_loss=0.208, pruned_loss=0.02897, over 971617.53 frames.], batch size: 35, lr: 1.24e-04 +2022-05-09 06:47:49,550 INFO [train.py:715] (6/8) Epoch 18, batch 6000, loss[loss=0.1094, simple_loss=0.1794, pruned_loss=0.0197, over 4808.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2066, pruned_loss=0.02839, over 972270.92 frames.], batch size: 12, lr: 1.24e-04 +2022-05-09 06:47:49,551 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 06:47:59,475 INFO [train.py:742] (6/8) Epoch 18, validation: loss=0.1047, simple_loss=0.188, pruned_loss=0.01075, over 914524.00 frames. +2022-05-09 06:48:39,113 INFO [train.py:715] (6/8) Epoch 18, batch 6050, loss[loss=0.1478, simple_loss=0.2232, pruned_loss=0.03619, over 4963.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2068, pruned_loss=0.02875, over 972347.03 frames.], batch size: 24, lr: 1.24e-04 +2022-05-09 06:49:18,302 INFO [train.py:715] (6/8) Epoch 18, batch 6100, loss[loss=0.1163, simple_loss=0.1917, pruned_loss=0.02047, over 4972.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02884, over 972259.96 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 06:49:56,630 INFO [train.py:715] (6/8) Epoch 18, batch 6150, loss[loss=0.146, simple_loss=0.2165, pruned_loss=0.03775, over 4796.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02939, over 972136.12 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 06:50:35,921 INFO [train.py:715] (6/8) Epoch 18, batch 6200, loss[loss=0.138, simple_loss=0.2242, pruned_loss=0.02596, over 4802.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02922, over 971143.72 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 06:51:15,501 INFO [train.py:715] (6/8) Epoch 18, batch 6250, loss[loss=0.1424, simple_loss=0.2153, pruned_loss=0.03473, over 4847.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02919, over 971050.83 frames.], batch size: 32, lr: 1.24e-04 +2022-05-09 06:51:54,533 INFO [train.py:715] (6/8) Epoch 18, batch 6300, loss[loss=0.1485, simple_loss=0.2118, pruned_loss=0.04262, over 4955.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.02962, over 970529.48 frames.], batch size: 35, lr: 1.24e-04 +2022-05-09 06:52:33,705 INFO [train.py:715] (6/8) Epoch 18, batch 6350, loss[loss=0.1544, simple_loss=0.2376, pruned_loss=0.03559, over 4795.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02937, over 970846.92 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 06:53:12,893 INFO [train.py:715] (6/8) Epoch 18, batch 6400, loss[loss=0.1273, simple_loss=0.2086, pruned_loss=0.02303, over 4806.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2069, pruned_loss=0.0287, over 971483.25 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 06:53:52,076 INFO [train.py:715] (6/8) Epoch 18, batch 6450, loss[loss=0.131, simple_loss=0.2082, pruned_loss=0.02691, over 4829.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2074, pruned_loss=0.02898, over 972155.27 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 06:54:30,359 INFO [train.py:715] (6/8) Epoch 18, batch 6500, loss[loss=0.1337, simple_loss=0.1904, pruned_loss=0.03849, over 4829.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2067, pruned_loss=0.0288, over 971640.84 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 06:55:08,643 INFO [train.py:715] (6/8) Epoch 18, batch 6550, loss[loss=0.1441, simple_loss=0.2217, pruned_loss=0.03328, over 4883.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02921, over 972098.29 frames.], batch size: 22, lr: 1.24e-04 +2022-05-09 06:55:48,104 INFO [train.py:715] (6/8) Epoch 18, batch 6600, loss[loss=0.1481, simple_loss=0.2189, pruned_loss=0.03865, over 4910.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02914, over 972407.05 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 06:56:27,460 INFO [train.py:715] (6/8) Epoch 18, batch 6650, loss[loss=0.1471, simple_loss=0.2138, pruned_loss=0.04018, over 4903.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02917, over 972192.87 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 06:57:05,503 INFO [train.py:715] (6/8) Epoch 18, batch 6700, loss[loss=0.1654, simple_loss=0.2427, pruned_loss=0.044, over 4815.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02912, over 971784.98 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:57:44,487 INFO [train.py:715] (6/8) Epoch 18, batch 6750, loss[loss=0.1126, simple_loss=0.1841, pruned_loss=0.02053, over 4859.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02925, over 972386.61 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 06:58:23,847 INFO [train.py:715] (6/8) Epoch 18, batch 6800, loss[loss=0.1386, simple_loss=0.204, pruned_loss=0.03663, over 4987.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02865, over 972081.18 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 06:59:02,556 INFO [train.py:715] (6/8) Epoch 18, batch 6850, loss[loss=0.1225, simple_loss=0.2025, pruned_loss=0.02125, over 4835.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.0289, over 972077.09 frames.], batch size: 26, lr: 1.24e-04 +2022-05-09 06:59:40,724 INFO [train.py:715] (6/8) Epoch 18, batch 6900, loss[loss=0.1392, simple_loss=0.2191, pruned_loss=0.02962, over 4749.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.0293, over 971575.03 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 07:00:20,319 INFO [train.py:715] (6/8) Epoch 18, batch 6950, loss[loss=0.136, simple_loss=0.2206, pruned_loss=0.02571, over 4776.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02879, over 971496.55 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:00:59,042 INFO [train.py:715] (6/8) Epoch 18, batch 7000, loss[loss=0.1422, simple_loss=0.217, pruned_loss=0.03372, over 4882.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02873, over 971945.59 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 07:01:37,424 INFO [train.py:715] (6/8) Epoch 18, batch 7050, loss[loss=0.1456, simple_loss=0.2212, pruned_loss=0.03503, over 4839.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02902, over 971709.26 frames.], batch size: 32, lr: 1.24e-04 +2022-05-09 07:02:16,627 INFO [train.py:715] (6/8) Epoch 18, batch 7100, loss[loss=0.1472, simple_loss=0.2221, pruned_loss=0.03614, over 4970.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02872, over 971959.93 frames.], batch size: 39, lr: 1.24e-04 +2022-05-09 07:02:56,208 INFO [train.py:715] (6/8) Epoch 18, batch 7150, loss[loss=0.1402, simple_loss=0.2222, pruned_loss=0.0291, over 4937.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.0289, over 972470.78 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 07:03:34,832 INFO [train.py:715] (6/8) Epoch 18, batch 7200, loss[loss=0.108, simple_loss=0.1782, pruned_loss=0.01888, over 4834.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2063, pruned_loss=0.02849, over 973172.45 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 07:04:13,067 INFO [train.py:715] (6/8) Epoch 18, batch 7250, loss[loss=0.1161, simple_loss=0.1868, pruned_loss=0.02268, over 4963.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2059, pruned_loss=0.0281, over 972047.92 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 07:04:52,164 INFO [train.py:715] (6/8) Epoch 18, batch 7300, loss[loss=0.1211, simple_loss=0.1881, pruned_loss=0.02701, over 4974.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02832, over 972164.27 frames.], batch size: 39, lr: 1.24e-04 +2022-05-09 07:05:31,288 INFO [train.py:715] (6/8) Epoch 18, batch 7350, loss[loss=0.1328, simple_loss=0.2103, pruned_loss=0.02763, over 4970.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02826, over 971882.50 frames.], batch size: 24, lr: 1.24e-04 +2022-05-09 07:06:09,361 INFO [train.py:715] (6/8) Epoch 18, batch 7400, loss[loss=0.1545, simple_loss=0.2283, pruned_loss=0.04041, over 4811.00 frames.], tot_loss[loss=0.1311, simple_loss=0.206, pruned_loss=0.02812, over 971649.06 frames.], batch size: 26, lr: 1.24e-04 +2022-05-09 07:06:48,515 INFO [train.py:715] (6/8) Epoch 18, batch 7450, loss[loss=0.1267, simple_loss=0.1834, pruned_loss=0.03496, over 4789.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02858, over 972209.67 frames.], batch size: 12, lr: 1.24e-04 +2022-05-09 07:07:27,763 INFO [train.py:715] (6/8) Epoch 18, batch 7500, loss[loss=0.1023, simple_loss=0.1737, pruned_loss=0.01549, over 4691.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02888, over 971832.70 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:08:05,374 INFO [train.py:715] (6/8) Epoch 18, batch 7550, loss[loss=0.1366, simple_loss=0.2134, pruned_loss=0.02997, over 4752.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.02888, over 971847.54 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 07:08:43,910 INFO [train.py:715] (6/8) Epoch 18, batch 7600, loss[loss=0.1308, simple_loss=0.2054, pruned_loss=0.02806, over 4691.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02895, over 972126.19 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:09:23,642 INFO [train.py:715] (6/8) Epoch 18, batch 7650, loss[loss=0.1403, simple_loss=0.2216, pruned_loss=0.02955, over 4977.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02891, over 972660.32 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:10:02,922 INFO [train.py:715] (6/8) Epoch 18, batch 7700, loss[loss=0.1248, simple_loss=0.1984, pruned_loss=0.0256, over 4856.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02902, over 973273.51 frames.], batch size: 32, lr: 1.24e-04 +2022-05-09 07:10:41,613 INFO [train.py:715] (6/8) Epoch 18, batch 7750, loss[loss=0.152, simple_loss=0.2206, pruned_loss=0.04175, over 4833.00 frames.], tot_loss[loss=0.133, simple_loss=0.2076, pruned_loss=0.02922, over 972863.07 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 07:11:21,216 INFO [train.py:715] (6/8) Epoch 18, batch 7800, loss[loss=0.1296, simple_loss=0.2026, pruned_loss=0.02831, over 4817.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.0295, over 971921.23 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:12:01,093 INFO [train.py:715] (6/8) Epoch 18, batch 7850, loss[loss=0.1231, simple_loss=0.201, pruned_loss=0.02263, over 4762.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2073, pruned_loss=0.02898, over 971890.09 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:12:40,479 INFO [train.py:715] (6/8) Epoch 18, batch 7900, loss[loss=0.1271, simple_loss=0.2017, pruned_loss=0.02623, over 4795.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2073, pruned_loss=0.0288, over 972356.21 frames.], batch size: 24, lr: 1.24e-04 +2022-05-09 07:13:19,679 INFO [train.py:715] (6/8) Epoch 18, batch 7950, loss[loss=0.1063, simple_loss=0.1822, pruned_loss=0.01523, over 4916.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2074, pruned_loss=0.0292, over 971376.67 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:13:59,117 INFO [train.py:715] (6/8) Epoch 18, batch 8000, loss[loss=0.1171, simple_loss=0.1919, pruned_loss=0.02111, over 4819.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2077, pruned_loss=0.02926, over 971156.55 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 07:14:38,132 INFO [train.py:715] (6/8) Epoch 18, batch 8050, loss[loss=0.1486, simple_loss=0.2215, pruned_loss=0.03784, over 4855.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02947, over 971884.29 frames.], batch size: 38, lr: 1.24e-04 +2022-05-09 07:15:16,609 INFO [train.py:715] (6/8) Epoch 18, batch 8100, loss[loss=0.1178, simple_loss=0.1948, pruned_loss=0.02042, over 4989.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2074, pruned_loss=0.02924, over 971785.21 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 07:15:55,251 INFO [train.py:715] (6/8) Epoch 18, batch 8150, loss[loss=0.1355, simple_loss=0.2181, pruned_loss=0.02643, over 4815.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02898, over 971699.95 frames.], batch size: 27, lr: 1.24e-04 +2022-05-09 07:16:34,312 INFO [train.py:715] (6/8) Epoch 18, batch 8200, loss[loss=0.1164, simple_loss=0.1982, pruned_loss=0.01728, over 4923.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02871, over 972194.58 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 07:17:12,930 INFO [train.py:715] (6/8) Epoch 18, batch 8250, loss[loss=0.1308, simple_loss=0.2132, pruned_loss=0.02417, over 4980.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2072, pruned_loss=0.02877, over 972070.29 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:17:51,222 INFO [train.py:715] (6/8) Epoch 18, batch 8300, loss[loss=0.1379, simple_loss=0.2044, pruned_loss=0.03568, over 4784.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02883, over 972379.11 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 07:18:31,301 INFO [train.py:715] (6/8) Epoch 18, batch 8350, loss[loss=0.1324, simple_loss=0.2098, pruned_loss=0.02752, over 4964.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02916, over 972362.06 frames.], batch size: 35, lr: 1.24e-04 +2022-05-09 07:19:10,481 INFO [train.py:715] (6/8) Epoch 18, batch 8400, loss[loss=0.1167, simple_loss=0.1942, pruned_loss=0.0196, over 4821.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02933, over 972582.62 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 07:19:48,921 INFO [train.py:715] (6/8) Epoch 18, batch 8450, loss[loss=0.1303, simple_loss=0.2097, pruned_loss=0.02546, over 4888.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02902, over 972257.78 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 07:20:28,159 INFO [train.py:715] (6/8) Epoch 18, batch 8500, loss[loss=0.1197, simple_loss=0.203, pruned_loss=0.0182, over 4968.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2049, pruned_loss=0.02844, over 972912.67 frames.], batch size: 24, lr: 1.24e-04 +2022-05-09 07:21:07,338 INFO [train.py:715] (6/8) Epoch 18, batch 8550, loss[loss=0.1191, simple_loss=0.1878, pruned_loss=0.02521, over 4881.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02899, over 971788.53 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 07:21:46,040 INFO [train.py:715] (6/8) Epoch 18, batch 8600, loss[loss=0.1389, simple_loss=0.2179, pruned_loss=0.03, over 4823.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02906, over 972391.87 frames.], batch size: 27, lr: 1.24e-04 +2022-05-09 07:22:24,246 INFO [train.py:715] (6/8) Epoch 18, batch 8650, loss[loss=0.1291, simple_loss=0.2101, pruned_loss=0.02404, over 4811.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2056, pruned_loss=0.0287, over 971789.40 frames.], batch size: 27, lr: 1.24e-04 +2022-05-09 07:23:03,809 INFO [train.py:715] (6/8) Epoch 18, batch 8700, loss[loss=0.1318, simple_loss=0.1961, pruned_loss=0.03376, over 4822.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2049, pruned_loss=0.02878, over 970998.63 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:23:43,655 INFO [train.py:715] (6/8) Epoch 18, batch 8750, loss[loss=0.1124, simple_loss=0.1949, pruned_loss=0.01498, over 4826.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2051, pruned_loss=0.02876, over 970965.08 frames.], batch size: 26, lr: 1.24e-04 +2022-05-09 07:24:23,138 INFO [train.py:715] (6/8) Epoch 18, batch 8800, loss[loss=0.1472, simple_loss=0.2233, pruned_loss=0.03553, over 4761.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2048, pruned_loss=0.0287, over 971771.75 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 07:25:01,508 INFO [train.py:715] (6/8) Epoch 18, batch 8850, loss[loss=0.1111, simple_loss=0.1799, pruned_loss=0.02114, over 4701.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2046, pruned_loss=0.02855, over 972219.73 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:25:41,125 INFO [train.py:715] (6/8) Epoch 18, batch 8900, loss[loss=0.1463, simple_loss=0.2145, pruned_loss=0.03903, over 4899.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2052, pruned_loss=0.0288, over 972753.40 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 07:26:19,642 INFO [train.py:715] (6/8) Epoch 18, batch 8950, loss[loss=0.1337, simple_loss=0.2128, pruned_loss=0.02731, over 4766.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.02883, over 972575.89 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 07:26:58,105 INFO [train.py:715] (6/8) Epoch 18, batch 9000, loss[loss=0.1177, simple_loss=0.1943, pruned_loss=0.02058, over 4802.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02874, over 971761.34 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 07:26:58,106 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 07:27:08,040 INFO [train.py:742] (6/8) Epoch 18, validation: loss=0.1045, simple_loss=0.1879, pruned_loss=0.01057, over 914524.00 frames. +2022-05-09 07:27:46,935 INFO [train.py:715] (6/8) Epoch 18, batch 9050, loss[loss=0.1159, simple_loss=0.19, pruned_loss=0.02093, over 4736.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02882, over 971959.40 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 07:28:26,542 INFO [train.py:715] (6/8) Epoch 18, batch 9100, loss[loss=0.119, simple_loss=0.1938, pruned_loss=0.02208, over 4806.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02924, over 972743.63 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 07:29:05,675 INFO [train.py:715] (6/8) Epoch 18, batch 9150, loss[loss=0.1304, simple_loss=0.2056, pruned_loss=0.02764, over 4807.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2052, pruned_loss=0.02856, over 972447.00 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:29:43,364 INFO [train.py:715] (6/8) Epoch 18, batch 9200, loss[loss=0.1255, simple_loss=0.2049, pruned_loss=0.02311, over 4756.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2057, pruned_loss=0.02881, over 972371.39 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 07:30:22,563 INFO [train.py:715] (6/8) Epoch 18, batch 9250, loss[loss=0.1252, simple_loss=0.2068, pruned_loss=0.02177, over 4941.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.0291, over 972711.76 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 07:31:01,725 INFO [train.py:715] (6/8) Epoch 18, batch 9300, loss[loss=0.1212, simple_loss=0.1945, pruned_loss=0.0239, over 4871.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02893, over 972657.96 frames.], batch size: 32, lr: 1.24e-04 +2022-05-09 07:31:39,927 INFO [train.py:715] (6/8) Epoch 18, batch 9350, loss[loss=0.1286, simple_loss=0.2103, pruned_loss=0.02344, over 4984.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2056, pruned_loss=0.02857, over 972067.55 frames.], batch size: 28, lr: 1.24e-04 +2022-05-09 07:32:18,517 INFO [train.py:715] (6/8) Epoch 18, batch 9400, loss[loss=0.161, simple_loss=0.2226, pruned_loss=0.04975, over 4851.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02856, over 972598.96 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 07:32:58,079 INFO [train.py:715] (6/8) Epoch 18, batch 9450, loss[loss=0.1377, simple_loss=0.2186, pruned_loss=0.02844, over 4980.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02909, over 973074.17 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 07:33:36,483 INFO [train.py:715] (6/8) Epoch 18, batch 9500, loss[loss=0.1325, simple_loss=0.2117, pruned_loss=0.02662, over 4819.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2074, pruned_loss=0.02926, over 972188.52 frames.], batch size: 27, lr: 1.24e-04 +2022-05-09 07:34:14,740 INFO [train.py:715] (6/8) Epoch 18, batch 9550, loss[loss=0.1157, simple_loss=0.1903, pruned_loss=0.02061, over 4932.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02881, over 972783.05 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 07:34:53,876 INFO [train.py:715] (6/8) Epoch 18, batch 9600, loss[loss=0.1257, simple_loss=0.206, pruned_loss=0.02265, over 4937.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2062, pruned_loss=0.02836, over 972708.57 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 07:35:33,432 INFO [train.py:715] (6/8) Epoch 18, batch 9650, loss[loss=0.1161, simple_loss=0.1975, pruned_loss=0.01737, over 4813.00 frames.], tot_loss[loss=0.1313, simple_loss=0.206, pruned_loss=0.02835, over 972377.30 frames.], batch size: 26, lr: 1.24e-04 +2022-05-09 07:36:12,278 INFO [train.py:715] (6/8) Epoch 18, batch 9700, loss[loss=0.1406, simple_loss=0.2055, pruned_loss=0.03789, over 4976.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02885, over 972378.92 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 07:36:50,932 INFO [train.py:715] (6/8) Epoch 18, batch 9750, loss[loss=0.1295, simple_loss=0.2203, pruned_loss=0.01937, over 4982.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2059, pruned_loss=0.02793, over 972640.04 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 07:37:31,030 INFO [train.py:715] (6/8) Epoch 18, batch 9800, loss[loss=0.1167, simple_loss=0.1903, pruned_loss=0.02156, over 4797.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2061, pruned_loss=0.02817, over 972434.42 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 07:38:09,639 INFO [train.py:715] (6/8) Epoch 18, batch 9850, loss[loss=0.1208, simple_loss=0.1883, pruned_loss=0.02667, over 4702.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2061, pruned_loss=0.02826, over 972652.61 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:38:48,001 INFO [train.py:715] (6/8) Epoch 18, batch 9900, loss[loss=0.1312, simple_loss=0.2063, pruned_loss=0.02802, over 4989.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2057, pruned_loss=0.02807, over 972896.92 frames.], batch size: 26, lr: 1.24e-04 +2022-05-09 07:39:27,319 INFO [train.py:715] (6/8) Epoch 18, batch 9950, loss[loss=0.1192, simple_loss=0.1832, pruned_loss=0.02756, over 4862.00 frames.], tot_loss[loss=0.1312, simple_loss=0.206, pruned_loss=0.02818, over 973390.95 frames.], batch size: 20, lr: 1.24e-04 +2022-05-09 07:40:06,409 INFO [train.py:715] (6/8) Epoch 18, batch 10000, loss[loss=0.1345, simple_loss=0.2107, pruned_loss=0.02917, over 4870.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2068, pruned_loss=0.02857, over 972499.21 frames.], batch size: 22, lr: 1.24e-04 +2022-05-09 07:40:45,259 INFO [train.py:715] (6/8) Epoch 18, batch 10050, loss[loss=0.1124, simple_loss=0.1864, pruned_loss=0.01921, over 4849.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02874, over 973452.56 frames.], batch size: 32, lr: 1.24e-04 +2022-05-09 07:41:23,502 INFO [train.py:715] (6/8) Epoch 18, batch 10100, loss[loss=0.1184, simple_loss=0.1917, pruned_loss=0.02259, over 4972.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02843, over 972869.06 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 07:42:02,488 INFO [train.py:715] (6/8) Epoch 18, batch 10150, loss[loss=0.1249, simple_loss=0.1981, pruned_loss=0.02584, over 4775.00 frames.], tot_loss[loss=0.131, simple_loss=0.2058, pruned_loss=0.02813, over 972395.64 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 07:42:41,664 INFO [train.py:715] (6/8) Epoch 18, batch 10200, loss[loss=0.146, simple_loss=0.2164, pruned_loss=0.03787, over 4978.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2054, pruned_loss=0.02778, over 972319.30 frames.], batch size: 35, lr: 1.24e-04 +2022-05-09 07:43:20,200 INFO [train.py:715] (6/8) Epoch 18, batch 10250, loss[loss=0.1398, simple_loss=0.2116, pruned_loss=0.03395, over 4964.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2054, pruned_loss=0.02779, over 972730.58 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:43:59,332 INFO [train.py:715] (6/8) Epoch 18, batch 10300, loss[loss=0.1144, simple_loss=0.1833, pruned_loss=0.02271, over 4850.00 frames.], tot_loss[loss=0.131, simple_loss=0.2061, pruned_loss=0.02794, over 972888.17 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:44:39,644 INFO [train.py:715] (6/8) Epoch 18, batch 10350, loss[loss=0.1338, simple_loss=0.2077, pruned_loss=0.02996, over 4989.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2067, pruned_loss=0.02839, over 972371.36 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 07:45:18,123 INFO [train.py:715] (6/8) Epoch 18, batch 10400, loss[loss=0.1289, simple_loss=0.2107, pruned_loss=0.02353, over 4776.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2066, pruned_loss=0.02855, over 972324.38 frames.], batch size: 12, lr: 1.24e-04 +2022-05-09 07:45:56,572 INFO [train.py:715] (6/8) Epoch 18, batch 10450, loss[loss=0.1359, simple_loss=0.2073, pruned_loss=0.0323, over 4747.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2065, pruned_loss=0.0284, over 972923.84 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 07:46:36,304 INFO [train.py:715] (6/8) Epoch 18, batch 10500, loss[loss=0.1386, simple_loss=0.2073, pruned_loss=0.03497, over 4931.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02849, over 972356.28 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:47:15,166 INFO [train.py:715] (6/8) Epoch 18, batch 10550, loss[loss=0.1271, simple_loss=0.1994, pruned_loss=0.02742, over 4852.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.02837, over 972722.98 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 07:47:53,901 INFO [train.py:715] (6/8) Epoch 18, batch 10600, loss[loss=0.1628, simple_loss=0.2355, pruned_loss=0.04499, over 4781.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02865, over 972408.93 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:48:33,505 INFO [train.py:715] (6/8) Epoch 18, batch 10650, loss[loss=0.1229, simple_loss=0.2107, pruned_loss=0.0175, over 4939.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2072, pruned_loss=0.0288, over 972394.35 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 07:49:13,196 INFO [train.py:715] (6/8) Epoch 18, batch 10700, loss[loss=0.1255, simple_loss=0.2033, pruned_loss=0.02385, over 4866.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.0284, over 972242.71 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 07:49:52,112 INFO [train.py:715] (6/8) Epoch 18, batch 10750, loss[loss=0.1033, simple_loss=0.1766, pruned_loss=0.01495, over 4790.00 frames.], tot_loss[loss=0.1317, simple_loss=0.206, pruned_loss=0.02867, over 971918.56 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:50:31,127 INFO [train.py:715] (6/8) Epoch 18, batch 10800, loss[loss=0.1394, simple_loss=0.2196, pruned_loss=0.02959, over 4781.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2059, pruned_loss=0.02836, over 972084.25 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:51:10,558 INFO [train.py:715] (6/8) Epoch 18, batch 10850, loss[loss=0.1292, simple_loss=0.2069, pruned_loss=0.02579, over 4841.00 frames.], tot_loss[loss=0.132, simple_loss=0.2068, pruned_loss=0.0286, over 972344.16 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 07:51:49,060 INFO [train.py:715] (6/8) Epoch 18, batch 10900, loss[loss=0.1098, simple_loss=0.1871, pruned_loss=0.01623, over 4888.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2065, pruned_loss=0.02834, over 972482.16 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 07:52:27,640 INFO [train.py:715] (6/8) Epoch 18, batch 10950, loss[loss=0.1495, simple_loss=0.2318, pruned_loss=0.03365, over 4784.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2065, pruned_loss=0.02831, over 972235.51 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 07:53:07,691 INFO [train.py:715] (6/8) Epoch 18, batch 11000, loss[loss=0.107, simple_loss=0.1829, pruned_loss=0.01553, over 4974.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02863, over 972411.07 frames.], batch size: 40, lr: 1.24e-04 +2022-05-09 07:53:46,749 INFO [train.py:715] (6/8) Epoch 18, batch 11050, loss[loss=0.1139, simple_loss=0.1881, pruned_loss=0.01984, over 4793.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2054, pruned_loss=0.02836, over 972305.51 frames.], batch size: 24, lr: 1.24e-04 +2022-05-09 07:54:26,305 INFO [train.py:715] (6/8) Epoch 18, batch 11100, loss[loss=0.1292, simple_loss=0.2024, pruned_loss=0.02803, over 4918.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02868, over 971627.64 frames.], batch size: 29, lr: 1.24e-04 +2022-05-09 07:55:05,203 INFO [train.py:715] (6/8) Epoch 18, batch 11150, loss[loss=0.1664, simple_loss=0.241, pruned_loss=0.04588, over 4825.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2056, pruned_loss=0.02868, over 971170.96 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:55:44,751 INFO [train.py:715] (6/8) Epoch 18, batch 11200, loss[loss=0.1241, simple_loss=0.2068, pruned_loss=0.02075, over 4842.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.0288, over 971624.47 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 07:56:23,195 INFO [train.py:715] (6/8) Epoch 18, batch 11250, loss[loss=0.1253, simple_loss=0.2065, pruned_loss=0.02203, over 4799.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2052, pruned_loss=0.0279, over 972135.91 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 07:57:01,932 INFO [train.py:715] (6/8) Epoch 18, batch 11300, loss[loss=0.1176, simple_loss=0.1921, pruned_loss=0.02157, over 4802.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2054, pruned_loss=0.02804, over 972283.13 frames.], batch size: 24, lr: 1.24e-04 +2022-05-09 07:57:41,020 INFO [train.py:715] (6/8) Epoch 18, batch 11350, loss[loss=0.1222, simple_loss=0.1862, pruned_loss=0.02904, over 4843.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2055, pruned_loss=0.02799, over 973141.51 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 07:58:20,191 INFO [train.py:715] (6/8) Epoch 18, batch 11400, loss[loss=0.1206, simple_loss=0.2006, pruned_loss=0.02028, over 4886.00 frames.], tot_loss[loss=0.1305, simple_loss=0.205, pruned_loss=0.02796, over 972757.94 frames.], batch size: 22, lr: 1.24e-04 +2022-05-09 07:58:59,558 INFO [train.py:715] (6/8) Epoch 18, batch 11450, loss[loss=0.1839, simple_loss=0.2355, pruned_loss=0.06616, over 4824.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2055, pruned_loss=0.02842, over 972116.70 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:59:38,062 INFO [train.py:715] (6/8) Epoch 18, batch 11500, loss[loss=0.1414, simple_loss=0.2225, pruned_loss=0.03008, over 4735.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2058, pruned_loss=0.02876, over 971670.69 frames.], batch size: 12, lr: 1.24e-04 +2022-05-09 08:00:17,726 INFO [train.py:715] (6/8) Epoch 18, batch 11550, loss[loss=0.1185, simple_loss=0.1962, pruned_loss=0.0204, over 4980.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.0289, over 972217.66 frames.], batch size: 31, lr: 1.24e-04 +2022-05-09 08:00:57,128 INFO [train.py:715] (6/8) Epoch 18, batch 11600, loss[loss=0.1339, simple_loss=0.2103, pruned_loss=0.02874, over 4950.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02895, over 971808.27 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 08:01:35,952 INFO [train.py:715] (6/8) Epoch 18, batch 11650, loss[loss=0.1154, simple_loss=0.1912, pruned_loss=0.01978, over 4974.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.0289, over 973136.53 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 08:02:15,658 INFO [train.py:715] (6/8) Epoch 18, batch 11700, loss[loss=0.1419, simple_loss=0.2223, pruned_loss=0.03072, over 4895.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2061, pruned_loss=0.02829, over 972205.56 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 08:02:54,933 INFO [train.py:715] (6/8) Epoch 18, batch 11750, loss[loss=0.1531, simple_loss=0.2112, pruned_loss=0.04747, over 4961.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2057, pruned_loss=0.02796, over 971950.45 frames.], batch size: 35, lr: 1.24e-04 +2022-05-09 08:03:34,977 INFO [train.py:715] (6/8) Epoch 18, batch 11800, loss[loss=0.1214, simple_loss=0.2018, pruned_loss=0.02053, over 4733.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2061, pruned_loss=0.02815, over 971660.41 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 08:04:13,542 INFO [train.py:715] (6/8) Epoch 18, batch 11850, loss[loss=0.1259, simple_loss=0.2003, pruned_loss=0.02581, over 4803.00 frames.], tot_loss[loss=0.1312, simple_loss=0.206, pruned_loss=0.02819, over 971771.92 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 08:04:53,381 INFO [train.py:715] (6/8) Epoch 18, batch 11900, loss[loss=0.1252, simple_loss=0.2008, pruned_loss=0.02482, over 4865.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.0286, over 971840.27 frames.], batch size: 38, lr: 1.24e-04 +2022-05-09 08:05:32,232 INFO [train.py:715] (6/8) Epoch 18, batch 11950, loss[loss=0.1274, simple_loss=0.1956, pruned_loss=0.02955, over 4967.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02892, over 971748.27 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 08:06:10,827 INFO [train.py:715] (6/8) Epoch 18, batch 12000, loss[loss=0.09607, simple_loss=0.1623, pruned_loss=0.01489, over 4835.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02906, over 972339.08 frames.], batch size: 12, lr: 1.24e-04 +2022-05-09 08:06:10,828 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 08:06:20,737 INFO [train.py:742] (6/8) Epoch 18, validation: loss=0.1046, simple_loss=0.188, pruned_loss=0.01063, over 914524.00 frames. +2022-05-09 08:07:00,014 INFO [train.py:715] (6/8) Epoch 18, batch 12050, loss[loss=0.1498, simple_loss=0.2044, pruned_loss=0.04753, over 4958.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2065, pruned_loss=0.02901, over 972238.20 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 08:07:39,524 INFO [train.py:715] (6/8) Epoch 18, batch 12100, loss[loss=0.1658, simple_loss=0.2289, pruned_loss=0.05134, over 4910.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02888, over 972813.40 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 08:08:19,053 INFO [train.py:715] (6/8) Epoch 18, batch 12150, loss[loss=0.159, simple_loss=0.2287, pruned_loss=0.04467, over 4957.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2078, pruned_loss=0.02928, over 971776.45 frames.], batch size: 35, lr: 1.24e-04 +2022-05-09 08:08:59,341 INFO [train.py:715] (6/8) Epoch 18, batch 12200, loss[loss=0.1228, simple_loss=0.1998, pruned_loss=0.02291, over 4809.00 frames.], tot_loss[loss=0.1334, simple_loss=0.208, pruned_loss=0.02944, over 972037.38 frames.], batch size: 26, lr: 1.24e-04 +2022-05-09 08:09:38,279 INFO [train.py:715] (6/8) Epoch 18, batch 12250, loss[loss=0.1333, simple_loss=0.2009, pruned_loss=0.03284, over 4977.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2087, pruned_loss=0.02982, over 971107.54 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 08:10:18,806 INFO [train.py:715] (6/8) Epoch 18, batch 12300, loss[loss=0.1225, simple_loss=0.1873, pruned_loss=0.02883, over 4767.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02984, over 970681.20 frames.], batch size: 12, lr: 1.24e-04 +2022-05-09 08:10:58,224 INFO [train.py:715] (6/8) Epoch 18, batch 12350, loss[loss=0.1359, simple_loss=0.2052, pruned_loss=0.03337, over 4817.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02975, over 971262.26 frames.], batch size: 27, lr: 1.24e-04 +2022-05-09 08:11:37,141 INFO [train.py:715] (6/8) Epoch 18, batch 12400, loss[loss=0.1405, simple_loss=0.2076, pruned_loss=0.03674, over 4739.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02932, over 970482.38 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 08:12:16,685 INFO [train.py:715] (6/8) Epoch 18, batch 12450, loss[loss=0.1186, simple_loss=0.1947, pruned_loss=0.02121, over 4798.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02886, over 970843.40 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 08:12:55,937 INFO [train.py:715] (6/8) Epoch 18, batch 12500, loss[loss=0.1287, simple_loss=0.1927, pruned_loss=0.0324, over 4767.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.02889, over 971332.41 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 08:13:36,316 INFO [train.py:715] (6/8) Epoch 18, batch 12550, loss[loss=0.1105, simple_loss=0.1798, pruned_loss=0.02059, over 4846.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02883, over 971990.93 frames.], batch size: 32, lr: 1.24e-04 +2022-05-09 08:14:14,822 INFO [train.py:715] (6/8) Epoch 18, batch 12600, loss[loss=0.1627, simple_loss=0.2302, pruned_loss=0.04759, over 4823.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02892, over 972038.07 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 08:14:54,515 INFO [train.py:715] (6/8) Epoch 18, batch 12650, loss[loss=0.1625, simple_loss=0.2349, pruned_loss=0.04508, over 4757.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02877, over 971073.20 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 08:15:33,315 INFO [train.py:715] (6/8) Epoch 18, batch 12700, loss[loss=0.1244, simple_loss=0.1971, pruned_loss=0.02583, over 4981.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.02883, over 970770.31 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 08:16:12,930 INFO [train.py:715] (6/8) Epoch 18, batch 12750, loss[loss=0.1426, simple_loss=0.2122, pruned_loss=0.03647, over 4692.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2056, pruned_loss=0.02863, over 971376.72 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 08:16:52,485 INFO [train.py:715] (6/8) Epoch 18, batch 12800, loss[loss=0.1337, simple_loss=0.2108, pruned_loss=0.02835, over 4953.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2054, pruned_loss=0.02875, over 971659.94 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 08:17:31,839 INFO [train.py:715] (6/8) Epoch 18, batch 12850, loss[loss=0.1284, simple_loss=0.209, pruned_loss=0.0239, over 4879.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2057, pruned_loss=0.02879, over 972210.59 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 08:18:11,710 INFO [train.py:715] (6/8) Epoch 18, batch 12900, loss[loss=0.1261, simple_loss=0.2062, pruned_loss=0.02304, over 4977.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02871, over 972373.46 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 08:18:50,201 INFO [train.py:715] (6/8) Epoch 18, batch 12950, loss[loss=0.1756, simple_loss=0.2555, pruned_loss=0.04791, over 4793.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2062, pruned_loss=0.02874, over 972074.27 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 08:19:30,197 INFO [train.py:715] (6/8) Epoch 18, batch 13000, loss[loss=0.1583, simple_loss=0.2263, pruned_loss=0.04518, over 4781.00 frames.], tot_loss[loss=0.1315, simple_loss=0.206, pruned_loss=0.02846, over 971793.99 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 08:20:09,532 INFO [train.py:715] (6/8) Epoch 18, batch 13050, loss[loss=0.1358, simple_loss=0.2182, pruned_loss=0.02674, over 4936.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02843, over 972759.65 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 08:20:48,613 INFO [train.py:715] (6/8) Epoch 18, batch 13100, loss[loss=0.1389, simple_loss=0.2262, pruned_loss=0.02575, over 4953.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02854, over 972799.16 frames.], batch size: 35, lr: 1.24e-04 +2022-05-09 08:21:28,142 INFO [train.py:715] (6/8) Epoch 18, batch 13150, loss[loss=0.1115, simple_loss=0.1865, pruned_loss=0.01822, over 4911.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2066, pruned_loss=0.02848, over 972877.60 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 08:22:07,413 INFO [train.py:715] (6/8) Epoch 18, batch 13200, loss[loss=0.1164, simple_loss=0.1916, pruned_loss=0.02063, over 4878.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2066, pruned_loss=0.02846, over 973157.27 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 08:22:47,223 INFO [train.py:715] (6/8) Epoch 18, batch 13250, loss[loss=0.119, simple_loss=0.1956, pruned_loss=0.02122, over 4793.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2069, pruned_loss=0.02866, over 972630.03 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 08:23:25,812 INFO [train.py:715] (6/8) Epoch 18, batch 13300, loss[loss=0.1208, simple_loss=0.1913, pruned_loss=0.02514, over 4752.00 frames.], tot_loss[loss=0.1321, simple_loss=0.207, pruned_loss=0.02867, over 972704.03 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 08:24:05,553 INFO [train.py:715] (6/8) Epoch 18, batch 13350, loss[loss=0.1495, simple_loss=0.2178, pruned_loss=0.04054, over 4907.00 frames.], tot_loss[loss=0.1321, simple_loss=0.207, pruned_loss=0.02855, over 973084.18 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 08:24:44,556 INFO [train.py:715] (6/8) Epoch 18, batch 13400, loss[loss=0.1528, simple_loss=0.2225, pruned_loss=0.04155, over 4784.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2075, pruned_loss=0.02914, over 972636.40 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 08:25:25,449 INFO [train.py:715] (6/8) Epoch 18, batch 13450, loss[loss=0.1459, simple_loss=0.218, pruned_loss=0.03688, over 4969.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2071, pruned_loss=0.02888, over 972695.84 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 08:26:05,143 INFO [train.py:715] (6/8) Epoch 18, batch 13500, loss[loss=0.1321, simple_loss=0.2121, pruned_loss=0.02604, over 4982.00 frames.], tot_loss[loss=0.1331, simple_loss=0.208, pruned_loss=0.02906, over 973371.76 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 08:26:44,083 INFO [train.py:715] (6/8) Epoch 18, batch 13550, loss[loss=0.1134, simple_loss=0.1943, pruned_loss=0.01624, over 4949.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2073, pruned_loss=0.0289, over 973743.77 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 08:27:23,349 INFO [train.py:715] (6/8) Epoch 18, batch 13600, loss[loss=0.1436, simple_loss=0.216, pruned_loss=0.03558, over 4984.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2064, pruned_loss=0.02854, over 973125.25 frames.], batch size: 35, lr: 1.23e-04 +2022-05-09 08:28:02,157 INFO [train.py:715] (6/8) Epoch 18, batch 13650, loss[loss=0.1508, simple_loss=0.2259, pruned_loss=0.03791, over 4907.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02876, over 972807.06 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 08:28:41,564 INFO [train.py:715] (6/8) Epoch 18, batch 13700, loss[loss=0.1419, simple_loss=0.2185, pruned_loss=0.0327, over 4827.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02891, over 971833.78 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 08:29:20,650 INFO [train.py:715] (6/8) Epoch 18, batch 13750, loss[loss=0.1197, simple_loss=0.1948, pruned_loss=0.02235, over 4945.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02882, over 971622.31 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 08:29:59,715 INFO [train.py:715] (6/8) Epoch 18, batch 13800, loss[loss=0.118, simple_loss=0.1875, pruned_loss=0.02423, over 4901.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02855, over 970442.16 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 08:30:39,487 INFO [train.py:715] (6/8) Epoch 18, batch 13850, loss[loss=0.1202, simple_loss=0.1924, pruned_loss=0.02404, over 4739.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02859, over 971787.88 frames.], batch size: 12, lr: 1.23e-04 +2022-05-09 08:31:18,295 INFO [train.py:715] (6/8) Epoch 18, batch 13900, loss[loss=0.1331, simple_loss=0.2162, pruned_loss=0.025, over 4812.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2051, pruned_loss=0.02799, over 971105.32 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 08:31:57,756 INFO [train.py:715] (6/8) Epoch 18, batch 13950, loss[loss=0.1266, simple_loss=0.2018, pruned_loss=0.02571, over 4647.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2046, pruned_loss=0.02796, over 970603.72 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 08:32:37,351 INFO [train.py:715] (6/8) Epoch 18, batch 14000, loss[loss=0.1237, simple_loss=0.1981, pruned_loss=0.02465, over 4815.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2051, pruned_loss=0.02822, over 970676.23 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 08:33:17,113 INFO [train.py:715] (6/8) Epoch 18, batch 14050, loss[loss=0.1505, simple_loss=0.23, pruned_loss=0.03553, over 4794.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2054, pruned_loss=0.02848, over 971726.52 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 08:33:56,296 INFO [train.py:715] (6/8) Epoch 18, batch 14100, loss[loss=0.1326, simple_loss=0.206, pruned_loss=0.02961, over 4852.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02866, over 972135.26 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 08:34:35,398 INFO [train.py:715] (6/8) Epoch 18, batch 14150, loss[loss=0.1389, simple_loss=0.2181, pruned_loss=0.0299, over 4818.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.02911, over 972276.85 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 08:35:14,784 INFO [train.py:715] (6/8) Epoch 18, batch 14200, loss[loss=0.1616, simple_loss=0.2325, pruned_loss=0.04535, over 4796.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2064, pruned_loss=0.02946, over 971877.45 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 08:35:54,061 INFO [train.py:715] (6/8) Epoch 18, batch 14250, loss[loss=0.1032, simple_loss=0.1841, pruned_loss=0.01115, over 4807.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2057, pruned_loss=0.0289, over 971016.07 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 08:36:33,990 INFO [train.py:715] (6/8) Epoch 18, batch 14300, loss[loss=0.1441, simple_loss=0.2242, pruned_loss=0.03197, over 4914.00 frames.], tot_loss[loss=0.132, simple_loss=0.2059, pruned_loss=0.029, over 971255.86 frames.], batch size: 39, lr: 1.23e-04 +2022-05-09 08:37:13,315 INFO [train.py:715] (6/8) Epoch 18, batch 14350, loss[loss=0.1248, simple_loss=0.2077, pruned_loss=0.02099, over 4811.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2059, pruned_loss=0.02889, over 971364.72 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 08:37:52,858 INFO [train.py:715] (6/8) Epoch 18, batch 14400, loss[loss=0.1179, simple_loss=0.1851, pruned_loss=0.02532, over 4825.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02928, over 971798.62 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 08:38:32,503 INFO [train.py:715] (6/8) Epoch 18, batch 14450, loss[loss=0.1509, simple_loss=0.2257, pruned_loss=0.03802, over 4928.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02922, over 972258.02 frames.], batch size: 29, lr: 1.23e-04 +2022-05-09 08:39:11,253 INFO [train.py:715] (6/8) Epoch 18, batch 14500, loss[loss=0.1367, simple_loss=0.2201, pruned_loss=0.02661, over 4847.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02909, over 971932.12 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 08:39:50,389 INFO [train.py:715] (6/8) Epoch 18, batch 14550, loss[loss=0.125, simple_loss=0.1903, pruned_loss=0.02985, over 4781.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2059, pruned_loss=0.02887, over 972213.32 frames.], batch size: 12, lr: 1.23e-04 +2022-05-09 08:40:29,546 INFO [train.py:715] (6/8) Epoch 18, batch 14600, loss[loss=0.1627, simple_loss=0.2223, pruned_loss=0.05158, over 4906.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.0289, over 972447.59 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 08:41:09,227 INFO [train.py:715] (6/8) Epoch 18, batch 14650, loss[loss=0.1215, simple_loss=0.194, pruned_loss=0.02447, over 4976.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2057, pruned_loss=0.0284, over 971787.32 frames.], batch size: 35, lr: 1.23e-04 +2022-05-09 08:41:48,681 INFO [train.py:715] (6/8) Epoch 18, batch 14700, loss[loss=0.1345, simple_loss=0.209, pruned_loss=0.03, over 4834.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2054, pruned_loss=0.02821, over 972568.04 frames.], batch size: 26, lr: 1.23e-04 +2022-05-09 08:42:28,039 INFO [train.py:715] (6/8) Epoch 18, batch 14750, loss[loss=0.1297, simple_loss=0.2054, pruned_loss=0.02697, over 4931.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.02836, over 972763.86 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 08:43:07,468 INFO [train.py:715] (6/8) Epoch 18, batch 14800, loss[loss=0.1305, simple_loss=0.2014, pruned_loss=0.02978, over 4977.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2055, pruned_loss=0.02852, over 972491.36 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 08:43:46,222 INFO [train.py:715] (6/8) Epoch 18, batch 14850, loss[loss=0.1317, simple_loss=0.201, pruned_loss=0.03117, over 4774.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2057, pruned_loss=0.02851, over 973048.87 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 08:44:25,884 INFO [train.py:715] (6/8) Epoch 18, batch 14900, loss[loss=0.1425, simple_loss=0.2146, pruned_loss=0.03524, over 4862.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02888, over 972755.61 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 08:45:05,551 INFO [train.py:715] (6/8) Epoch 18, batch 14950, loss[loss=0.1591, simple_loss=0.2434, pruned_loss=0.03737, over 4794.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02908, over 973200.87 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 08:45:44,815 INFO [train.py:715] (6/8) Epoch 18, batch 15000, loss[loss=0.121, simple_loss=0.195, pruned_loss=0.02346, over 4698.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02901, over 973508.16 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 08:45:44,816 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 08:45:54,765 INFO [train.py:742] (6/8) Epoch 18, validation: loss=0.1048, simple_loss=0.1881, pruned_loss=0.01071, over 914524.00 frames. +2022-05-09 08:46:34,350 INFO [train.py:715] (6/8) Epoch 18, batch 15050, loss[loss=0.1469, simple_loss=0.2165, pruned_loss=0.03867, over 4792.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02936, over 972850.50 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 08:47:13,527 INFO [train.py:715] (6/8) Epoch 18, batch 15100, loss[loss=0.1452, simple_loss=0.2139, pruned_loss=0.03828, over 4962.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02917, over 971798.35 frames.], batch size: 39, lr: 1.23e-04 +2022-05-09 08:47:53,258 INFO [train.py:715] (6/8) Epoch 18, batch 15150, loss[loss=0.1233, simple_loss=0.2057, pruned_loss=0.02045, over 4934.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02937, over 971996.96 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 08:48:32,390 INFO [train.py:715] (6/8) Epoch 18, batch 15200, loss[loss=0.1356, simple_loss=0.2163, pruned_loss=0.02744, over 4898.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.0289, over 973008.44 frames.], batch size: 39, lr: 1.23e-04 +2022-05-09 08:49:11,931 INFO [train.py:715] (6/8) Epoch 18, batch 15250, loss[loss=0.1377, simple_loss=0.2174, pruned_loss=0.02899, over 4786.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2056, pruned_loss=0.02815, over 972896.37 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 08:49:51,795 INFO [train.py:715] (6/8) Epoch 18, batch 15300, loss[loss=0.1235, simple_loss=0.2012, pruned_loss=0.02292, over 4967.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2059, pruned_loss=0.02817, over 973162.73 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 08:50:31,166 INFO [train.py:715] (6/8) Epoch 18, batch 15350, loss[loss=0.1364, simple_loss=0.216, pruned_loss=0.02843, over 4980.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2067, pruned_loss=0.02854, over 972888.64 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 08:51:10,084 INFO [train.py:715] (6/8) Epoch 18, batch 15400, loss[loss=0.1765, simple_loss=0.2475, pruned_loss=0.05274, over 4758.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2072, pruned_loss=0.02873, over 973178.08 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 08:51:49,375 INFO [train.py:715] (6/8) Epoch 18, batch 15450, loss[loss=0.1332, simple_loss=0.2138, pruned_loss=0.02626, over 4768.00 frames.], tot_loss[loss=0.132, simple_loss=0.2068, pruned_loss=0.02858, over 973028.79 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 08:52:29,003 INFO [train.py:715] (6/8) Epoch 18, batch 15500, loss[loss=0.1078, simple_loss=0.1794, pruned_loss=0.01813, over 4924.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2069, pruned_loss=0.0287, over 972431.96 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 08:53:08,162 INFO [train.py:715] (6/8) Epoch 18, batch 15550, loss[loss=0.1265, simple_loss=0.1994, pruned_loss=0.02677, over 4869.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02886, over 971900.23 frames.], batch size: 22, lr: 1.23e-04 +2022-05-09 08:53:47,891 INFO [train.py:715] (6/8) Epoch 18, batch 15600, loss[loss=0.1642, simple_loss=0.2211, pruned_loss=0.05364, over 4856.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02892, over 971502.05 frames.], batch size: 30, lr: 1.23e-04 +2022-05-09 08:54:28,014 INFO [train.py:715] (6/8) Epoch 18, batch 15650, loss[loss=0.1338, simple_loss=0.2104, pruned_loss=0.02856, over 4922.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02921, over 971148.02 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 08:55:07,616 INFO [train.py:715] (6/8) Epoch 18, batch 15700, loss[loss=0.1921, simple_loss=0.271, pruned_loss=0.05663, over 4969.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02909, over 970916.98 frames.], batch size: 35, lr: 1.23e-04 +2022-05-09 08:55:46,520 INFO [train.py:715] (6/8) Epoch 18, batch 15750, loss[loss=0.1225, simple_loss=0.208, pruned_loss=0.01853, over 4877.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.0287, over 972020.23 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 08:56:25,961 INFO [train.py:715] (6/8) Epoch 18, batch 15800, loss[loss=0.1214, simple_loss=0.1965, pruned_loss=0.0231, over 4772.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.0289, over 972323.60 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 08:57:05,872 INFO [train.py:715] (6/8) Epoch 18, batch 15850, loss[loss=0.1369, simple_loss=0.2075, pruned_loss=0.03319, over 4762.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02897, over 972859.42 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 08:57:45,117 INFO [train.py:715] (6/8) Epoch 18, batch 15900, loss[loss=0.1977, simple_loss=0.2684, pruned_loss=0.06351, over 4980.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02947, over 972511.73 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 08:58:24,430 INFO [train.py:715] (6/8) Epoch 18, batch 15950, loss[loss=0.1548, simple_loss=0.2301, pruned_loss=0.0398, over 4956.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02915, over 973175.98 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 08:59:04,913 INFO [train.py:715] (6/8) Epoch 18, batch 16000, loss[loss=0.1264, simple_loss=0.1957, pruned_loss=0.0286, over 4946.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02931, over 972713.12 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 08:59:45,385 INFO [train.py:715] (6/8) Epoch 18, batch 16050, loss[loss=0.1285, simple_loss=0.2066, pruned_loss=0.0252, over 4941.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2073, pruned_loss=0.02906, over 971381.08 frames.], batch size: 35, lr: 1.23e-04 +2022-05-09 09:00:24,423 INFO [train.py:715] (6/8) Epoch 18, batch 16100, loss[loss=0.1001, simple_loss=0.1714, pruned_loss=0.01444, over 4824.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02877, over 971807.31 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 09:01:03,602 INFO [train.py:715] (6/8) Epoch 18, batch 16150, loss[loss=0.1244, simple_loss=0.2026, pruned_loss=0.02306, over 4739.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02906, over 971261.35 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 09:01:43,698 INFO [train.py:715] (6/8) Epoch 18, batch 16200, loss[loss=0.1537, simple_loss=0.2317, pruned_loss=0.03786, over 4769.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2077, pruned_loss=0.02939, over 971569.18 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 09:02:22,645 INFO [train.py:715] (6/8) Epoch 18, batch 16250, loss[loss=0.1314, simple_loss=0.201, pruned_loss=0.03086, over 4993.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02935, over 971481.25 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 09:03:01,670 INFO [train.py:715] (6/8) Epoch 18, batch 16300, loss[loss=0.156, simple_loss=0.2426, pruned_loss=0.03468, over 4924.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02879, over 970983.83 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 09:03:41,212 INFO [train.py:715] (6/8) Epoch 18, batch 16350, loss[loss=0.1489, simple_loss=0.2298, pruned_loss=0.03397, over 4946.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02913, over 970876.32 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 09:04:20,331 INFO [train.py:715] (6/8) Epoch 18, batch 16400, loss[loss=0.1415, simple_loss=0.2178, pruned_loss=0.0326, over 4851.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02953, over 971597.72 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 09:04:59,288 INFO [train.py:715] (6/8) Epoch 18, batch 16450, loss[loss=0.119, simple_loss=0.1908, pruned_loss=0.02361, over 4787.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02941, over 971296.34 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 09:05:38,809 INFO [train.py:715] (6/8) Epoch 18, batch 16500, loss[loss=0.1336, simple_loss=0.21, pruned_loss=0.02856, over 4921.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02922, over 971920.17 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 09:06:18,651 INFO [train.py:715] (6/8) Epoch 18, batch 16550, loss[loss=0.1351, simple_loss=0.2041, pruned_loss=0.03304, over 4977.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02891, over 971903.15 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 09:06:57,077 INFO [train.py:715] (6/8) Epoch 18, batch 16600, loss[loss=0.1165, simple_loss=0.1915, pruned_loss=0.02069, over 4943.00 frames.], tot_loss[loss=0.132, simple_loss=0.2059, pruned_loss=0.02912, over 971817.70 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 09:07:36,516 INFO [train.py:715] (6/8) Epoch 18, batch 16650, loss[loss=0.1351, simple_loss=0.208, pruned_loss=0.03114, over 4783.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2058, pruned_loss=0.02878, over 972572.89 frames.], batch size: 12, lr: 1.23e-04 +2022-05-09 09:08:15,861 INFO [train.py:715] (6/8) Epoch 18, batch 16700, loss[loss=0.1101, simple_loss=0.1852, pruned_loss=0.01751, over 4821.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02853, over 973243.42 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 09:08:55,201 INFO [train.py:715] (6/8) Epoch 18, batch 16750, loss[loss=0.1474, simple_loss=0.2174, pruned_loss=0.03867, over 4950.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02918, over 973310.90 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 09:09:34,647 INFO [train.py:715] (6/8) Epoch 18, batch 16800, loss[loss=0.1282, simple_loss=0.205, pruned_loss=0.02572, over 4688.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.02955, over 972841.40 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 09:10:13,856 INFO [train.py:715] (6/8) Epoch 18, batch 16850, loss[loss=0.1214, simple_loss=0.1964, pruned_loss=0.02323, over 4766.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02972, over 971806.36 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 09:10:53,312 INFO [train.py:715] (6/8) Epoch 18, batch 16900, loss[loss=0.1165, simple_loss=0.1876, pruned_loss=0.02266, over 4994.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2065, pruned_loss=0.02944, over 972576.28 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 09:11:32,156 INFO [train.py:715] (6/8) Epoch 18, batch 16950, loss[loss=0.1177, simple_loss=0.2024, pruned_loss=0.01653, over 4934.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2062, pruned_loss=0.02932, over 973187.32 frames.], batch size: 29, lr: 1.23e-04 +2022-05-09 09:12:11,613 INFO [train.py:715] (6/8) Epoch 18, batch 17000, loss[loss=0.1376, simple_loss=0.2139, pruned_loss=0.03071, over 4930.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2056, pruned_loss=0.02928, over 972537.60 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 09:12:51,062 INFO [train.py:715] (6/8) Epoch 18, batch 17050, loss[loss=0.1365, simple_loss=0.2074, pruned_loss=0.03285, over 4818.00 frames.], tot_loss[loss=0.1323, simple_loss=0.206, pruned_loss=0.02928, over 971224.55 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 09:13:30,540 INFO [train.py:715] (6/8) Epoch 18, batch 17100, loss[loss=0.1133, simple_loss=0.1959, pruned_loss=0.01536, over 4698.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2065, pruned_loss=0.02939, over 971735.45 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 09:14:10,118 INFO [train.py:715] (6/8) Epoch 18, batch 17150, loss[loss=0.1287, simple_loss=0.1968, pruned_loss=0.03027, over 4783.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2065, pruned_loss=0.02939, over 971758.46 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 09:14:49,243 INFO [train.py:715] (6/8) Epoch 18, batch 17200, loss[loss=0.123, simple_loss=0.1981, pruned_loss=0.024, over 4859.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02921, over 972383.92 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 09:15:28,965 INFO [train.py:715] (6/8) Epoch 18, batch 17250, loss[loss=0.1502, simple_loss=0.219, pruned_loss=0.04074, over 4770.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2073, pruned_loss=0.02927, over 972296.41 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 09:16:08,224 INFO [train.py:715] (6/8) Epoch 18, batch 17300, loss[loss=0.1212, simple_loss=0.2015, pruned_loss=0.02044, over 4654.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02934, over 972017.61 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 09:16:48,155 INFO [train.py:715] (6/8) Epoch 18, batch 17350, loss[loss=0.1554, simple_loss=0.2323, pruned_loss=0.0393, over 4828.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02931, over 972620.86 frames.], batch size: 26, lr: 1.23e-04 +2022-05-09 09:17:27,216 INFO [train.py:715] (6/8) Epoch 18, batch 17400, loss[loss=0.1226, simple_loss=0.2007, pruned_loss=0.02228, over 4914.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2074, pruned_loss=0.02899, over 972434.91 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 09:18:07,007 INFO [train.py:715] (6/8) Epoch 18, batch 17450, loss[loss=0.1519, simple_loss=0.2231, pruned_loss=0.04039, over 4703.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02896, over 973398.86 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 09:18:46,088 INFO [train.py:715] (6/8) Epoch 18, batch 17500, loss[loss=0.1227, simple_loss=0.1955, pruned_loss=0.02493, over 4945.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02894, over 972953.19 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 09:19:24,716 INFO [train.py:715] (6/8) Epoch 18, batch 17550, loss[loss=0.12, simple_loss=0.1864, pruned_loss=0.02684, over 4852.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02883, over 973473.36 frames.], batch size: 30, lr: 1.23e-04 +2022-05-09 09:20:04,282 INFO [train.py:715] (6/8) Epoch 18, batch 17600, loss[loss=0.1226, simple_loss=0.1894, pruned_loss=0.02787, over 4772.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.02871, over 973081.60 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 09:20:43,550 INFO [train.py:715] (6/8) Epoch 18, batch 17650, loss[loss=0.1292, simple_loss=0.2077, pruned_loss=0.02535, over 4795.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02881, over 972813.01 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 09:21:22,851 INFO [train.py:715] (6/8) Epoch 18, batch 17700, loss[loss=0.1173, simple_loss=0.1882, pruned_loss=0.0232, over 4899.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02911, over 972978.03 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 09:22:01,954 INFO [train.py:715] (6/8) Epoch 18, batch 17750, loss[loss=0.1389, simple_loss=0.2075, pruned_loss=0.03509, over 4914.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02874, over 972285.48 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 09:22:41,554 INFO [train.py:715] (6/8) Epoch 18, batch 17800, loss[loss=0.1099, simple_loss=0.1819, pruned_loss=0.01895, over 4844.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2059, pruned_loss=0.02884, over 972135.25 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 09:23:20,836 INFO [train.py:715] (6/8) Epoch 18, batch 17850, loss[loss=0.1243, simple_loss=0.208, pruned_loss=0.02033, over 4770.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2058, pruned_loss=0.02897, over 971595.56 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 09:23:59,350 INFO [train.py:715] (6/8) Epoch 18, batch 17900, loss[loss=0.1355, simple_loss=0.2121, pruned_loss=0.02945, over 4933.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2052, pruned_loss=0.02896, over 971848.66 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 09:24:39,462 INFO [train.py:715] (6/8) Epoch 18, batch 17950, loss[loss=0.1339, simple_loss=0.2108, pruned_loss=0.02853, over 4794.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2053, pruned_loss=0.0286, over 972628.03 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 09:25:18,523 INFO [train.py:715] (6/8) Epoch 18, batch 18000, loss[loss=0.1104, simple_loss=0.1801, pruned_loss=0.02036, over 4796.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02849, over 972116.95 frames.], batch size: 12, lr: 1.23e-04 +2022-05-09 09:25:18,524 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 09:25:28,383 INFO [train.py:742] (6/8) Epoch 18, validation: loss=0.1046, simple_loss=0.1878, pruned_loss=0.01063, over 914524.00 frames. +2022-05-09 09:26:07,772 INFO [train.py:715] (6/8) Epoch 18, batch 18050, loss[loss=0.1466, simple_loss=0.2225, pruned_loss=0.03538, over 4934.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2052, pruned_loss=0.02781, over 972233.39 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 09:26:47,169 INFO [train.py:715] (6/8) Epoch 18, batch 18100, loss[loss=0.1494, simple_loss=0.2149, pruned_loss=0.04197, over 4816.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2059, pruned_loss=0.02814, over 972441.39 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 09:27:26,274 INFO [train.py:715] (6/8) Epoch 18, batch 18150, loss[loss=0.1221, simple_loss=0.2015, pruned_loss=0.02136, over 4863.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02856, over 973386.05 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 09:28:06,063 INFO [train.py:715] (6/8) Epoch 18, batch 18200, loss[loss=0.1654, simple_loss=0.235, pruned_loss=0.04787, over 4754.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02894, over 973290.12 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 09:28:45,757 INFO [train.py:715] (6/8) Epoch 18, batch 18250, loss[loss=0.1321, simple_loss=0.1997, pruned_loss=0.03231, over 4984.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02923, over 972860.86 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 09:29:24,156 INFO [train.py:715] (6/8) Epoch 18, batch 18300, loss[loss=0.1529, simple_loss=0.212, pruned_loss=0.04693, over 4743.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02934, over 972232.83 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 09:30:03,828 INFO [train.py:715] (6/8) Epoch 18, batch 18350, loss[loss=0.1371, simple_loss=0.2203, pruned_loss=0.02691, over 4980.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02967, over 971902.63 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 09:30:43,384 INFO [train.py:715] (6/8) Epoch 18, batch 18400, loss[loss=0.1289, simple_loss=0.2113, pruned_loss=0.02329, over 4914.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2062, pruned_loss=0.02905, over 971984.73 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 09:31:22,383 INFO [train.py:715] (6/8) Epoch 18, batch 18450, loss[loss=0.1295, simple_loss=0.1938, pruned_loss=0.03257, over 4856.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2056, pruned_loss=0.02872, over 970659.95 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 09:32:01,513 INFO [train.py:715] (6/8) Epoch 18, batch 18500, loss[loss=0.1277, simple_loss=0.2025, pruned_loss=0.02639, over 4866.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02865, over 971662.97 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 09:32:40,868 INFO [train.py:715] (6/8) Epoch 18, batch 18550, loss[loss=0.1285, simple_loss=0.2098, pruned_loss=0.0236, over 4854.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2059, pruned_loss=0.0287, over 971625.81 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 09:33:20,073 INFO [train.py:715] (6/8) Epoch 18, batch 18600, loss[loss=0.1394, simple_loss=0.2136, pruned_loss=0.03255, over 4983.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2051, pruned_loss=0.02834, over 972391.31 frames.], batch size: 35, lr: 1.23e-04 +2022-05-09 09:33:58,717 INFO [train.py:715] (6/8) Epoch 18, batch 18650, loss[loss=0.1409, simple_loss=0.214, pruned_loss=0.03392, over 4924.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2052, pruned_loss=0.02819, over 971533.95 frames.], batch size: 39, lr: 1.23e-04 +2022-05-09 09:34:38,209 INFO [train.py:715] (6/8) Epoch 18, batch 18700, loss[loss=0.1211, simple_loss=0.1952, pruned_loss=0.02349, over 4811.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02852, over 971751.64 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 09:35:17,435 INFO [train.py:715] (6/8) Epoch 18, batch 18750, loss[loss=0.1153, simple_loss=0.1942, pruned_loss=0.01826, over 4810.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2062, pruned_loss=0.02865, over 971924.51 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 09:35:56,638 INFO [train.py:715] (6/8) Epoch 18, batch 18800, loss[loss=0.1557, simple_loss=0.2347, pruned_loss=0.03833, over 4814.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02873, over 971676.16 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 09:36:36,018 INFO [train.py:715] (6/8) Epoch 18, batch 18850, loss[loss=0.1154, simple_loss=0.1845, pruned_loss=0.02316, over 4761.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.0286, over 971713.49 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 09:37:15,848 INFO [train.py:715] (6/8) Epoch 18, batch 18900, loss[loss=0.1252, simple_loss=0.2017, pruned_loss=0.02439, over 4803.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02899, over 971566.44 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 09:37:54,911 INFO [train.py:715] (6/8) Epoch 18, batch 18950, loss[loss=0.1128, simple_loss=0.1807, pruned_loss=0.02245, over 4919.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.0289, over 971501.17 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 09:38:33,359 INFO [train.py:715] (6/8) Epoch 18, batch 19000, loss[loss=0.1223, simple_loss=0.1945, pruned_loss=0.02502, over 4939.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02864, over 972374.15 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 09:39:12,870 INFO [train.py:715] (6/8) Epoch 18, batch 19050, loss[loss=0.1404, simple_loss=0.2188, pruned_loss=0.03097, over 4758.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02912, over 972617.82 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 09:39:51,876 INFO [train.py:715] (6/8) Epoch 18, batch 19100, loss[loss=0.125, simple_loss=0.1923, pruned_loss=0.02882, over 4974.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02874, over 972007.03 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 09:40:31,194 INFO [train.py:715] (6/8) Epoch 18, batch 19150, loss[loss=0.1628, simple_loss=0.2334, pruned_loss=0.04607, over 4822.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.02836, over 972125.12 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 09:41:11,058 INFO [train.py:715] (6/8) Epoch 18, batch 19200, loss[loss=0.1339, simple_loss=0.21, pruned_loss=0.02895, over 4884.00 frames.], tot_loss[loss=0.132, simple_loss=0.207, pruned_loss=0.02852, over 972322.26 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 09:41:50,579 INFO [train.py:715] (6/8) Epoch 18, batch 19250, loss[loss=0.1528, simple_loss=0.2297, pruned_loss=0.03789, over 4967.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2062, pruned_loss=0.02805, over 971900.76 frames.], batch size: 35, lr: 1.23e-04 +2022-05-09 09:42:29,656 INFO [train.py:715] (6/8) Epoch 18, batch 19300, loss[loss=0.1787, simple_loss=0.2617, pruned_loss=0.04789, over 4773.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02882, over 971758.18 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 09:43:08,122 INFO [train.py:715] (6/8) Epoch 18, batch 19350, loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03063, over 4898.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2063, pruned_loss=0.02829, over 971589.62 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 09:43:47,527 INFO [train.py:715] (6/8) Epoch 18, batch 19400, loss[loss=0.16, simple_loss=0.2359, pruned_loss=0.0421, over 4796.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2064, pruned_loss=0.02827, over 970365.12 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 09:44:26,739 INFO [train.py:715] (6/8) Epoch 18, batch 19450, loss[loss=0.1475, simple_loss=0.2225, pruned_loss=0.03627, over 4951.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2062, pruned_loss=0.02819, over 970413.17 frames.], batch size: 29, lr: 1.23e-04 +2022-05-09 09:45:05,484 INFO [train.py:715] (6/8) Epoch 18, batch 19500, loss[loss=0.1371, simple_loss=0.2171, pruned_loss=0.0286, over 4752.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2059, pruned_loss=0.02823, over 971035.77 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 09:45:44,656 INFO [train.py:715] (6/8) Epoch 18, batch 19550, loss[loss=0.1126, simple_loss=0.1981, pruned_loss=0.01351, over 4925.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2052, pruned_loss=0.02784, over 971178.03 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 09:46:24,064 INFO [train.py:715] (6/8) Epoch 18, batch 19600, loss[loss=0.122, simple_loss=0.1965, pruned_loss=0.02373, over 4942.00 frames.], tot_loss[loss=0.1322, simple_loss=0.207, pruned_loss=0.02869, over 970921.83 frames.], batch size: 35, lr: 1.23e-04 +2022-05-09 09:47:02,886 INFO [train.py:715] (6/8) Epoch 18, batch 19650, loss[loss=0.1914, simple_loss=0.2757, pruned_loss=0.05352, over 4770.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02861, over 971521.74 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 09:47:41,716 INFO [train.py:715] (6/8) Epoch 18, batch 19700, loss[loss=0.1982, simple_loss=0.2714, pruned_loss=0.06248, over 4985.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2063, pruned_loss=0.02866, over 971293.07 frames.], batch size: 28, lr: 1.23e-04 +2022-05-09 09:48:21,733 INFO [train.py:715] (6/8) Epoch 18, batch 19750, loss[loss=0.1453, simple_loss=0.2166, pruned_loss=0.03697, over 4893.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2073, pruned_loss=0.02897, over 971076.61 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 09:49:01,616 INFO [train.py:715] (6/8) Epoch 18, batch 19800, loss[loss=0.1493, simple_loss=0.2179, pruned_loss=0.04035, over 4840.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02859, over 970701.97 frames.], batch size: 30, lr: 1.23e-04 +2022-05-09 09:49:40,680 INFO [train.py:715] (6/8) Epoch 18, batch 19850, loss[loss=0.1204, simple_loss=0.1954, pruned_loss=0.02265, over 4743.00 frames.], tot_loss[loss=0.132, simple_loss=0.2068, pruned_loss=0.02858, over 970426.84 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 09:50:20,124 INFO [train.py:715] (6/8) Epoch 18, batch 19900, loss[loss=0.1406, simple_loss=0.2136, pruned_loss=0.03383, over 4960.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2069, pruned_loss=0.02838, over 970625.84 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 09:50:59,805 INFO [train.py:715] (6/8) Epoch 18, batch 19950, loss[loss=0.1395, simple_loss=0.2046, pruned_loss=0.03722, over 4977.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2062, pruned_loss=0.02835, over 971045.94 frames.], batch size: 35, lr: 1.23e-04 +2022-05-09 09:51:39,050 INFO [train.py:715] (6/8) Epoch 18, batch 20000, loss[loss=0.1284, simple_loss=0.1901, pruned_loss=0.03336, over 4833.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2056, pruned_loss=0.02782, over 971331.36 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 09:52:18,803 INFO [train.py:715] (6/8) Epoch 18, batch 20050, loss[loss=0.1135, simple_loss=0.1824, pruned_loss=0.02228, over 4824.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2055, pruned_loss=0.0277, over 971347.25 frames.], batch size: 12, lr: 1.23e-04 +2022-05-09 09:52:59,025 INFO [train.py:715] (6/8) Epoch 18, batch 20100, loss[loss=0.1248, simple_loss=0.2045, pruned_loss=0.02252, over 4926.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2056, pruned_loss=0.02774, over 971957.26 frames.], batch size: 29, lr: 1.23e-04 +2022-05-09 09:53:39,147 INFO [train.py:715] (6/8) Epoch 18, batch 20150, loss[loss=0.1215, simple_loss=0.1988, pruned_loss=0.02205, over 4954.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2055, pruned_loss=0.0278, over 972820.44 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 09:54:18,230 INFO [train.py:715] (6/8) Epoch 18, batch 20200, loss[loss=0.1252, simple_loss=0.2055, pruned_loss=0.02252, over 4790.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2059, pruned_loss=0.02835, over 972430.08 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 09:54:57,198 INFO [train.py:715] (6/8) Epoch 18, batch 20250, loss[loss=0.09925, simple_loss=0.1735, pruned_loss=0.01249, over 4834.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02837, over 972576.57 frames.], batch size: 12, lr: 1.23e-04 +2022-05-09 09:55:36,877 INFO [train.py:715] (6/8) Epoch 18, batch 20300, loss[loss=0.1138, simple_loss=0.1897, pruned_loss=0.01899, over 4924.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02826, over 972524.22 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 09:56:16,008 INFO [train.py:715] (6/8) Epoch 18, batch 20350, loss[loss=0.1202, simple_loss=0.1991, pruned_loss=0.02065, over 4746.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2063, pruned_loss=0.02846, over 971457.73 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 09:56:55,260 INFO [train.py:715] (6/8) Epoch 18, batch 20400, loss[loss=0.1046, simple_loss=0.1792, pruned_loss=0.01504, over 4927.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02856, over 972008.38 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 09:57:34,101 INFO [train.py:715] (6/8) Epoch 18, batch 20450, loss[loss=0.1321, simple_loss=0.2037, pruned_loss=0.0303, over 4906.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2054, pruned_loss=0.02839, over 971647.24 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 09:58:14,214 INFO [train.py:715] (6/8) Epoch 18, batch 20500, loss[loss=0.1153, simple_loss=0.1861, pruned_loss=0.02231, over 4857.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02867, over 971750.36 frames.], batch size: 12, lr: 1.23e-04 +2022-05-09 09:58:52,925 INFO [train.py:715] (6/8) Epoch 18, batch 20550, loss[loss=0.1627, simple_loss=0.2373, pruned_loss=0.04406, over 4751.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.02877, over 972301.29 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 09:59:31,872 INFO [train.py:715] (6/8) Epoch 18, batch 20600, loss[loss=0.1219, simple_loss=0.1932, pruned_loss=0.02523, over 4844.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02872, over 973312.61 frames.], batch size: 30, lr: 1.23e-04 +2022-05-09 10:00:10,872 INFO [train.py:715] (6/8) Epoch 18, batch 20650, loss[loss=0.1512, simple_loss=0.2173, pruned_loss=0.04256, over 4645.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02913, over 973047.93 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 10:00:50,426 INFO [train.py:715] (6/8) Epoch 18, batch 20700, loss[loss=0.1537, simple_loss=0.224, pruned_loss=0.04168, over 4855.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2064, pruned_loss=0.02939, over 972977.24 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 10:01:28,858 INFO [train.py:715] (6/8) Epoch 18, batch 20750, loss[loss=0.1387, simple_loss=0.2074, pruned_loss=0.03501, over 4699.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02911, over 972554.49 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 10:02:08,325 INFO [train.py:715] (6/8) Epoch 18, batch 20800, loss[loss=0.121, simple_loss=0.2003, pruned_loss=0.02086, over 4813.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.029, over 971584.06 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 10:02:47,768 INFO [train.py:715] (6/8) Epoch 18, batch 20850, loss[loss=0.1248, simple_loss=0.2, pruned_loss=0.02478, over 4792.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02863, over 971805.45 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 10:03:26,624 INFO [train.py:715] (6/8) Epoch 18, batch 20900, loss[loss=0.136, simple_loss=0.2221, pruned_loss=0.0249, over 4855.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2065, pruned_loss=0.02858, over 971982.13 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 10:04:05,320 INFO [train.py:715] (6/8) Epoch 18, batch 20950, loss[loss=0.1365, simple_loss=0.2048, pruned_loss=0.03412, over 4970.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02874, over 972044.87 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 10:04:44,845 INFO [train.py:715] (6/8) Epoch 18, batch 21000, loss[loss=0.1417, simple_loss=0.2046, pruned_loss=0.03938, over 4949.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2056, pruned_loss=0.02877, over 972913.33 frames.], batch size: 35, lr: 1.23e-04 +2022-05-09 10:04:44,846 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 10:04:54,817 INFO [train.py:742] (6/8) Epoch 18, validation: loss=0.1046, simple_loss=0.1879, pruned_loss=0.01059, over 914524.00 frames. +2022-05-09 10:05:34,591 INFO [train.py:715] (6/8) Epoch 18, batch 21050, loss[loss=0.1154, simple_loss=0.1965, pruned_loss=0.01714, over 4822.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2062, pruned_loss=0.02916, over 972958.44 frames.], batch size: 26, lr: 1.23e-04 +2022-05-09 10:06:14,354 INFO [train.py:715] (6/8) Epoch 18, batch 21100, loss[loss=0.1458, simple_loss=0.2226, pruned_loss=0.03446, over 4879.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2061, pruned_loss=0.02904, over 973313.34 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 10:06:53,517 INFO [train.py:715] (6/8) Epoch 18, batch 21150, loss[loss=0.1147, simple_loss=0.1851, pruned_loss=0.02213, over 4730.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2055, pruned_loss=0.02876, over 973048.67 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 10:07:33,005 INFO [train.py:715] (6/8) Epoch 18, batch 21200, loss[loss=0.1408, simple_loss=0.2238, pruned_loss=0.02889, over 4889.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2051, pruned_loss=0.02864, over 973181.17 frames.], batch size: 39, lr: 1.23e-04 +2022-05-09 10:08:12,713 INFO [train.py:715] (6/8) Epoch 18, batch 21250, loss[loss=0.114, simple_loss=0.1928, pruned_loss=0.01762, over 4906.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2053, pruned_loss=0.02856, over 973598.12 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 10:08:51,645 INFO [train.py:715] (6/8) Epoch 18, batch 21300, loss[loss=0.1269, simple_loss=0.2118, pruned_loss=0.02105, over 4813.00 frames.], tot_loss[loss=0.131, simple_loss=0.205, pruned_loss=0.02845, over 972940.50 frames.], batch size: 27, lr: 1.23e-04 +2022-05-09 10:09:30,213 INFO [train.py:715] (6/8) Epoch 18, batch 21350, loss[loss=0.1121, simple_loss=0.1813, pruned_loss=0.02145, over 4863.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2049, pruned_loss=0.02835, over 972471.24 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 10:10:09,587 INFO [train.py:715] (6/8) Epoch 18, batch 21400, loss[loss=0.1131, simple_loss=0.1882, pruned_loss=0.01897, over 4820.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2049, pruned_loss=0.0284, over 972772.50 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 10:10:51,766 INFO [train.py:715] (6/8) Epoch 18, batch 21450, loss[loss=0.1663, simple_loss=0.2286, pruned_loss=0.05203, over 4820.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2055, pruned_loss=0.02841, over 972815.63 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 10:11:30,945 INFO [train.py:715] (6/8) Epoch 18, batch 21500, loss[loss=0.1226, simple_loss=0.1969, pruned_loss=0.02412, over 4885.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.02883, over 973210.30 frames.], batch size: 22, lr: 1.23e-04 +2022-05-09 10:12:09,696 INFO [train.py:715] (6/8) Epoch 18, batch 21550, loss[loss=0.1244, simple_loss=0.2002, pruned_loss=0.02424, over 4935.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02849, over 972460.17 frames.], batch size: 29, lr: 1.23e-04 +2022-05-09 10:12:49,094 INFO [train.py:715] (6/8) Epoch 18, batch 21600, loss[loss=0.13, simple_loss=0.2029, pruned_loss=0.0286, over 4799.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2056, pruned_loss=0.0284, over 971774.70 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 10:13:28,304 INFO [train.py:715] (6/8) Epoch 18, batch 21650, loss[loss=0.1191, simple_loss=0.195, pruned_loss=0.02161, over 4953.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.02839, over 971762.71 frames.], batch size: 35, lr: 1.23e-04 +2022-05-09 10:14:06,695 INFO [train.py:715] (6/8) Epoch 18, batch 21700, loss[loss=0.117, simple_loss=0.1945, pruned_loss=0.01976, over 4854.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2065, pruned_loss=0.0284, over 972229.22 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 10:14:45,682 INFO [train.py:715] (6/8) Epoch 18, batch 21750, loss[loss=0.1318, simple_loss=0.2075, pruned_loss=0.02803, over 4823.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2064, pruned_loss=0.02821, over 971878.71 frames.], batch size: 26, lr: 1.23e-04 +2022-05-09 10:15:24,828 INFO [train.py:715] (6/8) Epoch 18, batch 21800, loss[loss=0.1202, simple_loss=0.2014, pruned_loss=0.01947, over 4896.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2068, pruned_loss=0.02807, over 971991.75 frames.], batch size: 22, lr: 1.23e-04 +2022-05-09 10:16:04,151 INFO [train.py:715] (6/8) Epoch 18, batch 21850, loss[loss=0.1364, simple_loss=0.1998, pruned_loss=0.0365, over 4888.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2066, pruned_loss=0.02847, over 972468.48 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 10:16:43,577 INFO [train.py:715] (6/8) Epoch 18, batch 21900, loss[loss=0.1323, simple_loss=0.2121, pruned_loss=0.02621, over 4937.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02869, over 972717.84 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 10:17:23,087 INFO [train.py:715] (6/8) Epoch 18, batch 21950, loss[loss=0.1188, simple_loss=0.1907, pruned_loss=0.02346, over 4752.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2071, pruned_loss=0.02893, over 973275.80 frames.], batch size: 12, lr: 1.23e-04 +2022-05-09 10:18:02,137 INFO [train.py:715] (6/8) Epoch 18, batch 22000, loss[loss=0.1716, simple_loss=0.2562, pruned_loss=0.04354, over 4976.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02912, over 973107.46 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 10:18:41,242 INFO [train.py:715] (6/8) Epoch 18, batch 22050, loss[loss=0.1404, simple_loss=0.2147, pruned_loss=0.03303, over 4786.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2069, pruned_loss=0.02873, over 972180.74 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 10:19:20,737 INFO [train.py:715] (6/8) Epoch 18, batch 22100, loss[loss=0.1276, simple_loss=0.206, pruned_loss=0.02467, over 4933.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02883, over 971733.67 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 10:19:59,628 INFO [train.py:715] (6/8) Epoch 18, batch 22150, loss[loss=0.1103, simple_loss=0.1855, pruned_loss=0.01757, over 4872.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02846, over 971747.30 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 10:20:39,098 INFO [train.py:715] (6/8) Epoch 18, batch 22200, loss[loss=0.1233, simple_loss=0.1988, pruned_loss=0.02385, over 4932.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2057, pruned_loss=0.02837, over 971253.56 frames.], batch size: 29, lr: 1.23e-04 +2022-05-09 10:21:17,774 INFO [train.py:715] (6/8) Epoch 18, batch 22250, loss[loss=0.1295, simple_loss=0.2054, pruned_loss=0.02679, over 4764.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.02881, over 970792.46 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 10:21:57,019 INFO [train.py:715] (6/8) Epoch 18, batch 22300, loss[loss=0.1284, simple_loss=0.1993, pruned_loss=0.02876, over 4759.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02867, over 970776.77 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 10:22:35,721 INFO [train.py:715] (6/8) Epoch 18, batch 22350, loss[loss=0.1242, simple_loss=0.1935, pruned_loss=0.02748, over 4786.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02843, over 971415.89 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 10:23:14,498 INFO [train.py:715] (6/8) Epoch 18, batch 22400, loss[loss=0.1349, simple_loss=0.205, pruned_loss=0.03242, over 4840.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02837, over 972001.60 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 10:23:53,399 INFO [train.py:715] (6/8) Epoch 18, batch 22450, loss[loss=0.1238, simple_loss=0.2013, pruned_loss=0.02311, over 4931.00 frames.], tot_loss[loss=0.131, simple_loss=0.2055, pruned_loss=0.02826, over 971417.81 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 10:24:32,485 INFO [train.py:715] (6/8) Epoch 18, batch 22500, loss[loss=0.1435, simple_loss=0.2172, pruned_loss=0.03487, over 4699.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2063, pruned_loss=0.02865, over 971941.73 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 10:25:11,264 INFO [train.py:715] (6/8) Epoch 18, batch 22550, loss[loss=0.1346, simple_loss=0.2044, pruned_loss=0.03245, over 4746.00 frames.], tot_loss[loss=0.1313, simple_loss=0.206, pruned_loss=0.02833, over 971608.69 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 10:25:50,059 INFO [train.py:715] (6/8) Epoch 18, batch 22600, loss[loss=0.1177, simple_loss=0.2006, pruned_loss=0.01739, over 4842.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2057, pruned_loss=0.028, over 971707.58 frames.], batch size: 30, lr: 1.23e-04 +2022-05-09 10:26:29,083 INFO [train.py:715] (6/8) Epoch 18, batch 22650, loss[loss=0.116, simple_loss=0.1886, pruned_loss=0.02167, over 4950.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.0286, over 971940.62 frames.], batch size: 29, lr: 1.23e-04 +2022-05-09 10:27:07,866 INFO [train.py:715] (6/8) Epoch 18, batch 22700, loss[loss=0.1576, simple_loss=0.2317, pruned_loss=0.04169, over 4789.00 frames.], tot_loss[loss=0.132, simple_loss=0.2067, pruned_loss=0.02864, over 972397.32 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 10:27:46,839 INFO [train.py:715] (6/8) Epoch 18, batch 22750, loss[loss=0.1164, simple_loss=0.2016, pruned_loss=0.01558, over 4987.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2067, pruned_loss=0.02859, over 971525.86 frames.], batch size: 26, lr: 1.23e-04 +2022-05-09 10:28:26,219 INFO [train.py:715] (6/8) Epoch 18, batch 22800, loss[loss=0.1184, simple_loss=0.1957, pruned_loss=0.02053, over 4837.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2073, pruned_loss=0.02884, over 972440.49 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 10:29:04,923 INFO [train.py:715] (6/8) Epoch 18, batch 22850, loss[loss=0.1361, simple_loss=0.2053, pruned_loss=0.03344, over 4827.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02869, over 972637.71 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 10:29:43,878 INFO [train.py:715] (6/8) Epoch 18, batch 22900, loss[loss=0.1447, simple_loss=0.2222, pruned_loss=0.03364, over 4927.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02902, over 973077.36 frames.], batch size: 29, lr: 1.23e-04 +2022-05-09 10:30:22,781 INFO [train.py:715] (6/8) Epoch 18, batch 22950, loss[loss=0.1336, simple_loss=0.2067, pruned_loss=0.03026, over 4792.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.0286, over 971765.77 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 10:31:02,203 INFO [train.py:715] (6/8) Epoch 18, batch 23000, loss[loss=0.1455, simple_loss=0.2332, pruned_loss=0.02886, over 4801.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02879, over 972015.10 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 10:31:40,969 INFO [train.py:715] (6/8) Epoch 18, batch 23050, loss[loss=0.1515, simple_loss=0.2296, pruned_loss=0.03672, over 4814.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.029, over 972866.68 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 10:32:20,114 INFO [train.py:715] (6/8) Epoch 18, batch 23100, loss[loss=0.1152, simple_loss=0.1824, pruned_loss=0.02393, over 4874.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02883, over 972338.66 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 10:32:59,659 INFO [train.py:715] (6/8) Epoch 18, batch 23150, loss[loss=0.1241, simple_loss=0.1986, pruned_loss=0.0248, over 4885.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2059, pruned_loss=0.0287, over 971952.60 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 10:33:38,765 INFO [train.py:715] (6/8) Epoch 18, batch 23200, loss[loss=0.1447, simple_loss=0.2123, pruned_loss=0.03859, over 4982.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02904, over 972496.09 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 10:34:17,630 INFO [train.py:715] (6/8) Epoch 18, batch 23250, loss[loss=0.1182, simple_loss=0.1993, pruned_loss=0.01849, over 4799.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02917, over 972316.81 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 10:34:56,942 INFO [train.py:715] (6/8) Epoch 18, batch 23300, loss[loss=0.1428, simple_loss=0.2168, pruned_loss=0.03439, over 4848.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02937, over 972333.17 frames.], batch size: 30, lr: 1.23e-04 +2022-05-09 10:35:36,585 INFO [train.py:715] (6/8) Epoch 18, batch 23350, loss[loss=0.1581, simple_loss=0.2335, pruned_loss=0.04132, over 4855.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2064, pruned_loss=0.02931, over 972342.66 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 10:36:15,529 INFO [train.py:715] (6/8) Epoch 18, batch 23400, loss[loss=0.1472, simple_loss=0.2214, pruned_loss=0.03651, over 4984.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2066, pruned_loss=0.02947, over 972383.34 frames.], batch size: 28, lr: 1.23e-04 +2022-05-09 10:36:54,048 INFO [train.py:715] (6/8) Epoch 18, batch 23450, loss[loss=0.1392, simple_loss=0.2159, pruned_loss=0.03123, over 4888.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02868, over 972290.31 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 10:37:33,554 INFO [train.py:715] (6/8) Epoch 18, batch 23500, loss[loss=0.123, simple_loss=0.1939, pruned_loss=0.02604, over 4833.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2058, pruned_loss=0.02867, over 971912.82 frames.], batch size: 26, lr: 1.23e-04 +2022-05-09 10:38:12,433 INFO [train.py:715] (6/8) Epoch 18, batch 23550, loss[loss=0.1548, simple_loss=0.2217, pruned_loss=0.04399, over 4815.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.02872, over 972827.20 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 10:38:51,088 INFO [train.py:715] (6/8) Epoch 18, batch 23600, loss[loss=0.1593, simple_loss=0.2273, pruned_loss=0.04571, over 4941.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2056, pruned_loss=0.02883, over 972641.22 frames.], batch size: 35, lr: 1.23e-04 +2022-05-09 10:39:30,022 INFO [train.py:715] (6/8) Epoch 18, batch 23650, loss[loss=0.1168, simple_loss=0.1891, pruned_loss=0.02224, over 4838.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2054, pruned_loss=0.02884, over 972177.44 frames.], batch size: 12, lr: 1.23e-04 +2022-05-09 10:40:08,662 INFO [train.py:715] (6/8) Epoch 18, batch 23700, loss[loss=0.1156, simple_loss=0.2021, pruned_loss=0.01456, over 4750.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2058, pruned_loss=0.02902, over 972229.82 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 10:40:47,471 INFO [train.py:715] (6/8) Epoch 18, batch 23750, loss[loss=0.11, simple_loss=0.1887, pruned_loss=0.01567, over 4787.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2065, pruned_loss=0.02906, over 973082.32 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 10:41:26,880 INFO [train.py:715] (6/8) Epoch 18, batch 23800, loss[loss=0.1185, simple_loss=0.1867, pruned_loss=0.02518, over 4875.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02894, over 972403.93 frames.], batch size: 22, lr: 1.23e-04 +2022-05-09 10:42:06,535 INFO [train.py:715] (6/8) Epoch 18, batch 23850, loss[loss=0.1474, simple_loss=0.2154, pruned_loss=0.03972, over 4904.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2059, pruned_loss=0.0286, over 972735.10 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 10:42:45,350 INFO [train.py:715] (6/8) Epoch 18, batch 23900, loss[loss=0.147, simple_loss=0.2174, pruned_loss=0.03832, over 4929.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02846, over 972286.79 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 10:43:24,102 INFO [train.py:715] (6/8) Epoch 18, batch 23950, loss[loss=0.1233, simple_loss=0.2034, pruned_loss=0.02164, over 4968.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.0285, over 972097.80 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 10:44:03,431 INFO [train.py:715] (6/8) Epoch 18, batch 24000, loss[loss=0.1291, simple_loss=0.1954, pruned_loss=0.03136, over 4703.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2049, pruned_loss=0.02819, over 971140.20 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 10:44:03,431 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 10:44:13,350 INFO [train.py:742] (6/8) Epoch 18, validation: loss=0.1045, simple_loss=0.1878, pruned_loss=0.01057, over 914524.00 frames. +2022-05-09 10:44:52,997 INFO [train.py:715] (6/8) Epoch 18, batch 24050, loss[loss=0.1231, simple_loss=0.2038, pruned_loss=0.02122, over 4837.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2052, pruned_loss=0.02829, over 971585.13 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 10:45:31,815 INFO [train.py:715] (6/8) Epoch 18, batch 24100, loss[loss=0.1173, simple_loss=0.1946, pruned_loss=0.01993, over 4867.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.0287, over 971470.19 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 10:46:10,748 INFO [train.py:715] (6/8) Epoch 18, batch 24150, loss[loss=0.1268, simple_loss=0.1993, pruned_loss=0.02713, over 4909.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02875, over 971370.73 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 10:46:50,173 INFO [train.py:715] (6/8) Epoch 18, batch 24200, loss[loss=0.1313, simple_loss=0.2012, pruned_loss=0.03068, over 4977.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02845, over 971375.87 frames.], batch size: 31, lr: 1.22e-04 +2022-05-09 10:47:29,224 INFO [train.py:715] (6/8) Epoch 18, batch 24250, loss[loss=0.132, simple_loss=0.2039, pruned_loss=0.0301, over 4990.00 frames.], tot_loss[loss=0.131, simple_loss=0.2057, pruned_loss=0.0281, over 972743.61 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 10:48:08,105 INFO [train.py:715] (6/8) Epoch 18, batch 24300, loss[loss=0.1432, simple_loss=0.213, pruned_loss=0.03668, over 4762.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2049, pruned_loss=0.0279, over 972239.41 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 10:48:46,585 INFO [train.py:715] (6/8) Epoch 18, batch 24350, loss[loss=0.1292, simple_loss=0.2019, pruned_loss=0.02821, over 4874.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2056, pruned_loss=0.02832, over 971355.84 frames.], batch size: 22, lr: 1.22e-04 +2022-05-09 10:49:25,642 INFO [train.py:715] (6/8) Epoch 18, batch 24400, loss[loss=0.1342, simple_loss=0.2038, pruned_loss=0.03233, over 4758.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2059, pruned_loss=0.02863, over 972194.79 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 10:50:04,249 INFO [train.py:715] (6/8) Epoch 18, batch 24450, loss[loss=0.123, simple_loss=0.1972, pruned_loss=0.02441, over 4851.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2069, pruned_loss=0.02878, over 971422.15 frames.], batch size: 30, lr: 1.22e-04 +2022-05-09 10:50:42,849 INFO [train.py:715] (6/8) Epoch 18, batch 24500, loss[loss=0.1154, simple_loss=0.1873, pruned_loss=0.02172, over 4871.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02902, over 971103.38 frames.], batch size: 22, lr: 1.22e-04 +2022-05-09 10:51:22,305 INFO [train.py:715] (6/8) Epoch 18, batch 24550, loss[loss=0.1448, simple_loss=0.2259, pruned_loss=0.0319, over 4755.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2071, pruned_loss=0.02888, over 971258.13 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 10:52:01,511 INFO [train.py:715] (6/8) Epoch 18, batch 24600, loss[loss=0.1542, simple_loss=0.2266, pruned_loss=0.04088, over 4954.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02928, over 971981.62 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 10:52:40,238 INFO [train.py:715] (6/8) Epoch 18, batch 24650, loss[loss=0.1235, simple_loss=0.1942, pruned_loss=0.02643, over 4960.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02912, over 971365.84 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 10:53:18,843 INFO [train.py:715] (6/8) Epoch 18, batch 24700, loss[loss=0.138, simple_loss=0.2126, pruned_loss=0.03169, over 4945.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2055, pruned_loss=0.02855, over 971725.59 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 10:53:58,068 INFO [train.py:715] (6/8) Epoch 18, batch 24750, loss[loss=0.1388, simple_loss=0.2255, pruned_loss=0.02603, over 4983.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2061, pruned_loss=0.0291, over 971871.81 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 10:54:37,045 INFO [train.py:715] (6/8) Epoch 18, batch 24800, loss[loss=0.1129, simple_loss=0.1843, pruned_loss=0.0207, over 4782.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2057, pruned_loss=0.02911, over 971894.13 frames.], batch size: 12, lr: 1.22e-04 +2022-05-09 10:55:16,445 INFO [train.py:715] (6/8) Epoch 18, batch 24850, loss[loss=0.1337, simple_loss=0.2082, pruned_loss=0.02963, over 4962.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2057, pruned_loss=0.02883, over 972145.76 frames.], batch size: 35, lr: 1.22e-04 +2022-05-09 10:55:55,500 INFO [train.py:715] (6/8) Epoch 18, batch 24900, loss[loss=0.1424, simple_loss=0.2047, pruned_loss=0.04008, over 4990.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2052, pruned_loss=0.02874, over 972966.79 frames.], batch size: 31, lr: 1.22e-04 +2022-05-09 10:56:35,066 INFO [train.py:715] (6/8) Epoch 18, batch 24950, loss[loss=0.1679, simple_loss=0.2314, pruned_loss=0.05216, over 4845.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2064, pruned_loss=0.02945, over 972231.90 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 10:57:14,188 INFO [train.py:715] (6/8) Epoch 18, batch 25000, loss[loss=0.1103, simple_loss=0.1918, pruned_loss=0.01436, over 4832.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02894, over 972062.88 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 10:57:52,845 INFO [train.py:715] (6/8) Epoch 18, batch 25050, loss[loss=0.1513, simple_loss=0.2233, pruned_loss=0.03964, over 4874.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2062, pruned_loss=0.02898, over 972421.69 frames.], batch size: 32, lr: 1.22e-04 +2022-05-09 10:58:32,137 INFO [train.py:715] (6/8) Epoch 18, batch 25100, loss[loss=0.1484, simple_loss=0.2148, pruned_loss=0.04101, over 4985.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.02843, over 972903.07 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 10:59:11,697 INFO [train.py:715] (6/8) Epoch 18, batch 25150, loss[loss=0.1773, simple_loss=0.2442, pruned_loss=0.05522, over 4861.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2056, pruned_loss=0.02837, over 973355.79 frames.], batch size: 32, lr: 1.22e-04 +2022-05-09 10:59:50,262 INFO [train.py:715] (6/8) Epoch 18, batch 25200, loss[loss=0.1328, simple_loss=0.2, pruned_loss=0.03277, over 4850.00 frames.], tot_loss[loss=0.131, simple_loss=0.2056, pruned_loss=0.02825, over 972733.42 frames.], batch size: 32, lr: 1.22e-04 +2022-05-09 11:00:29,821 INFO [train.py:715] (6/8) Epoch 18, batch 25250, loss[loss=0.1386, simple_loss=0.2133, pruned_loss=0.03191, over 4876.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02829, over 972204.23 frames.], batch size: 22, lr: 1.22e-04 +2022-05-09 11:01:09,551 INFO [train.py:715] (6/8) Epoch 18, batch 25300, loss[loss=0.1593, simple_loss=0.234, pruned_loss=0.04231, over 4788.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02844, over 971968.24 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 11:01:48,682 INFO [train.py:715] (6/8) Epoch 18, batch 25350, loss[loss=0.1499, simple_loss=0.2247, pruned_loss=0.0376, over 4684.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2051, pruned_loss=0.02806, over 972182.24 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:02:27,385 INFO [train.py:715] (6/8) Epoch 18, batch 25400, loss[loss=0.1251, simple_loss=0.1975, pruned_loss=0.02632, over 4772.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2056, pruned_loss=0.02844, over 972567.17 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 11:03:06,965 INFO [train.py:715] (6/8) Epoch 18, batch 25450, loss[loss=0.1365, simple_loss=0.2071, pruned_loss=0.03292, over 4857.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.0287, over 972575.14 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 11:03:45,939 INFO [train.py:715] (6/8) Epoch 18, batch 25500, loss[loss=0.1548, simple_loss=0.2263, pruned_loss=0.04169, over 4830.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02946, over 972568.88 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:04:24,927 INFO [train.py:715] (6/8) Epoch 18, batch 25550, loss[loss=0.1138, simple_loss=0.1823, pruned_loss=0.02261, over 4756.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2077, pruned_loss=0.02926, over 972929.86 frames.], batch size: 12, lr: 1.22e-04 +2022-05-09 11:05:04,564 INFO [train.py:715] (6/8) Epoch 18, batch 25600, loss[loss=0.1284, simple_loss=0.1934, pruned_loss=0.03166, over 4771.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02893, over 972556.20 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 11:05:44,109 INFO [train.py:715] (6/8) Epoch 18, batch 25650, loss[loss=0.1391, simple_loss=0.2128, pruned_loss=0.03273, over 4845.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2067, pruned_loss=0.02874, over 972514.31 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 11:06:23,311 INFO [train.py:715] (6/8) Epoch 18, batch 25700, loss[loss=0.1102, simple_loss=0.1908, pruned_loss=0.01486, over 4953.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02831, over 972644.14 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 11:07:02,570 INFO [train.py:715] (6/8) Epoch 18, batch 25750, loss[loss=0.1243, simple_loss=0.204, pruned_loss=0.0223, over 4960.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02821, over 972490.82 frames.], batch size: 24, lr: 1.22e-04 +2022-05-09 11:07:41,973 INFO [train.py:715] (6/8) Epoch 18, batch 25800, loss[loss=0.1792, simple_loss=0.2468, pruned_loss=0.0558, over 4908.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2067, pruned_loss=0.02835, over 972554.00 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 11:08:20,797 INFO [train.py:715] (6/8) Epoch 18, batch 25850, loss[loss=0.1421, simple_loss=0.2167, pruned_loss=0.0337, over 4765.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2055, pruned_loss=0.02778, over 972230.59 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 11:08:59,119 INFO [train.py:715] (6/8) Epoch 18, batch 25900, loss[loss=0.1335, simple_loss=0.2181, pruned_loss=0.02444, over 4850.00 frames.], tot_loss[loss=0.1312, simple_loss=0.206, pruned_loss=0.02817, over 972250.74 frames.], batch size: 32, lr: 1.22e-04 +2022-05-09 11:09:38,440 INFO [train.py:715] (6/8) Epoch 18, batch 25950, loss[loss=0.1376, simple_loss=0.2125, pruned_loss=0.0314, over 4837.00 frames.], tot_loss[loss=0.1332, simple_loss=0.208, pruned_loss=0.02925, over 972427.20 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:10:17,517 INFO [train.py:715] (6/8) Epoch 18, batch 26000, loss[loss=0.1198, simple_loss=0.1833, pruned_loss=0.02814, over 4876.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02879, over 972607.77 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 11:10:56,986 INFO [train.py:715] (6/8) Epoch 18, batch 26050, loss[loss=0.1461, simple_loss=0.2091, pruned_loss=0.04153, over 4801.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02877, over 972149.78 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 11:11:36,115 INFO [train.py:715] (6/8) Epoch 18, batch 26100, loss[loss=0.1203, simple_loss=0.2011, pruned_loss=0.01976, over 4945.00 frames.], tot_loss[loss=0.132, simple_loss=0.2067, pruned_loss=0.02864, over 971944.95 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 11:12:15,695 INFO [train.py:715] (6/8) Epoch 18, batch 26150, loss[loss=0.152, simple_loss=0.2239, pruned_loss=0.04002, over 4884.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2064, pruned_loss=0.02849, over 972250.95 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 11:12:54,906 INFO [train.py:715] (6/8) Epoch 18, batch 26200, loss[loss=0.1126, simple_loss=0.1862, pruned_loss=0.01955, over 4916.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02893, over 972247.97 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 11:13:33,239 INFO [train.py:715] (6/8) Epoch 18, batch 26250, loss[loss=0.1525, simple_loss=0.2295, pruned_loss=0.03771, over 4926.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02892, over 972287.89 frames.], batch size: 23, lr: 1.22e-04 +2022-05-09 11:14:12,860 INFO [train.py:715] (6/8) Epoch 18, batch 26300, loss[loss=0.1231, simple_loss=0.1949, pruned_loss=0.02561, over 4863.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2063, pruned_loss=0.02871, over 973107.88 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 11:14:51,548 INFO [train.py:715] (6/8) Epoch 18, batch 26350, loss[loss=0.1607, simple_loss=0.2308, pruned_loss=0.04533, over 4917.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02895, over 973228.60 frames.], batch size: 39, lr: 1.22e-04 +2022-05-09 11:15:30,596 INFO [train.py:715] (6/8) Epoch 18, batch 26400, loss[loss=0.1368, simple_loss=0.2078, pruned_loss=0.03292, over 4903.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02896, over 973589.22 frames.], batch size: 39, lr: 1.22e-04 +2022-05-09 11:16:09,487 INFO [train.py:715] (6/8) Epoch 18, batch 26450, loss[loss=0.1461, simple_loss=0.2253, pruned_loss=0.03343, over 4980.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02889, over 972799.75 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:16:49,041 INFO [train.py:715] (6/8) Epoch 18, batch 26500, loss[loss=0.1123, simple_loss=0.186, pruned_loss=0.01927, over 4930.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2051, pruned_loss=0.02801, over 972686.05 frames.], batch size: 29, lr: 1.22e-04 +2022-05-09 11:17:28,075 INFO [train.py:715] (6/8) Epoch 18, batch 26550, loss[loss=0.1229, simple_loss=0.2106, pruned_loss=0.01758, over 4915.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2048, pruned_loss=0.02766, over 972043.25 frames.], batch size: 23, lr: 1.22e-04 +2022-05-09 11:18:06,862 INFO [train.py:715] (6/8) Epoch 18, batch 26600, loss[loss=0.1294, simple_loss=0.2007, pruned_loss=0.02908, over 4969.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2051, pruned_loss=0.02794, over 971942.95 frames.], batch size: 39, lr: 1.22e-04 +2022-05-09 11:18:46,132 INFO [train.py:715] (6/8) Epoch 18, batch 26650, loss[loss=0.1134, simple_loss=0.1917, pruned_loss=0.01756, over 4898.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2048, pruned_loss=0.02773, over 972034.72 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 11:19:25,272 INFO [train.py:715] (6/8) Epoch 18, batch 26700, loss[loss=0.1269, simple_loss=0.2054, pruned_loss=0.02425, over 4838.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2055, pruned_loss=0.02812, over 972403.63 frames.], batch size: 32, lr: 1.22e-04 +2022-05-09 11:20:05,280 INFO [train.py:715] (6/8) Epoch 18, batch 26750, loss[loss=0.1097, simple_loss=0.1806, pruned_loss=0.01936, over 4926.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2051, pruned_loss=0.02788, over 972862.38 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 11:20:43,663 INFO [train.py:715] (6/8) Epoch 18, batch 26800, loss[loss=0.1164, simple_loss=0.1943, pruned_loss=0.01921, over 4788.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2052, pruned_loss=0.02784, over 972682.04 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 11:21:23,703 INFO [train.py:715] (6/8) Epoch 18, batch 26850, loss[loss=0.1233, simple_loss=0.2031, pruned_loss=0.02173, over 4950.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2049, pruned_loss=0.02795, over 971959.42 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 11:22:03,383 INFO [train.py:715] (6/8) Epoch 18, batch 26900, loss[loss=0.1133, simple_loss=0.1886, pruned_loss=0.01898, over 4989.00 frames.], tot_loss[loss=0.1308, simple_loss=0.205, pruned_loss=0.02832, over 972094.48 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 11:22:41,424 INFO [train.py:715] (6/8) Epoch 18, batch 26950, loss[loss=0.1322, simple_loss=0.2102, pruned_loss=0.02708, over 4896.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2051, pruned_loss=0.02839, over 971192.54 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 11:23:20,805 INFO [train.py:715] (6/8) Epoch 18, batch 27000, loss[loss=0.1373, simple_loss=0.2155, pruned_loss=0.02957, over 4787.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2047, pruned_loss=0.02816, over 971642.99 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 11:23:20,806 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 11:23:30,797 INFO [train.py:742] (6/8) Epoch 18, validation: loss=0.1044, simple_loss=0.1877, pruned_loss=0.01055, over 914524.00 frames. +2022-05-09 11:24:11,110 INFO [train.py:715] (6/8) Epoch 18, batch 27050, loss[loss=0.1227, simple_loss=0.202, pruned_loss=0.02171, over 4814.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2053, pruned_loss=0.02808, over 971888.82 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 11:24:50,018 INFO [train.py:715] (6/8) Epoch 18, batch 27100, loss[loss=0.1129, simple_loss=0.1841, pruned_loss=0.02087, over 4811.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2052, pruned_loss=0.02809, over 972042.10 frames.], batch size: 26, lr: 1.22e-04 +2022-05-09 11:25:29,325 INFO [train.py:715] (6/8) Epoch 18, batch 27150, loss[loss=0.1224, simple_loss=0.194, pruned_loss=0.02538, over 4787.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2053, pruned_loss=0.02804, over 972408.74 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 11:26:08,688 INFO [train.py:715] (6/8) Epoch 18, batch 27200, loss[loss=0.1357, simple_loss=0.2027, pruned_loss=0.03436, over 4781.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.0284, over 972643.91 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 11:26:47,919 INFO [train.py:715] (6/8) Epoch 18, batch 27250, loss[loss=0.1224, simple_loss=0.1949, pruned_loss=0.02501, over 4972.00 frames.], tot_loss[loss=0.1305, simple_loss=0.205, pruned_loss=0.02802, over 972483.84 frames.], batch size: 24, lr: 1.22e-04 +2022-05-09 11:27:26,985 INFO [train.py:715] (6/8) Epoch 18, batch 27300, loss[loss=0.1324, simple_loss=0.2111, pruned_loss=0.02678, over 4953.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2057, pruned_loss=0.02837, over 972838.21 frames.], batch size: 39, lr: 1.22e-04 +2022-05-09 11:28:05,829 INFO [train.py:715] (6/8) Epoch 18, batch 27350, loss[loss=0.1512, simple_loss=0.2257, pruned_loss=0.0383, over 4797.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2056, pruned_loss=0.02812, over 972574.42 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 11:28:46,002 INFO [train.py:715] (6/8) Epoch 18, batch 27400, loss[loss=0.1321, simple_loss=0.2093, pruned_loss=0.02745, over 4814.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02842, over 972080.53 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 11:29:25,405 INFO [train.py:715] (6/8) Epoch 18, batch 27450, loss[loss=0.098, simple_loss=0.1681, pruned_loss=0.01395, over 4985.00 frames.], tot_loss[loss=0.1304, simple_loss=0.205, pruned_loss=0.02788, over 971929.43 frames.], batch size: 26, lr: 1.22e-04 +2022-05-09 11:30:04,447 INFO [train.py:715] (6/8) Epoch 18, batch 27500, loss[loss=0.1212, simple_loss=0.2026, pruned_loss=0.01988, over 4750.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02827, over 971349.80 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 11:30:44,168 INFO [train.py:715] (6/8) Epoch 18, batch 27550, loss[loss=0.1335, simple_loss=0.2097, pruned_loss=0.02862, over 4946.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2056, pruned_loss=0.02789, over 971391.62 frames.], batch size: 29, lr: 1.22e-04 +2022-05-09 11:31:23,285 INFO [train.py:715] (6/8) Epoch 18, batch 27600, loss[loss=0.1196, simple_loss=0.1904, pruned_loss=0.02438, over 4799.00 frames.], tot_loss[loss=0.1312, simple_loss=0.206, pruned_loss=0.02817, over 971655.84 frames.], batch size: 12, lr: 1.22e-04 +2022-05-09 11:32:01,949 INFO [train.py:715] (6/8) Epoch 18, batch 27650, loss[loss=0.1069, simple_loss=0.1831, pruned_loss=0.01535, over 4815.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2056, pruned_loss=0.02808, over 971835.29 frames.], batch size: 12, lr: 1.22e-04 +2022-05-09 11:32:40,860 INFO [train.py:715] (6/8) Epoch 18, batch 27700, loss[loss=0.1392, simple_loss=0.2086, pruned_loss=0.03491, over 4832.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2049, pruned_loss=0.02767, over 972177.69 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 11:33:20,163 INFO [train.py:715] (6/8) Epoch 18, batch 27750, loss[loss=0.1086, simple_loss=0.1833, pruned_loss=0.01694, over 4870.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2056, pruned_loss=0.02806, over 972973.89 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 11:33:59,622 INFO [train.py:715] (6/8) Epoch 18, batch 27800, loss[loss=0.1243, simple_loss=0.1917, pruned_loss=0.02839, over 4754.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2064, pruned_loss=0.0285, over 972234.47 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 11:34:38,871 INFO [train.py:715] (6/8) Epoch 18, batch 27850, loss[loss=0.1268, simple_loss=0.201, pruned_loss=0.02626, over 4859.00 frames.], tot_loss[loss=0.1313, simple_loss=0.206, pruned_loss=0.02831, over 972219.83 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 11:35:18,485 INFO [train.py:715] (6/8) Epoch 18, batch 27900, loss[loss=0.1442, simple_loss=0.2187, pruned_loss=0.03488, over 4977.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02907, over 971833.29 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:35:57,742 INFO [train.py:715] (6/8) Epoch 18, batch 27950, loss[loss=0.103, simple_loss=0.1764, pruned_loss=0.01482, over 4862.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02909, over 971592.34 frames.], batch size: 32, lr: 1.22e-04 +2022-05-09 11:36:36,986 INFO [train.py:715] (6/8) Epoch 18, batch 28000, loss[loss=0.1193, simple_loss=0.1927, pruned_loss=0.02295, over 4926.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02895, over 971943.01 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 11:37:16,536 INFO [train.py:715] (6/8) Epoch 18, batch 28050, loss[loss=0.1383, simple_loss=0.2155, pruned_loss=0.03059, over 4823.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02922, over 971878.64 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:37:56,323 INFO [train.py:715] (6/8) Epoch 18, batch 28100, loss[loss=0.1372, simple_loss=0.2205, pruned_loss=0.02695, over 4769.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02942, over 972394.15 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 11:38:35,512 INFO [train.py:715] (6/8) Epoch 18, batch 28150, loss[loss=0.1616, simple_loss=0.2348, pruned_loss=0.0442, over 4862.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2083, pruned_loss=0.02956, over 972304.55 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 11:39:13,846 INFO [train.py:715] (6/8) Epoch 18, batch 28200, loss[loss=0.112, simple_loss=0.187, pruned_loss=0.0185, over 4778.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02936, over 971184.52 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 11:39:53,467 INFO [train.py:715] (6/8) Epoch 18, batch 28250, loss[loss=0.1548, simple_loss=0.2204, pruned_loss=0.0446, over 4970.00 frames.], tot_loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.02968, over 970581.29 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:40:32,295 INFO [train.py:715] (6/8) Epoch 18, batch 28300, loss[loss=0.1478, simple_loss=0.2169, pruned_loss=0.03936, over 4918.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2087, pruned_loss=0.03, over 971324.46 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 11:41:11,199 INFO [train.py:715] (6/8) Epoch 18, batch 28350, loss[loss=0.1296, simple_loss=0.2068, pruned_loss=0.02616, over 4975.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03019, over 971883.44 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 11:41:50,497 INFO [train.py:715] (6/8) Epoch 18, batch 28400, loss[loss=0.1322, simple_loss=0.209, pruned_loss=0.02772, over 4807.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03007, over 972261.74 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 11:42:29,774 INFO [train.py:715] (6/8) Epoch 18, batch 28450, loss[loss=0.1382, simple_loss=0.2064, pruned_loss=0.03497, over 4911.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.03023, over 971764.76 frames.], batch size: 39, lr: 1.22e-04 +2022-05-09 11:43:08,848 INFO [train.py:715] (6/8) Epoch 18, batch 28500, loss[loss=0.1382, simple_loss=0.2174, pruned_loss=0.02947, over 4933.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02979, over 972117.30 frames.], batch size: 29, lr: 1.22e-04 +2022-05-09 11:43:47,956 INFO [train.py:715] (6/8) Epoch 18, batch 28550, loss[loss=0.1444, simple_loss=0.2144, pruned_loss=0.03721, over 4858.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02899, over 971790.69 frames.], batch size: 32, lr: 1.22e-04 +2022-05-09 11:44:27,962 INFO [train.py:715] (6/8) Epoch 18, batch 28600, loss[loss=0.1215, simple_loss=0.19, pruned_loss=0.02646, over 4968.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2078, pruned_loss=0.02898, over 971262.29 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 11:45:06,665 INFO [train.py:715] (6/8) Epoch 18, batch 28650, loss[loss=0.1136, simple_loss=0.1862, pruned_loss=0.02051, over 4875.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2078, pruned_loss=0.02875, over 971461.77 frames.], batch size: 22, lr: 1.22e-04 +2022-05-09 11:45:45,613 INFO [train.py:715] (6/8) Epoch 18, batch 28700, loss[loss=0.1114, simple_loss=0.1861, pruned_loss=0.0183, over 4891.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2073, pruned_loss=0.02845, over 971274.58 frames.], batch size: 22, lr: 1.22e-04 +2022-05-09 11:46:25,182 INFO [train.py:715] (6/8) Epoch 18, batch 28750, loss[loss=0.1458, simple_loss=0.2055, pruned_loss=0.04308, over 4775.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2069, pruned_loss=0.02869, over 971718.49 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 11:47:04,224 INFO [train.py:715] (6/8) Epoch 18, batch 28800, loss[loss=0.1354, simple_loss=0.2166, pruned_loss=0.0271, over 4759.00 frames.], tot_loss[loss=0.1321, simple_loss=0.207, pruned_loss=0.02863, over 971968.75 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 11:47:43,077 INFO [train.py:715] (6/8) Epoch 18, batch 28850, loss[loss=0.1365, simple_loss=0.2106, pruned_loss=0.03122, over 4974.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2071, pruned_loss=0.02871, over 971711.86 frames.], batch size: 28, lr: 1.22e-04 +2022-05-09 11:48:21,645 INFO [train.py:715] (6/8) Epoch 18, batch 28900, loss[loss=0.1178, simple_loss=0.1954, pruned_loss=0.02013, over 4861.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2068, pruned_loss=0.02872, over 971207.98 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 11:49:01,750 INFO [train.py:715] (6/8) Epoch 18, batch 28950, loss[loss=0.1051, simple_loss=0.184, pruned_loss=0.01309, over 4774.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2065, pruned_loss=0.02861, over 971427.28 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 11:49:40,555 INFO [train.py:715] (6/8) Epoch 18, batch 29000, loss[loss=0.127, simple_loss=0.2022, pruned_loss=0.0259, over 4810.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02838, over 970435.05 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 11:50:19,736 INFO [train.py:715] (6/8) Epoch 18, batch 29050, loss[loss=0.127, simple_loss=0.2067, pruned_loss=0.02367, over 4921.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02863, over 971811.21 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 11:50:59,126 INFO [train.py:715] (6/8) Epoch 18, batch 29100, loss[loss=0.1676, simple_loss=0.2297, pruned_loss=0.05275, over 4984.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02867, over 972886.03 frames.], batch size: 35, lr: 1.22e-04 +2022-05-09 11:51:38,418 INFO [train.py:715] (6/8) Epoch 18, batch 29150, loss[loss=0.1351, simple_loss=0.2095, pruned_loss=0.03029, over 4948.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02861, over 972364.31 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 11:52:17,128 INFO [train.py:715] (6/8) Epoch 18, batch 29200, loss[loss=0.1257, simple_loss=0.208, pruned_loss=0.02164, over 4875.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02907, over 973667.18 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 11:52:55,646 INFO [train.py:715] (6/8) Epoch 18, batch 29250, loss[loss=0.1255, simple_loss=0.1992, pruned_loss=0.02588, over 4953.00 frames.], tot_loss[loss=0.132, simple_loss=0.2067, pruned_loss=0.02866, over 973619.16 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:53:35,213 INFO [train.py:715] (6/8) Epoch 18, batch 29300, loss[loss=0.1423, simple_loss=0.2133, pruned_loss=0.03567, over 4810.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02896, over 973716.06 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 11:54:13,919 INFO [train.py:715] (6/8) Epoch 18, batch 29350, loss[loss=0.1179, simple_loss=0.184, pruned_loss=0.02589, over 4766.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02913, over 973473.21 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 11:54:52,614 INFO [train.py:715] (6/8) Epoch 18, batch 29400, loss[loss=0.135, simple_loss=0.2112, pruned_loss=0.02939, over 4988.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02918, over 972302.37 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 11:55:33,954 INFO [train.py:715] (6/8) Epoch 18, batch 29450, loss[loss=0.1211, simple_loss=0.2052, pruned_loss=0.01855, over 4932.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02972, over 972097.47 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 11:56:12,983 INFO [train.py:715] (6/8) Epoch 18, batch 29500, loss[loss=0.119, simple_loss=0.1956, pruned_loss=0.02122, over 4974.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.02961, over 972166.30 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 11:56:52,077 INFO [train.py:715] (6/8) Epoch 18, batch 29550, loss[loss=0.1427, simple_loss=0.2211, pruned_loss=0.03214, over 4923.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02929, over 972446.47 frames.], batch size: 23, lr: 1.22e-04 +2022-05-09 11:57:30,049 INFO [train.py:715] (6/8) Epoch 18, batch 29600, loss[loss=0.1266, simple_loss=0.1941, pruned_loss=0.02957, over 4967.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02923, over 971184.58 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:58:09,264 INFO [train.py:715] (6/8) Epoch 18, batch 29650, loss[loss=0.1178, simple_loss=0.1963, pruned_loss=0.01962, over 4870.00 frames.], tot_loss[loss=0.132, simple_loss=0.2061, pruned_loss=0.02896, over 971352.15 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 11:58:48,237 INFO [train.py:715] (6/8) Epoch 18, batch 29700, loss[loss=0.1314, simple_loss=0.212, pruned_loss=0.02544, over 4763.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.02901, over 971421.80 frames.], batch size: 12, lr: 1.22e-04 +2022-05-09 11:59:26,592 INFO [train.py:715] (6/8) Epoch 18, batch 29750, loss[loss=0.1564, simple_loss=0.2357, pruned_loss=0.03852, over 4839.00 frames.], tot_loss[loss=0.1335, simple_loss=0.208, pruned_loss=0.02951, over 971423.57 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:00:05,918 INFO [train.py:715] (6/8) Epoch 18, batch 29800, loss[loss=0.1307, simple_loss=0.2046, pruned_loss=0.02836, over 4776.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02896, over 971546.42 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 12:00:45,628 INFO [train.py:715] (6/8) Epoch 18, batch 29850, loss[loss=0.1155, simple_loss=0.1909, pruned_loss=0.02004, over 4766.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02863, over 971883.47 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 12:01:24,704 INFO [train.py:715] (6/8) Epoch 18, batch 29900, loss[loss=0.1318, simple_loss=0.2137, pruned_loss=0.02492, over 4857.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2064, pruned_loss=0.02854, over 972591.55 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 12:02:03,298 INFO [train.py:715] (6/8) Epoch 18, batch 29950, loss[loss=0.1581, simple_loss=0.2214, pruned_loss=0.04737, over 4972.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02852, over 973195.78 frames.], batch size: 35, lr: 1.22e-04 +2022-05-09 12:02:43,065 INFO [train.py:715] (6/8) Epoch 18, batch 30000, loss[loss=0.1113, simple_loss=0.1881, pruned_loss=0.01725, over 4927.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2059, pruned_loss=0.02836, over 973697.03 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:02:43,066 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 12:02:52,968 INFO [train.py:742] (6/8) Epoch 18, validation: loss=0.1047, simple_loss=0.188, pruned_loss=0.01071, over 914524.00 frames. +2022-05-09 12:03:33,193 INFO [train.py:715] (6/8) Epoch 18, batch 30050, loss[loss=0.1415, simple_loss=0.2141, pruned_loss=0.03446, over 4927.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2056, pruned_loss=0.02827, over 973559.33 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:04:12,321 INFO [train.py:715] (6/8) Epoch 18, batch 30100, loss[loss=0.1083, simple_loss=0.1803, pruned_loss=0.01809, over 4802.00 frames.], tot_loss[loss=0.131, simple_loss=0.2055, pruned_loss=0.02828, over 972866.26 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 12:04:50,506 INFO [train.py:715] (6/8) Epoch 18, batch 30150, loss[loss=0.1491, simple_loss=0.2196, pruned_loss=0.03924, over 4799.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2048, pruned_loss=0.02809, over 972368.48 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 12:05:29,940 INFO [train.py:715] (6/8) Epoch 18, batch 30200, loss[loss=0.1685, simple_loss=0.2391, pruned_loss=0.04895, over 4976.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02859, over 972660.04 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:06:09,184 INFO [train.py:715] (6/8) Epoch 18, batch 30250, loss[loss=0.1347, simple_loss=0.2114, pruned_loss=0.02897, over 4782.00 frames.], tot_loss[loss=0.131, simple_loss=0.2053, pruned_loss=0.02829, over 973584.38 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:06:48,874 INFO [train.py:715] (6/8) Epoch 18, batch 30300, loss[loss=0.1332, simple_loss=0.2083, pruned_loss=0.02898, over 4880.00 frames.], tot_loss[loss=0.131, simple_loss=0.2054, pruned_loss=0.0283, over 973265.61 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 12:07:27,510 INFO [train.py:715] (6/8) Epoch 18, batch 30350, loss[loss=0.1271, simple_loss=0.1972, pruned_loss=0.02852, over 4983.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2054, pruned_loss=0.02819, over 973064.15 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 12:08:07,407 INFO [train.py:715] (6/8) Epoch 18, batch 30400, loss[loss=0.1175, simple_loss=0.1948, pruned_loss=0.02013, over 4899.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2057, pruned_loss=0.02807, over 973064.79 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 12:08:46,437 INFO [train.py:715] (6/8) Epoch 18, batch 30450, loss[loss=0.1388, simple_loss=0.2205, pruned_loss=0.02855, over 4843.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2049, pruned_loss=0.02783, over 972829.40 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 12:09:24,924 INFO [train.py:715] (6/8) Epoch 18, batch 30500, loss[loss=0.115, simple_loss=0.1919, pruned_loss=0.0191, over 4826.00 frames.], tot_loss[loss=0.1313, simple_loss=0.206, pruned_loss=0.02832, over 972230.04 frames.], batch size: 12, lr: 1.22e-04 +2022-05-09 12:10:04,136 INFO [train.py:715] (6/8) Epoch 18, batch 30550, loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03221, over 4940.00 frames.], tot_loss[loss=0.131, simple_loss=0.2051, pruned_loss=0.02845, over 971853.18 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 12:10:42,817 INFO [train.py:715] (6/8) Epoch 18, batch 30600, loss[loss=0.1455, simple_loss=0.2259, pruned_loss=0.03257, over 4916.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2052, pruned_loss=0.02832, over 971755.66 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 12:11:21,484 INFO [train.py:715] (6/8) Epoch 18, batch 30650, loss[loss=0.1206, simple_loss=0.1972, pruned_loss=0.02203, over 4796.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2055, pruned_loss=0.0285, over 972170.20 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 12:12:00,156 INFO [train.py:715] (6/8) Epoch 18, batch 30700, loss[loss=0.1204, simple_loss=0.1898, pruned_loss=0.02547, over 4699.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2053, pruned_loss=0.02827, over 972447.45 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:12:39,285 INFO [train.py:715] (6/8) Epoch 18, batch 30750, loss[loss=0.1402, simple_loss=0.2066, pruned_loss=0.03685, over 4854.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2052, pruned_loss=0.02831, over 972300.19 frames.], batch size: 32, lr: 1.22e-04 +2022-05-09 12:13:18,040 INFO [train.py:715] (6/8) Epoch 18, batch 30800, loss[loss=0.1671, simple_loss=0.2437, pruned_loss=0.04521, over 4783.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02827, over 972544.07 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:13:56,475 INFO [train.py:715] (6/8) Epoch 18, batch 30850, loss[loss=0.1398, simple_loss=0.2211, pruned_loss=0.02929, over 4875.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2059, pruned_loss=0.0282, over 973060.51 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 12:14:35,511 INFO [train.py:715] (6/8) Epoch 18, batch 30900, loss[loss=0.1173, simple_loss=0.1972, pruned_loss=0.0187, over 4855.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2049, pruned_loss=0.02812, over 972678.17 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 12:15:14,125 INFO [train.py:715] (6/8) Epoch 18, batch 30950, loss[loss=0.1366, simple_loss=0.2015, pruned_loss=0.03583, over 4801.00 frames.], tot_loss[loss=0.131, simple_loss=0.2054, pruned_loss=0.02834, over 972587.67 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 12:15:52,435 INFO [train.py:715] (6/8) Epoch 18, batch 31000, loss[loss=0.1439, simple_loss=0.2185, pruned_loss=0.03464, over 4950.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02861, over 971512.82 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 12:16:31,407 INFO [train.py:715] (6/8) Epoch 18, batch 31050, loss[loss=0.1191, simple_loss=0.1963, pruned_loss=0.02095, over 4962.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2059, pruned_loss=0.02838, over 971816.16 frames.], batch size: 24, lr: 1.22e-04 +2022-05-09 12:17:10,962 INFO [train.py:715] (6/8) Epoch 18, batch 31100, loss[loss=0.1371, simple_loss=0.2251, pruned_loss=0.02456, over 4809.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02883, over 972195.44 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 12:17:49,898 INFO [train.py:715] (6/8) Epoch 18, batch 31150, loss[loss=0.1281, simple_loss=0.2067, pruned_loss=0.02478, over 4698.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02858, over 972172.17 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:18:28,840 INFO [train.py:715] (6/8) Epoch 18, batch 31200, loss[loss=0.1289, simple_loss=0.1947, pruned_loss=0.03158, over 4992.00 frames.], tot_loss[loss=0.1307, simple_loss=0.205, pruned_loss=0.02815, over 972295.95 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 12:19:08,216 INFO [train.py:715] (6/8) Epoch 18, batch 31250, loss[loss=0.1106, simple_loss=0.1884, pruned_loss=0.01646, over 4815.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2053, pruned_loss=0.02807, over 972192.04 frames.], batch size: 26, lr: 1.22e-04 +2022-05-09 12:19:47,257 INFO [train.py:715] (6/8) Epoch 18, batch 31300, loss[loss=0.1002, simple_loss=0.1835, pruned_loss=0.008383, over 4938.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2051, pruned_loss=0.02772, over 971994.36 frames.], batch size: 23, lr: 1.22e-04 +2022-05-09 12:20:25,880 INFO [train.py:715] (6/8) Epoch 18, batch 31350, loss[loss=0.1111, simple_loss=0.1832, pruned_loss=0.01948, over 4852.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2059, pruned_loss=0.02837, over 971141.69 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 12:21:05,055 INFO [train.py:715] (6/8) Epoch 18, batch 31400, loss[loss=0.1433, simple_loss=0.2127, pruned_loss=0.03694, over 4983.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2065, pruned_loss=0.02829, over 971995.71 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 12:21:44,557 INFO [train.py:715] (6/8) Epoch 18, batch 31450, loss[loss=0.1385, simple_loss=0.2029, pruned_loss=0.03708, over 4926.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2062, pruned_loss=0.02841, over 971311.75 frames.], batch size: 23, lr: 1.22e-04 +2022-05-09 12:22:23,390 INFO [train.py:715] (6/8) Epoch 18, batch 31500, loss[loss=0.1258, simple_loss=0.1904, pruned_loss=0.03059, over 4976.00 frames.], tot_loss[loss=0.1317, simple_loss=0.206, pruned_loss=0.02868, over 971668.11 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 12:23:01,626 INFO [train.py:715] (6/8) Epoch 18, batch 31550, loss[loss=0.1731, simple_loss=0.2454, pruned_loss=0.05038, over 4864.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02873, over 972358.99 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:23:41,443 INFO [train.py:715] (6/8) Epoch 18, batch 31600, loss[loss=0.1325, simple_loss=0.2087, pruned_loss=0.02817, over 4900.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2059, pruned_loss=0.02831, over 973374.80 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 12:24:20,713 INFO [train.py:715] (6/8) Epoch 18, batch 31650, loss[loss=0.1311, simple_loss=0.2111, pruned_loss=0.02551, over 4738.00 frames.], tot_loss[loss=0.131, simple_loss=0.2054, pruned_loss=0.02831, over 972944.44 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 12:24:59,691 INFO [train.py:715] (6/8) Epoch 18, batch 31700, loss[loss=0.1147, simple_loss=0.1955, pruned_loss=0.01698, over 4976.00 frames.], tot_loss[loss=0.131, simple_loss=0.2055, pruned_loss=0.02829, over 974073.77 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 12:25:38,814 INFO [train.py:715] (6/8) Epoch 18, batch 31750, loss[loss=0.1218, simple_loss=0.1987, pruned_loss=0.02243, over 4957.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2059, pruned_loss=0.02827, over 973410.27 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:26:18,653 INFO [train.py:715] (6/8) Epoch 18, batch 31800, loss[loss=0.1518, simple_loss=0.222, pruned_loss=0.04084, over 4785.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2051, pruned_loss=0.02782, over 973619.27 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:26:58,020 INFO [train.py:715] (6/8) Epoch 18, batch 31850, loss[loss=0.1074, simple_loss=0.1941, pruned_loss=0.01032, over 4766.00 frames.], tot_loss[loss=0.1297, simple_loss=0.2041, pruned_loss=0.02767, over 973669.74 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 12:27:36,980 INFO [train.py:715] (6/8) Epoch 18, batch 31900, loss[loss=0.1051, simple_loss=0.1765, pruned_loss=0.01686, over 4841.00 frames.], tot_loss[loss=0.1298, simple_loss=0.2045, pruned_loss=0.02759, over 972698.35 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 12:28:16,149 INFO [train.py:715] (6/8) Epoch 18, batch 31950, loss[loss=0.1409, simple_loss=0.2105, pruned_loss=0.03561, over 4782.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2055, pruned_loss=0.02859, over 972356.91 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:28:54,462 INFO [train.py:715] (6/8) Epoch 18, batch 32000, loss[loss=0.1371, simple_loss=0.2154, pruned_loss=0.02937, over 4925.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2061, pruned_loss=0.02911, over 972216.83 frames.], batch size: 23, lr: 1.22e-04 +2022-05-09 12:29:32,618 INFO [train.py:715] (6/8) Epoch 18, batch 32050, loss[loss=0.1126, simple_loss=0.1786, pruned_loss=0.02332, over 4754.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2057, pruned_loss=0.02894, over 972323.15 frames.], batch size: 12, lr: 1.22e-04 +2022-05-09 12:30:11,894 INFO [train.py:715] (6/8) Epoch 18, batch 32100, loss[loss=0.1342, simple_loss=0.2042, pruned_loss=0.03208, over 4919.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2042, pruned_loss=0.02841, over 971660.37 frames.], batch size: 38, lr: 1.22e-04 +2022-05-09 12:30:51,386 INFO [train.py:715] (6/8) Epoch 18, batch 32150, loss[loss=0.1305, simple_loss=0.1983, pruned_loss=0.0313, over 4957.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2045, pruned_loss=0.02861, over 971773.59 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:31:30,535 INFO [train.py:715] (6/8) Epoch 18, batch 32200, loss[loss=0.1113, simple_loss=0.1926, pruned_loss=0.01503, over 4913.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2042, pruned_loss=0.02815, over 971949.29 frames.], batch size: 29, lr: 1.22e-04 +2022-05-09 12:32:08,910 INFO [train.py:715] (6/8) Epoch 18, batch 32250, loss[loss=0.125, simple_loss=0.1917, pruned_loss=0.02917, over 4782.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2044, pruned_loss=0.02813, over 971996.97 frames.], batch size: 12, lr: 1.22e-04 +2022-05-09 12:32:48,157 INFO [train.py:715] (6/8) Epoch 18, batch 32300, loss[loss=0.1441, simple_loss=0.2066, pruned_loss=0.04078, over 4788.00 frames.], tot_loss[loss=0.1309, simple_loss=0.205, pruned_loss=0.02842, over 972142.03 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 12:33:26,713 INFO [train.py:715] (6/8) Epoch 18, batch 32350, loss[loss=0.1278, simple_loss=0.2015, pruned_loss=0.02707, over 4874.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2044, pruned_loss=0.02797, over 972141.88 frames.], batch size: 32, lr: 1.22e-04 +2022-05-09 12:34:05,362 INFO [train.py:715] (6/8) Epoch 18, batch 32400, loss[loss=0.1082, simple_loss=0.1897, pruned_loss=0.01331, over 4850.00 frames.], tot_loss[loss=0.1311, simple_loss=0.205, pruned_loss=0.0286, over 971489.24 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 12:34:44,784 INFO [train.py:715] (6/8) Epoch 18, batch 32450, loss[loss=0.1212, simple_loss=0.2002, pruned_loss=0.02107, over 4697.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2053, pruned_loss=0.02882, over 971508.12 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:35:23,654 INFO [train.py:715] (6/8) Epoch 18, batch 32500, loss[loss=0.1102, simple_loss=0.1863, pruned_loss=0.0171, over 4889.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2044, pruned_loss=0.02823, over 971959.06 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 12:36:02,853 INFO [train.py:715] (6/8) Epoch 18, batch 32550, loss[loss=0.1298, simple_loss=0.2136, pruned_loss=0.02296, over 4804.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2055, pruned_loss=0.02854, over 972339.99 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 12:36:42,027 INFO [train.py:715] (6/8) Epoch 18, batch 32600, loss[loss=0.1514, simple_loss=0.2258, pruned_loss=0.03856, over 4814.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.0288, over 972242.92 frames.], batch size: 27, lr: 1.22e-04 +2022-05-09 12:37:21,457 INFO [train.py:715] (6/8) Epoch 18, batch 32650, loss[loss=0.1215, simple_loss=0.2034, pruned_loss=0.01977, over 4824.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2058, pruned_loss=0.02869, over 973232.28 frames.], batch size: 26, lr: 1.22e-04 +2022-05-09 12:37:59,895 INFO [train.py:715] (6/8) Epoch 18, batch 32700, loss[loss=0.1269, simple_loss=0.2069, pruned_loss=0.02342, over 4938.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2056, pruned_loss=0.02854, over 972903.46 frames.], batch size: 29, lr: 1.22e-04 +2022-05-09 12:38:38,642 INFO [train.py:715] (6/8) Epoch 18, batch 32750, loss[loss=0.1837, simple_loss=0.2484, pruned_loss=0.05953, over 4690.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02864, over 972808.30 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:39:17,961 INFO [train.py:715] (6/8) Epoch 18, batch 32800, loss[loss=0.1215, simple_loss=0.2051, pruned_loss=0.01894, over 4847.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02876, over 972389.92 frames.], batch size: 26, lr: 1.22e-04 +2022-05-09 12:39:57,154 INFO [train.py:715] (6/8) Epoch 18, batch 32850, loss[loss=0.1185, simple_loss=0.196, pruned_loss=0.02049, over 4861.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2051, pruned_loss=0.02868, over 971861.43 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 12:40:35,667 INFO [train.py:715] (6/8) Epoch 18, batch 32900, loss[loss=0.1411, simple_loss=0.2093, pruned_loss=0.03648, over 4972.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2057, pruned_loss=0.02872, over 972471.81 frames.], batch size: 35, lr: 1.22e-04 +2022-05-09 12:41:14,765 INFO [train.py:715] (6/8) Epoch 18, batch 32950, loss[loss=0.1458, simple_loss=0.2144, pruned_loss=0.03864, over 4875.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02883, over 972521.16 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 12:41:53,960 INFO [train.py:715] (6/8) Epoch 18, batch 33000, loss[loss=0.1201, simple_loss=0.1908, pruned_loss=0.02474, over 4908.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2062, pruned_loss=0.02867, over 973145.30 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 12:41:53,960 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 12:42:03,827 INFO [train.py:742] (6/8) Epoch 18, validation: loss=0.1046, simple_loss=0.1878, pruned_loss=0.01068, over 914524.00 frames. +2022-05-09 12:42:43,670 INFO [train.py:715] (6/8) Epoch 18, batch 33050, loss[loss=0.1331, simple_loss=0.2059, pruned_loss=0.03016, over 4774.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.02837, over 973615.16 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 12:43:22,621 INFO [train.py:715] (6/8) Epoch 18, batch 33100, loss[loss=0.1301, simple_loss=0.1982, pruned_loss=0.03095, over 4939.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02856, over 973726.39 frames.], batch size: 23, lr: 1.22e-04 +2022-05-09 12:44:02,107 INFO [train.py:715] (6/8) Epoch 18, batch 33150, loss[loss=0.1559, simple_loss=0.2266, pruned_loss=0.04263, over 4912.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02893, over 973095.11 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 12:44:41,946 INFO [train.py:715] (6/8) Epoch 18, batch 33200, loss[loss=0.1179, simple_loss=0.1855, pruned_loss=0.02515, over 4884.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02887, over 973074.45 frames.], batch size: 22, lr: 1.22e-04 +2022-05-09 12:45:20,902 INFO [train.py:715] (6/8) Epoch 18, batch 33250, loss[loss=0.153, simple_loss=0.2159, pruned_loss=0.04501, over 4839.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02889, over 973717.50 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:45:59,528 INFO [train.py:715] (6/8) Epoch 18, batch 33300, loss[loss=0.1473, simple_loss=0.2222, pruned_loss=0.03622, over 4746.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02866, over 973320.03 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 12:46:38,969 INFO [train.py:715] (6/8) Epoch 18, batch 33350, loss[loss=0.1529, simple_loss=0.2151, pruned_loss=0.04533, over 4992.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02886, over 973008.23 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 12:47:18,354 INFO [train.py:715] (6/8) Epoch 18, batch 33400, loss[loss=0.1329, simple_loss=0.2082, pruned_loss=0.02878, over 4988.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02886, over 972887.83 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 12:47:57,078 INFO [train.py:715] (6/8) Epoch 18, batch 33450, loss[loss=0.1297, simple_loss=0.2069, pruned_loss=0.02623, over 4905.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02911, over 973015.59 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 12:48:36,023 INFO [train.py:715] (6/8) Epoch 18, batch 33500, loss[loss=0.1177, simple_loss=0.1932, pruned_loss=0.02107, over 4824.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02907, over 972400.94 frames.], batch size: 27, lr: 1.22e-04 +2022-05-09 12:49:15,398 INFO [train.py:715] (6/8) Epoch 18, batch 33550, loss[loss=0.116, simple_loss=0.1923, pruned_loss=0.01988, over 4806.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02881, over 972837.74 frames.], batch size: 24, lr: 1.22e-04 +2022-05-09 12:49:54,442 INFO [train.py:715] (6/8) Epoch 18, batch 33600, loss[loss=0.1373, simple_loss=0.2008, pruned_loss=0.03688, over 4800.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02865, over 973015.64 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 12:50:32,507 INFO [train.py:715] (6/8) Epoch 18, batch 33650, loss[loss=0.1498, simple_loss=0.2219, pruned_loss=0.03884, over 4818.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02862, over 972719.19 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 12:51:11,946 INFO [train.py:715] (6/8) Epoch 18, batch 33700, loss[loss=0.1592, simple_loss=0.2293, pruned_loss=0.04459, over 4812.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02866, over 972248.66 frames.], batch size: 27, lr: 1.22e-04 +2022-05-09 12:51:51,116 INFO [train.py:715] (6/8) Epoch 18, batch 33750, loss[loss=0.1144, simple_loss=0.1971, pruned_loss=0.01582, over 4747.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02856, over 973078.90 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 12:52:30,432 INFO [train.py:715] (6/8) Epoch 18, batch 33800, loss[loss=0.1413, simple_loss=0.2138, pruned_loss=0.03442, over 4981.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2054, pruned_loss=0.02803, over 973433.54 frames.], batch size: 28, lr: 1.22e-04 +2022-05-09 12:53:09,707 INFO [train.py:715] (6/8) Epoch 18, batch 33850, loss[loss=0.1225, simple_loss=0.2008, pruned_loss=0.02217, over 4939.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2056, pruned_loss=0.02791, over 973596.14 frames.], batch size: 23, lr: 1.22e-04 +2022-05-09 12:53:49,539 INFO [train.py:715] (6/8) Epoch 18, batch 33900, loss[loss=0.1152, simple_loss=0.1909, pruned_loss=0.01976, over 4828.00 frames.], tot_loss[loss=0.1303, simple_loss=0.205, pruned_loss=0.02783, over 973655.87 frames.], batch size: 26, lr: 1.22e-04 +2022-05-09 12:54:28,739 INFO [train.py:715] (6/8) Epoch 18, batch 33950, loss[loss=0.1725, simple_loss=0.2398, pruned_loss=0.05258, over 4903.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2052, pruned_loss=0.02786, over 973756.54 frames.], batch size: 39, lr: 1.22e-04 +2022-05-09 12:55:07,057 INFO [train.py:715] (6/8) Epoch 18, batch 34000, loss[loss=0.1353, simple_loss=0.2112, pruned_loss=0.02972, over 4940.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2066, pruned_loss=0.02857, over 973217.16 frames.], batch size: 23, lr: 1.22e-04 +2022-05-09 12:55:46,479 INFO [train.py:715] (6/8) Epoch 18, batch 34050, loss[loss=0.1284, simple_loss=0.2116, pruned_loss=0.02265, over 4883.00 frames.], tot_loss[loss=0.1321, simple_loss=0.207, pruned_loss=0.02857, over 972637.83 frames.], batch size: 22, lr: 1.22e-04 +2022-05-09 12:56:25,894 INFO [train.py:715] (6/8) Epoch 18, batch 34100, loss[loss=0.1347, simple_loss=0.2115, pruned_loss=0.02897, over 4776.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02855, over 972731.37 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:57:05,036 INFO [train.py:715] (6/8) Epoch 18, batch 34150, loss[loss=0.1316, simple_loss=0.2051, pruned_loss=0.02903, over 4770.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02879, over 972634.67 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:57:44,081 INFO [train.py:715] (6/8) Epoch 18, batch 34200, loss[loss=0.1187, simple_loss=0.2007, pruned_loss=0.01835, over 4869.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2055, pruned_loss=0.02844, over 972144.19 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 12:58:23,226 INFO [train.py:715] (6/8) Epoch 18, batch 34250, loss[loss=0.1422, simple_loss=0.2112, pruned_loss=0.03655, over 4811.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2054, pruned_loss=0.02858, over 972011.89 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 12:59:02,029 INFO [train.py:715] (6/8) Epoch 18, batch 34300, loss[loss=0.1611, simple_loss=0.2252, pruned_loss=0.04853, over 4694.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2059, pruned_loss=0.02853, over 971849.65 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:59:40,338 INFO [train.py:715] (6/8) Epoch 18, batch 34350, loss[loss=0.1142, simple_loss=0.1806, pruned_loss=0.02386, over 4773.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.0286, over 972803.38 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 13:00:19,867 INFO [train.py:715] (6/8) Epoch 18, batch 34400, loss[loss=0.1565, simple_loss=0.2301, pruned_loss=0.04148, over 4823.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2073, pruned_loss=0.02903, over 973072.68 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 13:00:59,442 INFO [train.py:715] (6/8) Epoch 18, batch 34450, loss[loss=0.135, simple_loss=0.2052, pruned_loss=0.03239, over 4897.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02903, over 973959.45 frames.], batch size: 39, lr: 1.22e-04 +2022-05-09 13:01:39,372 INFO [train.py:715] (6/8) Epoch 18, batch 34500, loss[loss=0.1516, simple_loss=0.2253, pruned_loss=0.03891, over 4900.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2077, pruned_loss=0.02899, over 972731.96 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 13:02:18,897 INFO [train.py:715] (6/8) Epoch 18, batch 34550, loss[loss=0.1437, simple_loss=0.2172, pruned_loss=0.03511, over 4766.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2077, pruned_loss=0.02885, over 972683.45 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 13:02:58,574 INFO [train.py:715] (6/8) Epoch 18, batch 34600, loss[loss=0.1383, simple_loss=0.2195, pruned_loss=0.02858, over 4909.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2073, pruned_loss=0.02895, over 973393.89 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 13:03:37,773 INFO [train.py:715] (6/8) Epoch 18, batch 34650, loss[loss=0.142, simple_loss=0.2126, pruned_loss=0.03571, over 4859.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02865, over 972943.16 frames.], batch size: 32, lr: 1.22e-04 +2022-05-09 13:04:17,388 INFO [train.py:715] (6/8) Epoch 18, batch 34700, loss[loss=0.1289, simple_loss=0.2058, pruned_loss=0.026, over 4798.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2052, pruned_loss=0.02814, over 973128.98 frames.], batch size: 24, lr: 1.21e-04 +2022-05-09 13:04:56,521 INFO [train.py:715] (6/8) Epoch 18, batch 34750, loss[loss=0.1071, simple_loss=0.1893, pruned_loss=0.01239, over 4694.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02836, over 972575.77 frames.], batch size: 15, lr: 1.21e-04 +2022-05-09 13:05:34,145 INFO [train.py:715] (6/8) Epoch 18, batch 34800, loss[loss=0.1277, simple_loss=0.1988, pruned_loss=0.02833, over 4734.00 frames.], tot_loss[loss=0.1308, simple_loss=0.205, pruned_loss=0.02831, over 971294.35 frames.], batch size: 12, lr: 1.21e-04 +2022-05-09 13:06:24,924 INFO [train.py:715] (6/8) Epoch 19, batch 0, loss[loss=0.1224, simple_loss=0.2042, pruned_loss=0.0203, over 4900.00 frames.], tot_loss[loss=0.1224, simple_loss=0.2042, pruned_loss=0.0203, over 4900.00 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 13:07:03,502 INFO [train.py:715] (6/8) Epoch 19, batch 50, loss[loss=0.1241, simple_loss=0.202, pruned_loss=0.02308, over 4849.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2065, pruned_loss=0.02815, over 219701.72 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 13:07:43,523 INFO [train.py:715] (6/8) Epoch 19, batch 100, loss[loss=0.1439, simple_loss=0.2143, pruned_loss=0.03677, over 4959.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2065, pruned_loss=0.02842, over 387061.24 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 13:08:23,952 INFO [train.py:715] (6/8) Epoch 19, batch 150, loss[loss=0.1426, simple_loss=0.2086, pruned_loss=0.03829, over 4782.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2055, pruned_loss=0.02794, over 516143.01 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 13:09:04,185 INFO [train.py:715] (6/8) Epoch 19, batch 200, loss[loss=0.1064, simple_loss=0.1847, pruned_loss=0.01409, over 4875.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2051, pruned_loss=0.02766, over 618033.27 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:09:44,079 INFO [train.py:715] (6/8) Epoch 19, batch 250, loss[loss=0.1284, simple_loss=0.2126, pruned_loss=0.02213, over 4934.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2054, pruned_loss=0.02765, over 696554.08 frames.], batch size: 29, lr: 1.18e-04 +2022-05-09 13:10:24,217 INFO [train.py:715] (6/8) Epoch 19, batch 300, loss[loss=0.1229, simple_loss=0.1941, pruned_loss=0.02579, over 4980.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2063, pruned_loss=0.02807, over 756893.92 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 13:11:04,673 INFO [train.py:715] (6/8) Epoch 19, batch 350, loss[loss=0.1302, simple_loss=0.201, pruned_loss=0.0297, over 4904.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2063, pruned_loss=0.0281, over 804754.52 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 13:11:43,723 INFO [train.py:715] (6/8) Epoch 19, batch 400, loss[loss=0.1288, simple_loss=0.201, pruned_loss=0.02829, over 4757.00 frames.], tot_loss[loss=0.1309, simple_loss=0.206, pruned_loss=0.02795, over 841458.08 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:12:24,050 INFO [train.py:715] (6/8) Epoch 19, batch 450, loss[loss=0.1114, simple_loss=0.1836, pruned_loss=0.01963, over 4872.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2069, pruned_loss=0.02836, over 868881.66 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:13:04,621 INFO [train.py:715] (6/8) Epoch 19, batch 500, loss[loss=0.1476, simple_loss=0.2265, pruned_loss=0.03438, over 4875.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02856, over 891668.72 frames.], batch size: 32, lr: 1.18e-04 +2022-05-09 13:13:44,282 INFO [train.py:715] (6/8) Epoch 19, batch 550, loss[loss=0.1277, simple_loss=0.2061, pruned_loss=0.02467, over 4790.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02857, over 909207.45 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 13:14:24,238 INFO [train.py:715] (6/8) Epoch 19, batch 600, loss[loss=0.1709, simple_loss=0.228, pruned_loss=0.05694, over 4776.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02918, over 923570.92 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 13:15:04,546 INFO [train.py:715] (6/8) Epoch 19, batch 650, loss[loss=0.1311, simple_loss=0.2075, pruned_loss=0.02734, over 4964.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02926, over 935186.14 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 13:15:44,877 INFO [train.py:715] (6/8) Epoch 19, batch 700, loss[loss=0.1457, simple_loss=0.2165, pruned_loss=0.03743, over 4884.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02929, over 944298.10 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 13:16:24,134 INFO [train.py:715] (6/8) Epoch 19, batch 750, loss[loss=0.1557, simple_loss=0.2339, pruned_loss=0.03874, over 4977.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02876, over 950636.36 frames.], batch size: 40, lr: 1.18e-04 +2022-05-09 13:17:03,939 INFO [train.py:715] (6/8) Epoch 19, batch 800, loss[loss=0.1677, simple_loss=0.2274, pruned_loss=0.054, over 4853.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02889, over 955038.18 frames.], batch size: 30, lr: 1.18e-04 +2022-05-09 13:17:44,200 INFO [train.py:715] (6/8) Epoch 19, batch 850, loss[loss=0.1259, simple_loss=0.1972, pruned_loss=0.02736, over 4746.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02867, over 957993.47 frames.], batch size: 12, lr: 1.18e-04 +2022-05-09 13:18:24,386 INFO [train.py:715] (6/8) Epoch 19, batch 900, loss[loss=0.1176, simple_loss=0.1916, pruned_loss=0.02183, over 4853.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2058, pruned_loss=0.02796, over 960751.67 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 13:19:03,907 INFO [train.py:715] (6/8) Epoch 19, batch 950, loss[loss=0.1286, simple_loss=0.2003, pruned_loss=0.02842, over 4849.00 frames.], tot_loss[loss=0.1298, simple_loss=0.2047, pruned_loss=0.02741, over 962081.85 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 13:19:43,258 INFO [train.py:715] (6/8) Epoch 19, batch 1000, loss[loss=0.1199, simple_loss=0.2017, pruned_loss=0.01905, over 4939.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2051, pruned_loss=0.02792, over 963970.90 frames.], batch size: 29, lr: 1.18e-04 +2022-05-09 13:20:23,195 INFO [train.py:715] (6/8) Epoch 19, batch 1050, loss[loss=0.1693, simple_loss=0.2431, pruned_loss=0.04775, over 4952.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.02835, over 965921.52 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 13:21:02,193 INFO [train.py:715] (6/8) Epoch 19, batch 1100, loss[loss=0.1198, simple_loss=0.1998, pruned_loss=0.0199, over 4798.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2063, pruned_loss=0.0285, over 966977.70 frames.], batch size: 12, lr: 1.18e-04 +2022-05-09 13:21:42,016 INFO [train.py:715] (6/8) Epoch 19, batch 1150, loss[loss=0.1102, simple_loss=0.1858, pruned_loss=0.01727, over 4986.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2057, pruned_loss=0.02829, over 968195.93 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 13:22:21,975 INFO [train.py:715] (6/8) Epoch 19, batch 1200, loss[loss=0.1115, simple_loss=0.1965, pruned_loss=0.01322, over 4973.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02842, over 969560.99 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 13:23:01,715 INFO [train.py:715] (6/8) Epoch 19, batch 1250, loss[loss=0.1453, simple_loss=0.2122, pruned_loss=0.03915, over 4880.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02855, over 970087.64 frames.], batch size: 30, lr: 1.18e-04 +2022-05-09 13:23:41,059 INFO [train.py:715] (6/8) Epoch 19, batch 1300, loss[loss=0.1273, simple_loss=0.21, pruned_loss=0.02227, over 4890.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02859, over 971503.50 frames.], batch size: 22, lr: 1.18e-04 +2022-05-09 13:24:20,597 INFO [train.py:715] (6/8) Epoch 19, batch 1350, loss[loss=0.1235, simple_loss=0.191, pruned_loss=0.02797, over 4986.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.0287, over 972034.05 frames.], batch size: 28, lr: 1.18e-04 +2022-05-09 13:25:00,622 INFO [train.py:715] (6/8) Epoch 19, batch 1400, loss[loss=0.1329, simple_loss=0.2037, pruned_loss=0.03109, over 4649.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.02844, over 971935.55 frames.], batch size: 13, lr: 1.18e-04 +2022-05-09 13:25:39,918 INFO [train.py:715] (6/8) Epoch 19, batch 1450, loss[loss=0.1328, simple_loss=0.2119, pruned_loss=0.02683, over 4905.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02865, over 972332.42 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 13:26:20,229 INFO [train.py:715] (6/8) Epoch 19, batch 1500, loss[loss=0.1149, simple_loss=0.1946, pruned_loss=0.01758, over 4808.00 frames.], tot_loss[loss=0.131, simple_loss=0.2053, pruned_loss=0.02837, over 971666.06 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 13:27:00,282 INFO [train.py:715] (6/8) Epoch 19, batch 1550, loss[loss=0.1313, simple_loss=0.2032, pruned_loss=0.02975, over 4987.00 frames.], tot_loss[loss=0.131, simple_loss=0.2052, pruned_loss=0.02839, over 972325.67 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 13:27:40,368 INFO [train.py:715] (6/8) Epoch 19, batch 1600, loss[loss=0.1185, simple_loss=0.1898, pruned_loss=0.02362, over 4864.00 frames.], tot_loss[loss=0.131, simple_loss=0.2055, pruned_loss=0.02826, over 972836.04 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 13:28:19,707 INFO [train.py:715] (6/8) Epoch 19, batch 1650, loss[loss=0.1461, simple_loss=0.2128, pruned_loss=0.03967, over 4976.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2052, pruned_loss=0.0281, over 971542.48 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 13:28:59,074 INFO [train.py:715] (6/8) Epoch 19, batch 1700, loss[loss=0.09748, simple_loss=0.161, pruned_loss=0.01699, over 4769.00 frames.], tot_loss[loss=0.1305, simple_loss=0.205, pruned_loss=0.02804, over 971951.69 frames.], batch size: 12, lr: 1.18e-04 +2022-05-09 13:29:39,056 INFO [train.py:715] (6/8) Epoch 19, batch 1750, loss[loss=0.1338, simple_loss=0.2165, pruned_loss=0.02557, over 4763.00 frames.], tot_loss[loss=0.131, simple_loss=0.2057, pruned_loss=0.02813, over 971930.42 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 13:30:18,172 INFO [train.py:715] (6/8) Epoch 19, batch 1800, loss[loss=0.13, simple_loss=0.1992, pruned_loss=0.03039, over 4987.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2054, pruned_loss=0.02794, over 971016.69 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 13:30:57,614 INFO [train.py:715] (6/8) Epoch 19, batch 1850, loss[loss=0.1448, simple_loss=0.2132, pruned_loss=0.03824, over 4908.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02861, over 971012.62 frames.], batch size: 32, lr: 1.18e-04 +2022-05-09 13:31:36,859 INFO [train.py:715] (6/8) Epoch 19, batch 1900, loss[loss=0.1226, simple_loss=0.1885, pruned_loss=0.02835, over 4848.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.02874, over 971218.09 frames.], batch size: 13, lr: 1.18e-04 +2022-05-09 13:32:16,779 INFO [train.py:715] (6/8) Epoch 19, batch 1950, loss[loss=0.1124, simple_loss=0.186, pruned_loss=0.0194, over 4831.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02856, over 970944.79 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 13:32:55,077 INFO [train.py:715] (6/8) Epoch 19, batch 2000, loss[loss=0.1353, simple_loss=0.2078, pruned_loss=0.03138, over 4703.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02856, over 971021.50 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 13:33:34,215 INFO [train.py:715] (6/8) Epoch 19, batch 2050, loss[loss=0.115, simple_loss=0.1924, pruned_loss=0.0188, over 4971.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.0283, over 971164.22 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 13:34:13,316 INFO [train.py:715] (6/8) Epoch 19, batch 2100, loss[loss=0.1302, simple_loss=0.2059, pruned_loss=0.02723, over 4923.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.02837, over 971148.54 frames.], batch size: 39, lr: 1.18e-04 +2022-05-09 13:34:52,134 INFO [train.py:715] (6/8) Epoch 19, batch 2150, loss[loss=0.1308, simple_loss=0.2131, pruned_loss=0.02426, over 4955.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02872, over 971984.20 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 13:35:31,127 INFO [train.py:715] (6/8) Epoch 19, batch 2200, loss[loss=0.1301, simple_loss=0.1918, pruned_loss=0.03417, over 4809.00 frames.], tot_loss[loss=0.132, simple_loss=0.2069, pruned_loss=0.02861, over 971733.93 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 13:36:09,824 INFO [train.py:715] (6/8) Epoch 19, batch 2250, loss[loss=0.162, simple_loss=0.2419, pruned_loss=0.04107, over 4917.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02867, over 971515.62 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 13:36:49,417 INFO [train.py:715] (6/8) Epoch 19, batch 2300, loss[loss=0.1409, simple_loss=0.2154, pruned_loss=0.03319, over 4983.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.02841, over 972187.49 frames.], batch size: 35, lr: 1.18e-04 +2022-05-09 13:37:28,011 INFO [train.py:715] (6/8) Epoch 19, batch 2350, loss[loss=0.1365, simple_loss=0.21, pruned_loss=0.03151, over 4868.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2059, pruned_loss=0.02832, over 972903.60 frames.], batch size: 32, lr: 1.18e-04 +2022-05-09 13:38:07,174 INFO [train.py:715] (6/8) Epoch 19, batch 2400, loss[loss=0.1248, simple_loss=0.2012, pruned_loss=0.02423, over 4974.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2056, pruned_loss=0.02845, over 972201.68 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 13:38:46,614 INFO [train.py:715] (6/8) Epoch 19, batch 2450, loss[loss=0.1307, simple_loss=0.212, pruned_loss=0.02475, over 4775.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2058, pruned_loss=0.02873, over 972193.87 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 13:39:25,451 INFO [train.py:715] (6/8) Epoch 19, batch 2500, loss[loss=0.1297, simple_loss=0.1955, pruned_loss=0.03196, over 4743.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.02887, over 972104.18 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:40:04,475 INFO [train.py:715] (6/8) Epoch 19, batch 2550, loss[loss=0.1146, simple_loss=0.2009, pruned_loss=0.01419, over 4832.00 frames.], tot_loss[loss=0.1312, simple_loss=0.205, pruned_loss=0.02873, over 972162.10 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 13:40:44,012 INFO [train.py:715] (6/8) Epoch 19, batch 2600, loss[loss=0.1176, simple_loss=0.1888, pruned_loss=0.02316, over 4968.00 frames.], tot_loss[loss=0.131, simple_loss=0.2049, pruned_loss=0.0286, over 973005.40 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 13:41:26,477 INFO [train.py:715] (6/8) Epoch 19, batch 2650, loss[loss=0.1588, simple_loss=0.2499, pruned_loss=0.03388, over 4961.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2058, pruned_loss=0.02886, over 973649.97 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 13:42:05,379 INFO [train.py:715] (6/8) Epoch 19, batch 2700, loss[loss=0.1199, simple_loss=0.1897, pruned_loss=0.02511, over 4741.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02895, over 973292.96 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 13:42:44,053 INFO [train.py:715] (6/8) Epoch 19, batch 2750, loss[loss=0.1361, simple_loss=0.2157, pruned_loss=0.02825, over 4879.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02886, over 973566.62 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:43:23,806 INFO [train.py:715] (6/8) Epoch 19, batch 2800, loss[loss=0.126, simple_loss=0.2047, pruned_loss=0.0236, over 4874.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02884, over 973726.21 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 13:44:03,075 INFO [train.py:715] (6/8) Epoch 19, batch 2850, loss[loss=0.1612, simple_loss=0.2213, pruned_loss=0.05053, over 4980.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2068, pruned_loss=0.02844, over 972939.39 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 13:44:42,006 INFO [train.py:715] (6/8) Epoch 19, batch 2900, loss[loss=0.1151, simple_loss=0.1921, pruned_loss=0.01905, over 4746.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2069, pruned_loss=0.02834, over 972476.17 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:45:20,762 INFO [train.py:715] (6/8) Epoch 19, batch 2950, loss[loss=0.129, simple_loss=0.2114, pruned_loss=0.02329, over 4844.00 frames.], tot_loss[loss=0.131, simple_loss=0.206, pruned_loss=0.02804, over 971649.89 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 13:46:00,074 INFO [train.py:715] (6/8) Epoch 19, batch 3000, loss[loss=0.141, simple_loss=0.2247, pruned_loss=0.02864, over 4942.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2059, pruned_loss=0.0282, over 971052.12 frames.], batch size: 39, lr: 1.18e-04 +2022-05-09 13:46:00,075 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 13:46:10,051 INFO [train.py:742] (6/8) Epoch 19, validation: loss=0.1045, simple_loss=0.1877, pruned_loss=0.01062, over 914524.00 frames. +2022-05-09 13:46:50,340 INFO [train.py:715] (6/8) Epoch 19, batch 3050, loss[loss=0.1338, simple_loss=0.2072, pruned_loss=0.0302, over 4742.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2054, pruned_loss=0.02808, over 971381.87 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:47:29,686 INFO [train.py:715] (6/8) Epoch 19, batch 3100, loss[loss=0.1192, simple_loss=0.1968, pruned_loss=0.02081, over 4823.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2053, pruned_loss=0.02841, over 972079.20 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 13:48:08,833 INFO [train.py:715] (6/8) Epoch 19, batch 3150, loss[loss=0.1235, simple_loss=0.1977, pruned_loss=0.02469, over 4899.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2071, pruned_loss=0.02878, over 971703.99 frames.], batch size: 22, lr: 1.18e-04 +2022-05-09 13:48:48,672 INFO [train.py:715] (6/8) Epoch 19, batch 3200, loss[loss=0.1509, simple_loss=0.2235, pruned_loss=0.03914, over 4929.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2072, pruned_loss=0.02867, over 970773.79 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 13:49:27,692 INFO [train.py:715] (6/8) Epoch 19, batch 3250, loss[loss=0.153, simple_loss=0.2261, pruned_loss=0.03999, over 4904.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2068, pruned_loss=0.02824, over 971818.20 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 13:50:07,134 INFO [train.py:715] (6/8) Epoch 19, batch 3300, loss[loss=0.1427, simple_loss=0.202, pruned_loss=0.0417, over 4700.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2067, pruned_loss=0.0287, over 971815.55 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 13:50:46,364 INFO [train.py:715] (6/8) Epoch 19, batch 3350, loss[loss=0.1353, simple_loss=0.2086, pruned_loss=0.031, over 4887.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02873, over 971354.95 frames.], batch size: 22, lr: 1.18e-04 +2022-05-09 13:51:26,507 INFO [train.py:715] (6/8) Epoch 19, batch 3400, loss[loss=0.1406, simple_loss=0.2174, pruned_loss=0.03189, over 4935.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2065, pruned_loss=0.0283, over 971855.31 frames.], batch size: 35, lr: 1.18e-04 +2022-05-09 13:52:05,362 INFO [train.py:715] (6/8) Epoch 19, batch 3450, loss[loss=0.1187, simple_loss=0.1922, pruned_loss=0.0226, over 4840.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2062, pruned_loss=0.02836, over 972178.33 frames.], batch size: 32, lr: 1.18e-04 +2022-05-09 13:52:44,617 INFO [train.py:715] (6/8) Epoch 19, batch 3500, loss[loss=0.1163, simple_loss=0.1966, pruned_loss=0.01802, over 4926.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02871, over 972736.06 frames.], batch size: 23, lr: 1.18e-04 +2022-05-09 13:53:23,736 INFO [train.py:715] (6/8) Epoch 19, batch 3550, loss[loss=0.1384, simple_loss=0.2156, pruned_loss=0.0306, over 4891.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02882, over 972584.17 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 13:54:02,623 INFO [train.py:715] (6/8) Epoch 19, batch 3600, loss[loss=0.1443, simple_loss=0.2131, pruned_loss=0.03772, over 4922.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2058, pruned_loss=0.02901, over 972735.22 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 13:54:42,253 INFO [train.py:715] (6/8) Epoch 19, batch 3650, loss[loss=0.1168, simple_loss=0.1872, pruned_loss=0.02321, over 4909.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2052, pruned_loss=0.02879, over 972756.83 frames.], batch size: 29, lr: 1.18e-04 +2022-05-09 13:55:21,398 INFO [train.py:715] (6/8) Epoch 19, batch 3700, loss[loss=0.1399, simple_loss=0.2164, pruned_loss=0.0317, over 4819.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2051, pruned_loss=0.02874, over 971544.14 frames.], batch size: 27, lr: 1.18e-04 +2022-05-09 13:56:01,871 INFO [train.py:715] (6/8) Epoch 19, batch 3750, loss[loss=0.1303, simple_loss=0.2116, pruned_loss=0.02457, over 4860.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2047, pruned_loss=0.02873, over 971870.49 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 13:56:40,837 INFO [train.py:715] (6/8) Epoch 19, batch 3800, loss[loss=0.1291, simple_loss=0.2028, pruned_loss=0.02764, over 4947.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2054, pruned_loss=0.02863, over 972353.70 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 13:57:19,818 INFO [train.py:715] (6/8) Epoch 19, batch 3850, loss[loss=0.1185, simple_loss=0.2016, pruned_loss=0.01771, over 4931.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2054, pruned_loss=0.02852, over 972242.14 frames.], batch size: 29, lr: 1.18e-04 +2022-05-09 13:57:59,512 INFO [train.py:715] (6/8) Epoch 19, batch 3900, loss[loss=0.1171, simple_loss=0.1908, pruned_loss=0.02173, over 4951.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.029, over 971857.10 frames.], batch size: 35, lr: 1.18e-04 +2022-05-09 13:58:38,562 INFO [train.py:715] (6/8) Epoch 19, batch 3950, loss[loss=0.1336, simple_loss=0.2134, pruned_loss=0.02689, over 4848.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02863, over 972389.72 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 13:59:17,197 INFO [train.py:715] (6/8) Epoch 19, batch 4000, loss[loss=0.1247, simple_loss=0.1993, pruned_loss=0.02502, over 4930.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02899, over 973081.55 frames.], batch size: 23, lr: 1.18e-04 +2022-05-09 13:59:56,647 INFO [train.py:715] (6/8) Epoch 19, batch 4050, loss[loss=0.1565, simple_loss=0.2362, pruned_loss=0.03844, over 4968.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.0288, over 972489.70 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:00:36,779 INFO [train.py:715] (6/8) Epoch 19, batch 4100, loss[loss=0.1339, simple_loss=0.1933, pruned_loss=0.03727, over 4867.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02866, over 972549.80 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 14:01:15,969 INFO [train.py:715] (6/8) Epoch 19, batch 4150, loss[loss=0.1378, simple_loss=0.2161, pruned_loss=0.02971, over 4916.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2055, pruned_loss=0.02812, over 972392.20 frames.], batch size: 39, lr: 1.18e-04 +2022-05-09 14:01:54,741 INFO [train.py:715] (6/8) Epoch 19, batch 4200, loss[loss=0.1121, simple_loss=0.1899, pruned_loss=0.01718, over 4805.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.02842, over 971949.26 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 14:02:33,999 INFO [train.py:715] (6/8) Epoch 19, batch 4250, loss[loss=0.138, simple_loss=0.2248, pruned_loss=0.02555, over 4900.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2065, pruned_loss=0.02857, over 972151.79 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 14:03:13,066 INFO [train.py:715] (6/8) Epoch 19, batch 4300, loss[loss=0.1166, simple_loss=0.1977, pruned_loss=0.01779, over 4935.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2064, pruned_loss=0.02819, over 971289.25 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 14:03:52,549 INFO [train.py:715] (6/8) Epoch 19, batch 4350, loss[loss=0.1271, simple_loss=0.1988, pruned_loss=0.02769, over 4836.00 frames.], tot_loss[loss=0.131, simple_loss=0.206, pruned_loss=0.02795, over 970699.48 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 14:04:31,618 INFO [train.py:715] (6/8) Epoch 19, batch 4400, loss[loss=0.1424, simple_loss=0.2167, pruned_loss=0.03402, over 4765.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2063, pruned_loss=0.02826, over 970388.50 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 14:05:11,664 INFO [train.py:715] (6/8) Epoch 19, batch 4450, loss[loss=0.1353, simple_loss=0.2125, pruned_loss=0.02902, over 4929.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2063, pruned_loss=0.02847, over 970759.52 frames.], batch size: 23, lr: 1.18e-04 +2022-05-09 14:05:50,511 INFO [train.py:715] (6/8) Epoch 19, batch 4500, loss[loss=0.1512, simple_loss=0.2181, pruned_loss=0.04215, over 4971.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02822, over 971227.40 frames.], batch size: 35, lr: 1.18e-04 +2022-05-09 14:06:29,205 INFO [train.py:715] (6/8) Epoch 19, batch 4550, loss[loss=0.1042, simple_loss=0.184, pruned_loss=0.01215, over 4981.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02831, over 971912.74 frames.], batch size: 28, lr: 1.18e-04 +2022-05-09 14:07:08,893 INFO [train.py:715] (6/8) Epoch 19, batch 4600, loss[loss=0.1085, simple_loss=0.1871, pruned_loss=0.01499, over 4780.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2056, pruned_loss=0.02813, over 972291.12 frames.], batch size: 12, lr: 1.18e-04 +2022-05-09 14:07:48,141 INFO [train.py:715] (6/8) Epoch 19, batch 4650, loss[loss=0.1294, simple_loss=0.2153, pruned_loss=0.02178, over 4788.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2066, pruned_loss=0.02853, over 972061.79 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 14:08:27,121 INFO [train.py:715] (6/8) Epoch 19, batch 4700, loss[loss=0.1278, simple_loss=0.206, pruned_loss=0.02482, over 4948.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2058, pruned_loss=0.02793, over 972761.68 frames.], batch size: 29, lr: 1.18e-04 +2022-05-09 14:09:06,338 INFO [train.py:715] (6/8) Epoch 19, batch 4750, loss[loss=0.1477, simple_loss=0.2126, pruned_loss=0.04136, over 4837.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2061, pruned_loss=0.0282, over 972711.12 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:09:46,298 INFO [train.py:715] (6/8) Epoch 19, batch 4800, loss[loss=0.1318, simple_loss=0.2058, pruned_loss=0.02894, over 4971.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2062, pruned_loss=0.02843, over 972248.15 frames.], batch size: 28, lr: 1.18e-04 +2022-05-09 14:10:25,679 INFO [train.py:715] (6/8) Epoch 19, batch 4850, loss[loss=0.1372, simple_loss=0.1983, pruned_loss=0.03803, over 4900.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2055, pruned_loss=0.0281, over 972392.92 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 14:11:04,339 INFO [train.py:715] (6/8) Epoch 19, batch 4900, loss[loss=0.1517, simple_loss=0.2235, pruned_loss=0.03996, over 4803.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2056, pruned_loss=0.02803, over 971940.69 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 14:11:44,098 INFO [train.py:715] (6/8) Epoch 19, batch 4950, loss[loss=0.1655, simple_loss=0.2338, pruned_loss=0.04858, over 4774.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2054, pruned_loss=0.02775, over 972361.14 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 14:12:23,741 INFO [train.py:715] (6/8) Epoch 19, batch 5000, loss[loss=0.1483, simple_loss=0.2136, pruned_loss=0.04147, over 4946.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2052, pruned_loss=0.02798, over 972155.33 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 14:13:02,753 INFO [train.py:715] (6/8) Epoch 19, batch 5050, loss[loss=0.1263, simple_loss=0.2126, pruned_loss=0.02004, over 4936.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02856, over 971881.23 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 14:13:41,117 INFO [train.py:715] (6/8) Epoch 19, batch 5100, loss[loss=0.1238, simple_loss=0.1984, pruned_loss=0.02466, over 4918.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.0286, over 971963.68 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 14:14:21,162 INFO [train.py:715] (6/8) Epoch 19, batch 5150, loss[loss=0.1185, simple_loss=0.2046, pruned_loss=0.01617, over 4901.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2064, pruned_loss=0.02835, over 972345.60 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 14:15:00,192 INFO [train.py:715] (6/8) Epoch 19, batch 5200, loss[loss=0.1353, simple_loss=0.2098, pruned_loss=0.03041, over 4963.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2065, pruned_loss=0.02846, over 973514.67 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 14:15:38,858 INFO [train.py:715] (6/8) Epoch 19, batch 5250, loss[loss=0.1782, simple_loss=0.2477, pruned_loss=0.05438, over 4821.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02891, over 973263.67 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:16:18,537 INFO [train.py:715] (6/8) Epoch 19, batch 5300, loss[loss=0.1055, simple_loss=0.1809, pruned_loss=0.01506, over 4793.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02884, over 971936.92 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 14:16:58,488 INFO [train.py:715] (6/8) Epoch 19, batch 5350, loss[loss=0.1337, simple_loss=0.2147, pruned_loss=0.02634, over 4706.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02899, over 971033.67 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:17:38,581 INFO [train.py:715] (6/8) Epoch 19, batch 5400, loss[loss=0.1441, simple_loss=0.2125, pruned_loss=0.03789, over 4914.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2071, pruned_loss=0.02868, over 971515.86 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 14:18:17,828 INFO [train.py:715] (6/8) Epoch 19, batch 5450, loss[loss=0.1232, simple_loss=0.1972, pruned_loss=0.02462, over 4840.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2069, pruned_loss=0.02876, over 971809.53 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:18:58,019 INFO [train.py:715] (6/8) Epoch 19, batch 5500, loss[loss=0.1271, simple_loss=0.197, pruned_loss=0.02856, over 4780.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02848, over 971287.40 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 14:19:37,205 INFO [train.py:715] (6/8) Epoch 19, batch 5550, loss[loss=0.1297, simple_loss=0.2042, pruned_loss=0.02763, over 4925.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2062, pruned_loss=0.02811, over 972587.37 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 14:20:16,795 INFO [train.py:715] (6/8) Epoch 19, batch 5600, loss[loss=0.1304, simple_loss=0.1933, pruned_loss=0.03373, over 4851.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02852, over 972582.87 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 14:20:56,103 INFO [train.py:715] (6/8) Epoch 19, batch 5650, loss[loss=0.1176, simple_loss=0.1924, pruned_loss=0.02141, over 4829.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2057, pruned_loss=0.02849, over 972542.94 frames.], batch size: 12, lr: 1.18e-04 +2022-05-09 14:21:35,832 INFO [train.py:715] (6/8) Epoch 19, batch 5700, loss[loss=0.1244, simple_loss=0.197, pruned_loss=0.02584, over 4740.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02901, over 973576.28 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 14:22:15,337 INFO [train.py:715] (6/8) Epoch 19, batch 5750, loss[loss=0.1232, simple_loss=0.1954, pruned_loss=0.02548, over 4891.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2057, pruned_loss=0.02893, over 973551.86 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 14:22:53,921 INFO [train.py:715] (6/8) Epoch 19, batch 5800, loss[loss=0.1209, simple_loss=0.1867, pruned_loss=0.02753, over 4855.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.029, over 972550.47 frames.], batch size: 13, lr: 1.18e-04 +2022-05-09 14:23:33,194 INFO [train.py:715] (6/8) Epoch 19, batch 5850, loss[loss=0.1391, simple_loss=0.2051, pruned_loss=0.03656, over 4782.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2059, pruned_loss=0.02884, over 972654.17 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 14:24:11,661 INFO [train.py:715] (6/8) Epoch 19, batch 5900, loss[loss=0.1331, simple_loss=0.2127, pruned_loss=0.02676, over 4957.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2061, pruned_loss=0.02902, over 972844.60 frames.], batch size: 23, lr: 1.18e-04 +2022-05-09 14:24:51,081 INFO [train.py:715] (6/8) Epoch 19, batch 5950, loss[loss=0.1351, simple_loss=0.1964, pruned_loss=0.03684, over 4888.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2055, pruned_loss=0.02867, over 972303.16 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 14:25:30,302 INFO [train.py:715] (6/8) Epoch 19, batch 6000, loss[loss=0.1317, simple_loss=0.2121, pruned_loss=0.02568, over 4910.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2056, pruned_loss=0.0285, over 972744.80 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 14:25:30,302 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 14:25:40,197 INFO [train.py:742] (6/8) Epoch 19, validation: loss=0.1046, simple_loss=0.1878, pruned_loss=0.01067, over 914524.00 frames. +2022-05-09 14:26:19,492 INFO [train.py:715] (6/8) Epoch 19, batch 6050, loss[loss=0.1603, simple_loss=0.2319, pruned_loss=0.04428, over 4898.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2056, pruned_loss=0.02877, over 972608.12 frames.], batch size: 22, lr: 1.18e-04 +2022-05-09 14:26:58,350 INFO [train.py:715] (6/8) Epoch 19, batch 6100, loss[loss=0.1299, simple_loss=0.2017, pruned_loss=0.02908, over 4766.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2054, pruned_loss=0.02863, over 972479.07 frames.], batch size: 12, lr: 1.18e-04 +2022-05-09 14:27:37,411 INFO [train.py:715] (6/8) Epoch 19, batch 6150, loss[loss=0.1242, simple_loss=0.2064, pruned_loss=0.02102, over 4770.00 frames.], tot_loss[loss=0.131, simple_loss=0.2053, pruned_loss=0.02836, over 972974.47 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 14:28:15,612 INFO [train.py:715] (6/8) Epoch 19, batch 6200, loss[loss=0.1387, simple_loss=0.2128, pruned_loss=0.03225, over 4965.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02919, over 973661.31 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 14:28:55,855 INFO [train.py:715] (6/8) Epoch 19, batch 6250, loss[loss=0.1085, simple_loss=0.1844, pruned_loss=0.01626, over 4813.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02905, over 973964.41 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 14:29:35,043 INFO [train.py:715] (6/8) Epoch 19, batch 6300, loss[loss=0.1248, simple_loss=0.2012, pruned_loss=0.02417, over 4892.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02911, over 973245.89 frames.], batch size: 22, lr: 1.18e-04 +2022-05-09 14:30:14,725 INFO [train.py:715] (6/8) Epoch 19, batch 6350, loss[loss=0.1247, simple_loss=0.2034, pruned_loss=0.02304, over 4945.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02903, over 973006.62 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 14:30:54,203 INFO [train.py:715] (6/8) Epoch 19, batch 6400, loss[loss=0.1216, simple_loss=0.1997, pruned_loss=0.0217, over 4930.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02922, over 972436.54 frames.], batch size: 29, lr: 1.18e-04 +2022-05-09 14:31:33,494 INFO [train.py:715] (6/8) Epoch 19, batch 6450, loss[loss=0.1278, simple_loss=0.2009, pruned_loss=0.02735, over 4928.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02943, over 972823.17 frames.], batch size: 23, lr: 1.18e-04 +2022-05-09 14:32:12,985 INFO [train.py:715] (6/8) Epoch 19, batch 6500, loss[loss=0.1168, simple_loss=0.1988, pruned_loss=0.01739, over 4805.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02885, over 972639.69 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 14:32:51,557 INFO [train.py:715] (6/8) Epoch 19, batch 6550, loss[loss=0.1051, simple_loss=0.1802, pruned_loss=0.01499, over 4889.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2059, pruned_loss=0.02865, over 973053.63 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 14:33:31,045 INFO [train.py:715] (6/8) Epoch 19, batch 6600, loss[loss=0.1349, simple_loss=0.2135, pruned_loss=0.02815, over 4858.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02911, over 972424.50 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 14:34:10,196 INFO [train.py:715] (6/8) Epoch 19, batch 6650, loss[loss=0.1317, simple_loss=0.2078, pruned_loss=0.02781, over 4912.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2054, pruned_loss=0.02867, over 972319.39 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 14:34:48,939 INFO [train.py:715] (6/8) Epoch 19, batch 6700, loss[loss=0.1274, simple_loss=0.1981, pruned_loss=0.02838, over 4700.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2052, pruned_loss=0.0288, over 971669.71 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:35:28,068 INFO [train.py:715] (6/8) Epoch 19, batch 6750, loss[loss=0.1255, simple_loss=0.203, pruned_loss=0.02397, over 4694.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2052, pruned_loss=0.02869, over 972084.08 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:36:07,538 INFO [train.py:715] (6/8) Epoch 19, batch 6800, loss[loss=0.1341, simple_loss=0.2161, pruned_loss=0.02605, over 4751.00 frames.], tot_loss[loss=0.131, simple_loss=0.2048, pruned_loss=0.02859, over 972533.64 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 14:36:46,932 INFO [train.py:715] (6/8) Epoch 19, batch 6850, loss[loss=0.1394, simple_loss=0.221, pruned_loss=0.02892, over 4865.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02907, over 973025.69 frames.], batch size: 32, lr: 1.18e-04 +2022-05-09 14:37:25,092 INFO [train.py:715] (6/8) Epoch 19, batch 6900, loss[loss=0.1321, simple_loss=0.2143, pruned_loss=0.02491, over 4760.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02872, over 973143.99 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 14:38:04,137 INFO [train.py:715] (6/8) Epoch 19, batch 6950, loss[loss=0.1331, simple_loss=0.212, pruned_loss=0.02705, over 4776.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2069, pruned_loss=0.02878, over 973094.62 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 14:38:43,603 INFO [train.py:715] (6/8) Epoch 19, batch 7000, loss[loss=0.1231, simple_loss=0.1997, pruned_loss=0.02332, over 4845.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02908, over 973299.51 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 14:39:22,854 INFO [train.py:715] (6/8) Epoch 19, batch 7050, loss[loss=0.1223, simple_loss=0.1997, pruned_loss=0.02248, over 4919.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2057, pruned_loss=0.0286, over 973103.97 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 14:40:02,435 INFO [train.py:715] (6/8) Epoch 19, batch 7100, loss[loss=0.1219, simple_loss=0.1952, pruned_loss=0.02429, over 4689.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02877, over 973047.20 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:40:42,074 INFO [train.py:715] (6/8) Epoch 19, batch 7150, loss[loss=0.1623, simple_loss=0.2377, pruned_loss=0.04349, over 4915.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02872, over 972831.90 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 14:41:20,982 INFO [train.py:715] (6/8) Epoch 19, batch 7200, loss[loss=0.1482, simple_loss=0.2253, pruned_loss=0.03555, over 4970.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2058, pruned_loss=0.02866, over 972069.12 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:41:59,736 INFO [train.py:715] (6/8) Epoch 19, batch 7250, loss[loss=0.1198, simple_loss=0.1879, pruned_loss=0.02588, over 4833.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02841, over 972599.85 frames.], batch size: 27, lr: 1.18e-04 +2022-05-09 14:42:39,100 INFO [train.py:715] (6/8) Epoch 19, batch 7300, loss[loss=0.1174, simple_loss=0.1928, pruned_loss=0.02101, over 4980.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02868, over 973072.81 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 14:43:18,262 INFO [train.py:715] (6/8) Epoch 19, batch 7350, loss[loss=0.09533, simple_loss=0.1639, pruned_loss=0.01338, over 4755.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02824, over 972018.96 frames.], batch size: 12, lr: 1.18e-04 +2022-05-09 14:43:57,168 INFO [train.py:715] (6/8) Epoch 19, batch 7400, loss[loss=0.12, simple_loss=0.191, pruned_loss=0.02453, over 4764.00 frames.], tot_loss[loss=0.131, simple_loss=0.2059, pruned_loss=0.02801, over 972362.20 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 14:44:37,628 INFO [train.py:715] (6/8) Epoch 19, batch 7450, loss[loss=0.1612, simple_loss=0.2343, pruned_loss=0.04402, over 4893.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2065, pruned_loss=0.02844, over 972438.92 frames.], batch size: 39, lr: 1.18e-04 +2022-05-09 14:45:17,484 INFO [train.py:715] (6/8) Epoch 19, batch 7500, loss[loss=0.1109, simple_loss=0.181, pruned_loss=0.02043, over 4639.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02857, over 971595.12 frames.], batch size: 13, lr: 1.18e-04 +2022-05-09 14:45:56,704 INFO [train.py:715] (6/8) Epoch 19, batch 7550, loss[loss=0.134, simple_loss=0.2059, pruned_loss=0.03102, over 4758.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2075, pruned_loss=0.02919, over 971537.23 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 14:46:36,054 INFO [train.py:715] (6/8) Epoch 19, batch 7600, loss[loss=0.1279, simple_loss=0.2006, pruned_loss=0.02755, over 4856.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2076, pruned_loss=0.02909, over 971327.97 frames.], batch size: 34, lr: 1.18e-04 +2022-05-09 14:47:16,836 INFO [train.py:715] (6/8) Epoch 19, batch 7650, loss[loss=0.1512, simple_loss=0.2245, pruned_loss=0.03895, over 4943.00 frames.], tot_loss[loss=0.133, simple_loss=0.2076, pruned_loss=0.02921, over 972055.46 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 14:47:56,155 INFO [train.py:715] (6/8) Epoch 19, batch 7700, loss[loss=0.1855, simple_loss=0.2412, pruned_loss=0.06489, over 4881.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02971, over 972446.13 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 14:48:34,962 INFO [train.py:715] (6/8) Epoch 19, batch 7750, loss[loss=0.1283, simple_loss=0.2033, pruned_loss=0.02667, over 4879.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02942, over 972314.15 frames.], batch size: 22, lr: 1.18e-04 +2022-05-09 14:49:14,659 INFO [train.py:715] (6/8) Epoch 19, batch 7800, loss[loss=0.1272, simple_loss=0.2054, pruned_loss=0.02455, over 4742.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02915, over 971193.01 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 14:49:54,096 INFO [train.py:715] (6/8) Epoch 19, batch 7850, loss[loss=0.1107, simple_loss=0.186, pruned_loss=0.01769, over 4956.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02886, over 971956.26 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 14:50:33,362 INFO [train.py:715] (6/8) Epoch 19, batch 7900, loss[loss=0.1288, simple_loss=0.2007, pruned_loss=0.02848, over 4817.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02954, over 971245.24 frames.], batch size: 27, lr: 1.18e-04 +2022-05-09 14:51:11,764 INFO [train.py:715] (6/8) Epoch 19, batch 7950, loss[loss=0.1677, simple_loss=0.2343, pruned_loss=0.0506, over 4816.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2068, pruned_loss=0.02963, over 970885.09 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 14:51:51,093 INFO [train.py:715] (6/8) Epoch 19, batch 8000, loss[loss=0.1152, simple_loss=0.1869, pruned_loss=0.02171, over 4858.00 frames.], tot_loss[loss=0.1325, simple_loss=0.206, pruned_loss=0.02947, over 971078.79 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 14:52:30,293 INFO [train.py:715] (6/8) Epoch 19, batch 8050, loss[loss=0.1261, simple_loss=0.2022, pruned_loss=0.02503, over 4972.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2061, pruned_loss=0.02916, over 971791.90 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 14:53:08,825 INFO [train.py:715] (6/8) Epoch 19, batch 8100, loss[loss=0.1239, simple_loss=0.1878, pruned_loss=0.03001, over 4862.00 frames.], tot_loss[loss=0.132, simple_loss=0.2061, pruned_loss=0.02894, over 971198.79 frames.], batch size: 30, lr: 1.18e-04 +2022-05-09 14:53:48,279 INFO [train.py:715] (6/8) Epoch 19, batch 8150, loss[loss=0.1331, simple_loss=0.2083, pruned_loss=0.02897, over 4924.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02867, over 972512.76 frames.], batch size: 23, lr: 1.18e-04 +2022-05-09 14:54:27,924 INFO [train.py:715] (6/8) Epoch 19, batch 8200, loss[loss=0.1524, simple_loss=0.2298, pruned_loss=0.03749, over 4800.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.029, over 972435.24 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 14:55:06,909 INFO [train.py:715] (6/8) Epoch 19, batch 8250, loss[loss=0.1181, simple_loss=0.1887, pruned_loss=0.02372, over 4908.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2067, pruned_loss=0.02852, over 973133.82 frames.], batch size: 29, lr: 1.18e-04 +2022-05-09 14:55:45,535 INFO [train.py:715] (6/8) Epoch 19, batch 8300, loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03186, over 4813.00 frames.], tot_loss[loss=0.132, simple_loss=0.2068, pruned_loss=0.02858, over 973008.90 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:56:25,222 INFO [train.py:715] (6/8) Epoch 19, batch 8350, loss[loss=0.1297, simple_loss=0.1965, pruned_loss=0.03141, over 4960.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02883, over 972032.86 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 14:57:04,449 INFO [train.py:715] (6/8) Epoch 19, batch 8400, loss[loss=0.1637, simple_loss=0.2326, pruned_loss=0.04738, over 4952.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2057, pruned_loss=0.02849, over 972782.45 frames.], batch size: 35, lr: 1.18e-04 +2022-05-09 14:57:43,452 INFO [train.py:715] (6/8) Epoch 19, batch 8450, loss[loss=0.1177, simple_loss=0.1918, pruned_loss=0.02176, over 4802.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2055, pruned_loss=0.02852, over 972743.42 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 14:58:23,235 INFO [train.py:715] (6/8) Epoch 19, batch 8500, loss[loss=0.1281, simple_loss=0.2008, pruned_loss=0.02767, over 4823.00 frames.], tot_loss[loss=0.1308, simple_loss=0.205, pruned_loss=0.02834, over 972006.78 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 14:59:01,926 INFO [train.py:715] (6/8) Epoch 19, batch 8550, loss[loss=0.1397, simple_loss=0.218, pruned_loss=0.03067, over 4838.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2048, pruned_loss=0.0283, over 973183.48 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:59:41,020 INFO [train.py:715] (6/8) Epoch 19, batch 8600, loss[loss=0.1253, simple_loss=0.199, pruned_loss=0.02582, over 4930.00 frames.], tot_loss[loss=0.131, simple_loss=0.2053, pruned_loss=0.02836, over 972784.46 frames.], batch size: 29, lr: 1.18e-04 +2022-05-09 15:00:20,527 INFO [train.py:715] (6/8) Epoch 19, batch 8650, loss[loss=0.1188, simple_loss=0.1937, pruned_loss=0.02195, over 4783.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.02836, over 972974.05 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 15:01:00,041 INFO [train.py:715] (6/8) Epoch 19, batch 8700, loss[loss=0.1254, simple_loss=0.2091, pruned_loss=0.02078, over 4786.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2054, pruned_loss=0.02841, over 972356.43 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 15:01:39,202 INFO [train.py:715] (6/8) Epoch 19, batch 8750, loss[loss=0.1354, simple_loss=0.2104, pruned_loss=0.0302, over 4905.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2052, pruned_loss=0.02858, over 971939.91 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 15:02:17,959 INFO [train.py:715] (6/8) Epoch 19, batch 8800, loss[loss=0.1252, simple_loss=0.1928, pruned_loss=0.0288, over 4983.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2051, pruned_loss=0.02821, over 972530.13 frames.], batch size: 33, lr: 1.18e-04 +2022-05-09 15:02:57,609 INFO [train.py:715] (6/8) Epoch 19, batch 8850, loss[loss=0.1242, simple_loss=0.2097, pruned_loss=0.01934, over 4814.00 frames.], tot_loss[loss=0.1313, simple_loss=0.206, pruned_loss=0.02828, over 972629.46 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 15:03:36,663 INFO [train.py:715] (6/8) Epoch 19, batch 8900, loss[loss=0.1357, simple_loss=0.2057, pruned_loss=0.03284, over 4783.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2063, pruned_loss=0.02868, over 972315.12 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 15:04:16,007 INFO [train.py:715] (6/8) Epoch 19, batch 8950, loss[loss=0.1408, simple_loss=0.2118, pruned_loss=0.0349, over 4825.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.02872, over 971947.36 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 15:04:54,903 INFO [train.py:715] (6/8) Epoch 19, batch 9000, loss[loss=0.1137, simple_loss=0.1945, pruned_loss=0.01648, over 4802.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2055, pruned_loss=0.02871, over 971472.66 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 15:04:54,904 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 15:05:04,819 INFO [train.py:742] (6/8) Epoch 19, validation: loss=0.1047, simple_loss=0.1879, pruned_loss=0.01072, over 914524.00 frames. +2022-05-09 15:05:44,272 INFO [train.py:715] (6/8) Epoch 19, batch 9050, loss[loss=0.09461, simple_loss=0.1632, pruned_loss=0.01303, over 4946.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2064, pruned_loss=0.0292, over 972086.42 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 15:06:23,989 INFO [train.py:715] (6/8) Epoch 19, batch 9100, loss[loss=0.1215, simple_loss=0.1909, pruned_loss=0.02608, over 4819.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2065, pruned_loss=0.02951, over 971650.19 frames.], batch size: 13, lr: 1.18e-04 +2022-05-09 15:07:03,255 INFO [train.py:715] (6/8) Epoch 19, batch 9150, loss[loss=0.126, simple_loss=0.2075, pruned_loss=0.02228, over 4811.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2054, pruned_loss=0.02876, over 971521.65 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 15:07:42,034 INFO [train.py:715] (6/8) Epoch 19, batch 9200, loss[loss=0.1555, simple_loss=0.2298, pruned_loss=0.04061, over 4757.00 frames.], tot_loss[loss=0.131, simple_loss=0.2048, pruned_loss=0.02863, over 970523.61 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 15:08:21,758 INFO [train.py:715] (6/8) Epoch 19, batch 9250, loss[loss=0.125, simple_loss=0.1998, pruned_loss=0.02506, over 4914.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.02904, over 971422.66 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 15:09:00,954 INFO [train.py:715] (6/8) Epoch 19, batch 9300, loss[loss=0.1291, simple_loss=0.1973, pruned_loss=0.03049, over 4909.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.02897, over 970953.12 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 15:09:39,866 INFO [train.py:715] (6/8) Epoch 19, batch 9350, loss[loss=0.1348, simple_loss=0.2041, pruned_loss=0.03281, over 4875.00 frames.], tot_loss[loss=0.132, simple_loss=0.2061, pruned_loss=0.02895, over 970979.33 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 15:10:19,958 INFO [train.py:715] (6/8) Epoch 19, batch 9400, loss[loss=0.1428, simple_loss=0.2277, pruned_loss=0.02891, over 4805.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2059, pruned_loss=0.02867, over 971768.38 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 15:11:00,062 INFO [train.py:715] (6/8) Epoch 19, batch 9450, loss[loss=0.1527, simple_loss=0.2226, pruned_loss=0.04143, over 4808.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2051, pruned_loss=0.02808, over 972113.70 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 15:11:38,884 INFO [train.py:715] (6/8) Epoch 19, batch 9500, loss[loss=0.117, simple_loss=0.1802, pruned_loss=0.02688, over 4799.00 frames.], tot_loss[loss=0.1299, simple_loss=0.2041, pruned_loss=0.02781, over 972676.72 frames.], batch size: 13, lr: 1.18e-04 +2022-05-09 15:12:18,097 INFO [train.py:715] (6/8) Epoch 19, batch 9550, loss[loss=0.1229, simple_loss=0.2037, pruned_loss=0.02105, over 4802.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2045, pruned_loss=0.0279, over 972820.87 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 15:12:57,474 INFO [train.py:715] (6/8) Epoch 19, batch 9600, loss[loss=0.1319, simple_loss=0.2089, pruned_loss=0.02747, over 4909.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2049, pruned_loss=0.0279, over 972234.10 frames.], batch size: 23, lr: 1.18e-04 +2022-05-09 15:13:36,650 INFO [train.py:715] (6/8) Epoch 19, batch 9650, loss[loss=0.1262, simple_loss=0.2063, pruned_loss=0.02307, over 4925.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2049, pruned_loss=0.0281, over 972595.25 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 15:14:14,976 INFO [train.py:715] (6/8) Epoch 19, batch 9700, loss[loss=0.1237, simple_loss=0.2023, pruned_loss=0.02258, over 4899.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2051, pruned_loss=0.02793, over 972935.97 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 15:14:54,706 INFO [train.py:715] (6/8) Epoch 19, batch 9750, loss[loss=0.1155, simple_loss=0.1876, pruned_loss=0.02175, over 4976.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2054, pruned_loss=0.02803, over 972767.73 frames.], batch size: 35, lr: 1.18e-04 +2022-05-09 15:15:34,784 INFO [train.py:715] (6/8) Epoch 19, batch 9800, loss[loss=0.1197, simple_loss=0.1908, pruned_loss=0.0243, over 4660.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2064, pruned_loss=0.02856, over 973068.31 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 15:16:14,509 INFO [train.py:715] (6/8) Epoch 19, batch 9850, loss[loss=0.1359, simple_loss=0.219, pruned_loss=0.02643, over 4986.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.02841, over 973853.25 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 15:16:53,384 INFO [train.py:715] (6/8) Epoch 19, batch 9900, loss[loss=0.1395, simple_loss=0.2107, pruned_loss=0.03418, over 4740.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2058, pruned_loss=0.02799, over 973278.10 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 15:17:33,336 INFO [train.py:715] (6/8) Epoch 19, batch 9950, loss[loss=0.1149, simple_loss=0.1854, pruned_loss=0.02221, over 4813.00 frames.], tot_loss[loss=0.1299, simple_loss=0.2049, pruned_loss=0.02744, over 973944.17 frames.], batch size: 27, lr: 1.18e-04 +2022-05-09 15:18:12,863 INFO [train.py:715] (6/8) Epoch 19, batch 10000, loss[loss=0.1231, simple_loss=0.1999, pruned_loss=0.02315, over 4785.00 frames.], tot_loss[loss=0.1299, simple_loss=0.205, pruned_loss=0.02737, over 974576.21 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 15:18:51,544 INFO [train.py:715] (6/8) Epoch 19, batch 10050, loss[loss=0.1351, simple_loss=0.2152, pruned_loss=0.02752, over 4853.00 frames.], tot_loss[loss=0.1294, simple_loss=0.2041, pruned_loss=0.02731, over 974051.85 frames.], batch size: 34, lr: 1.18e-04 +2022-05-09 15:19:31,280 INFO [train.py:715] (6/8) Epoch 19, batch 10100, loss[loss=0.1347, simple_loss=0.1973, pruned_loss=0.03608, over 4959.00 frames.], tot_loss[loss=0.1298, simple_loss=0.2043, pruned_loss=0.02762, over 974238.26 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 15:20:10,776 INFO [train.py:715] (6/8) Epoch 19, batch 10150, loss[loss=0.1282, simple_loss=0.2174, pruned_loss=0.01951, over 4777.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2049, pruned_loss=0.02817, over 974319.90 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 15:20:49,760 INFO [train.py:715] (6/8) Epoch 19, batch 10200, loss[loss=0.1654, simple_loss=0.2428, pruned_loss=0.04396, over 4975.00 frames.], tot_loss[loss=0.1297, simple_loss=0.2041, pruned_loss=0.02765, over 973841.19 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 15:21:29,139 INFO [train.py:715] (6/8) Epoch 19, batch 10250, loss[loss=0.1376, simple_loss=0.2056, pruned_loss=0.03474, over 4872.00 frames.], tot_loss[loss=0.13, simple_loss=0.2046, pruned_loss=0.02775, over 973374.71 frames.], batch size: 32, lr: 1.17e-04 +2022-05-09 15:22:09,267 INFO [train.py:715] (6/8) Epoch 19, batch 10300, loss[loss=0.1541, simple_loss=0.2206, pruned_loss=0.04382, over 4752.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2055, pruned_loss=0.02817, over 972917.23 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 15:22:48,850 INFO [train.py:715] (6/8) Epoch 19, batch 10350, loss[loss=0.1096, simple_loss=0.1885, pruned_loss=0.01539, over 4871.00 frames.], tot_loss[loss=0.131, simple_loss=0.2057, pruned_loss=0.0281, over 972559.21 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 15:23:27,534 INFO [train.py:715] (6/8) Epoch 19, batch 10400, loss[loss=0.1258, simple_loss=0.1997, pruned_loss=0.02599, over 4747.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2051, pruned_loss=0.02797, over 971299.60 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 15:24:07,294 INFO [train.py:715] (6/8) Epoch 19, batch 10450, loss[loss=0.1251, simple_loss=0.1967, pruned_loss=0.02673, over 4824.00 frames.], tot_loss[loss=0.1297, simple_loss=0.2042, pruned_loss=0.02756, over 971102.10 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 15:24:47,016 INFO [train.py:715] (6/8) Epoch 19, batch 10500, loss[loss=0.1449, simple_loss=0.2296, pruned_loss=0.03008, over 4924.00 frames.], tot_loss[loss=0.1296, simple_loss=0.2044, pruned_loss=0.0274, over 970936.96 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 15:25:25,931 INFO [train.py:715] (6/8) Epoch 19, batch 10550, loss[loss=0.1248, simple_loss=0.1981, pruned_loss=0.02569, over 4967.00 frames.], tot_loss[loss=0.1295, simple_loss=0.2042, pruned_loss=0.02739, over 970723.42 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 15:26:04,898 INFO [train.py:715] (6/8) Epoch 19, batch 10600, loss[loss=0.1247, simple_loss=0.2045, pruned_loss=0.02247, over 4889.00 frames.], tot_loss[loss=0.1304, simple_loss=0.205, pruned_loss=0.02793, over 970688.33 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 15:26:47,170 INFO [train.py:715] (6/8) Epoch 19, batch 10650, loss[loss=0.1342, simple_loss=0.2113, pruned_loss=0.0286, over 4776.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2053, pruned_loss=0.02802, over 971043.27 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 15:27:26,345 INFO [train.py:715] (6/8) Epoch 19, batch 10700, loss[loss=0.1129, simple_loss=0.1844, pruned_loss=0.02066, over 4797.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2053, pruned_loss=0.02813, over 971496.34 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 15:28:05,706 INFO [train.py:715] (6/8) Epoch 19, batch 10750, loss[loss=0.1306, simple_loss=0.2106, pruned_loss=0.02527, over 4895.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02843, over 972409.39 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 15:28:45,282 INFO [train.py:715] (6/8) Epoch 19, batch 10800, loss[loss=0.1554, simple_loss=0.2381, pruned_loss=0.03636, over 4830.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2056, pruned_loss=0.02877, over 972061.51 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 15:29:25,026 INFO [train.py:715] (6/8) Epoch 19, batch 10850, loss[loss=0.117, simple_loss=0.1946, pruned_loss=0.01965, over 4778.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2052, pruned_loss=0.02882, over 971793.68 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 15:30:03,633 INFO [train.py:715] (6/8) Epoch 19, batch 10900, loss[loss=0.1027, simple_loss=0.1737, pruned_loss=0.01585, over 4983.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2052, pruned_loss=0.0286, over 971272.13 frames.], batch size: 26, lr: 1.17e-04 +2022-05-09 15:30:42,651 INFO [train.py:715] (6/8) Epoch 19, batch 10950, loss[loss=0.1477, simple_loss=0.2232, pruned_loss=0.03613, over 4979.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2049, pruned_loss=0.02819, over 971622.03 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 15:31:22,358 INFO [train.py:715] (6/8) Epoch 19, batch 11000, loss[loss=0.1172, simple_loss=0.1863, pruned_loss=0.02401, over 4750.00 frames.], tot_loss[loss=0.1315, simple_loss=0.206, pruned_loss=0.02848, over 973039.12 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 15:32:02,212 INFO [train.py:715] (6/8) Epoch 19, batch 11050, loss[loss=0.1282, simple_loss=0.2051, pruned_loss=0.02567, over 4795.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2054, pruned_loss=0.02809, over 972775.86 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 15:32:40,681 INFO [train.py:715] (6/8) Epoch 19, batch 11100, loss[loss=0.1141, simple_loss=0.1851, pruned_loss=0.02152, over 4889.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2057, pruned_loss=0.02798, over 973174.31 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 15:33:20,044 INFO [train.py:715] (6/8) Epoch 19, batch 11150, loss[loss=0.1423, simple_loss=0.218, pruned_loss=0.03331, over 4939.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2054, pruned_loss=0.02774, over 972589.40 frames.], batch size: 23, lr: 1.17e-04 +2022-05-09 15:33:59,456 INFO [train.py:715] (6/8) Epoch 19, batch 11200, loss[loss=0.1213, simple_loss=0.2069, pruned_loss=0.01788, over 4845.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2056, pruned_loss=0.02793, over 972016.78 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 15:34:38,835 INFO [train.py:715] (6/8) Epoch 19, batch 11250, loss[loss=0.1241, simple_loss=0.2008, pruned_loss=0.02368, over 4794.00 frames.], tot_loss[loss=0.1302, simple_loss=0.205, pruned_loss=0.02769, over 971792.91 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 15:35:18,205 INFO [train.py:715] (6/8) Epoch 19, batch 11300, loss[loss=0.1429, simple_loss=0.2136, pruned_loss=0.0361, over 4903.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2051, pruned_loss=0.02778, over 971893.88 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 15:35:56,992 INFO [train.py:715] (6/8) Epoch 19, batch 11350, loss[loss=0.1435, simple_loss=0.2197, pruned_loss=0.03361, over 4936.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2063, pruned_loss=0.02837, over 972446.26 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 15:36:36,606 INFO [train.py:715] (6/8) Epoch 19, batch 11400, loss[loss=0.1413, simple_loss=0.2106, pruned_loss=0.03598, over 4686.00 frames.], tot_loss[loss=0.1312, simple_loss=0.206, pruned_loss=0.0282, over 971771.12 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 15:37:16,210 INFO [train.py:715] (6/8) Epoch 19, batch 11450, loss[loss=0.1364, simple_loss=0.2083, pruned_loss=0.03226, over 4797.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.02842, over 971543.94 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 15:37:56,153 INFO [train.py:715] (6/8) Epoch 19, batch 11500, loss[loss=0.129, simple_loss=0.2097, pruned_loss=0.02416, over 4792.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02843, over 972893.58 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 15:38:35,582 INFO [train.py:715] (6/8) Epoch 19, batch 11550, loss[loss=0.09835, simple_loss=0.1684, pruned_loss=0.01413, over 4782.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02859, over 972639.69 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 15:39:14,558 INFO [train.py:715] (6/8) Epoch 19, batch 11600, loss[loss=0.116, simple_loss=0.1875, pruned_loss=0.02227, over 4889.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2062, pruned_loss=0.02863, over 973158.40 frames.], batch size: 22, lr: 1.17e-04 +2022-05-09 15:39:54,377 INFO [train.py:715] (6/8) Epoch 19, batch 11650, loss[loss=0.1243, simple_loss=0.2082, pruned_loss=0.02014, over 4880.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02844, over 971935.31 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 15:40:33,463 INFO [train.py:715] (6/8) Epoch 19, batch 11700, loss[loss=0.1292, simple_loss=0.215, pruned_loss=0.02171, over 4926.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02863, over 971822.34 frames.], batch size: 29, lr: 1.17e-04 +2022-05-09 15:41:13,006 INFO [train.py:715] (6/8) Epoch 19, batch 11750, loss[loss=0.1239, simple_loss=0.1925, pruned_loss=0.02767, over 4796.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2059, pruned_loss=0.02852, over 972104.20 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 15:41:52,548 INFO [train.py:715] (6/8) Epoch 19, batch 11800, loss[loss=0.1521, simple_loss=0.2214, pruned_loss=0.04143, over 4835.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2049, pruned_loss=0.02831, over 972892.21 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 15:42:32,208 INFO [train.py:715] (6/8) Epoch 19, batch 11850, loss[loss=0.1325, simple_loss=0.2119, pruned_loss=0.02657, over 4868.00 frames.], tot_loss[loss=0.131, simple_loss=0.205, pruned_loss=0.02845, over 971546.03 frames.], batch size: 20, lr: 1.17e-04 +2022-05-09 15:43:11,797 INFO [train.py:715] (6/8) Epoch 19, batch 11900, loss[loss=0.1448, simple_loss=0.211, pruned_loss=0.03928, over 4820.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2055, pruned_loss=0.02866, over 971344.22 frames.], batch size: 27, lr: 1.17e-04 +2022-05-09 15:43:51,294 INFO [train.py:715] (6/8) Epoch 19, batch 11950, loss[loss=0.1442, simple_loss=0.2251, pruned_loss=0.03172, over 4919.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2054, pruned_loss=0.02865, over 971648.34 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 15:44:30,458 INFO [train.py:715] (6/8) Epoch 19, batch 12000, loss[loss=0.1329, simple_loss=0.2133, pruned_loss=0.02623, over 4782.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2051, pruned_loss=0.0283, over 972259.90 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 15:44:30,459 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 15:44:40,311 INFO [train.py:742] (6/8) Epoch 19, validation: loss=0.1044, simple_loss=0.1877, pruned_loss=0.01054, over 914524.00 frames. +2022-05-09 15:45:20,292 INFO [train.py:715] (6/8) Epoch 19, batch 12050, loss[loss=0.1153, simple_loss=0.1872, pruned_loss=0.02168, over 4754.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2051, pruned_loss=0.02832, over 971781.55 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 15:46:00,185 INFO [train.py:715] (6/8) Epoch 19, batch 12100, loss[loss=0.1227, simple_loss=0.1882, pruned_loss=0.02864, over 4844.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2047, pruned_loss=0.02795, over 971662.23 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 15:46:39,306 INFO [train.py:715] (6/8) Epoch 19, batch 12150, loss[loss=0.1228, simple_loss=0.2052, pruned_loss=0.02014, over 4747.00 frames.], tot_loss[loss=0.1305, simple_loss=0.205, pruned_loss=0.02804, over 971806.67 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 15:47:18,771 INFO [train.py:715] (6/8) Epoch 19, batch 12200, loss[loss=0.1505, simple_loss=0.2285, pruned_loss=0.03622, over 4940.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2049, pruned_loss=0.0279, over 971997.21 frames.], batch size: 23, lr: 1.17e-04 +2022-05-09 15:47:58,235 INFO [train.py:715] (6/8) Epoch 19, batch 12250, loss[loss=0.1027, simple_loss=0.1833, pruned_loss=0.01105, over 4793.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2052, pruned_loss=0.02787, over 972154.54 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 15:48:37,872 INFO [train.py:715] (6/8) Epoch 19, batch 12300, loss[loss=0.1374, simple_loss=0.2085, pruned_loss=0.03313, over 4769.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02843, over 972403.42 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 15:49:17,551 INFO [train.py:715] (6/8) Epoch 19, batch 12350, loss[loss=0.1241, simple_loss=0.2059, pruned_loss=0.0212, over 4931.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.02837, over 972703.56 frames.], batch size: 29, lr: 1.17e-04 +2022-05-09 15:49:56,110 INFO [train.py:715] (6/8) Epoch 19, batch 12400, loss[loss=0.1328, simple_loss=0.2084, pruned_loss=0.02857, over 4869.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2052, pruned_loss=0.02847, over 971918.97 frames.], batch size: 32, lr: 1.17e-04 +2022-05-09 15:50:35,579 INFO [train.py:715] (6/8) Epoch 19, batch 12450, loss[loss=0.1299, simple_loss=0.1979, pruned_loss=0.03094, over 4803.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2044, pruned_loss=0.02824, over 971809.99 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 15:51:14,301 INFO [train.py:715] (6/8) Epoch 19, batch 12500, loss[loss=0.1485, simple_loss=0.2265, pruned_loss=0.03522, over 4981.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2047, pruned_loss=0.02808, over 972592.83 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 15:51:53,639 INFO [train.py:715] (6/8) Epoch 19, batch 12550, loss[loss=0.1177, simple_loss=0.2053, pruned_loss=0.01508, over 4806.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2046, pruned_loss=0.02784, over 972897.32 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 15:52:33,124 INFO [train.py:715] (6/8) Epoch 19, batch 12600, loss[loss=0.1536, simple_loss=0.2199, pruned_loss=0.04361, over 4956.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2055, pruned_loss=0.02813, over 971307.46 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 15:53:12,610 INFO [train.py:715] (6/8) Epoch 19, batch 12650, loss[loss=0.1196, simple_loss=0.1866, pruned_loss=0.02633, over 4788.00 frames.], tot_loss[loss=0.131, simple_loss=0.2051, pruned_loss=0.02842, over 971329.44 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 15:53:51,560 INFO [train.py:715] (6/8) Epoch 19, batch 12700, loss[loss=0.1821, simple_loss=0.2501, pruned_loss=0.05706, over 4881.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2059, pruned_loss=0.02887, over 972143.29 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 15:54:30,745 INFO [train.py:715] (6/8) Epoch 19, batch 12750, loss[loss=0.1403, simple_loss=0.2127, pruned_loss=0.03402, over 4710.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2062, pruned_loss=0.02903, over 972193.21 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 15:55:10,391 INFO [train.py:715] (6/8) Epoch 19, batch 12800, loss[loss=0.1096, simple_loss=0.1885, pruned_loss=0.01539, over 4941.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2067, pruned_loss=0.02941, over 972130.89 frames.], batch size: 29, lr: 1.17e-04 +2022-05-09 15:55:49,792 INFO [train.py:715] (6/8) Epoch 19, batch 12850, loss[loss=0.1247, simple_loss=0.1995, pruned_loss=0.02491, over 4956.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2063, pruned_loss=0.02954, over 971492.04 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 15:56:28,732 INFO [train.py:715] (6/8) Epoch 19, batch 12900, loss[loss=0.1144, simple_loss=0.1905, pruned_loss=0.01921, over 4768.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2058, pruned_loss=0.02888, over 972724.54 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 15:57:08,290 INFO [train.py:715] (6/8) Epoch 19, batch 12950, loss[loss=0.1222, simple_loss=0.1969, pruned_loss=0.02372, over 4902.00 frames.], tot_loss[loss=0.132, simple_loss=0.2061, pruned_loss=0.02891, over 973680.37 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 15:57:47,532 INFO [train.py:715] (6/8) Epoch 19, batch 13000, loss[loss=0.1419, simple_loss=0.2138, pruned_loss=0.03498, over 4860.00 frames.], tot_loss[loss=0.1319, simple_loss=0.206, pruned_loss=0.02893, over 973561.04 frames.], batch size: 20, lr: 1.17e-04 +2022-05-09 15:58:26,678 INFO [train.py:715] (6/8) Epoch 19, batch 13050, loss[loss=0.1255, simple_loss=0.1916, pruned_loss=0.02967, over 4902.00 frames.], tot_loss[loss=0.1319, simple_loss=0.206, pruned_loss=0.02891, over 973233.17 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 15:59:05,575 INFO [train.py:715] (6/8) Epoch 19, batch 13100, loss[loss=0.1005, simple_loss=0.1721, pruned_loss=0.01449, over 4756.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2056, pruned_loss=0.02829, over 972927.95 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 15:59:44,834 INFO [train.py:715] (6/8) Epoch 19, batch 13150, loss[loss=0.1437, simple_loss=0.2214, pruned_loss=0.03302, over 4832.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2057, pruned_loss=0.02811, over 973215.31 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 16:00:24,449 INFO [train.py:715] (6/8) Epoch 19, batch 13200, loss[loss=0.1188, simple_loss=0.1976, pruned_loss=0.02005, over 4811.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2053, pruned_loss=0.02777, over 973876.75 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 16:01:03,679 INFO [train.py:715] (6/8) Epoch 19, batch 13250, loss[loss=0.1246, simple_loss=0.1937, pruned_loss=0.02772, over 4909.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2055, pruned_loss=0.02769, over 972855.94 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:01:42,982 INFO [train.py:715] (6/8) Epoch 19, batch 13300, loss[loss=0.1314, simple_loss=0.1995, pruned_loss=0.03164, over 4839.00 frames.], tot_loss[loss=0.1312, simple_loss=0.206, pruned_loss=0.02821, over 973317.85 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 16:02:22,615 INFO [train.py:715] (6/8) Epoch 19, batch 13350, loss[loss=0.1305, simple_loss=0.2019, pruned_loss=0.02953, over 4777.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2058, pruned_loss=0.02819, over 973063.57 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 16:03:01,281 INFO [train.py:715] (6/8) Epoch 19, batch 13400, loss[loss=0.1247, simple_loss=0.204, pruned_loss=0.02273, over 4923.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2053, pruned_loss=0.02814, over 973473.70 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 16:03:40,774 INFO [train.py:715] (6/8) Epoch 19, batch 13450, loss[loss=0.1318, simple_loss=0.209, pruned_loss=0.02732, over 4954.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2045, pruned_loss=0.0278, over 973152.43 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 16:04:20,048 INFO [train.py:715] (6/8) Epoch 19, batch 13500, loss[loss=0.1232, simple_loss=0.2007, pruned_loss=0.02286, over 4875.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2048, pruned_loss=0.02799, over 973892.04 frames.], batch size: 22, lr: 1.17e-04 +2022-05-09 16:04:59,472 INFO [train.py:715] (6/8) Epoch 19, batch 13550, loss[loss=0.1152, simple_loss=0.1877, pruned_loss=0.02135, over 4905.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2048, pruned_loss=0.02817, over 974072.44 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:05:38,407 INFO [train.py:715] (6/8) Epoch 19, batch 13600, loss[loss=0.1197, simple_loss=0.1939, pruned_loss=0.02274, over 4812.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2052, pruned_loss=0.02854, over 973073.60 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 16:06:17,572 INFO [train.py:715] (6/8) Epoch 19, batch 13650, loss[loss=0.1105, simple_loss=0.1845, pruned_loss=0.01827, over 4967.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2053, pruned_loss=0.02854, over 973124.17 frames.], batch size: 28, lr: 1.17e-04 +2022-05-09 16:06:57,013 INFO [train.py:715] (6/8) Epoch 19, batch 13700, loss[loss=0.1311, simple_loss=0.2063, pruned_loss=0.02795, over 4830.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2049, pruned_loss=0.02811, over 973454.19 frames.], batch size: 26, lr: 1.17e-04 +2022-05-09 16:07:35,737 INFO [train.py:715] (6/8) Epoch 19, batch 13750, loss[loss=0.1444, simple_loss=0.214, pruned_loss=0.03735, over 4955.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2049, pruned_loss=0.02809, over 973244.38 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 16:08:15,000 INFO [train.py:715] (6/8) Epoch 19, batch 13800, loss[loss=0.1284, simple_loss=0.1988, pruned_loss=0.029, over 4811.00 frames.], tot_loss[loss=0.1299, simple_loss=0.2041, pruned_loss=0.02787, over 972381.43 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 16:08:55,064 INFO [train.py:715] (6/8) Epoch 19, batch 13850, loss[loss=0.1221, simple_loss=0.1939, pruned_loss=0.02516, over 4747.00 frames.], tot_loss[loss=0.1298, simple_loss=0.2042, pruned_loss=0.02766, over 972950.83 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 16:09:34,684 INFO [train.py:715] (6/8) Epoch 19, batch 13900, loss[loss=0.1307, simple_loss=0.2068, pruned_loss=0.02728, over 4946.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2052, pruned_loss=0.02856, over 972739.62 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 16:10:14,446 INFO [train.py:715] (6/8) Epoch 19, batch 13950, loss[loss=0.1092, simple_loss=0.1756, pruned_loss=0.02139, over 4792.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2045, pruned_loss=0.0284, over 972007.77 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 16:10:53,217 INFO [train.py:715] (6/8) Epoch 19, batch 14000, loss[loss=0.1199, simple_loss=0.1908, pruned_loss=0.02451, over 4815.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2045, pruned_loss=0.02833, over 971629.91 frames.], batch size: 27, lr: 1.17e-04 +2022-05-09 16:11:32,648 INFO [train.py:715] (6/8) Epoch 19, batch 14050, loss[loss=0.1591, simple_loss=0.2457, pruned_loss=0.03623, over 4938.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2046, pruned_loss=0.02829, over 970930.01 frames.], batch size: 23, lr: 1.17e-04 +2022-05-09 16:12:11,847 INFO [train.py:715] (6/8) Epoch 19, batch 14100, loss[loss=0.123, simple_loss=0.2043, pruned_loss=0.02088, over 4874.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2048, pruned_loss=0.0283, over 970691.68 frames.], batch size: 22, lr: 1.17e-04 +2022-05-09 16:12:51,004 INFO [train.py:715] (6/8) Epoch 19, batch 14150, loss[loss=0.1298, simple_loss=0.2155, pruned_loss=0.02211, over 4887.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2053, pruned_loss=0.028, over 971489.12 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:13:30,122 INFO [train.py:715] (6/8) Epoch 19, batch 14200, loss[loss=0.1221, simple_loss=0.2081, pruned_loss=0.01805, over 4940.00 frames.], tot_loss[loss=0.131, simple_loss=0.2053, pruned_loss=0.02831, over 971432.16 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:14:08,942 INFO [train.py:715] (6/8) Epoch 19, batch 14250, loss[loss=0.1287, simple_loss=0.202, pruned_loss=0.02775, over 4789.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.02837, over 971354.77 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 16:14:48,117 INFO [train.py:715] (6/8) Epoch 19, batch 14300, loss[loss=0.1287, simple_loss=0.2097, pruned_loss=0.02387, over 4899.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02857, over 971863.01 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:15:27,234 INFO [train.py:715] (6/8) Epoch 19, batch 14350, loss[loss=0.1073, simple_loss=0.1791, pruned_loss=0.01773, over 4684.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02889, over 971869.07 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 16:16:06,787 INFO [train.py:715] (6/8) Epoch 19, batch 14400, loss[loss=0.1587, simple_loss=0.2446, pruned_loss=0.03638, over 4809.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02884, over 971646.96 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 16:16:45,671 INFO [train.py:715] (6/8) Epoch 19, batch 14450, loss[loss=0.1342, simple_loss=0.2101, pruned_loss=0.02913, over 4768.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02879, over 972373.70 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 16:17:24,667 INFO [train.py:715] (6/8) Epoch 19, batch 14500, loss[loss=0.1325, simple_loss=0.1985, pruned_loss=0.03325, over 4829.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2067, pruned_loss=0.02874, over 971985.44 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 16:18:03,480 INFO [train.py:715] (6/8) Epoch 19, batch 14550, loss[loss=0.158, simple_loss=0.2214, pruned_loss=0.04729, over 4907.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02919, over 971822.34 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:18:43,156 INFO [train.py:715] (6/8) Epoch 19, batch 14600, loss[loss=0.1065, simple_loss=0.1845, pruned_loss=0.01427, over 4791.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2073, pruned_loss=0.02897, over 971584.92 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 16:19:22,224 INFO [train.py:715] (6/8) Epoch 19, batch 14650, loss[loss=0.1219, simple_loss=0.194, pruned_loss=0.02495, over 4847.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2078, pruned_loss=0.029, over 971697.20 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 16:20:01,161 INFO [train.py:715] (6/8) Epoch 19, batch 14700, loss[loss=0.1496, simple_loss=0.2248, pruned_loss=0.03725, over 4934.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02878, over 972260.77 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 16:20:40,524 INFO [train.py:715] (6/8) Epoch 19, batch 14750, loss[loss=0.1206, simple_loss=0.1896, pruned_loss=0.02582, over 4951.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02874, over 971342.59 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 16:21:19,783 INFO [train.py:715] (6/8) Epoch 19, batch 14800, loss[loss=0.1174, simple_loss=0.1763, pruned_loss=0.02926, over 4763.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02876, over 971275.82 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 16:21:58,088 INFO [train.py:715] (6/8) Epoch 19, batch 14850, loss[loss=0.1358, simple_loss=0.2075, pruned_loss=0.0321, over 4794.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2063, pruned_loss=0.02862, over 970985.58 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 16:22:37,375 INFO [train.py:715] (6/8) Epoch 19, batch 14900, loss[loss=0.1427, simple_loss=0.2228, pruned_loss=0.03128, over 4879.00 frames.], tot_loss[loss=0.132, simple_loss=0.2067, pruned_loss=0.02867, over 971232.67 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 16:23:16,325 INFO [train.py:715] (6/8) Epoch 19, batch 14950, loss[loss=0.1328, simple_loss=0.1973, pruned_loss=0.03418, over 4836.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.0288, over 971148.17 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 16:23:55,097 INFO [train.py:715] (6/8) Epoch 19, batch 15000, loss[loss=0.1302, simple_loss=0.1957, pruned_loss=0.03235, over 4836.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02884, over 971819.17 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 16:23:55,098 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 16:24:07,488 INFO [train.py:742] (6/8) Epoch 19, validation: loss=0.1045, simple_loss=0.1877, pruned_loss=0.01064, over 914524.00 frames. +2022-05-09 16:24:46,705 INFO [train.py:715] (6/8) Epoch 19, batch 15050, loss[loss=0.1539, simple_loss=0.2239, pruned_loss=0.04198, over 4849.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2077, pruned_loss=0.02929, over 971824.43 frames.], batch size: 32, lr: 1.17e-04 +2022-05-09 16:25:26,174 INFO [train.py:715] (6/8) Epoch 19, batch 15100, loss[loss=0.149, simple_loss=0.2272, pruned_loss=0.03539, over 4805.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02928, over 971851.86 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 16:26:05,811 INFO [train.py:715] (6/8) Epoch 19, batch 15150, loss[loss=0.1371, simple_loss=0.2118, pruned_loss=0.03119, over 4916.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2081, pruned_loss=0.02956, over 971918.13 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:26:45,270 INFO [train.py:715] (6/8) Epoch 19, batch 15200, loss[loss=0.1403, simple_loss=0.2303, pruned_loss=0.02515, over 4971.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2076, pruned_loss=0.0291, over 971636.77 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 16:27:24,251 INFO [train.py:715] (6/8) Epoch 19, batch 15250, loss[loss=0.1462, simple_loss=0.2128, pruned_loss=0.03986, over 4688.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2078, pruned_loss=0.02899, over 972221.41 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 16:28:04,176 INFO [train.py:715] (6/8) Epoch 19, batch 15300, loss[loss=0.1312, simple_loss=0.2101, pruned_loss=0.02617, over 4790.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2078, pruned_loss=0.02849, over 971604.56 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 16:28:43,724 INFO [train.py:715] (6/8) Epoch 19, batch 15350, loss[loss=0.1171, simple_loss=0.1893, pruned_loss=0.02242, over 4709.00 frames.], tot_loss[loss=0.1319, simple_loss=0.207, pruned_loss=0.02841, over 971525.03 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 16:29:23,533 INFO [train.py:715] (6/8) Epoch 19, batch 15400, loss[loss=0.1656, simple_loss=0.2171, pruned_loss=0.05703, over 4855.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2064, pruned_loss=0.02817, over 971154.80 frames.], batch size: 34, lr: 1.17e-04 +2022-05-09 16:30:03,015 INFO [train.py:715] (6/8) Epoch 19, batch 15450, loss[loss=0.1422, simple_loss=0.2267, pruned_loss=0.02879, over 4635.00 frames.], tot_loss[loss=0.1308, simple_loss=0.206, pruned_loss=0.02783, over 970359.43 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 16:30:42,453 INFO [train.py:715] (6/8) Epoch 19, batch 15500, loss[loss=0.1393, simple_loss=0.2015, pruned_loss=0.03854, over 4869.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2072, pruned_loss=0.02854, over 971494.10 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 16:31:21,339 INFO [train.py:715] (6/8) Epoch 19, batch 15550, loss[loss=0.1108, simple_loss=0.1821, pruned_loss=0.01976, over 4949.00 frames.], tot_loss[loss=0.132, simple_loss=0.2068, pruned_loss=0.02853, over 971989.13 frames.], batch size: 23, lr: 1.17e-04 +2022-05-09 16:32:00,422 INFO [train.py:715] (6/8) Epoch 19, batch 15600, loss[loss=0.1303, simple_loss=0.2076, pruned_loss=0.02652, over 4769.00 frames.], tot_loss[loss=0.1322, simple_loss=0.207, pruned_loss=0.02873, over 972265.80 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:32:40,096 INFO [train.py:715] (6/8) Epoch 19, batch 15650, loss[loss=0.1285, simple_loss=0.1987, pruned_loss=0.02913, over 4838.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2062, pruned_loss=0.02842, over 972841.98 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 16:33:19,045 INFO [train.py:715] (6/8) Epoch 19, batch 15700, loss[loss=0.1416, simple_loss=0.2121, pruned_loss=0.03555, over 4983.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02824, over 972760.84 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 16:33:59,157 INFO [train.py:715] (6/8) Epoch 19, batch 15750, loss[loss=0.1246, simple_loss=0.1938, pruned_loss=0.0277, over 4969.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02844, over 973334.94 frames.], batch size: 28, lr: 1.17e-04 +2022-05-09 16:34:38,396 INFO [train.py:715] (6/8) Epoch 19, batch 15800, loss[loss=0.1392, simple_loss=0.2149, pruned_loss=0.03176, over 4901.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2074, pruned_loss=0.02897, over 973021.50 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 16:35:17,531 INFO [train.py:715] (6/8) Epoch 19, batch 15850, loss[loss=0.1244, simple_loss=0.1957, pruned_loss=0.02659, over 4829.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2074, pruned_loss=0.02921, over 972454.34 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 16:35:56,471 INFO [train.py:715] (6/8) Epoch 19, batch 15900, loss[loss=0.1678, simple_loss=0.2375, pruned_loss=0.049, over 4905.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2079, pruned_loss=0.02914, over 972196.90 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:36:35,619 INFO [train.py:715] (6/8) Epoch 19, batch 15950, loss[loss=0.1417, simple_loss=0.2201, pruned_loss=0.03163, over 4904.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2079, pruned_loss=0.02926, over 973393.83 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:37:15,277 INFO [train.py:715] (6/8) Epoch 19, batch 16000, loss[loss=0.1405, simple_loss=0.2114, pruned_loss=0.03486, over 4976.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2073, pruned_loss=0.02876, over 972414.95 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 16:37:53,934 INFO [train.py:715] (6/8) Epoch 19, batch 16050, loss[loss=0.109, simple_loss=0.1832, pruned_loss=0.01736, over 4833.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2079, pruned_loss=0.02911, over 972264.30 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 16:38:33,244 INFO [train.py:715] (6/8) Epoch 19, batch 16100, loss[loss=0.1305, simple_loss=0.2055, pruned_loss=0.02772, over 4780.00 frames.], tot_loss[loss=0.1332, simple_loss=0.208, pruned_loss=0.02921, over 972531.74 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 16:39:12,544 INFO [train.py:715] (6/8) Epoch 19, batch 16150, loss[loss=0.1119, simple_loss=0.189, pruned_loss=0.0174, over 4792.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2073, pruned_loss=0.02881, over 972201.94 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 16:39:51,598 INFO [train.py:715] (6/8) Epoch 19, batch 16200, loss[loss=0.1361, simple_loss=0.2141, pruned_loss=0.02902, over 4899.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2079, pruned_loss=0.02914, over 971643.75 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:40:29,892 INFO [train.py:715] (6/8) Epoch 19, batch 16250, loss[loss=0.1629, simple_loss=0.2299, pruned_loss=0.048, over 4954.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2081, pruned_loss=0.02902, over 971606.44 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 16:41:08,937 INFO [train.py:715] (6/8) Epoch 19, batch 16300, loss[loss=0.13, simple_loss=0.2091, pruned_loss=0.02545, over 4884.00 frames.], tot_loss[loss=0.133, simple_loss=0.2078, pruned_loss=0.02905, over 972121.71 frames.], batch size: 22, lr: 1.17e-04 +2022-05-09 16:41:48,392 INFO [train.py:715] (6/8) Epoch 19, batch 16350, loss[loss=0.1334, simple_loss=0.2127, pruned_loss=0.02701, over 4944.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2085, pruned_loss=0.02903, over 972289.90 frames.], batch size: 23, lr: 1.17e-04 +2022-05-09 16:42:26,936 INFO [train.py:715] (6/8) Epoch 19, batch 16400, loss[loss=0.1213, simple_loss=0.1882, pruned_loss=0.02718, over 4761.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2085, pruned_loss=0.02916, over 972858.90 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 16:43:05,776 INFO [train.py:715] (6/8) Epoch 19, batch 16450, loss[loss=0.1466, simple_loss=0.217, pruned_loss=0.0381, over 4904.00 frames.], tot_loss[loss=0.133, simple_loss=0.2081, pruned_loss=0.02894, over 973010.61 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:43:44,327 INFO [train.py:715] (6/8) Epoch 19, batch 16500, loss[loss=0.1511, simple_loss=0.2354, pruned_loss=0.03335, over 4864.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2075, pruned_loss=0.02865, over 972745.24 frames.], batch size: 20, lr: 1.17e-04 +2022-05-09 16:44:23,747 INFO [train.py:715] (6/8) Epoch 19, batch 16550, loss[loss=0.1028, simple_loss=0.171, pruned_loss=0.0173, over 4832.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2071, pruned_loss=0.02864, over 972442.97 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 16:45:02,748 INFO [train.py:715] (6/8) Epoch 19, batch 16600, loss[loss=0.1166, simple_loss=0.1824, pruned_loss=0.02542, over 4770.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2063, pruned_loss=0.02823, over 972049.36 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 16:45:41,762 INFO [train.py:715] (6/8) Epoch 19, batch 16650, loss[loss=0.1241, simple_loss=0.2055, pruned_loss=0.02136, over 4935.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2056, pruned_loss=0.02783, over 973047.82 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 16:46:21,733 INFO [train.py:715] (6/8) Epoch 19, batch 16700, loss[loss=0.1131, simple_loss=0.1933, pruned_loss=0.01647, over 4975.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2056, pruned_loss=0.02794, over 972469.67 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 16:47:00,849 INFO [train.py:715] (6/8) Epoch 19, batch 16750, loss[loss=0.143, simple_loss=0.2138, pruned_loss=0.03606, over 4884.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2066, pruned_loss=0.0285, over 972843.09 frames.], batch size: 22, lr: 1.17e-04 +2022-05-09 16:47:40,570 INFO [train.py:715] (6/8) Epoch 19, batch 16800, loss[loss=0.1576, simple_loss=0.2332, pruned_loss=0.04101, over 4903.00 frames.], tot_loss[loss=0.1313, simple_loss=0.206, pruned_loss=0.02829, over 973054.05 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:48:19,975 INFO [train.py:715] (6/8) Epoch 19, batch 16850, loss[loss=0.1154, simple_loss=0.1781, pruned_loss=0.02633, over 4959.00 frames.], tot_loss[loss=0.131, simple_loss=0.2056, pruned_loss=0.02826, over 973197.17 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 16:48:59,494 INFO [train.py:715] (6/8) Epoch 19, batch 16900, loss[loss=0.1212, simple_loss=0.2019, pruned_loss=0.02024, over 4867.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2055, pruned_loss=0.02851, over 972810.42 frames.], batch size: 32, lr: 1.17e-04 +2022-05-09 16:49:38,112 INFO [train.py:715] (6/8) Epoch 19, batch 16950, loss[loss=0.137, simple_loss=0.2066, pruned_loss=0.03367, over 4955.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.02883, over 972447.84 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 16:50:17,672 INFO [train.py:715] (6/8) Epoch 19, batch 17000, loss[loss=0.2318, simple_loss=0.2677, pruned_loss=0.09795, over 4957.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02921, over 972279.72 frames.], batch size: 29, lr: 1.17e-04 +2022-05-09 16:50:57,098 INFO [train.py:715] (6/8) Epoch 19, batch 17050, loss[loss=0.1317, simple_loss=0.2167, pruned_loss=0.02341, over 4850.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02901, over 972600.67 frames.], batch size: 20, lr: 1.17e-04 +2022-05-09 16:51:36,158 INFO [train.py:715] (6/8) Epoch 19, batch 17100, loss[loss=0.1225, simple_loss=0.1899, pruned_loss=0.02761, over 4834.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02875, over 972673.75 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 16:52:15,340 INFO [train.py:715] (6/8) Epoch 19, batch 17150, loss[loss=0.1308, simple_loss=0.2086, pruned_loss=0.02645, over 4805.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.02885, over 972041.51 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 16:52:54,354 INFO [train.py:715] (6/8) Epoch 19, batch 17200, loss[loss=0.1026, simple_loss=0.1678, pruned_loss=0.01875, over 4849.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2056, pruned_loss=0.02843, over 970962.91 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 16:53:33,094 INFO [train.py:715] (6/8) Epoch 19, batch 17250, loss[loss=0.1227, simple_loss=0.1935, pruned_loss=0.02596, over 4848.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02866, over 970687.82 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 16:54:12,082 INFO [train.py:715] (6/8) Epoch 19, batch 17300, loss[loss=0.1202, simple_loss=0.1974, pruned_loss=0.02154, over 4791.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02885, over 971345.43 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:54:51,747 INFO [train.py:715] (6/8) Epoch 19, batch 17350, loss[loss=0.1958, simple_loss=0.2641, pruned_loss=0.06373, over 4749.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02926, over 972531.67 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 16:55:31,275 INFO [train.py:715] (6/8) Epoch 19, batch 17400, loss[loss=0.1851, simple_loss=0.2474, pruned_loss=0.06137, over 4984.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02947, over 971933.56 frames.], batch size: 39, lr: 1.17e-04 +2022-05-09 16:56:10,522 INFO [train.py:715] (6/8) Epoch 19, batch 17450, loss[loss=0.1052, simple_loss=0.1851, pruned_loss=0.0126, over 4894.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02899, over 972813.21 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:56:49,869 INFO [train.py:715] (6/8) Epoch 19, batch 17500, loss[loss=0.1333, simple_loss=0.2115, pruned_loss=0.02756, over 4924.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.02873, over 972845.96 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:57:29,144 INFO [train.py:715] (6/8) Epoch 19, batch 17550, loss[loss=0.1378, simple_loss=0.2084, pruned_loss=0.03363, over 4747.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2057, pruned_loss=0.02838, over 972853.54 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:58:08,756 INFO [train.py:715] (6/8) Epoch 19, batch 17600, loss[loss=0.1401, simple_loss=0.2168, pruned_loss=0.03166, over 4780.00 frames.], tot_loss[loss=0.131, simple_loss=0.2055, pruned_loss=0.02822, over 973258.04 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 16:58:47,931 INFO [train.py:715] (6/8) Epoch 19, batch 17650, loss[loss=0.1185, simple_loss=0.1958, pruned_loss=0.02061, over 4759.00 frames.], tot_loss[loss=0.131, simple_loss=0.2058, pruned_loss=0.02807, over 973383.74 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:59:27,080 INFO [train.py:715] (6/8) Epoch 19, batch 17700, loss[loss=0.1275, simple_loss=0.2043, pruned_loss=0.02533, over 4922.00 frames.], tot_loss[loss=0.131, simple_loss=0.206, pruned_loss=0.02799, over 973476.89 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 17:00:06,649 INFO [train.py:715] (6/8) Epoch 19, batch 17750, loss[loss=0.1692, simple_loss=0.2413, pruned_loss=0.04857, over 4895.00 frames.], tot_loss[loss=0.1323, simple_loss=0.207, pruned_loss=0.02882, over 973207.07 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 17:00:45,239 INFO [train.py:715] (6/8) Epoch 19, batch 17800, loss[loss=0.1402, simple_loss=0.2086, pruned_loss=0.03596, over 4860.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02902, over 972916.82 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 17:01:24,012 INFO [train.py:715] (6/8) Epoch 19, batch 17850, loss[loss=0.1303, simple_loss=0.2087, pruned_loss=0.02595, over 4830.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2065, pruned_loss=0.02856, over 972791.21 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 17:02:03,488 INFO [train.py:715] (6/8) Epoch 19, batch 17900, loss[loss=0.1284, simple_loss=0.1945, pruned_loss=0.03119, over 4800.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02845, over 972050.24 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 17:02:41,972 INFO [train.py:715] (6/8) Epoch 19, batch 17950, loss[loss=0.1264, simple_loss=0.1986, pruned_loss=0.02705, over 4981.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02865, over 972403.55 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 17:03:21,257 INFO [train.py:715] (6/8) Epoch 19, batch 18000, loss[loss=0.119, simple_loss=0.1991, pruned_loss=0.01944, over 4957.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02856, over 972385.24 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 17:03:21,257 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 17:03:31,129 INFO [train.py:742] (6/8) Epoch 19, validation: loss=0.1046, simple_loss=0.1877, pruned_loss=0.01074, over 914524.00 frames. +2022-05-09 17:04:10,642 INFO [train.py:715] (6/8) Epoch 19, batch 18050, loss[loss=0.1207, simple_loss=0.1982, pruned_loss=0.02156, over 4985.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2063, pruned_loss=0.02839, over 972784.43 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 17:04:50,208 INFO [train.py:715] (6/8) Epoch 19, batch 18100, loss[loss=0.1594, simple_loss=0.2419, pruned_loss=0.03848, over 4903.00 frames.], tot_loss[loss=0.132, simple_loss=0.2067, pruned_loss=0.02859, over 973272.85 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 17:05:30,068 INFO [train.py:715] (6/8) Epoch 19, batch 18150, loss[loss=0.1725, simple_loss=0.2565, pruned_loss=0.04422, over 4782.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2074, pruned_loss=0.02885, over 972230.52 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 17:06:09,192 INFO [train.py:715] (6/8) Epoch 19, batch 18200, loss[loss=0.1175, simple_loss=0.1885, pruned_loss=0.02326, over 4921.00 frames.], tot_loss[loss=0.1323, simple_loss=0.207, pruned_loss=0.02881, over 972564.20 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:06:48,119 INFO [train.py:715] (6/8) Epoch 19, batch 18250, loss[loss=0.1486, simple_loss=0.2203, pruned_loss=0.03845, over 4962.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02906, over 971841.74 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 17:07:28,077 INFO [train.py:715] (6/8) Epoch 19, batch 18300, loss[loss=0.1118, simple_loss=0.1768, pruned_loss=0.02341, over 4781.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02888, over 971685.44 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 17:08:07,536 INFO [train.py:715] (6/8) Epoch 19, batch 18350, loss[loss=0.1293, simple_loss=0.2051, pruned_loss=0.02672, over 4844.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.0291, over 971478.49 frames.], batch size: 27, lr: 1.17e-04 +2022-05-09 17:08:47,436 INFO [train.py:715] (6/8) Epoch 19, batch 18400, loss[loss=0.1277, simple_loss=0.2001, pruned_loss=0.02763, over 4845.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02905, over 970942.00 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 17:09:26,675 INFO [train.py:715] (6/8) Epoch 19, batch 18450, loss[loss=0.1049, simple_loss=0.1758, pruned_loss=0.01701, over 4830.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02908, over 971006.01 frames.], batch size: 26, lr: 1.17e-04 +2022-05-09 17:10:06,114 INFO [train.py:715] (6/8) Epoch 19, batch 18500, loss[loss=0.1166, simple_loss=0.1906, pruned_loss=0.02132, over 4918.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.0289, over 972240.53 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 17:10:45,351 INFO [train.py:715] (6/8) Epoch 19, batch 18550, loss[loss=0.129, simple_loss=0.2045, pruned_loss=0.02672, over 4731.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02894, over 971982.70 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 17:11:24,400 INFO [train.py:715] (6/8) Epoch 19, batch 18600, loss[loss=0.1194, simple_loss=0.2003, pruned_loss=0.01919, over 4777.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2054, pruned_loss=0.02873, over 972132.59 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 17:12:06,308 INFO [train.py:715] (6/8) Epoch 19, batch 18650, loss[loss=0.1347, simple_loss=0.2057, pruned_loss=0.0318, over 4910.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2047, pruned_loss=0.0285, over 972291.09 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:12:45,154 INFO [train.py:715] (6/8) Epoch 19, batch 18700, loss[loss=0.1122, simple_loss=0.1736, pruned_loss=0.0254, over 4813.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2042, pruned_loss=0.02806, over 972618.72 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 17:13:24,459 INFO [train.py:715] (6/8) Epoch 19, batch 18750, loss[loss=0.1153, simple_loss=0.1937, pruned_loss=0.01848, over 4698.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2055, pruned_loss=0.0286, over 972633.20 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 17:14:04,399 INFO [train.py:715] (6/8) Epoch 19, batch 18800, loss[loss=0.1341, simple_loss=0.2124, pruned_loss=0.02796, over 4905.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2048, pruned_loss=0.02856, over 973818.43 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:14:44,267 INFO [train.py:715] (6/8) Epoch 19, batch 18850, loss[loss=0.127, simple_loss=0.2049, pruned_loss=0.02451, over 4888.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2056, pruned_loss=0.02876, over 973656.60 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:15:23,451 INFO [train.py:715] (6/8) Epoch 19, batch 18900, loss[loss=0.1295, simple_loss=0.2047, pruned_loss=0.02716, over 4832.00 frames.], tot_loss[loss=0.1321, simple_loss=0.206, pruned_loss=0.02911, over 973608.13 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 17:16:02,850 INFO [train.py:715] (6/8) Epoch 19, batch 18950, loss[loss=0.1183, simple_loss=0.1937, pruned_loss=0.02145, over 4863.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02865, over 972940.83 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 17:16:42,876 INFO [train.py:715] (6/8) Epoch 19, batch 19000, loss[loss=0.1112, simple_loss=0.1929, pruned_loss=0.01469, over 4981.00 frames.], tot_loss[loss=0.131, simple_loss=0.2053, pruned_loss=0.02837, over 972612.64 frames.], batch size: 28, lr: 1.17e-04 +2022-05-09 17:17:22,390 INFO [train.py:715] (6/8) Epoch 19, batch 19050, loss[loss=0.1163, simple_loss=0.1955, pruned_loss=0.01854, over 4986.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.02833, over 972573.03 frames.], batch size: 26, lr: 1.17e-04 +2022-05-09 17:18:01,437 INFO [train.py:715] (6/8) Epoch 19, batch 19100, loss[loss=0.1573, simple_loss=0.2231, pruned_loss=0.04578, over 4818.00 frames.], tot_loss[loss=0.131, simple_loss=0.2053, pruned_loss=0.02839, over 973216.02 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 17:18:41,051 INFO [train.py:715] (6/8) Epoch 19, batch 19150, loss[loss=0.1339, simple_loss=0.2171, pruned_loss=0.02531, over 4889.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2059, pruned_loss=0.02858, over 972701.46 frames.], batch size: 39, lr: 1.17e-04 +2022-05-09 17:19:20,397 INFO [train.py:715] (6/8) Epoch 19, batch 19200, loss[loss=0.1494, simple_loss=0.2314, pruned_loss=0.03372, over 4881.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2054, pruned_loss=0.02813, over 972521.82 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:19:59,882 INFO [train.py:715] (6/8) Epoch 19, batch 19250, loss[loss=0.1512, simple_loss=0.2343, pruned_loss=0.03407, over 4908.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02856, over 973219.87 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 17:20:39,161 INFO [train.py:715] (6/8) Epoch 19, batch 19300, loss[loss=0.1169, simple_loss=0.197, pruned_loss=0.0184, over 4818.00 frames.], tot_loss[loss=0.1313, simple_loss=0.206, pruned_loss=0.02836, over 973475.48 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 17:21:19,528 INFO [train.py:715] (6/8) Epoch 19, batch 19350, loss[loss=0.1345, simple_loss=0.2029, pruned_loss=0.03305, over 4933.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02858, over 972837.72 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 17:21:58,945 INFO [train.py:715] (6/8) Epoch 19, batch 19400, loss[loss=0.121, simple_loss=0.2061, pruned_loss=0.018, over 4810.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.0283, over 972348.58 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 17:22:38,652 INFO [train.py:715] (6/8) Epoch 19, batch 19450, loss[loss=0.127, simple_loss=0.2019, pruned_loss=0.02607, over 4802.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2052, pruned_loss=0.02785, over 972887.32 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 17:23:18,396 INFO [train.py:715] (6/8) Epoch 19, batch 19500, loss[loss=0.1158, simple_loss=0.1943, pruned_loss=0.01866, over 4929.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2052, pruned_loss=0.02797, over 973045.16 frames.], batch size: 39, lr: 1.17e-04 +2022-05-09 17:23:57,785 INFO [train.py:715] (6/8) Epoch 19, batch 19550, loss[loss=0.1246, simple_loss=0.1956, pruned_loss=0.02679, over 4869.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2052, pruned_loss=0.02786, over 973857.70 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 17:24:36,951 INFO [train.py:715] (6/8) Epoch 19, batch 19600, loss[loss=0.1355, simple_loss=0.2194, pruned_loss=0.02579, over 4910.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2054, pruned_loss=0.02846, over 972977.84 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 17:25:17,657 INFO [train.py:715] (6/8) Epoch 19, batch 19650, loss[loss=0.1151, simple_loss=0.1922, pruned_loss=0.01899, over 4813.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2056, pruned_loss=0.0286, over 973596.48 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 17:25:56,969 INFO [train.py:715] (6/8) Epoch 19, batch 19700, loss[loss=0.1342, simple_loss=0.2002, pruned_loss=0.03407, over 4789.00 frames.], tot_loss[loss=0.131, simple_loss=0.2052, pruned_loss=0.0284, over 973208.97 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 17:26:35,810 INFO [train.py:715] (6/8) Epoch 19, batch 19750, loss[loss=0.1359, simple_loss=0.2142, pruned_loss=0.02878, over 4904.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2055, pruned_loss=0.02858, over 973073.34 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 17:27:16,080 INFO [train.py:715] (6/8) Epoch 19, batch 19800, loss[loss=0.1189, simple_loss=0.2018, pruned_loss=0.01803, over 4838.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2053, pruned_loss=0.02817, over 973319.12 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 17:27:55,922 INFO [train.py:715] (6/8) Epoch 19, batch 19850, loss[loss=0.1481, simple_loss=0.2229, pruned_loss=0.03669, over 4762.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02843, over 973056.81 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:28:35,201 INFO [train.py:715] (6/8) Epoch 19, batch 19900, loss[loss=0.1357, simple_loss=0.2136, pruned_loss=0.02889, over 4973.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2052, pruned_loss=0.02806, over 972433.79 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 17:29:13,882 INFO [train.py:715] (6/8) Epoch 19, batch 19950, loss[loss=0.1333, simple_loss=0.2066, pruned_loss=0.03006, over 4779.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2048, pruned_loss=0.02803, over 971767.22 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 17:29:53,617 INFO [train.py:715] (6/8) Epoch 19, batch 20000, loss[loss=0.146, simple_loss=0.2246, pruned_loss=0.03367, over 4958.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2048, pruned_loss=0.02774, over 972538.35 frames.], batch size: 39, lr: 1.17e-04 +2022-05-09 17:30:33,010 INFO [train.py:715] (6/8) Epoch 19, batch 20050, loss[loss=0.1321, simple_loss=0.2024, pruned_loss=0.0309, over 4800.00 frames.], tot_loss[loss=0.131, simple_loss=0.2057, pruned_loss=0.02813, over 972274.57 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 17:31:12,643 INFO [train.py:715] (6/8) Epoch 19, batch 20100, loss[loss=0.1185, simple_loss=0.1888, pruned_loss=0.02416, over 4663.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2052, pruned_loss=0.02832, over 972440.23 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 17:31:52,174 INFO [train.py:715] (6/8) Epoch 19, batch 20150, loss[loss=0.1378, simple_loss=0.2127, pruned_loss=0.0314, over 4833.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2055, pruned_loss=0.0284, over 972469.92 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 17:32:31,822 INFO [train.py:715] (6/8) Epoch 19, batch 20200, loss[loss=0.1311, simple_loss=0.1988, pruned_loss=0.03173, over 4912.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2046, pruned_loss=0.02821, over 971921.46 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 17:33:11,350 INFO [train.py:715] (6/8) Epoch 19, batch 20250, loss[loss=0.1669, simple_loss=0.2397, pruned_loss=0.04703, over 4969.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2051, pruned_loss=0.0284, over 971725.96 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 17:33:50,685 INFO [train.py:715] (6/8) Epoch 19, batch 20300, loss[loss=0.1275, simple_loss=0.2026, pruned_loss=0.02619, over 4976.00 frames.], tot_loss[loss=0.1305, simple_loss=0.205, pruned_loss=0.028, over 971075.67 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 17:34:30,223 INFO [train.py:715] (6/8) Epoch 19, batch 20350, loss[loss=0.1241, simple_loss=0.1996, pruned_loss=0.02429, over 4761.00 frames.], tot_loss[loss=0.1305, simple_loss=0.205, pruned_loss=0.02803, over 970843.08 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:35:09,437 INFO [train.py:715] (6/8) Epoch 19, batch 20400, loss[loss=0.1219, simple_loss=0.1984, pruned_loss=0.02263, over 4915.00 frames.], tot_loss[loss=0.1305, simple_loss=0.205, pruned_loss=0.02794, over 970194.85 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 17:35:48,302 INFO [train.py:715] (6/8) Epoch 19, batch 20450, loss[loss=0.1379, simple_loss=0.2187, pruned_loss=0.02851, over 4861.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2055, pruned_loss=0.02817, over 970587.55 frames.], batch size: 20, lr: 1.17e-04 +2022-05-09 17:36:28,043 INFO [train.py:715] (6/8) Epoch 19, batch 20500, loss[loss=0.1251, simple_loss=0.1993, pruned_loss=0.02548, over 4915.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2051, pruned_loss=0.02802, over 971069.73 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 17:37:07,746 INFO [train.py:715] (6/8) Epoch 19, batch 20550, loss[loss=0.1227, simple_loss=0.1941, pruned_loss=0.02559, over 4853.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2045, pruned_loss=0.0278, over 971349.36 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 17:37:46,567 INFO [train.py:715] (6/8) Epoch 19, batch 20600, loss[loss=0.1226, simple_loss=0.2014, pruned_loss=0.02188, over 4766.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.02843, over 971474.58 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 17:38:26,014 INFO [train.py:715] (6/8) Epoch 19, batch 20650, loss[loss=0.1174, simple_loss=0.1935, pruned_loss=0.02069, over 4796.00 frames.], tot_loss[loss=0.1321, simple_loss=0.207, pruned_loss=0.02865, over 971255.34 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 17:39:05,348 INFO [train.py:715] (6/8) Epoch 19, batch 20700, loss[loss=0.1238, simple_loss=0.2076, pruned_loss=0.01997, over 4851.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2063, pruned_loss=0.02835, over 971667.00 frames.], batch size: 20, lr: 1.17e-04 +2022-05-09 17:39:44,826 INFO [train.py:715] (6/8) Epoch 19, batch 20750, loss[loss=0.1578, simple_loss=0.2292, pruned_loss=0.04315, over 4887.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2059, pruned_loss=0.02818, over 972345.17 frames.], batch size: 39, lr: 1.17e-04 +2022-05-09 17:40:23,539 INFO [train.py:715] (6/8) Epoch 19, batch 20800, loss[loss=0.146, simple_loss=0.2157, pruned_loss=0.0382, over 4968.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.02834, over 971871.38 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 17:41:02,815 INFO [train.py:715] (6/8) Epoch 19, batch 20850, loss[loss=0.1308, simple_loss=0.2005, pruned_loss=0.03054, over 4861.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2062, pruned_loss=0.02843, over 972660.69 frames.], batch size: 20, lr: 1.17e-04 +2022-05-09 17:41:42,484 INFO [train.py:715] (6/8) Epoch 19, batch 20900, loss[loss=0.1511, simple_loss=0.2235, pruned_loss=0.03938, over 4965.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02883, over 973274.56 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 17:42:21,291 INFO [train.py:715] (6/8) Epoch 19, batch 20950, loss[loss=0.1261, simple_loss=0.1973, pruned_loss=0.02743, over 4849.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02871, over 972354.83 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 17:43:01,039 INFO [train.py:715] (6/8) Epoch 19, batch 21000, loss[loss=0.1237, simple_loss=0.1938, pruned_loss=0.02684, over 4848.00 frames.], tot_loss[loss=0.1319, simple_loss=0.206, pruned_loss=0.02885, over 972999.54 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 17:43:01,040 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 17:43:11,505 INFO [train.py:742] (6/8) Epoch 19, validation: loss=0.1045, simple_loss=0.1878, pruned_loss=0.01062, over 914524.00 frames. +2022-05-09 17:43:51,340 INFO [train.py:715] (6/8) Epoch 19, batch 21050, loss[loss=0.1095, simple_loss=0.1719, pruned_loss=0.02353, over 4788.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2055, pruned_loss=0.02856, over 971842.24 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 17:44:31,305 INFO [train.py:715] (6/8) Epoch 19, batch 21100, loss[loss=0.1647, simple_loss=0.2382, pruned_loss=0.04564, over 4837.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02835, over 972069.89 frames.], batch size: 26, lr: 1.17e-04 +2022-05-09 17:45:10,119 INFO [train.py:715] (6/8) Epoch 19, batch 21150, loss[loss=0.1368, simple_loss=0.217, pruned_loss=0.02829, over 4817.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2055, pruned_loss=0.02818, over 971501.86 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 17:45:49,704 INFO [train.py:715] (6/8) Epoch 19, batch 21200, loss[loss=0.1306, simple_loss=0.2106, pruned_loss=0.02532, over 4764.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2058, pruned_loss=0.02797, over 971921.45 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:46:28,953 INFO [train.py:715] (6/8) Epoch 19, batch 21250, loss[loss=0.1353, simple_loss=0.2141, pruned_loss=0.02827, over 4862.00 frames.], tot_loss[loss=0.1312, simple_loss=0.206, pruned_loss=0.0282, over 973293.93 frames.], batch size: 20, lr: 1.17e-04 +2022-05-09 17:47:07,995 INFO [train.py:715] (6/8) Epoch 19, batch 21300, loss[loss=0.1474, simple_loss=0.2217, pruned_loss=0.03652, over 4942.00 frames.], tot_loss[loss=0.132, simple_loss=0.207, pruned_loss=0.02855, over 973089.12 frames.], batch size: 39, lr: 1.17e-04 +2022-05-09 17:47:46,812 INFO [train.py:715] (6/8) Epoch 19, batch 21350, loss[loss=0.1233, simple_loss=0.2047, pruned_loss=0.02098, over 4951.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2068, pruned_loss=0.02829, over 973442.85 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 17:48:26,341 INFO [train.py:715] (6/8) Epoch 19, batch 21400, loss[loss=0.1112, simple_loss=0.1977, pruned_loss=0.01229, over 4980.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2063, pruned_loss=0.02797, over 972789.94 frames.], batch size: 28, lr: 1.17e-04 +2022-05-09 17:49:05,856 INFO [train.py:715] (6/8) Epoch 19, batch 21450, loss[loss=0.1416, simple_loss=0.2195, pruned_loss=0.0318, over 4919.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2058, pruned_loss=0.02789, over 972782.05 frames.], batch size: 23, lr: 1.17e-04 +2022-05-09 17:49:44,651 INFO [train.py:715] (6/8) Epoch 19, batch 21500, loss[loss=0.1059, simple_loss=0.1843, pruned_loss=0.01374, over 4873.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2065, pruned_loss=0.02813, over 972407.77 frames.], batch size: 22, lr: 1.17e-04 +2022-05-09 17:50:24,360 INFO [train.py:715] (6/8) Epoch 19, batch 21550, loss[loss=0.1177, simple_loss=0.1952, pruned_loss=0.0201, over 4815.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2061, pruned_loss=0.02786, over 972790.16 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 17:51:04,083 INFO [train.py:715] (6/8) Epoch 19, batch 21600, loss[loss=0.1407, simple_loss=0.2194, pruned_loss=0.03099, over 4986.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2063, pruned_loss=0.02815, over 972352.51 frames.], batch size: 39, lr: 1.17e-04 +2022-05-09 17:51:43,842 INFO [train.py:715] (6/8) Epoch 19, batch 21650, loss[loss=0.1263, simple_loss=0.2024, pruned_loss=0.02507, over 4882.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2058, pruned_loss=0.02779, over 971680.56 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 17:52:22,734 INFO [train.py:715] (6/8) Epoch 19, batch 21700, loss[loss=0.1197, simple_loss=0.1967, pruned_loss=0.02129, over 4938.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2062, pruned_loss=0.02825, over 972334.06 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 17:53:02,148 INFO [train.py:715] (6/8) Epoch 19, batch 21750, loss[loss=0.1099, simple_loss=0.1845, pruned_loss=0.01766, over 4956.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2052, pruned_loss=0.02807, over 972166.96 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 17:53:43,135 INFO [train.py:715] (6/8) Epoch 19, batch 21800, loss[loss=0.1369, simple_loss=0.2178, pruned_loss=0.02801, over 4686.00 frames.], tot_loss[loss=0.1295, simple_loss=0.2039, pruned_loss=0.02756, over 971803.25 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 17:54:22,956 INFO [train.py:715] (6/8) Epoch 19, batch 21850, loss[loss=0.1445, simple_loss=0.2225, pruned_loss=0.03328, over 4961.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2043, pruned_loss=0.0279, over 971288.55 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 17:55:03,333 INFO [train.py:715] (6/8) Epoch 19, batch 21900, loss[loss=0.1011, simple_loss=0.1756, pruned_loss=0.01333, over 4754.00 frames.], tot_loss[loss=0.1308, simple_loss=0.205, pruned_loss=0.02827, over 971697.44 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 17:55:43,308 INFO [train.py:715] (6/8) Epoch 19, batch 21950, loss[loss=0.1143, simple_loss=0.1923, pruned_loss=0.0182, over 4834.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2048, pruned_loss=0.02778, over 972399.46 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 17:56:22,529 INFO [train.py:715] (6/8) Epoch 19, batch 22000, loss[loss=0.1522, simple_loss=0.2064, pruned_loss=0.04901, over 4737.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2046, pruned_loss=0.02796, over 972242.37 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 17:57:01,819 INFO [train.py:715] (6/8) Epoch 19, batch 22050, loss[loss=0.1146, simple_loss=0.1913, pruned_loss=0.01896, over 4765.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2049, pruned_loss=0.02809, over 972941.78 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 17:57:41,390 INFO [train.py:715] (6/8) Epoch 19, batch 22100, loss[loss=0.1399, simple_loss=0.2039, pruned_loss=0.03792, over 4849.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2048, pruned_loss=0.02841, over 973330.14 frames.], batch size: 30, lr: 1.16e-04 +2022-05-09 17:58:21,391 INFO [train.py:715] (6/8) Epoch 19, batch 22150, loss[loss=0.1176, simple_loss=0.2046, pruned_loss=0.01524, over 4788.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2043, pruned_loss=0.02808, over 973625.32 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 17:59:00,691 INFO [train.py:715] (6/8) Epoch 19, batch 22200, loss[loss=0.1274, simple_loss=0.2062, pruned_loss=0.02429, over 4881.00 frames.], tot_loss[loss=0.13, simple_loss=0.2041, pruned_loss=0.02789, over 974378.48 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 17:59:40,601 INFO [train.py:715] (6/8) Epoch 19, batch 22250, loss[loss=0.1549, simple_loss=0.2291, pruned_loss=0.04037, over 4988.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2052, pruned_loss=0.02817, over 973907.60 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 18:00:20,346 INFO [train.py:715] (6/8) Epoch 19, batch 22300, loss[loss=0.1177, simple_loss=0.1916, pruned_loss=0.02188, over 4827.00 frames.], tot_loss[loss=0.1305, simple_loss=0.205, pruned_loss=0.02804, over 973746.70 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 18:00:59,303 INFO [train.py:715] (6/8) Epoch 19, batch 22350, loss[loss=0.1242, simple_loss=0.1982, pruned_loss=0.02513, over 4867.00 frames.], tot_loss[loss=0.1296, simple_loss=0.2041, pruned_loss=0.02758, over 973108.54 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 18:01:38,322 INFO [train.py:715] (6/8) Epoch 19, batch 22400, loss[loss=0.1544, simple_loss=0.2314, pruned_loss=0.03873, over 4820.00 frames.], tot_loss[loss=0.13, simple_loss=0.2049, pruned_loss=0.02759, over 972867.66 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 18:02:17,608 INFO [train.py:715] (6/8) Epoch 19, batch 22450, loss[loss=0.1306, simple_loss=0.2051, pruned_loss=0.02809, over 4870.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2051, pruned_loss=0.02776, over 973191.35 frames.], batch size: 32, lr: 1.16e-04 +2022-05-09 18:02:57,546 INFO [train.py:715] (6/8) Epoch 19, batch 22500, loss[loss=0.1171, simple_loss=0.1866, pruned_loss=0.02381, over 4820.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2049, pruned_loss=0.02792, over 972522.85 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 18:03:36,401 INFO [train.py:715] (6/8) Epoch 19, batch 22550, loss[loss=0.1621, simple_loss=0.2362, pruned_loss=0.04406, over 4926.00 frames.], tot_loss[loss=0.1295, simple_loss=0.2043, pruned_loss=0.02734, over 973319.54 frames.], batch size: 39, lr: 1.16e-04 +2022-05-09 18:04:16,077 INFO [train.py:715] (6/8) Epoch 19, batch 22600, loss[loss=0.1248, simple_loss=0.2001, pruned_loss=0.02477, over 4817.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2054, pruned_loss=0.02777, over 973148.69 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 18:04:55,710 INFO [train.py:715] (6/8) Epoch 19, batch 22650, loss[loss=0.1291, simple_loss=0.2126, pruned_loss=0.02279, over 4829.00 frames.], tot_loss[loss=0.131, simple_loss=0.2061, pruned_loss=0.02797, over 973178.14 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 18:05:34,683 INFO [train.py:715] (6/8) Epoch 19, batch 22700, loss[loss=0.1286, simple_loss=0.2043, pruned_loss=0.02639, over 4919.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02864, over 973827.35 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 18:06:13,672 INFO [train.py:715] (6/8) Epoch 19, batch 22750, loss[loss=0.1163, simple_loss=0.1921, pruned_loss=0.02023, over 4783.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02874, over 973084.24 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 18:06:53,399 INFO [train.py:715] (6/8) Epoch 19, batch 22800, loss[loss=0.1193, simple_loss=0.1892, pruned_loss=0.02466, over 4837.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2059, pruned_loss=0.02865, over 973991.78 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:07:33,733 INFO [train.py:715] (6/8) Epoch 19, batch 22850, loss[loss=0.151, simple_loss=0.2252, pruned_loss=0.03839, over 4976.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02888, over 973120.49 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:08:11,748 INFO [train.py:715] (6/8) Epoch 19, batch 22900, loss[loss=0.1125, simple_loss=0.1883, pruned_loss=0.01829, over 4935.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02888, over 973585.20 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 18:08:51,144 INFO [train.py:715] (6/8) Epoch 19, batch 22950, loss[loss=0.1229, simple_loss=0.2066, pruned_loss=0.01961, over 4954.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2058, pruned_loss=0.02867, over 973987.13 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 18:09:31,741 INFO [train.py:715] (6/8) Epoch 19, batch 23000, loss[loss=0.126, simple_loss=0.2017, pruned_loss=0.02518, over 4900.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2054, pruned_loss=0.02864, over 973481.63 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 18:10:12,243 INFO [train.py:715] (6/8) Epoch 19, batch 23050, loss[loss=0.1575, simple_loss=0.224, pruned_loss=0.04549, over 4752.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2055, pruned_loss=0.02841, over 973314.62 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 18:10:52,468 INFO [train.py:715] (6/8) Epoch 19, batch 23100, loss[loss=0.1218, simple_loss=0.1983, pruned_loss=0.02262, over 4778.00 frames.], tot_loss[loss=0.1306, simple_loss=0.205, pruned_loss=0.02815, over 973084.49 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 18:11:33,199 INFO [train.py:715] (6/8) Epoch 19, batch 23150, loss[loss=0.1302, simple_loss=0.2003, pruned_loss=0.03004, over 4982.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2049, pruned_loss=0.02829, over 972698.04 frames.], batch size: 35, lr: 1.16e-04 +2022-05-09 18:12:14,193 INFO [train.py:715] (6/8) Epoch 19, batch 23200, loss[loss=0.106, simple_loss=0.1778, pruned_loss=0.01717, over 4841.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2046, pruned_loss=0.02806, over 972768.05 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 18:12:53,609 INFO [train.py:715] (6/8) Epoch 19, batch 23250, loss[loss=0.1427, simple_loss=0.2175, pruned_loss=0.03391, over 4971.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2051, pruned_loss=0.02835, over 973011.72 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:13:34,368 INFO [train.py:715] (6/8) Epoch 19, batch 23300, loss[loss=0.1333, simple_loss=0.2092, pruned_loss=0.02868, over 4824.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2049, pruned_loss=0.02792, over 973367.25 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:14:16,105 INFO [train.py:715] (6/8) Epoch 19, batch 23350, loss[loss=0.163, simple_loss=0.2169, pruned_loss=0.05451, over 4817.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2062, pruned_loss=0.02839, over 972947.39 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 18:14:56,718 INFO [train.py:715] (6/8) Epoch 19, batch 23400, loss[loss=0.1197, simple_loss=0.1968, pruned_loss=0.02132, over 4858.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02835, over 973067.79 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 18:15:37,875 INFO [train.py:715] (6/8) Epoch 19, batch 23450, loss[loss=0.1062, simple_loss=0.1737, pruned_loss=0.01933, over 4914.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.0283, over 971863.51 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 18:16:19,143 INFO [train.py:715] (6/8) Epoch 19, batch 23500, loss[loss=0.1317, simple_loss=0.2003, pruned_loss=0.03159, over 4764.00 frames.], tot_loss[loss=0.1301, simple_loss=0.205, pruned_loss=0.02767, over 971406.41 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 18:17:00,526 INFO [train.py:715] (6/8) Epoch 19, batch 23550, loss[loss=0.1223, simple_loss=0.2043, pruned_loss=0.02011, over 4936.00 frames.], tot_loss[loss=0.1301, simple_loss=0.205, pruned_loss=0.02763, over 971125.29 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 18:17:41,327 INFO [train.py:715] (6/8) Epoch 19, batch 23600, loss[loss=0.1241, simple_loss=0.2026, pruned_loss=0.02275, over 4912.00 frames.], tot_loss[loss=0.13, simple_loss=0.2045, pruned_loss=0.02773, over 972026.68 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 18:18:22,142 INFO [train.py:715] (6/8) Epoch 19, batch 23650, loss[loss=0.1299, simple_loss=0.2017, pruned_loss=0.029, over 4812.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2048, pruned_loss=0.02788, over 971910.84 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 18:19:04,135 INFO [train.py:715] (6/8) Epoch 19, batch 23700, loss[loss=0.119, simple_loss=0.1985, pruned_loss=0.01975, over 4782.00 frames.], tot_loss[loss=0.13, simple_loss=0.2046, pruned_loss=0.02767, over 972255.09 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 18:19:44,520 INFO [train.py:715] (6/8) Epoch 19, batch 23750, loss[loss=0.1197, simple_loss=0.1978, pruned_loss=0.02079, over 4811.00 frames.], tot_loss[loss=0.1304, simple_loss=0.205, pruned_loss=0.02792, over 972280.75 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 18:20:24,727 INFO [train.py:715] (6/8) Epoch 19, batch 23800, loss[loss=0.1104, simple_loss=0.1913, pruned_loss=0.01471, over 4963.00 frames.], tot_loss[loss=0.1305, simple_loss=0.205, pruned_loss=0.02803, over 971675.93 frames.], batch size: 28, lr: 1.16e-04 +2022-05-09 18:21:05,151 INFO [train.py:715] (6/8) Epoch 19, batch 23850, loss[loss=0.1316, simple_loss=0.1997, pruned_loss=0.03173, over 4987.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2056, pruned_loss=0.02847, over 973071.51 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 18:21:45,590 INFO [train.py:715] (6/8) Epoch 19, batch 23900, loss[loss=0.1244, simple_loss=0.1997, pruned_loss=0.0245, over 4830.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02855, over 972143.85 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:22:24,904 INFO [train.py:715] (6/8) Epoch 19, batch 23950, loss[loss=0.1385, simple_loss=0.2149, pruned_loss=0.03112, over 4696.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02866, over 971852.33 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:23:05,248 INFO [train.py:715] (6/8) Epoch 19, batch 24000, loss[loss=0.1506, simple_loss=0.2289, pruned_loss=0.03612, over 4812.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02886, over 972223.02 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:23:05,249 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 18:23:15,158 INFO [train.py:742] (6/8) Epoch 19, validation: loss=0.1046, simple_loss=0.1878, pruned_loss=0.01073, over 914524.00 frames. +2022-05-09 18:23:55,488 INFO [train.py:715] (6/8) Epoch 19, batch 24050, loss[loss=0.1174, simple_loss=0.204, pruned_loss=0.01543, over 4989.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2065, pruned_loss=0.02825, over 973032.91 frames.], batch size: 28, lr: 1.16e-04 +2022-05-09 18:24:36,272 INFO [train.py:715] (6/8) Epoch 19, batch 24100, loss[loss=0.1334, simple_loss=0.2041, pruned_loss=0.03141, over 4977.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2064, pruned_loss=0.02824, over 973152.66 frames.], batch size: 35, lr: 1.16e-04 +2022-05-09 18:25:16,114 INFO [train.py:715] (6/8) Epoch 19, batch 24150, loss[loss=0.1663, simple_loss=0.2263, pruned_loss=0.05314, over 4747.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2058, pruned_loss=0.02789, over 972452.81 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 18:25:56,280 INFO [train.py:715] (6/8) Epoch 19, batch 24200, loss[loss=0.1536, simple_loss=0.2325, pruned_loss=0.03739, over 4641.00 frames.], tot_loss[loss=0.131, simple_loss=0.2062, pruned_loss=0.02796, over 972355.72 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 18:26:36,612 INFO [train.py:715] (6/8) Epoch 19, batch 24250, loss[loss=0.1456, simple_loss=0.2294, pruned_loss=0.03088, over 4743.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2058, pruned_loss=0.02771, over 971775.95 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 18:27:17,350 INFO [train.py:715] (6/8) Epoch 19, batch 24300, loss[loss=0.1393, simple_loss=0.2143, pruned_loss=0.03216, over 4931.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2054, pruned_loss=0.02779, over 971303.65 frames.], batch size: 23, lr: 1.16e-04 +2022-05-09 18:27:56,396 INFO [train.py:715] (6/8) Epoch 19, batch 24350, loss[loss=0.1268, simple_loss=0.2101, pruned_loss=0.02174, over 4760.00 frames.], tot_loss[loss=0.1299, simple_loss=0.205, pruned_loss=0.02738, over 971742.66 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 18:28:36,037 INFO [train.py:715] (6/8) Epoch 19, batch 24400, loss[loss=0.1132, simple_loss=0.2015, pruned_loss=0.01246, over 4965.00 frames.], tot_loss[loss=0.1298, simple_loss=0.2044, pruned_loss=0.02754, over 972243.63 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 18:29:16,473 INFO [train.py:715] (6/8) Epoch 19, batch 24450, loss[loss=0.1238, simple_loss=0.204, pruned_loss=0.02178, over 4887.00 frames.], tot_loss[loss=0.1296, simple_loss=0.2041, pruned_loss=0.0275, over 972520.02 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 18:29:55,852 INFO [train.py:715] (6/8) Epoch 19, batch 24500, loss[loss=0.1043, simple_loss=0.1847, pruned_loss=0.01198, over 4812.00 frames.], tot_loss[loss=0.1286, simple_loss=0.2032, pruned_loss=0.027, over 972710.39 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 18:30:34,374 INFO [train.py:715] (6/8) Epoch 19, batch 24550, loss[loss=0.1182, simple_loss=0.1969, pruned_loss=0.01974, over 4778.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2046, pruned_loss=0.02788, over 972457.92 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 18:31:13,267 INFO [train.py:715] (6/8) Epoch 19, batch 24600, loss[loss=0.1039, simple_loss=0.1768, pruned_loss=0.0155, over 4789.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2056, pruned_loss=0.02807, over 972517.76 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 18:31:52,760 INFO [train.py:715] (6/8) Epoch 19, batch 24650, loss[loss=0.1369, simple_loss=0.2167, pruned_loss=0.02849, over 4907.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2064, pruned_loss=0.0282, over 972996.92 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 18:32:31,488 INFO [train.py:715] (6/8) Epoch 19, batch 24700, loss[loss=0.119, simple_loss=0.2059, pruned_loss=0.01604, over 4789.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2066, pruned_loss=0.02825, over 973344.98 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 18:33:10,030 INFO [train.py:715] (6/8) Epoch 19, batch 24750, loss[loss=0.1367, simple_loss=0.214, pruned_loss=0.02966, over 4942.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2068, pruned_loss=0.02811, over 973615.57 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 18:33:50,384 INFO [train.py:715] (6/8) Epoch 19, batch 24800, loss[loss=0.1387, simple_loss=0.2079, pruned_loss=0.03473, over 4767.00 frames.], tot_loss[loss=0.132, simple_loss=0.2072, pruned_loss=0.02842, over 973172.18 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 18:34:30,024 INFO [train.py:715] (6/8) Epoch 19, batch 24850, loss[loss=0.1348, simple_loss=0.2118, pruned_loss=0.02889, over 4797.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02857, over 973100.34 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 18:35:09,086 INFO [train.py:715] (6/8) Epoch 19, batch 24900, loss[loss=0.1582, simple_loss=0.2302, pruned_loss=0.04312, over 4782.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2068, pruned_loss=0.02841, over 973522.42 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 18:35:48,553 INFO [train.py:715] (6/8) Epoch 19, batch 24950, loss[loss=0.1201, simple_loss=0.1994, pruned_loss=0.02046, over 4801.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2072, pruned_loss=0.02877, over 972818.10 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 18:36:28,358 INFO [train.py:715] (6/8) Epoch 19, batch 25000, loss[loss=0.1291, simple_loss=0.205, pruned_loss=0.02662, over 4829.00 frames.], tot_loss[loss=0.1321, simple_loss=0.207, pruned_loss=0.0286, over 973390.24 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 18:37:07,184 INFO [train.py:715] (6/8) Epoch 19, batch 25050, loss[loss=0.1315, simple_loss=0.2037, pruned_loss=0.02968, over 4969.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2064, pruned_loss=0.02844, over 973756.61 frames.], batch size: 35, lr: 1.16e-04 +2022-05-09 18:37:46,486 INFO [train.py:715] (6/8) Epoch 19, batch 25100, loss[loss=0.1368, simple_loss=0.205, pruned_loss=0.03427, over 4830.00 frames.], tot_loss[loss=0.1324, simple_loss=0.207, pruned_loss=0.02888, over 973635.64 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 18:38:26,084 INFO [train.py:715] (6/8) Epoch 19, batch 25150, loss[loss=0.1279, simple_loss=0.2021, pruned_loss=0.02688, over 4788.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02859, over 973589.01 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 18:39:05,721 INFO [train.py:715] (6/8) Epoch 19, batch 25200, loss[loss=0.126, simple_loss=0.203, pruned_loss=0.02449, over 4862.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2055, pruned_loss=0.02802, over 973482.51 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 18:39:44,330 INFO [train.py:715] (6/8) Epoch 19, batch 25250, loss[loss=0.1202, simple_loss=0.1933, pruned_loss=0.02353, over 4812.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.0282, over 973800.55 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 18:40:23,578 INFO [train.py:715] (6/8) Epoch 19, batch 25300, loss[loss=0.1469, simple_loss=0.2224, pruned_loss=0.03568, over 4827.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2055, pruned_loss=0.02845, over 973411.41 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 18:41:03,222 INFO [train.py:715] (6/8) Epoch 19, batch 25350, loss[loss=0.1472, simple_loss=0.228, pruned_loss=0.0332, over 4982.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02848, over 972507.22 frames.], batch size: 33, lr: 1.16e-04 +2022-05-09 18:41:42,448 INFO [train.py:715] (6/8) Epoch 19, batch 25400, loss[loss=0.1138, simple_loss=0.1833, pruned_loss=0.02212, over 4865.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2054, pruned_loss=0.02819, over 971955.18 frames.], batch size: 38, lr: 1.16e-04 +2022-05-09 18:42:21,496 INFO [train.py:715] (6/8) Epoch 19, batch 25450, loss[loss=0.1185, simple_loss=0.1833, pruned_loss=0.02682, over 4827.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02862, over 971935.94 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 18:43:00,716 INFO [train.py:715] (6/8) Epoch 19, batch 25500, loss[loss=0.1279, simple_loss=0.2041, pruned_loss=0.02585, over 4952.00 frames.], tot_loss[loss=0.131, simple_loss=0.2055, pruned_loss=0.02823, over 971523.50 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 18:43:39,826 INFO [train.py:715] (6/8) Epoch 19, batch 25550, loss[loss=0.1755, simple_loss=0.2308, pruned_loss=0.06008, over 4827.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2054, pruned_loss=0.02803, over 971136.10 frames.], batch size: 27, lr: 1.16e-04 +2022-05-09 18:44:18,034 INFO [train.py:715] (6/8) Epoch 19, batch 25600, loss[loss=0.1451, simple_loss=0.2251, pruned_loss=0.03253, over 4799.00 frames.], tot_loss[loss=0.13, simple_loss=0.2049, pruned_loss=0.0276, over 971420.98 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 18:44:56,961 INFO [train.py:715] (6/8) Epoch 19, batch 25650, loss[loss=0.1203, simple_loss=0.1984, pruned_loss=0.02112, over 4757.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2056, pruned_loss=0.02799, over 972430.13 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 18:45:36,005 INFO [train.py:715] (6/8) Epoch 19, batch 25700, loss[loss=0.1192, simple_loss=0.1957, pruned_loss=0.02135, over 4857.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02836, over 972268.52 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 18:46:14,566 INFO [train.py:715] (6/8) Epoch 19, batch 25750, loss[loss=0.1373, simple_loss=0.2057, pruned_loss=0.03445, over 4780.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2074, pruned_loss=0.02908, over 971938.47 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 18:46:53,575 INFO [train.py:715] (6/8) Epoch 19, batch 25800, loss[loss=0.1324, simple_loss=0.2119, pruned_loss=0.02639, over 4939.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02831, over 971748.03 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 18:47:32,972 INFO [train.py:715] (6/8) Epoch 19, batch 25850, loss[loss=0.1285, simple_loss=0.2091, pruned_loss=0.02399, over 4912.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02827, over 971490.63 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 18:48:12,302 INFO [train.py:715] (6/8) Epoch 19, batch 25900, loss[loss=0.1162, simple_loss=0.1993, pruned_loss=0.01652, over 4816.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2054, pruned_loss=0.02835, over 972556.75 frames.], batch size: 27, lr: 1.16e-04 +2022-05-09 18:48:50,880 INFO [train.py:715] (6/8) Epoch 19, batch 25950, loss[loss=0.1619, simple_loss=0.246, pruned_loss=0.03885, over 4866.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.02845, over 973183.20 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 18:49:30,506 INFO [train.py:715] (6/8) Epoch 19, batch 26000, loss[loss=0.1361, simple_loss=0.2284, pruned_loss=0.02194, over 4770.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2062, pruned_loss=0.02796, over 972871.91 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 18:50:10,477 INFO [train.py:715] (6/8) Epoch 19, batch 26050, loss[loss=0.1384, simple_loss=0.2077, pruned_loss=0.03454, over 4987.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2064, pruned_loss=0.02827, over 972115.85 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 18:50:49,159 INFO [train.py:715] (6/8) Epoch 19, batch 26100, loss[loss=0.1304, simple_loss=0.2075, pruned_loss=0.02666, over 4979.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2068, pruned_loss=0.02869, over 971798.06 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:51:28,557 INFO [train.py:715] (6/8) Epoch 19, batch 26150, loss[loss=0.1219, simple_loss=0.1981, pruned_loss=0.02284, over 4860.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.02841, over 971715.16 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 18:52:07,560 INFO [train.py:715] (6/8) Epoch 19, batch 26200, loss[loss=0.1417, simple_loss=0.2153, pruned_loss=0.03401, over 4875.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2051, pruned_loss=0.02805, over 972425.85 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 18:52:47,077 INFO [train.py:715] (6/8) Epoch 19, batch 26250, loss[loss=0.1223, simple_loss=0.198, pruned_loss=0.02331, over 4819.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2054, pruned_loss=0.02867, over 972746.04 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 18:53:25,474 INFO [train.py:715] (6/8) Epoch 19, batch 26300, loss[loss=0.1265, simple_loss=0.2042, pruned_loss=0.02436, over 4925.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2062, pruned_loss=0.02921, over 972437.61 frames.], batch size: 23, lr: 1.16e-04 +2022-05-09 18:54:04,844 INFO [train.py:715] (6/8) Epoch 19, batch 26350, loss[loss=0.1298, simple_loss=0.2091, pruned_loss=0.02528, over 4654.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2065, pruned_loss=0.02945, over 971937.19 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 18:54:44,064 INFO [train.py:715] (6/8) Epoch 19, batch 26400, loss[loss=0.1557, simple_loss=0.2385, pruned_loss=0.03644, over 4988.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.0293, over 972063.15 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 18:55:23,160 INFO [train.py:715] (6/8) Epoch 19, batch 26450, loss[loss=0.1252, simple_loss=0.1935, pruned_loss=0.02848, over 4979.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2057, pruned_loss=0.02901, over 970935.50 frames.], batch size: 28, lr: 1.16e-04 +2022-05-09 18:56:02,218 INFO [train.py:715] (6/8) Epoch 19, batch 26500, loss[loss=0.1282, simple_loss=0.2085, pruned_loss=0.02396, over 4804.00 frames.], tot_loss[loss=0.131, simple_loss=0.205, pruned_loss=0.02853, over 970384.84 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 18:56:40,884 INFO [train.py:715] (6/8) Epoch 19, batch 26550, loss[loss=0.1103, simple_loss=0.1869, pruned_loss=0.01687, over 4892.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2046, pruned_loss=0.02822, over 970868.92 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 18:57:21,644 INFO [train.py:715] (6/8) Epoch 19, batch 26600, loss[loss=0.1447, simple_loss=0.227, pruned_loss=0.03118, over 4938.00 frames.], tot_loss[loss=0.1308, simple_loss=0.205, pruned_loss=0.02831, over 971923.31 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 18:58:02,787 INFO [train.py:715] (6/8) Epoch 19, batch 26650, loss[loss=0.1257, simple_loss=0.1931, pruned_loss=0.02917, over 4893.00 frames.], tot_loss[loss=0.1308, simple_loss=0.205, pruned_loss=0.02834, over 972108.44 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 18:58:41,698 INFO [train.py:715] (6/8) Epoch 19, batch 26700, loss[loss=0.1294, simple_loss=0.2041, pruned_loss=0.02741, over 4926.00 frames.], tot_loss[loss=0.13, simple_loss=0.2041, pruned_loss=0.02798, over 971860.08 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 18:59:21,013 INFO [train.py:715] (6/8) Epoch 19, batch 26750, loss[loss=0.1137, simple_loss=0.1904, pruned_loss=0.01853, over 4650.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2046, pruned_loss=0.0286, over 972379.28 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 19:00:00,961 INFO [train.py:715] (6/8) Epoch 19, batch 26800, loss[loss=0.1072, simple_loss=0.1751, pruned_loss=0.01968, over 4782.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2042, pruned_loss=0.02828, over 971945.00 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 19:00:41,177 INFO [train.py:715] (6/8) Epoch 19, batch 26850, loss[loss=0.1351, simple_loss=0.2135, pruned_loss=0.02829, over 4898.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2049, pruned_loss=0.02789, over 971966.28 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 19:01:20,368 INFO [train.py:715] (6/8) Epoch 19, batch 26900, loss[loss=0.1229, simple_loss=0.1974, pruned_loss=0.02416, over 4752.00 frames.], tot_loss[loss=0.13, simple_loss=0.2047, pruned_loss=0.02766, over 972926.33 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 19:02:00,260 INFO [train.py:715] (6/8) Epoch 19, batch 26950, loss[loss=0.1446, simple_loss=0.2284, pruned_loss=0.03037, over 4883.00 frames.], tot_loss[loss=0.1298, simple_loss=0.2046, pruned_loss=0.02748, over 972688.65 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 19:02:39,719 INFO [train.py:715] (6/8) Epoch 19, batch 27000, loss[loss=0.137, simple_loss=0.2129, pruned_loss=0.03058, over 4915.00 frames.], tot_loss[loss=0.1296, simple_loss=0.2045, pruned_loss=0.02735, over 972905.18 frames.], batch size: 39, lr: 1.16e-04 +2022-05-09 19:02:39,720 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 19:02:49,599 INFO [train.py:742] (6/8) Epoch 19, validation: loss=0.1047, simple_loss=0.1878, pruned_loss=0.0108, over 914524.00 frames. +2022-05-09 19:03:29,472 INFO [train.py:715] (6/8) Epoch 19, batch 27050, loss[loss=0.1241, simple_loss=0.2176, pruned_loss=0.01532, over 4803.00 frames.], tot_loss[loss=0.1298, simple_loss=0.2048, pruned_loss=0.02737, over 972587.32 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 19:04:09,795 INFO [train.py:715] (6/8) Epoch 19, batch 27100, loss[loss=0.1503, simple_loss=0.2206, pruned_loss=0.03997, over 4836.00 frames.], tot_loss[loss=0.13, simple_loss=0.2047, pruned_loss=0.02764, over 972751.34 frames.], batch size: 30, lr: 1.16e-04 +2022-05-09 19:04:50,662 INFO [train.py:715] (6/8) Epoch 19, batch 27150, loss[loss=0.1234, simple_loss=0.2033, pruned_loss=0.02174, over 4756.00 frames.], tot_loss[loss=0.1301, simple_loss=0.205, pruned_loss=0.0276, over 972509.16 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:05:30,595 INFO [train.py:715] (6/8) Epoch 19, batch 27200, loss[loss=0.08644, simple_loss=0.1545, pruned_loss=0.009211, over 4803.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2049, pruned_loss=0.02791, over 973081.47 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 19:06:11,131 INFO [train.py:715] (6/8) Epoch 19, batch 27250, loss[loss=0.1348, simple_loss=0.2107, pruned_loss=0.02949, over 4923.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02857, over 972519.10 frames.], batch size: 23, lr: 1.16e-04 +2022-05-09 19:06:52,926 INFO [train.py:715] (6/8) Epoch 19, batch 27300, loss[loss=0.1496, simple_loss=0.2261, pruned_loss=0.03649, over 4875.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.0283, over 971910.32 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 19:07:33,651 INFO [train.py:715] (6/8) Epoch 19, batch 27350, loss[loss=0.1236, simple_loss=0.1936, pruned_loss=0.02679, over 4819.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2057, pruned_loss=0.02871, over 971430.21 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 19:08:14,912 INFO [train.py:715] (6/8) Epoch 19, batch 27400, loss[loss=0.1331, simple_loss=0.2004, pruned_loss=0.03294, over 4695.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2047, pruned_loss=0.02823, over 971424.68 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 19:08:54,857 INFO [train.py:715] (6/8) Epoch 19, batch 27450, loss[loss=0.1336, simple_loss=0.2165, pruned_loss=0.02533, over 4920.00 frames.], tot_loss[loss=0.131, simple_loss=0.2052, pruned_loss=0.02836, over 971621.20 frames.], batch size: 23, lr: 1.16e-04 +2022-05-09 19:09:36,500 INFO [train.py:715] (6/8) Epoch 19, batch 27500, loss[loss=0.1228, simple_loss=0.2039, pruned_loss=0.02085, over 4815.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2045, pruned_loss=0.028, over 971179.93 frames.], batch size: 27, lr: 1.16e-04 +2022-05-09 19:10:17,086 INFO [train.py:715] (6/8) Epoch 19, batch 27550, loss[loss=0.1237, simple_loss=0.1916, pruned_loss=0.02792, over 4942.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2046, pruned_loss=0.02786, over 972287.39 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:10:57,706 INFO [train.py:715] (6/8) Epoch 19, batch 27600, loss[loss=0.119, simple_loss=0.2019, pruned_loss=0.01802, over 4799.00 frames.], tot_loss[loss=0.1297, simple_loss=0.2045, pruned_loss=0.02747, over 972454.40 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:11:38,781 INFO [train.py:715] (6/8) Epoch 19, batch 27650, loss[loss=0.1187, simple_loss=0.1948, pruned_loss=0.02133, over 4832.00 frames.], tot_loss[loss=0.1296, simple_loss=0.2044, pruned_loss=0.02742, over 971786.49 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 19:12:19,405 INFO [train.py:715] (6/8) Epoch 19, batch 27700, loss[loss=0.1123, simple_loss=0.1823, pruned_loss=0.02115, over 4929.00 frames.], tot_loss[loss=0.1294, simple_loss=0.2038, pruned_loss=0.02748, over 971838.47 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 19:13:00,036 INFO [train.py:715] (6/8) Epoch 19, batch 27750, loss[loss=0.1514, simple_loss=0.2302, pruned_loss=0.03629, over 4932.00 frames.], tot_loss[loss=0.1299, simple_loss=0.204, pruned_loss=0.02786, over 971539.30 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:13:40,115 INFO [train.py:715] (6/8) Epoch 19, batch 27800, loss[loss=0.1204, simple_loss=0.1864, pruned_loss=0.02714, over 4847.00 frames.], tot_loss[loss=0.1299, simple_loss=0.2037, pruned_loss=0.02808, over 971230.00 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 19:14:21,137 INFO [train.py:715] (6/8) Epoch 19, batch 27850, loss[loss=0.1182, simple_loss=0.2003, pruned_loss=0.01801, over 4850.00 frames.], tot_loss[loss=0.1298, simple_loss=0.2037, pruned_loss=0.0279, over 971447.52 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 19:15:01,164 INFO [train.py:715] (6/8) Epoch 19, batch 27900, loss[loss=0.1353, simple_loss=0.2038, pruned_loss=0.03334, over 4797.00 frames.], tot_loss[loss=0.131, simple_loss=0.2049, pruned_loss=0.02853, over 972550.93 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 19:15:41,282 INFO [train.py:715] (6/8) Epoch 19, batch 27950, loss[loss=0.1329, simple_loss=0.2138, pruned_loss=0.02596, over 4747.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.02834, over 972275.15 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 19:16:21,261 INFO [train.py:715] (6/8) Epoch 19, batch 28000, loss[loss=0.155, simple_loss=0.2247, pruned_loss=0.04268, over 4800.00 frames.], tot_loss[loss=0.131, simple_loss=0.2054, pruned_loss=0.02825, over 972427.15 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:17:02,096 INFO [train.py:715] (6/8) Epoch 19, batch 28050, loss[loss=0.1405, simple_loss=0.2229, pruned_loss=0.02904, over 4988.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2055, pruned_loss=0.02844, over 972334.79 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 19:17:42,533 INFO [train.py:715] (6/8) Epoch 19, batch 28100, loss[loss=0.1049, simple_loss=0.1791, pruned_loss=0.01541, over 4801.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02855, over 972083.17 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 19:18:22,478 INFO [train.py:715] (6/8) Epoch 19, batch 28150, loss[loss=0.1358, simple_loss=0.209, pruned_loss=0.03133, over 4930.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2052, pruned_loss=0.02827, over 972268.45 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:19:02,907 INFO [train.py:715] (6/8) Epoch 19, batch 28200, loss[loss=0.1116, simple_loss=0.1861, pruned_loss=0.01854, over 4899.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02871, over 972690.36 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:19:42,617 INFO [train.py:715] (6/8) Epoch 19, batch 28250, loss[loss=0.1338, simple_loss=0.1996, pruned_loss=0.03406, over 4840.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2047, pruned_loss=0.02806, over 972972.56 frames.], batch size: 32, lr: 1.16e-04 +2022-05-09 19:20:22,523 INFO [train.py:715] (6/8) Epoch 19, batch 28300, loss[loss=0.1314, simple_loss=0.2071, pruned_loss=0.02788, over 4746.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2044, pruned_loss=0.02821, over 971456.26 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:21:02,199 INFO [train.py:715] (6/8) Epoch 19, batch 28350, loss[loss=0.1502, simple_loss=0.229, pruned_loss=0.03571, over 4908.00 frames.], tot_loss[loss=0.1307, simple_loss=0.205, pruned_loss=0.02822, over 972298.36 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:21:42,234 INFO [train.py:715] (6/8) Epoch 19, batch 28400, loss[loss=0.1475, simple_loss=0.2276, pruned_loss=0.03367, over 4817.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2045, pruned_loss=0.02809, over 972627.95 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 19:22:22,336 INFO [train.py:715] (6/8) Epoch 19, batch 28450, loss[loss=0.1116, simple_loss=0.183, pruned_loss=0.02012, over 4982.00 frames.], tot_loss[loss=0.131, simple_loss=0.2052, pruned_loss=0.02838, over 972078.49 frames.], batch size: 28, lr: 1.16e-04 +2022-05-09 19:23:02,158 INFO [train.py:715] (6/8) Epoch 19, batch 28500, loss[loss=0.1174, simple_loss=0.1951, pruned_loss=0.01983, over 4874.00 frames.], tot_loss[loss=0.131, simple_loss=0.2054, pruned_loss=0.02833, over 972627.48 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 19:23:42,835 INFO [train.py:715] (6/8) Epoch 19, batch 28550, loss[loss=0.1168, simple_loss=0.1988, pruned_loss=0.01745, over 4791.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02841, over 972430.49 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 19:24:22,312 INFO [train.py:715] (6/8) Epoch 19, batch 28600, loss[loss=0.1655, simple_loss=0.2459, pruned_loss=0.04256, over 4884.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.0286, over 972767.19 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:25:02,352 INFO [train.py:715] (6/8) Epoch 19, batch 28650, loss[loss=0.1261, simple_loss=0.2089, pruned_loss=0.02165, over 4728.00 frames.], tot_loss[loss=0.132, simple_loss=0.2067, pruned_loss=0.02871, over 973102.22 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 19:25:43,130 INFO [train.py:715] (6/8) Epoch 19, batch 28700, loss[loss=0.1153, simple_loss=0.187, pruned_loss=0.02182, over 4942.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02864, over 972961.55 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:26:22,657 INFO [train.py:715] (6/8) Epoch 19, batch 28750, loss[loss=0.1346, simple_loss=0.2139, pruned_loss=0.02767, over 4975.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.02877, over 972018.73 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 19:27:02,570 INFO [train.py:715] (6/8) Epoch 19, batch 28800, loss[loss=0.1334, simple_loss=0.2217, pruned_loss=0.02257, over 4768.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02854, over 971898.12 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 19:27:41,941 INFO [train.py:715] (6/8) Epoch 19, batch 28850, loss[loss=0.1325, simple_loss=0.2106, pruned_loss=0.02721, over 4771.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02896, over 971778.27 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 19:28:21,328 INFO [train.py:715] (6/8) Epoch 19, batch 28900, loss[loss=0.1187, simple_loss=0.2052, pruned_loss=0.0161, over 4779.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02853, over 972215.32 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 19:28:59,440 INFO [train.py:715] (6/8) Epoch 19, batch 28950, loss[loss=0.1208, simple_loss=0.1897, pruned_loss=0.02593, over 4633.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2062, pruned_loss=0.02838, over 972123.00 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 19:29:38,329 INFO [train.py:715] (6/8) Epoch 19, batch 29000, loss[loss=0.1276, simple_loss=0.1938, pruned_loss=0.03067, over 4983.00 frames.], tot_loss[loss=0.132, simple_loss=0.2069, pruned_loss=0.02858, over 972260.21 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 19:30:17,560 INFO [train.py:715] (6/8) Epoch 19, batch 29050, loss[loss=0.1432, simple_loss=0.2225, pruned_loss=0.03196, over 4762.00 frames.], tot_loss[loss=0.132, simple_loss=0.2071, pruned_loss=0.02845, over 970940.37 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:30:56,440 INFO [train.py:715] (6/8) Epoch 19, batch 29100, loss[loss=0.1299, simple_loss=0.2074, pruned_loss=0.02621, over 4976.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2068, pruned_loss=0.02837, over 970869.09 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 19:31:35,385 INFO [train.py:715] (6/8) Epoch 19, batch 29150, loss[loss=0.1008, simple_loss=0.1713, pruned_loss=0.01518, over 4781.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2066, pruned_loss=0.02836, over 971325.13 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 19:32:14,168 INFO [train.py:715] (6/8) Epoch 19, batch 29200, loss[loss=0.1287, simple_loss=0.2018, pruned_loss=0.02781, over 4810.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2063, pruned_loss=0.02833, over 971340.27 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 19:32:53,533 INFO [train.py:715] (6/8) Epoch 19, batch 29250, loss[loss=0.1294, simple_loss=0.2026, pruned_loss=0.02807, over 4779.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2058, pruned_loss=0.02824, over 971725.73 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 19:33:32,162 INFO [train.py:715] (6/8) Epoch 19, batch 29300, loss[loss=0.14, simple_loss=0.2126, pruned_loss=0.03373, over 4852.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2065, pruned_loss=0.02853, over 972142.33 frames.], batch size: 30, lr: 1.16e-04 +2022-05-09 19:34:11,675 INFO [train.py:715] (6/8) Epoch 19, batch 29350, loss[loss=0.1243, simple_loss=0.2012, pruned_loss=0.02377, over 4755.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.0288, over 972720.25 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:34:50,603 INFO [train.py:715] (6/8) Epoch 19, batch 29400, loss[loss=0.1308, simple_loss=0.2018, pruned_loss=0.02989, over 4944.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02894, over 972240.02 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:35:29,744 INFO [train.py:715] (6/8) Epoch 19, batch 29450, loss[loss=0.1381, simple_loss=0.2039, pruned_loss=0.03618, over 4846.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02843, over 971658.44 frames.], batch size: 32, lr: 1.16e-04 +2022-05-09 19:36:09,172 INFO [train.py:715] (6/8) Epoch 19, batch 29500, loss[loss=0.1184, simple_loss=0.1902, pruned_loss=0.02333, over 4807.00 frames.], tot_loss[loss=0.1319, simple_loss=0.206, pruned_loss=0.02887, over 971413.65 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:36:48,560 INFO [train.py:715] (6/8) Epoch 19, batch 29550, loss[loss=0.1951, simple_loss=0.2631, pruned_loss=0.06358, over 4887.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02872, over 972418.48 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 19:37:28,172 INFO [train.py:715] (6/8) Epoch 19, batch 29600, loss[loss=0.1205, simple_loss=0.1946, pruned_loss=0.02318, over 4865.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2062, pruned_loss=0.02855, over 973022.98 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 19:38:07,301 INFO [train.py:715] (6/8) Epoch 19, batch 29650, loss[loss=0.1325, simple_loss=0.2009, pruned_loss=0.03203, over 4860.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2056, pruned_loss=0.02864, over 972648.37 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 19:38:47,455 INFO [train.py:715] (6/8) Epoch 19, batch 29700, loss[loss=0.1213, simple_loss=0.1982, pruned_loss=0.02216, over 4985.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2053, pruned_loss=0.0286, over 972149.97 frames.], batch size: 33, lr: 1.16e-04 +2022-05-09 19:39:26,744 INFO [train.py:715] (6/8) Epoch 19, batch 29750, loss[loss=0.1375, simple_loss=0.2089, pruned_loss=0.03305, over 4841.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2055, pruned_loss=0.02848, over 972388.63 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 19:40:06,092 INFO [train.py:715] (6/8) Epoch 19, batch 29800, loss[loss=0.1205, simple_loss=0.194, pruned_loss=0.02349, over 4888.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02832, over 972007.59 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:40:45,396 INFO [train.py:715] (6/8) Epoch 19, batch 29850, loss[loss=0.119, simple_loss=0.1986, pruned_loss=0.01968, over 4877.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2055, pruned_loss=0.02777, over 972007.93 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:41:24,812 INFO [train.py:715] (6/8) Epoch 19, batch 29900, loss[loss=0.1127, simple_loss=0.1898, pruned_loss=0.01776, over 4803.00 frames.], tot_loss[loss=0.1294, simple_loss=0.2045, pruned_loss=0.02714, over 971848.96 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 19:42:04,769 INFO [train.py:715] (6/8) Epoch 19, batch 29950, loss[loss=0.1158, simple_loss=0.1885, pruned_loss=0.0215, over 4925.00 frames.], tot_loss[loss=0.1297, simple_loss=0.2051, pruned_loss=0.0272, over 972233.85 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 19:42:43,618 INFO [train.py:715] (6/8) Epoch 19, batch 30000, loss[loss=0.1277, simple_loss=0.2, pruned_loss=0.02774, over 4886.00 frames.], tot_loss[loss=0.131, simple_loss=0.2061, pruned_loss=0.02801, over 972471.78 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:42:43,619 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 19:42:53,508 INFO [train.py:742] (6/8) Epoch 19, validation: loss=0.1045, simple_loss=0.1877, pruned_loss=0.01067, over 914524.00 frames. +2022-05-09 19:43:32,626 INFO [train.py:715] (6/8) Epoch 19, batch 30050, loss[loss=0.1367, simple_loss=0.2186, pruned_loss=0.02743, over 4942.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2065, pruned_loss=0.02811, over 972513.46 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:44:12,191 INFO [train.py:715] (6/8) Epoch 19, batch 30100, loss[loss=0.1213, simple_loss=0.2046, pruned_loss=0.01898, over 4938.00 frames.], tot_loss[loss=0.132, simple_loss=0.2072, pruned_loss=0.0284, over 971974.50 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:44:51,312 INFO [train.py:715] (6/8) Epoch 19, batch 30150, loss[loss=0.1526, simple_loss=0.2299, pruned_loss=0.03772, over 4877.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2071, pruned_loss=0.02888, over 972086.91 frames.], batch size: 39, lr: 1.16e-04 +2022-05-09 19:45:31,085 INFO [train.py:715] (6/8) Epoch 19, batch 30200, loss[loss=0.1342, simple_loss=0.2076, pruned_loss=0.03039, over 4992.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2071, pruned_loss=0.02901, over 972918.70 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 19:46:09,574 INFO [train.py:715] (6/8) Epoch 19, batch 30250, loss[loss=0.156, simple_loss=0.2319, pruned_loss=0.04008, over 4893.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2078, pruned_loss=0.029, over 972737.89 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:46:48,898 INFO [train.py:715] (6/8) Epoch 19, batch 30300, loss[loss=0.1496, simple_loss=0.2148, pruned_loss=0.04221, over 4705.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2075, pruned_loss=0.02906, over 972246.16 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 19:47:28,487 INFO [train.py:715] (6/8) Epoch 19, batch 30350, loss[loss=0.1447, simple_loss=0.2155, pruned_loss=0.037, over 4911.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02873, over 972734.75 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:48:08,089 INFO [train.py:715] (6/8) Epoch 19, batch 30400, loss[loss=0.1357, simple_loss=0.214, pruned_loss=0.02869, over 4963.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02858, over 972348.13 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 19:48:47,856 INFO [train.py:715] (6/8) Epoch 19, batch 30450, loss[loss=0.1081, simple_loss=0.1753, pruned_loss=0.02049, over 4874.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2062, pruned_loss=0.02847, over 972408.37 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 19:49:26,662 INFO [train.py:715] (6/8) Epoch 19, batch 30500, loss[loss=0.1473, simple_loss=0.2136, pruned_loss=0.04043, over 4911.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2062, pruned_loss=0.02836, over 973351.42 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 19:50:06,594 INFO [train.py:715] (6/8) Epoch 19, batch 30550, loss[loss=0.1115, simple_loss=0.188, pruned_loss=0.01748, over 4816.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2055, pruned_loss=0.02809, over 973627.80 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 19:50:45,746 INFO [train.py:715] (6/8) Epoch 19, batch 30600, loss[loss=0.137, simple_loss=0.2177, pruned_loss=0.02817, over 4862.00 frames.], tot_loss[loss=0.1302, simple_loss=0.205, pruned_loss=0.0277, over 973228.28 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 19:51:25,830 INFO [train.py:715] (6/8) Epoch 19, batch 30650, loss[loss=0.1149, simple_loss=0.1981, pruned_loss=0.01583, over 4816.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2057, pruned_loss=0.02791, over 972714.26 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 19:52:05,611 INFO [train.py:715] (6/8) Epoch 19, batch 30700, loss[loss=0.1159, simple_loss=0.1975, pruned_loss=0.01717, over 4817.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2053, pruned_loss=0.02788, over 973028.24 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 19:52:45,187 INFO [train.py:715] (6/8) Epoch 19, batch 30750, loss[loss=0.1292, simple_loss=0.2037, pruned_loss=0.0273, over 4804.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2053, pruned_loss=0.02769, over 973104.78 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:53:25,711 INFO [train.py:715] (6/8) Epoch 19, batch 30800, loss[loss=0.1025, simple_loss=0.1733, pruned_loss=0.01591, over 4974.00 frames.], tot_loss[loss=0.1299, simple_loss=0.2045, pruned_loss=0.02763, over 972802.22 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 19:54:05,673 INFO [train.py:715] (6/8) Epoch 19, batch 30850, loss[loss=0.1274, simple_loss=0.2003, pruned_loss=0.02732, over 4921.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2049, pruned_loss=0.0276, over 972933.98 frames.], batch size: 39, lr: 1.16e-04 +2022-05-09 19:54:46,464 INFO [train.py:715] (6/8) Epoch 19, batch 30900, loss[loss=0.1123, simple_loss=0.1832, pruned_loss=0.02064, over 4825.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2056, pruned_loss=0.02805, over 973187.71 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 19:55:26,517 INFO [train.py:715] (6/8) Epoch 19, batch 30950, loss[loss=0.1369, simple_loss=0.2072, pruned_loss=0.03327, over 4988.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2048, pruned_loss=0.02823, over 973593.55 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 19:56:07,124 INFO [train.py:715] (6/8) Epoch 19, batch 31000, loss[loss=0.1186, simple_loss=0.2008, pruned_loss=0.01816, over 4809.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2048, pruned_loss=0.02795, over 973635.29 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 19:56:47,794 INFO [train.py:715] (6/8) Epoch 19, batch 31050, loss[loss=0.1193, simple_loss=0.1942, pruned_loss=0.02224, over 4848.00 frames.], tot_loss[loss=0.13, simple_loss=0.2045, pruned_loss=0.02773, over 973610.08 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 19:57:28,119 INFO [train.py:715] (6/8) Epoch 19, batch 31100, loss[loss=0.1275, simple_loss=0.1999, pruned_loss=0.02756, over 4939.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2052, pruned_loss=0.02798, over 972548.97 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 19:58:08,821 INFO [train.py:715] (6/8) Epoch 19, batch 31150, loss[loss=0.1326, simple_loss=0.2114, pruned_loss=0.02687, over 4763.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2055, pruned_loss=0.02795, over 972999.17 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:58:49,196 INFO [train.py:715] (6/8) Epoch 19, batch 31200, loss[loss=0.1223, simple_loss=0.1859, pruned_loss=0.0293, over 4776.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2053, pruned_loss=0.02768, over 972552.32 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 19:59:30,193 INFO [train.py:715] (6/8) Epoch 19, batch 31250, loss[loss=0.1204, simple_loss=0.2012, pruned_loss=0.01984, over 4897.00 frames.], tot_loss[loss=0.1305, simple_loss=0.205, pruned_loss=0.02802, over 971987.77 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 20:00:09,928 INFO [train.py:715] (6/8) Epoch 19, batch 31300, loss[loss=0.127, simple_loss=0.2048, pruned_loss=0.02456, over 4791.00 frames.], tot_loss[loss=0.13, simple_loss=0.2045, pruned_loss=0.02773, over 972445.91 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 20:00:50,571 INFO [train.py:715] (6/8) Epoch 19, batch 31350, loss[loss=0.1114, simple_loss=0.1887, pruned_loss=0.01702, over 4777.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2046, pruned_loss=0.02789, over 972476.59 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 20:01:31,165 INFO [train.py:715] (6/8) Epoch 19, batch 31400, loss[loss=0.1152, simple_loss=0.1881, pruned_loss=0.02113, over 4931.00 frames.], tot_loss[loss=0.131, simple_loss=0.2054, pruned_loss=0.02825, over 972504.41 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 20:02:11,470 INFO [train.py:715] (6/8) Epoch 19, batch 31450, loss[loss=0.1173, simple_loss=0.1845, pruned_loss=0.02505, over 4927.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2053, pruned_loss=0.02854, over 973073.37 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 20:02:52,734 INFO [train.py:715] (6/8) Epoch 19, batch 31500, loss[loss=0.1293, simple_loss=0.1951, pruned_loss=0.03174, over 4776.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2046, pruned_loss=0.02796, over 972600.73 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 20:03:32,848 INFO [train.py:715] (6/8) Epoch 19, batch 31550, loss[loss=0.1031, simple_loss=0.1816, pruned_loss=0.01236, over 4933.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2048, pruned_loss=0.02791, over 971575.42 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 20:04:13,431 INFO [train.py:715] (6/8) Epoch 19, batch 31600, loss[loss=0.1118, simple_loss=0.1828, pruned_loss=0.02041, over 4820.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2053, pruned_loss=0.02819, over 972033.40 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 20:04:53,556 INFO [train.py:715] (6/8) Epoch 19, batch 31650, loss[loss=0.1073, simple_loss=0.1825, pruned_loss=0.01602, over 4922.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2056, pruned_loss=0.02837, over 971581.33 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 20:05:33,942 INFO [train.py:715] (6/8) Epoch 19, batch 31700, loss[loss=0.1371, simple_loss=0.212, pruned_loss=0.03106, over 4776.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2054, pruned_loss=0.02824, over 971183.15 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 20:06:14,408 INFO [train.py:715] (6/8) Epoch 19, batch 31750, loss[loss=0.1125, simple_loss=0.1829, pruned_loss=0.0211, over 4811.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2057, pruned_loss=0.02839, over 971728.96 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 20:06:54,587 INFO [train.py:715] (6/8) Epoch 19, batch 31800, loss[loss=0.1557, simple_loss=0.234, pruned_loss=0.03866, over 4878.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2054, pruned_loss=0.02788, over 971807.98 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 20:07:35,683 INFO [train.py:715] (6/8) Epoch 19, batch 31850, loss[loss=0.114, simple_loss=0.1808, pruned_loss=0.02353, over 4778.00 frames.], tot_loss[loss=0.131, simple_loss=0.206, pruned_loss=0.02802, over 971542.75 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 20:08:15,980 INFO [train.py:715] (6/8) Epoch 19, batch 31900, loss[loss=0.1583, simple_loss=0.2249, pruned_loss=0.04587, over 4814.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2058, pruned_loss=0.02799, over 971311.69 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 20:08:56,575 INFO [train.py:715] (6/8) Epoch 19, batch 31950, loss[loss=0.1179, simple_loss=0.1913, pruned_loss=0.0223, over 4916.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2061, pruned_loss=0.02811, over 972090.89 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 20:09:36,647 INFO [train.py:715] (6/8) Epoch 19, batch 32000, loss[loss=0.1255, simple_loss=0.2085, pruned_loss=0.02121, over 4842.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2065, pruned_loss=0.02818, over 971857.14 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 20:10:16,978 INFO [train.py:715] (6/8) Epoch 19, batch 32050, loss[loss=0.1196, simple_loss=0.1946, pruned_loss=0.02228, over 4940.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2065, pruned_loss=0.02837, over 972191.17 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 20:10:57,305 INFO [train.py:715] (6/8) Epoch 19, batch 32100, loss[loss=0.1086, simple_loss=0.1902, pruned_loss=0.01349, over 4875.00 frames.], tot_loss[loss=0.1311, simple_loss=0.206, pruned_loss=0.02813, over 972881.00 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 20:11:37,105 INFO [train.py:715] (6/8) Epoch 19, batch 32150, loss[loss=0.1265, simple_loss=0.2082, pruned_loss=0.02242, over 4918.00 frames.], tot_loss[loss=0.1309, simple_loss=0.206, pruned_loss=0.02794, over 973136.46 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 20:12:18,365 INFO [train.py:715] (6/8) Epoch 19, batch 32200, loss[loss=0.1127, simple_loss=0.1895, pruned_loss=0.01792, over 4806.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2066, pruned_loss=0.02828, over 973398.54 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 20:12:58,110 INFO [train.py:715] (6/8) Epoch 19, batch 32250, loss[loss=0.1192, simple_loss=0.1968, pruned_loss=0.0208, over 4758.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2061, pruned_loss=0.02823, over 972638.20 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 20:13:38,497 INFO [train.py:715] (6/8) Epoch 19, batch 32300, loss[loss=0.1224, simple_loss=0.1956, pruned_loss=0.02466, over 4866.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2067, pruned_loss=0.02809, over 973838.67 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 20:14:19,667 INFO [train.py:715] (6/8) Epoch 19, batch 32350, loss[loss=0.1252, simple_loss=0.1951, pruned_loss=0.02765, over 4960.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2067, pruned_loss=0.02848, over 973457.08 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 20:15:00,208 INFO [train.py:715] (6/8) Epoch 19, batch 32400, loss[loss=0.1131, simple_loss=0.1869, pruned_loss=0.01961, over 4786.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02861, over 973982.56 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 20:15:40,808 INFO [train.py:715] (6/8) Epoch 19, batch 32450, loss[loss=0.1396, simple_loss=0.2205, pruned_loss=0.02941, over 4976.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.0288, over 974149.18 frames.], batch size: 28, lr: 1.16e-04 +2022-05-09 20:16:20,801 INFO [train.py:715] (6/8) Epoch 19, batch 32500, loss[loss=0.1147, simple_loss=0.187, pruned_loss=0.02118, over 4819.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02824, over 973359.99 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 20:17:01,573 INFO [train.py:715] (6/8) Epoch 19, batch 32550, loss[loss=0.1257, simple_loss=0.192, pruned_loss=0.02972, over 4801.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02886, over 972767.27 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 20:17:41,584 INFO [train.py:715] (6/8) Epoch 19, batch 32600, loss[loss=0.1382, simple_loss=0.2033, pruned_loss=0.03654, over 4837.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2067, pruned_loss=0.02857, over 972055.76 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 20:18:21,660 INFO [train.py:715] (6/8) Epoch 19, batch 32650, loss[loss=0.1588, simple_loss=0.2182, pruned_loss=0.04974, over 4783.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02917, over 972055.80 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 20:19:02,308 INFO [train.py:715] (6/8) Epoch 19, batch 32700, loss[loss=0.1519, simple_loss=0.2331, pruned_loss=0.03532, over 4751.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02903, over 972271.19 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 20:19:42,126 INFO [train.py:715] (6/8) Epoch 19, batch 32750, loss[loss=0.162, simple_loss=0.2335, pruned_loss=0.04522, over 4846.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.02888, over 972329.51 frames.], batch size: 30, lr: 1.16e-04 +2022-05-09 20:20:21,843 INFO [train.py:715] (6/8) Epoch 19, batch 32800, loss[loss=0.1381, simple_loss=0.2087, pruned_loss=0.03382, over 4877.00 frames.], tot_loss[loss=0.1309, simple_loss=0.205, pruned_loss=0.02837, over 972198.36 frames.], batch size: 30, lr: 1.16e-04 +2022-05-09 20:21:00,659 INFO [train.py:715] (6/8) Epoch 19, batch 32850, loss[loss=0.1371, simple_loss=0.2117, pruned_loss=0.03123, over 4898.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2053, pruned_loss=0.02804, over 972581.70 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 20:21:39,685 INFO [train.py:715] (6/8) Epoch 19, batch 32900, loss[loss=0.1332, simple_loss=0.2004, pruned_loss=0.03304, over 4784.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2051, pruned_loss=0.02775, over 972578.54 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 20:22:18,349 INFO [train.py:715] (6/8) Epoch 19, batch 32950, loss[loss=0.1245, simple_loss=0.2043, pruned_loss=0.0223, over 4800.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2056, pruned_loss=0.02813, over 973021.04 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 20:22:57,661 INFO [train.py:715] (6/8) Epoch 19, batch 33000, loss[loss=0.132, simple_loss=0.21, pruned_loss=0.02699, over 4889.00 frames.], tot_loss[loss=0.131, simple_loss=0.2059, pruned_loss=0.028, over 972771.35 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 20:22:57,661 INFO [train.py:733] (6/8) Computing validation loss +2022-05-09 20:23:07,492 INFO [train.py:742] (6/8) Epoch 19, validation: loss=0.1048, simple_loss=0.1878, pruned_loss=0.01088, over 914524.00 frames. +2022-05-09 20:23:46,772 INFO [train.py:715] (6/8) Epoch 19, batch 33050, loss[loss=0.1139, simple_loss=0.1987, pruned_loss=0.01455, over 4754.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2062, pruned_loss=0.02848, over 972843.87 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 20:24:26,214 INFO [train.py:715] (6/8) Epoch 19, batch 33100, loss[loss=0.1501, simple_loss=0.2245, pruned_loss=0.0379, over 4777.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2067, pruned_loss=0.02846, over 972476.26 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 20:25:05,034 INFO [train.py:715] (6/8) Epoch 19, batch 33150, loss[loss=0.1144, simple_loss=0.1854, pruned_loss=0.02168, over 4689.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2067, pruned_loss=0.02827, over 972837.49 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 20:25:44,211 INFO [train.py:715] (6/8) Epoch 19, batch 33200, loss[loss=0.13, simple_loss=0.2106, pruned_loss=0.02472, over 4928.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2064, pruned_loss=0.02823, over 973108.35 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 20:26:23,771 INFO [train.py:715] (6/8) Epoch 19, batch 33250, loss[loss=0.1369, simple_loss=0.2092, pruned_loss=0.03236, over 4916.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2059, pruned_loss=0.02771, over 973198.79 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 20:27:03,192 INFO [train.py:715] (6/8) Epoch 19, batch 33300, loss[loss=0.1381, simple_loss=0.212, pruned_loss=0.03215, over 4768.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2062, pruned_loss=0.02811, over 973339.25 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 20:27:42,911 INFO [train.py:715] (6/8) Epoch 19, batch 33350, loss[loss=0.1176, simple_loss=0.1836, pruned_loss=0.02581, over 4877.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2056, pruned_loss=0.02758, over 972499.99 frames.], batch size: 32, lr: 1.16e-04 +2022-05-09 20:28:22,077 INFO [train.py:715] (6/8) Epoch 19, batch 33400, loss[loss=0.1241, simple_loss=0.2016, pruned_loss=0.02333, over 4839.00 frames.], tot_loss[loss=0.13, simple_loss=0.205, pruned_loss=0.02752, over 971577.79 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 20:29:01,058 INFO [train.py:715] (6/8) Epoch 19, batch 33450, loss[loss=0.1164, simple_loss=0.1986, pruned_loss=0.01704, over 4919.00 frames.], tot_loss[loss=0.1307, simple_loss=0.206, pruned_loss=0.02766, over 972004.00 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 20:29:40,023 INFO [train.py:715] (6/8) Epoch 19, batch 33500, loss[loss=0.1405, simple_loss=0.2234, pruned_loss=0.02881, over 4763.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2064, pruned_loss=0.02765, over 971805.57 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 20:30:18,904 INFO [train.py:715] (6/8) Epoch 19, batch 33550, loss[loss=0.1153, simple_loss=0.1964, pruned_loss=0.01709, over 4991.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2057, pruned_loss=0.02724, over 971499.45 frames.], batch size: 15, lr: 1.15e-04 +2022-05-09 20:30:58,237 INFO [train.py:715] (6/8) Epoch 19, batch 33600, loss[loss=0.136, simple_loss=0.2173, pruned_loss=0.0273, over 4924.00 frames.], tot_loss[loss=0.1304, simple_loss=0.206, pruned_loss=0.02741, over 971136.71 frames.], batch size: 23, lr: 1.15e-04 +2022-05-09 20:31:37,217 INFO [train.py:715] (6/8) Epoch 19, batch 33650, loss[loss=0.137, simple_loss=0.2088, pruned_loss=0.03266, over 4918.00 frames.], tot_loss[loss=0.1298, simple_loss=0.2054, pruned_loss=0.02711, over 970882.41 frames.], batch size: 29, lr: 1.15e-04 +2022-05-09 20:32:16,618 INFO [train.py:715] (6/8) Epoch 19, batch 33700, loss[loss=0.1278, simple_loss=0.2018, pruned_loss=0.02689, over 4956.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2054, pruned_loss=0.02737, over 970652.26 frames.], batch size: 24, lr: 1.15e-04 +2022-05-09 20:32:55,333 INFO [train.py:715] (6/8) Epoch 19, batch 33750, loss[loss=0.1434, simple_loss=0.213, pruned_loss=0.03691, over 4863.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2051, pruned_loss=0.02752, over 971443.81 frames.], batch size: 20, lr: 1.15e-04 +2022-05-09 20:33:34,127 INFO [train.py:715] (6/8) Epoch 19, batch 33800, loss[loss=0.1127, simple_loss=0.1844, pruned_loss=0.02046, over 4942.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2049, pruned_loss=0.02761, over 971660.21 frames.], batch size: 21, lr: 1.15e-04 +2022-05-09 20:34:12,735 INFO [train.py:715] (6/8) Epoch 19, batch 33850, loss[loss=0.1189, simple_loss=0.1987, pruned_loss=0.01957, over 4789.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2054, pruned_loss=0.0279, over 971441.04 frames.], batch size: 18, lr: 1.15e-04 +2022-05-09 20:34:51,528 INFO [train.py:715] (6/8) Epoch 19, batch 33900, loss[loss=0.1269, simple_loss=0.2041, pruned_loss=0.02483, over 4708.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2058, pruned_loss=0.02798, over 971550.48 frames.], batch size: 15, lr: 1.15e-04 +2022-05-09 20:35:31,246 INFO [train.py:715] (6/8) Epoch 19, batch 33950, loss[loss=0.1126, simple_loss=0.1972, pruned_loss=0.014, over 4940.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2067, pruned_loss=0.02825, over 972838.93 frames.], batch size: 23, lr: 1.15e-04 +2022-05-09 20:36:10,894 INFO [train.py:715] (6/8) Epoch 19, batch 34000, loss[loss=0.1286, simple_loss=0.1977, pruned_loss=0.0297, over 4882.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2059, pruned_loss=0.02793, over 972522.72 frames.], batch size: 32, lr: 1.15e-04 +2022-05-09 20:36:50,180 INFO [train.py:715] (6/8) Epoch 19, batch 34050, loss[loss=0.1286, simple_loss=0.2122, pruned_loss=0.02246, over 4883.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2065, pruned_loss=0.02808, over 973418.69 frames.], batch size: 22, lr: 1.15e-04 +2022-05-09 20:37:28,970 INFO [train.py:715] (6/8) Epoch 19, batch 34100, loss[loss=0.1371, simple_loss=0.2215, pruned_loss=0.02634, over 4892.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2066, pruned_loss=0.02788, over 972144.61 frames.], batch size: 19, lr: 1.15e-04 +2022-05-09 20:38:08,482 INFO [train.py:715] (6/8) Epoch 19, batch 34150, loss[loss=0.1357, simple_loss=0.2167, pruned_loss=0.02733, over 4838.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2061, pruned_loss=0.02761, over 971490.11 frames.], batch size: 15, lr: 1.15e-04 +2022-05-09 20:38:48,058 INFO [train.py:715] (6/8) Epoch 19, batch 34200, loss[loss=0.1337, simple_loss=0.2178, pruned_loss=0.02485, over 4769.00 frames.], tot_loss[loss=0.13, simple_loss=0.2049, pruned_loss=0.02757, over 971841.80 frames.], batch size: 18, lr: 1.15e-04 +2022-05-09 20:39:27,602 INFO [train.py:715] (6/8) Epoch 19, batch 34250, loss[loss=0.1154, simple_loss=0.1873, pruned_loss=0.02171, over 4833.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2052, pruned_loss=0.02795, over 972392.00 frames.], batch size: 27, lr: 1.15e-04 +2022-05-09 20:40:06,930 INFO [train.py:715] (6/8) Epoch 19, batch 34300, loss[loss=0.1277, simple_loss=0.2075, pruned_loss=0.02394, over 4839.00 frames.], tot_loss[loss=0.1297, simple_loss=0.2044, pruned_loss=0.02749, over 973100.12 frames.], batch size: 30, lr: 1.15e-04 +2022-05-09 20:40:46,128 INFO [train.py:715] (6/8) Epoch 19, batch 34350, loss[loss=0.1318, simple_loss=0.213, pruned_loss=0.02532, over 4758.00 frames.], tot_loss[loss=0.1299, simple_loss=0.2045, pruned_loss=0.02764, over 973245.45 frames.], batch size: 16, lr: 1.15e-04 +2022-05-09 20:41:25,884 INFO [train.py:715] (6/8) Epoch 19, batch 34400, loss[loss=0.1286, simple_loss=0.2059, pruned_loss=0.02564, over 4969.00 frames.], tot_loss[loss=0.13, simple_loss=0.2045, pruned_loss=0.02775, over 973958.39 frames.], batch size: 35, lr: 1.15e-04 +2022-05-09 20:42:05,059 INFO [train.py:715] (6/8) Epoch 19, batch 34450, loss[loss=0.1501, simple_loss=0.2197, pruned_loss=0.04031, over 4983.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.02831, over 973404.43 frames.], batch size: 25, lr: 1.15e-04 +2022-05-09 20:42:44,561 INFO [train.py:715] (6/8) Epoch 19, batch 34500, loss[loss=0.1177, simple_loss=0.2006, pruned_loss=0.01745, over 4943.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2053, pruned_loss=0.02825, over 972710.77 frames.], batch size: 21, lr: 1.15e-04 +2022-05-09 20:43:24,269 INFO [train.py:715] (6/8) Epoch 19, batch 34550, loss[loss=0.1437, simple_loss=0.2185, pruned_loss=0.03447, over 4830.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2063, pruned_loss=0.02835, over 972674.85 frames.], batch size: 15, lr: 1.15e-04 +2022-05-09 20:44:03,131 INFO [train.py:715] (6/8) Epoch 19, batch 34600, loss[loss=0.1617, simple_loss=0.228, pruned_loss=0.04767, over 4694.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2075, pruned_loss=0.02905, over 972184.62 frames.], batch size: 15, lr: 1.15e-04 +2022-05-09 20:44:45,183 INFO [train.py:715] (6/8) Epoch 19, batch 34650, loss[loss=0.1083, simple_loss=0.1752, pruned_loss=0.0207, over 4818.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2075, pruned_loss=0.02905, over 972379.91 frames.], batch size: 12, lr: 1.15e-04 +2022-05-09 20:45:24,631 INFO [train.py:715] (6/8) Epoch 19, batch 34700, loss[loss=0.1377, simple_loss=0.2168, pruned_loss=0.02936, over 4856.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02903, over 971376.43 frames.], batch size: 38, lr: 1.15e-04 +2022-05-09 20:46:02,681 INFO [train.py:715] (6/8) Epoch 19, batch 34750, loss[loss=0.1093, simple_loss=0.1901, pruned_loss=0.01421, over 4931.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02918, over 971004.02 frames.], batch size: 23, lr: 1.15e-04 +2022-05-09 20:46:39,960 INFO [train.py:715] (6/8) Epoch 19, batch 34800, loss[loss=0.1351, simple_loss=0.2124, pruned_loss=0.02891, over 4912.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02897, over 971360.58 frames.], batch size: 19, lr: 1.15e-04 +2022-05-09 20:46:47,722 INFO [train.py:915] (6/8) Done!