diff --git "a/exp/log/log-train-2023-03-27-14-47-20-1" "b/exp/log/log-train-2023-03-27-14-47-20-1" new file mode 100644--- /dev/null +++ "b/exp/log/log-train-2023-03-27-14-47-20-1" @@ -0,0 +1,12291 @@ +2023-03-27 14:47:20,926 INFO [train.py:962] (1/4) Training started +2023-03-27 14:47:20,927 INFO [train.py:972] (1/4) Device: cuda:1 +2023-03-27 14:47:20,930 INFO [train.py:981] (1/4) {'frame_shift_ms': 10.0, 'allowed_excess_duration_ratio': 0.1, 'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.23.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': '9426c9f730820d291f5dcb06be337662595fa7b4', 'k2-git-date': 'Sun Feb 5 17:35:01 2023', 'lhotse-version': '1.13.0.dev+git.4cbd1bde.clean', 'torch-version': '1.10.0+cu102', 'torch-cuda-available': True, 'torch-cuda-version': '10.2', 'python-version': '3.8', 'icefall-git-branch': 'bbpe', 'icefall-git-sha1': 'e03c10a-dirty', 'icefall-git-date': 'Mon Mar 27 00:05:03 2023', 'icefall-path': '/ceph-kw/kangwei/code/icefall_bbpe', 'k2-path': '/ceph-hw/kangwei/code/k2_release/k2/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-hw/kangwei/dev_tools/anaconda3/envs/rnnt2/lib/python3.8/site-packages/lhotse-1.13.0.dev0+git.4cbd1bde.clean-py3.8.egg/lhotse/__init__.py', 'hostname': 'de-74279-k2-train-9-0208143539-7dcb6bfd79-b6fdq', 'IP address': '10.177.13.150'}, 'world_size': 4, 'master_port': 12535, 'tensorboard': True, 'num_epochs': 50, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('pruned_transducer_stateless7_bbpe/exp'), 'bbpe_model': 'data/lang_bbpe_500/bbpe.model', 'base_lr': 0.05, 'lr_batches': 5000, 'lr_epochs': 3.5, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 2000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,4,3,2,4', 'feedforward_dims': '1024,1024,2048,2048,1024', 'nhead': '8,8,8,8,8', 'encoder_dims': '384,384,384,384,384', 'attention_dims': '192,192,192,192,192', 'encoder_unmasked_dims': '256,256,256,256,256', 'zipformer_downsampling_factors': '1,2,4,8,2', 'cnn_module_kernels': '31,31,31,31,31', 'decoder_dim': 512, 'joiner_dim': 512, 'manifest_dir': PosixPath('data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 300, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2023-03-27 14:47:20,930 INFO [train.py:983] (1/4) About to create model +2023-03-27 14:47:21,880 INFO [zipformer.py:178] (1/4) At encoder stack 4, which has downsampling_factor=2, we will combine the outputs of layers 1 and 3, with downsampling_factors=2 and 8. +2023-03-27 14:47:21,904 INFO [train.py:987] (1/4) Number of model parameters: 70369391 +2023-03-27 14:47:28,412 INFO [train.py:1002] (1/4) Using DDP +2023-03-27 14:47:28,704 INFO [asr_datamodule.py:407] (1/4) About to get train cuts +2023-03-27 14:47:28,708 INFO [train.py:1083] (1/4) Filtering short and long utterances. +2023-03-27 14:47:28,708 INFO [train.py:1086] (1/4) Tokenizing and encoding texts in train cuts. +2023-03-27 14:47:28,708 INFO [asr_datamodule.py:224] (1/4) About to get Musan cuts +2023-03-27 14:47:32,029 INFO [asr_datamodule.py:229] (1/4) Enable MUSAN +2023-03-27 14:47:32,029 INFO [asr_datamodule.py:252] (1/4) Enable SpecAugment +2023-03-27 14:47:32,029 INFO [asr_datamodule.py:253] (1/4) Time warp factor: 80 +2023-03-27 14:47:32,029 INFO [asr_datamodule.py:263] (1/4) Num frame mask: 10 +2023-03-27 14:47:32,029 INFO [asr_datamodule.py:276] (1/4) About to create train dataset +2023-03-27 14:47:32,030 INFO [asr_datamodule.py:303] (1/4) Using DynamicBucketingSampler. +2023-03-27 14:47:42,232 INFO [asr_datamodule.py:320] (1/4) About to create train dataloader +2023-03-27 14:47:42,233 INFO [asr_datamodule.py:414] (1/4) About to get dev cuts +2023-03-27 14:47:42,235 INFO [train.py:1102] (1/4) Tokenizing and encoding texts in valid cuts. +2023-03-27 14:47:42,235 INFO [asr_datamodule.py:351] (1/4) About to create dev dataset +2023-03-27 14:47:43,064 INFO [asr_datamodule.py:370] (1/4) About to create dev dataloader +2023-03-27 14:48:25,410 INFO [train.py:892] (1/4) Epoch 1, batch 0, loss[loss=7.432, simple_loss=6.728, pruned_loss=7.019, over 19612.00 frames. ], tot_loss[loss=7.432, simple_loss=6.728, pruned_loss=7.019, over 19612.00 frames. ], batch size: 65, lr: 2.50e-02, grad_scale: 2.0 +2023-03-27 14:48:25,411 INFO [train.py:917] (1/4) Computing validation loss +2023-03-27 14:48:53,173 INFO [train.py:926] (1/4) Epoch 1, validation: loss=6.85, simple_loss=6.179, pruned_loss=6.691, over 2883724.00 frames. +2023-03-27 14:48:53,174 INFO [train.py:927] (1/4) Maximum memory allocated so far is 15324MB +2023-03-27 14:49:01,286 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-27 14:49:05,777 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=3.62 vs. limit=2.0 +2023-03-27 14:49:31,451 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 14:50:04,615 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=15.38 vs. limit=2.0 +2023-03-27 14:50:15,181 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=196.56 vs. limit=5.0 +2023-03-27 14:50:17,293 INFO [train.py:892] (1/4) Epoch 1, batch 50, loss[loss=1.327, simple_loss=1.179, pruned_loss=1.331, over 19634.00 frames. ], tot_loss[loss=2.207, simple_loss=1.999, pruned_loss=1.998, over 891182.13 frames. ], batch size: 343, lr: 2.75e-02, grad_scale: 0.5 +2023-03-27 14:50:26,551 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4506, 5.4405, 5.4511, 5.4422, 5.4530, 5.4457, 4.7661, 5.4390], + device='cuda:1'), covar=tensor([0.0010, 0.0008, 0.0016, 0.0017, 0.0010, 0.0019, 0.0016, 0.0014], + device='cuda:1'), in_proj_covar=tensor([0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009], + device='cuda:1'), out_proj_covar=tensor([9.0030e-06, 9.1429e-06, 8.9563e-06, 9.2147e-06, 8.9738e-06, 9.0880e-06, + 9.1009e-06, 9.1455e-06], device='cuda:1') +2023-03-27 14:50:51,215 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=78.78 vs. limit=5.0 +2023-03-27 14:51:13,720 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 14:51:21,742 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=8.19 vs. limit=2.0 +2023-03-27 14:51:39,745 INFO [train.py:892] (1/4) Epoch 1, batch 100, loss[loss=0.8182, simple_loss=0.6979, pruned_loss=0.9486, over 19767.00 frames. ], tot_loss[loss=1.556, simple_loss=1.381, pruned_loss=1.569, over 1569034.30 frames. ], batch size: 119, lr: 3.00e-02, grad_scale: 1.0 +2023-03-27 14:51:42,869 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.423e+01 2.042e+02 3.545e+02 1.360e+03 1.838e+04, threshold=7.089e+02, percent-clipped=0.0 +2023-03-27 14:52:20,743 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=5.33 vs. limit=2.0 +2023-03-27 14:52:28,116 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=4.86 vs. limit=2.0 +2023-03-27 14:52:37,084 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=3.73 vs. limit=2.0 +2023-03-27 14:52:40,340 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=31.24 vs. limit=5.0 +2023-03-27 14:52:51,602 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=144.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-27 14:53:04,038 INFO [train.py:892] (1/4) Epoch 1, batch 150, loss[loss=0.7818, simple_loss=0.6696, pruned_loss=0.8191, over 19872.00 frames. ], tot_loss[loss=1.26, simple_loss=1.105, pruned_loss=1.31, over 2098571.15 frames. ], batch size: 138, lr: 3.25e-02, grad_scale: 1.0 +2023-03-27 14:53:53,585 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=17.51 vs. limit=5.0 +2023-03-27 14:54:26,320 INFO [train.py:892] (1/4) Epoch 1, batch 200, loss[loss=0.7757, simple_loss=0.6523, pruned_loss=0.8201, over 19787.00 frames. ], tot_loss[loss=1.105, simple_loss=0.9622, pruned_loss=1.148, over 2508919.17 frames. ], batch size: 42, lr: 3.50e-02, grad_scale: 1.0 +2023-03-27 14:54:29,324 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.392e+01 1.785e+02 2.449e+02 3.359e+02 7.299e+02, threshold=4.898e+02, percent-clipped=1.0 +2023-03-27 14:55:46,820 INFO [train.py:892] (1/4) Epoch 1, batch 250, loss[loss=0.7696, simple_loss=0.6502, pruned_loss=0.7522, over 19734.00 frames. ], tot_loss[loss=1.012, simple_loss=0.8749, pruned_loss=1.042, over 2827352.67 frames. ], batch size: 92, lr: 3.75e-02, grad_scale: 1.0 +2023-03-27 14:57:03,533 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=296.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 14:57:10,646 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=300.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 14:57:11,391 INFO [train.py:892] (1/4) Epoch 1, batch 300, loss[loss=0.8171, simple_loss=0.679, pruned_loss=0.8034, over 19818.00 frames. ], tot_loss[loss=0.9521, simple_loss=0.818, pruned_loss=0.9646, over 3076477.17 frames. ], batch size: 40, lr: 4.00e-02, grad_scale: 1.0 +2023-03-27 14:57:14,287 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 9.056e+01 1.487e+02 2.142e+02 2.914e+02 5.641e+02, threshold=4.285e+02, percent-clipped=2.0 +2023-03-27 14:57:15,197 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2456, 4.2450, 4.2456, 4.2435, 4.2455, 4.2425, 4.2458, 4.2323], + device='cuda:1'), covar=tensor([0.0035, 0.0038, 0.0033, 0.0045, 0.0035, 0.0045, 0.0033, 0.0032], + device='cuda:1'), in_proj_covar=tensor([0.0009, 0.0009, 0.0009, 0.0010, 0.0010, 0.0010, 0.0009, 0.0010], + device='cuda:1'), out_proj_covar=tensor([9.2021e-06, 9.4709e-06, 9.3913e-06, 9.3952e-06, 9.6717e-06, 9.3787e-06, + 9.3906e-06, 9.4817e-06], device='cuda:1') +2023-03-27 14:58:31,273 INFO [train.py:892] (1/4) Epoch 1, batch 350, loss[loss=0.7372, simple_loss=0.6154, pruned_loss=0.6815, over 19875.00 frames. ], tot_loss[loss=0.9114, simple_loss=0.7786, pruned_loss=0.906, over 3268978.35 frames. ], batch size: 92, lr: 4.25e-02, grad_scale: 1.0 +2023-03-27 14:58:36,608 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4639, 5.4651, 5.4799, 5.4278, 5.4689, 5.4614, 5.4740, 5.4708], + device='cuda:1'), covar=tensor([0.0063, 0.0074, 0.0059, 0.0086, 0.0070, 0.0070, 0.0069, 0.0065], + device='cuda:1'), in_proj_covar=tensor([0.0010, 0.0010, 0.0010, 0.0010, 0.0010, 0.0010, 0.0010, 0.0010], + device='cuda:1'), out_proj_covar=tensor([9.3472e-06, 9.6767e-06, 9.5448e-06, 9.6189e-06, 9.8174e-06, 9.5774e-06, + 9.5912e-06, 9.6527e-06], device='cuda:1') +2023-03-27 14:58:40,954 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=357.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 14:59:30,157 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=387.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 14:59:51,345 INFO [train.py:892] (1/4) Epoch 1, batch 400, loss[loss=0.8147, simple_loss=0.6819, pruned_loss=0.7174, over 19748.00 frames. ], tot_loss[loss=0.8857, simple_loss=0.7524, pruned_loss=0.8623, over 3418038.92 frames. ], batch size: 259, lr: 4.50e-02, grad_scale: 2.0 +2023-03-27 14:59:54,321 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.540e+02 2.069e+02 2.975e+02 6.292e+02, threshold=4.137e+02, percent-clipped=2.0 +2023-03-27 15:00:51,864 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=439.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 15:01:07,717 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=448.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-27 15:01:12,540 INFO [train.py:892] (1/4) Epoch 1, batch 450, loss[loss=0.7097, simple_loss=0.5935, pruned_loss=0.6044, over 19837.00 frames. ], tot_loss[loss=0.8612, simple_loss=0.7284, pruned_loss=0.8192, over 3536442.87 frames. ], batch size: 90, lr: 4.75e-02, grad_scale: 2.0 +2023-03-27 15:02:30,379 INFO [train.py:892] (1/4) Epoch 1, batch 500, loss[loss=0.7191, simple_loss=0.6068, pruned_loss=0.5788, over 19821.00 frames. ], tot_loss[loss=0.8344, simple_loss=0.7042, pruned_loss=0.7726, over 3629685.82 frames. ], batch size: 229, lr: 4.99e-02, grad_scale: 2.0 +2023-03-27 15:02:33,256 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.044e+02 2.386e+02 3.363e+02 4.760e+02 1.006e+03, threshold=6.727e+02, percent-clipped=34.0 +2023-03-27 15:03:50,991 INFO [train.py:892] (1/4) Epoch 1, batch 550, loss[loss=0.7748, simple_loss=0.6496, pruned_loss=0.6171, over 19890.00 frames. ], tot_loss[loss=0.8105, simple_loss=0.6838, pruned_loss=0.728, over 3700897.14 frames. ], batch size: 84, lr: 4.98e-02, grad_scale: 2.0 +2023-03-27 15:04:10,850 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=562.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:04:20,942 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=568.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:04:55,485 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=590.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:04:57,527 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=3.81 vs. limit=2.0 +2023-03-27 15:05:10,266 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.66 vs. limit=5.0 +2023-03-27 15:05:13,762 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=600.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:05:14,478 INFO [train.py:892] (1/4) Epoch 1, batch 600, loss[loss=0.6562, simple_loss=0.5613, pruned_loss=0.4837, over 19593.00 frames. ], tot_loss[loss=0.7873, simple_loss=0.6649, pruned_loss=0.6848, over 3753566.24 frames. ], batch size: 45, lr: 4.98e-02, grad_scale: 2.0 +2023-03-27 15:05:17,528 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.786e+02 4.093e+02 5.434e+02 6.548e+02 1.823e+03, threshold=1.087e+03, percent-clipped=20.0 +2023-03-27 15:05:49,176 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=623.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 15:05:59,376 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=629.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 15:06:31,648 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=648.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:06:35,614 INFO [train.py:892] (1/4) Epoch 1, batch 650, loss[loss=0.6827, simple_loss=0.5883, pruned_loss=0.4838, over 19884.00 frames. ], tot_loss[loss=0.7638, simple_loss=0.6467, pruned_loss=0.6424, over 3796440.27 frames. ], batch size: 61, lr: 4.98e-02, grad_scale: 2.0 +2023-03-27 15:06:36,324 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=651.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 15:06:37,789 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=652.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:06:49,097 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.97 vs. limit=2.0 +2023-03-27 15:07:50,299 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=6.64 vs. limit=5.0 +2023-03-27 15:07:56,920 INFO [train.py:892] (1/4) Epoch 1, batch 700, loss[loss=0.6212, simple_loss=0.5347, pruned_loss=0.4336, over 19870.00 frames. ], tot_loss[loss=0.7432, simple_loss=0.6306, pruned_loss=0.6058, over 3829406.70 frames. ], batch size: 136, lr: 4.98e-02, grad_scale: 2.0 +2023-03-27 15:07:59,944 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.633e+02 4.666e+02 6.058e+02 9.217e+02 2.342e+03, threshold=1.212e+03, percent-clipped=17.0 +2023-03-27 15:08:58,984 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=739.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 15:09:06,932 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=743.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:09:20,450 INFO [train.py:892] (1/4) Epoch 1, batch 750, loss[loss=0.6408, simple_loss=0.5529, pruned_loss=0.4378, over 19653.00 frames. ], tot_loss[loss=0.7245, simple_loss=0.6161, pruned_loss=0.573, over 3854413.56 frames. ], batch size: 67, lr: 4.97e-02, grad_scale: 2.0 +2023-03-27 15:09:42,044 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3784, 2.2924, 2.0642, 2.2610, 2.1282, 2.0455, 1.9664, 1.8632], + device='cuda:1'), covar=tensor([0.2802, 0.2562, 0.3161, 0.2384, 0.2145, 0.3315, 0.3312, 0.3715], + device='cuda:1'), in_proj_covar=tensor([0.0031, 0.0028, 0.0028, 0.0028, 0.0028, 0.0035, 0.0034, 0.0035], + device='cuda:1'), out_proj_covar=tensor([2.7801e-05, 2.3971e-05, 2.4207e-05, 2.4449e-05, 2.4970e-05, 3.1807e-05, + 3.1006e-05, 3.2160e-05], device='cuda:1') +2023-03-27 15:10:17,292 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=787.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:10:39,431 INFO [train.py:892] (1/4) Epoch 1, batch 800, loss[loss=0.6216, simple_loss=0.5407, pruned_loss=0.4107, over 19832.00 frames. ], tot_loss[loss=0.7016, simple_loss=0.5985, pruned_loss=0.5387, over 3877457.06 frames. ], batch size: 57, lr: 4.97e-02, grad_scale: 4.0 +2023-03-27 15:10:42,234 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.256e+02 4.881e+02 6.070e+02 8.660e+02 1.858e+03, threshold=1.214e+03, percent-clipped=11.0 +2023-03-27 15:11:53,909 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=847.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:12:00,806 INFO [train.py:892] (1/4) Epoch 1, batch 850, loss[loss=0.5763, simple_loss=0.5101, pruned_loss=0.3618, over 19639.00 frames. ], tot_loss[loss=0.6855, simple_loss=0.5867, pruned_loss=0.5114, over 3892064.72 frames. ], batch size: 47, lr: 4.96e-02, grad_scale: 4.0 +2023-03-27 15:14:32,540 INFO [train.py:892] (1/4) Epoch 1, batch 900, loss[loss=0.5811, simple_loss=0.5077, pruned_loss=0.3714, over 19843.00 frames. ], tot_loss[loss=0.6695, simple_loss=0.5737, pruned_loss=0.4879, over 3905977.51 frames. ], batch size: 109, lr: 4.96e-02, grad_scale: 4.0 +2023-03-27 15:14:39,006 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.144e+02 5.792e+02 7.433e+02 9.367e+02 4.103e+03, threshold=1.487e+03, percent-clipped=16.0 +2023-03-27 15:14:55,539 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=908.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:15:25,488 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=918.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-27 15:15:44,726 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=924.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-27 15:16:59,722 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=946.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:17:13,602 INFO [train.py:892] (1/4) Epoch 1, batch 950, loss[loss=0.6389, simple_loss=0.5514, pruned_loss=0.414, over 19633.00 frames. ], tot_loss[loss=0.6544, simple_loss=0.5626, pruned_loss=0.465, over 3915871.33 frames. ], batch size: 299, lr: 4.96e-02, grad_scale: 4.0 +2023-03-27 15:17:16,344 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=952.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:19:15,012 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1000.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:19:15,859 INFO [train.py:892] (1/4) Epoch 1, batch 1000, loss[loss=0.5748, simple_loss=0.4967, pruned_loss=0.3677, over 19828.00 frames. ], tot_loss[loss=0.6397, simple_loss=0.5513, pruned_loss=0.4446, over 3923935.00 frames. ], batch size: 127, lr: 4.95e-02, grad_scale: 4.0 +2023-03-27 15:19:21,142 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.942e+02 5.404e+02 6.507e+02 8.567e+02 3.462e+03, threshold=1.301e+03, percent-clipped=3.0 +2023-03-27 15:21:22,516 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1043.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:21:50,403 INFO [train.py:892] (1/4) Epoch 1, batch 1050, loss[loss=0.5204, simple_loss=0.4594, pruned_loss=0.3171, over 19834.00 frames. ], tot_loss[loss=0.6285, simple_loss=0.5429, pruned_loss=0.428, over 3930683.58 frames. ], batch size: 128, lr: 4.95e-02, grad_scale: 4.0 +2023-03-27 15:23:43,148 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1091.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:24:13,095 INFO [train.py:892] (1/4) Epoch 1, batch 1100, loss[loss=0.5464, simple_loss=0.4874, pruned_loss=0.3243, over 19693.00 frames. ], tot_loss[loss=0.6165, simple_loss=0.5349, pruned_loss=0.4103, over 3934282.98 frames. ], batch size: 46, lr: 4.94e-02, grad_scale: 4.0 +2023-03-27 15:24:19,035 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.337e+02 6.074e+02 7.964e+02 1.002e+03 2.431e+03, threshold=1.593e+03, percent-clipped=21.0 +2023-03-27 15:26:44,188 INFO [train.py:892] (1/4) Epoch 1, batch 1150, loss[loss=0.5801, simple_loss=0.5114, pruned_loss=0.3494, over 19617.00 frames. ], tot_loss[loss=0.6031, simple_loss=0.5252, pruned_loss=0.3937, over 3938067.81 frames. ], batch size: 51, lr: 4.94e-02, grad_scale: 4.0 +2023-03-27 15:26:48,657 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1153.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:27:20,233 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.6174, 5.6729, 5.5439, 5.3024, 5.6146, 5.7166, 5.4893, 5.1224], + device='cuda:1'), covar=tensor([0.0316, 0.0453, 0.0496, 0.0520, 0.0304, 0.0363, 0.0506, 0.0466], + device='cuda:1'), in_proj_covar=tensor([0.0028, 0.0027, 0.0030, 0.0030, 0.0028, 0.0029, 0.0030, 0.0032], + device='cuda:1'), out_proj_covar=tensor([2.8387e-05, 2.5490e-05, 2.9336e-05, 3.0128e-05, 2.6802e-05, 2.7849e-05, + 2.9132e-05, 3.0865e-05], device='cuda:1') +2023-03-27 15:28:37,488 INFO [train.py:892] (1/4) Epoch 1, batch 1200, loss[loss=0.5489, simple_loss=0.491, pruned_loss=0.3205, over 19690.00 frames. ], tot_loss[loss=0.5947, simple_loss=0.5193, pruned_loss=0.3814, over 3940689.08 frames. ], batch size: 46, lr: 4.93e-02, grad_scale: 8.0 +2023-03-27 15:28:40,921 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.874e+02 6.275e+02 7.572e+02 9.981e+02 2.448e+03, threshold=1.514e+03, percent-clipped=4.0 +2023-03-27 15:28:41,658 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1203.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:29:12,210 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1214.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-27 15:29:24,410 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1218.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:29:45,948 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1224.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 15:30:50,669 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1246.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:31:03,298 INFO [train.py:892] (1/4) Epoch 1, batch 1250, loss[loss=0.5437, simple_loss=0.4914, pruned_loss=0.3106, over 19885.00 frames. ], tot_loss[loss=0.5819, simple_loss=0.5103, pruned_loss=0.3666, over 3943325.19 frames. ], batch size: 47, lr: 4.92e-02, grad_scale: 8.0 +2023-03-27 15:31:29,656 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.10 vs. limit=2.0 +2023-03-27 15:31:47,652 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1266.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:31:59,811 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1272.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:32:47,347 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1294.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:32:57,675 INFO [train.py:892] (1/4) Epoch 1, batch 1300, loss[loss=0.5205, simple_loss=0.4624, pruned_loss=0.3042, over 19834.00 frames. ], tot_loss[loss=0.5751, simple_loss=0.5056, pruned_loss=0.3571, over 3944909.76 frames. ], batch size: 143, lr: 4.92e-02, grad_scale: 8.0 +2023-03-27 15:33:00,840 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.916e+02 5.972e+02 8.796e+02 1.171e+03 2.494e+03, threshold=1.759e+03, percent-clipped=14.0 +2023-03-27 15:34:24,478 INFO [train.py:892] (1/4) Epoch 1, batch 1350, loss[loss=0.5164, simple_loss=0.4632, pruned_loss=0.2961, over 19816.00 frames. ], tot_loss[loss=0.5697, simple_loss=0.5024, pruned_loss=0.3487, over 3944389.38 frames. ], batch size: 103, lr: 4.91e-02, grad_scale: 8.0 +2023-03-27 15:35:39,475 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9569, 4.3384, 5.0612, 4.6511, 4.7452, 4.6400, 3.9346, 4.2535], + device='cuda:1'), covar=tensor([0.0270, 0.0726, 0.0345, 0.0425, 0.0538, 0.0272, 0.0495, 0.0653], + device='cuda:1'), in_proj_covar=tensor([0.0023, 0.0023, 0.0025, 0.0026, 0.0027, 0.0024, 0.0027, 0.0028], + device='cuda:1'), out_proj_covar=tensor([1.8760e-05, 1.7986e-05, 1.9489e-05, 2.0377e-05, 2.4036e-05, 1.9080e-05, + 2.3311e-05, 2.3250e-05], device='cuda:1') +2023-03-27 15:35:47,394 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6779, 3.5453, 3.6415, 3.2051, 3.5679, 3.2539, 3.5026, 3.7130], + device='cuda:1'), covar=tensor([0.0742, 0.0462, 0.0491, 0.0571, 0.0420, 0.0613, 0.0572, 0.0461], + device='cuda:1'), in_proj_covar=tensor([0.0040, 0.0034, 0.0040, 0.0042, 0.0037, 0.0041, 0.0038, 0.0033], + device='cuda:1'), out_proj_covar=tensor([3.7484e-05, 2.8262e-05, 3.5754e-05, 3.7248e-05, 3.3569e-05, 3.6188e-05, + 3.3025e-05, 2.7787e-05], device='cuda:1') +2023-03-27 15:35:50,286 INFO [train.py:892] (1/4) Epoch 1, batch 1400, loss[loss=0.4702, simple_loss=0.43, pruned_loss=0.2612, over 19858.00 frames. ], tot_loss[loss=0.5626, simple_loss=0.4982, pruned_loss=0.3393, over 3944242.37 frames. ], batch size: 165, lr: 4.91e-02, grad_scale: 8.0 +2023-03-27 15:35:53,772 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.724e+02 6.220e+02 8.188e+02 1.056e+03 1.766e+03, threshold=1.638e+03, percent-clipped=1.0 +2023-03-27 15:36:50,080 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1434.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:37:19,430 INFO [train.py:892] (1/4) Epoch 1, batch 1450, loss[loss=0.5309, simple_loss=0.4766, pruned_loss=0.3018, over 19725.00 frames. ], tot_loss[loss=0.5567, simple_loss=0.4939, pruned_loss=0.3323, over 3946298.90 frames. ], batch size: 269, lr: 4.90e-02, grad_scale: 8.0 +2023-03-27 15:38:38,442 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1495.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:38:47,166 INFO [train.py:892] (1/4) Epoch 1, batch 1500, loss[loss=0.4614, simple_loss=0.4213, pruned_loss=0.2557, over 19835.00 frames. ], tot_loss[loss=0.5477, simple_loss=0.488, pruned_loss=0.3228, over 3948448.42 frames. ], batch size: 128, lr: 4.89e-02, grad_scale: 8.0 +2023-03-27 15:38:51,708 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 6.710e+02 8.347e+02 1.087e+03 2.003e+03, threshold=1.669e+03, percent-clipped=5.0 +2023-03-27 15:38:52,657 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1503.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:39:02,507 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1509.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-27 15:39:39,846 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.14 vs. limit=2.0 +2023-03-27 15:40:15,419 INFO [train.py:892] (1/4) Epoch 1, batch 1550, loss[loss=0.4572, simple_loss=0.4332, pruned_loss=0.2406, over 19774.00 frames. ], tot_loss[loss=0.5392, simple_loss=0.4826, pruned_loss=0.3139, over 3948304.80 frames. ], batch size: 42, lr: 4.89e-02, grad_scale: 8.0 +2023-03-27 15:40:16,059 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1551.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:40:45,155 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1566.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:41:45,849 INFO [train.py:892] (1/4) Epoch 1, batch 1600, loss[loss=0.5336, simple_loss=0.4944, pruned_loss=0.289, over 19727.00 frames. ], tot_loss[loss=0.5321, simple_loss=0.4779, pruned_loss=0.3067, over 3949539.92 frames. ], batch size: 50, lr: 4.88e-02, grad_scale: 8.0 +2023-03-27 15:41:50,275 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.877e+02 6.710e+02 8.151e+02 1.107e+03 2.056e+03, threshold=1.630e+03, percent-clipped=4.0 +2023-03-27 15:42:03,934 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1611.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:42:30,878 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5767, 3.9260, 3.6969, 3.9157, 3.3002, 3.0250, 3.5626, 3.0293], + device='cuda:1'), covar=tensor([0.0448, 0.0316, 0.0381, 0.0283, 0.0625, 0.0734, 0.0396, 0.0400], + device='cuda:1'), in_proj_covar=tensor([0.0054, 0.0048, 0.0051, 0.0045, 0.0054, 0.0053, 0.0048, 0.0047], + device='cuda:1'), out_proj_covar=tensor([4.4352e-05, 3.8336e-05, 4.2183e-05, 3.5016e-05, 4.4805e-05, 4.2431e-05, + 4.0724e-05, 3.7080e-05], device='cuda:1') +2023-03-27 15:42:30,899 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1627.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:43:13,159 INFO [train.py:892] (1/4) Epoch 1, batch 1650, loss[loss=0.4822, simple_loss=0.4407, pruned_loss=0.265, over 19781.00 frames. ], tot_loss[loss=0.5252, simple_loss=0.4733, pruned_loss=0.2999, over 3949708.12 frames. ], batch size: 131, lr: 4.87e-02, grad_scale: 8.0 +2023-03-27 15:43:45,619 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1492, 4.6351, 4.9583, 4.4814, 4.9883, 4.3513, 4.7569, 5.0774], + device='cuda:1'), covar=tensor([0.0226, 0.0277, 0.0204, 0.0230, 0.0167, 0.0271, 0.0230, 0.0200], + device='cuda:1'), in_proj_covar=tensor([0.0033, 0.0032, 0.0035, 0.0037, 0.0035, 0.0037, 0.0035, 0.0031], + device='cuda:1'), out_proj_covar=tensor([3.0910e-05, 2.5264e-05, 3.0899e-05, 3.3538e-05, 3.1371e-05, 3.1659e-05, + 2.9393e-05, 2.6045e-05], device='cuda:1') +2023-03-27 15:43:50,570 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1672.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-27 15:44:39,830 INFO [train.py:892] (1/4) Epoch 1, batch 1700, loss[loss=0.5, simple_loss=0.4583, pruned_loss=0.2734, over 19822.00 frames. ], tot_loss[loss=0.5192, simple_loss=0.4706, pruned_loss=0.2931, over 3949239.21 frames. ], batch size: 187, lr: 4.86e-02, grad_scale: 8.0 +2023-03-27 15:44:43,066 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 6.108e+02 7.327e+02 1.005e+03 2.757e+03, threshold=1.465e+03, percent-clipped=5.0 +2023-03-27 15:46:05,815 INFO [train.py:892] (1/4) Epoch 1, batch 1750, loss[loss=0.5009, simple_loss=0.4678, pruned_loss=0.2679, over 19804.00 frames. ], tot_loss[loss=0.5112, simple_loss=0.4654, pruned_loss=0.2861, over 3950313.31 frames. ], batch size: 68, lr: 4.86e-02, grad_scale: 8.0 +2023-03-27 15:46:06,857 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.33 vs. limit=2.0 +2023-03-27 15:47:05,107 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1790.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 15:47:06,992 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.85 vs. limit=5.0 +2023-03-27 15:47:20,401 INFO [train.py:892] (1/4) Epoch 1, batch 1800, loss[loss=0.5007, simple_loss=0.454, pruned_loss=0.2758, over 19771.00 frames. ], tot_loss[loss=0.5077, simple_loss=0.463, pruned_loss=0.2825, over 3951279.81 frames. ], batch size: 233, lr: 4.85e-02, grad_scale: 8.0 +2023-03-27 15:47:23,276 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.834e+02 6.979e+02 1.011e+03 1.306e+03 2.784e+03, threshold=2.021e+03, percent-clipped=17.0 +2023-03-27 15:47:32,184 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1809.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:47:53,743 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9616, 3.7609, 4.0188, 4.3441, 3.8550, 3.4705, 3.4953, 3.4251], + device='cuda:1'), covar=tensor([0.0364, 0.0331, 0.0400, 0.0223, 0.0346, 0.0448, 0.0408, 0.0508], + device='cuda:1'), in_proj_covar=tensor([0.0041, 0.0036, 0.0041, 0.0035, 0.0040, 0.0045, 0.0043, 0.0043], + device='cuda:1'), out_proj_covar=tensor([3.2224e-05, 3.1587e-05, 3.1718e-05, 2.6908e-05, 3.0051e-05, 3.6477e-05, + 3.4461e-05, 3.5114e-05], device='cuda:1') +2023-03-27 15:48:13,786 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1838.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:48:31,000 INFO [train.py:892] (1/4) Epoch 1, batch 1850, loss[loss=0.4491, simple_loss=0.4394, pruned_loss=0.2283, over 19583.00 frames. ], tot_loss[loss=0.5046, simple_loss=0.4634, pruned_loss=0.2779, over 3949420.21 frames. ], batch size: 53, lr: 4.84e-02, grad_scale: 8.0 +2023-03-27 15:49:26,261 INFO [train.py:892] (1/4) Epoch 2, batch 0, loss[loss=0.5321, simple_loss=0.4783, pruned_loss=0.2948, over 19744.00 frames. ], tot_loss[loss=0.5321, simple_loss=0.4783, pruned_loss=0.2948, over 19744.00 frames. ], batch size: 259, lr: 4.75e-02, grad_scale: 8.0 +2023-03-27 15:49:26,262 INFO [train.py:917] (1/4) Computing validation loss +2023-03-27 15:49:48,715 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.4848, 0.7376, 1.4382, 0.9674, 1.2972, 1.4838, 1.4330, 1.3098], + device='cuda:1'), covar=tensor([0.5096, 1.5934, 0.4387, 1.3410, 0.5425, 0.3416, 0.2767, 0.3499], + device='cuda:1'), in_proj_covar=tensor([0.0026, 0.0049, 0.0025, 0.0052, 0.0028, 0.0032, 0.0027, 0.0027], + device='cuda:1'), out_proj_covar=tensor([1.9811e-05, 4.4370e-05, 1.9254e-05, 4.6820e-05, 2.2890e-05, 2.4864e-05, + 2.0715e-05, 1.9702e-05], device='cuda:1') +2023-03-27 15:49:52,686 INFO [train.py:926] (1/4) Epoch 2, validation: loss=0.3819, simple_loss=0.4085, pruned_loss=0.1743, over 2883724.00 frames. +2023-03-27 15:49:52,688 INFO [train.py:927] (1/4) Maximum memory allocated so far is 18089MB +2023-03-27 15:49:56,680 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1857.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:49:56,775 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1857.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:50:22,566 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1870.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:51:15,930 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1899.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 15:51:22,947 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.599e+02 7.082e+02 8.943e+02 1.129e+03 2.587e+03, threshold=1.789e+03, percent-clipped=3.0 +2023-03-27 15:51:29,939 INFO [train.py:892] (1/4) Epoch 2, batch 50, loss[loss=0.4775, simple_loss=0.4431, pruned_loss=0.2564, over 19837.00 frames. ], tot_loss[loss=0.4697, simple_loss=0.4403, pruned_loss=0.2498, over 890122.67 frames. ], batch size: 137, lr: 4.74e-02, grad_scale: 8.0 +2023-03-27 15:51:53,524 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1918.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:51:59,901 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1922.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 15:52:01,615 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.5773, 5.3109, 5.0383, 5.4235, 5.0299, 5.5901, 5.2770, 5.6514], + device='cuda:1'), covar=tensor([0.0225, 0.0196, 0.0249, 0.0231, 0.0354, 0.0103, 0.0191, 0.0237], + device='cuda:1'), in_proj_covar=tensor([0.0051, 0.0060, 0.0065, 0.0059, 0.0065, 0.0056, 0.0066, 0.0056], + device='cuda:1'), out_proj_covar=tensor([4.5953e-05, 5.9416e-05, 6.7648e-05, 5.8033e-05, 6.6432e-05, 6.0338e-05, + 6.5968e-05, 5.8747e-05], device='cuda:1') +2023-03-27 15:52:16,581 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1931.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 15:52:51,776 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.90 vs. limit=2.0 +2023-03-27 15:53:00,444 INFO [train.py:892] (1/4) Epoch 2, batch 100, loss[loss=0.4375, simple_loss=0.4083, pruned_loss=0.2335, over 19799.00 frames. ], tot_loss[loss=0.4731, simple_loss=0.443, pruned_loss=0.2518, over 1568260.68 frames. ], batch size: 151, lr: 4.73e-02, grad_scale: 8.0 +2023-03-27 15:53:21,306 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1967.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:54:30,523 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.745e+02 7.981e+02 1.059e+03 1.451e+03 2.427e+03, threshold=2.118e+03, percent-clipped=7.0 +2023-03-27 15:54:36,162 INFO [train.py:892] (1/4) Epoch 2, batch 150, loss[loss=0.4234, simple_loss=0.4212, pruned_loss=0.2128, over 19732.00 frames. ], tot_loss[loss=0.4713, simple_loss=0.4414, pruned_loss=0.2508, over 2096361.48 frames. ], batch size: 52, lr: 4.72e-02, grad_scale: 8.0 +2023-03-27 15:56:17,764 INFO [train.py:892] (1/4) Epoch 2, batch 200, loss[loss=0.4301, simple_loss=0.4273, pruned_loss=0.2165, over 19842.00 frames. ], tot_loss[loss=0.4645, simple_loss=0.4394, pruned_loss=0.2449, over 2505177.65 frames. ], batch size: 90, lr: 4.72e-02, grad_scale: 16.0 +2023-03-27 15:57:25,408 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2090.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:57:51,092 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.526e+02 6.247e+02 7.217e+02 9.235e+02 2.365e+03, threshold=1.443e+03, percent-clipped=1.0 +2023-03-27 15:57:55,814 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4269, 4.3379, 4.5196, 4.1308, 3.0611, 3.9878, 4.1066, 2.4716], + device='cuda:1'), covar=tensor([0.0115, 0.0176, 0.0158, 0.0129, 0.1267, 0.0154, 0.0181, 0.1527], + device='cuda:1'), in_proj_covar=tensor([0.0028, 0.0027, 0.0030, 0.0030, 0.0049, 0.0028, 0.0034, 0.0051], + device='cuda:1'), out_proj_covar=tensor([2.0869e-05, 2.1204e-05, 2.3294e-05, 2.1401e-05, 4.5325e-05, 2.0855e-05, + 2.8672e-05, 4.5356e-05], device='cuda:1') +2023-03-27 15:57:56,572 INFO [train.py:892] (1/4) Epoch 2, batch 250, loss[loss=0.4314, simple_loss=0.413, pruned_loss=0.2249, over 19853.00 frames. ], tot_loss[loss=0.4558, simple_loss=0.4345, pruned_loss=0.2386, over 2825058.32 frames. ], batch size: 85, lr: 4.71e-02, grad_scale: 16.0 +2023-03-27 15:59:00,538 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2138.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:59:00,709 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5157, 4.0457, 4.3994, 3.8654, 4.2884, 3.7394, 4.2619, 4.4472], + device='cuda:1'), covar=tensor([0.0154, 0.0196, 0.0132, 0.0206, 0.0171, 0.0313, 0.0162, 0.0141], + device='cuda:1'), in_proj_covar=tensor([0.0031, 0.0032, 0.0034, 0.0038, 0.0035, 0.0039, 0.0034, 0.0031], + device='cuda:1'), out_proj_covar=tensor([3.0015e-05, 2.7110e-05, 3.1199e-05, 3.4368e-05, 3.3451e-05, 3.5073e-05, + 2.9598e-05, 2.7596e-05], device='cuda:1') +2023-03-27 15:59:32,880 INFO [train.py:892] (1/4) Epoch 2, batch 300, loss[loss=0.3874, simple_loss=0.3925, pruned_loss=0.1911, over 19829.00 frames. ], tot_loss[loss=0.4493, simple_loss=0.4307, pruned_loss=0.234, over 3075300.57 frames. ], batch size: 75, lr: 4.70e-02, grad_scale: 16.0 +2023-03-27 16:00:11,800 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.3739, 4.8891, 5.2725, 4.7819, 5.3456, 4.2869, 4.9271, 5.4080], + device='cuda:1'), covar=tensor([0.0157, 0.0166, 0.0143, 0.0167, 0.0094, 0.0285, 0.0203, 0.0099], + device='cuda:1'), in_proj_covar=tensor([0.0032, 0.0032, 0.0035, 0.0039, 0.0036, 0.0040, 0.0035, 0.0031], + device='cuda:1'), out_proj_covar=tensor([3.0686e-05, 2.8155e-05, 3.2712e-05, 3.4987e-05, 3.4008e-05, 3.6333e-05, + 3.0447e-05, 2.8534e-05], device='cuda:1') +2023-03-27 16:00:53,367 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2194.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:01:11,048 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.616e+02 6.231e+02 7.603e+02 9.238e+02 1.387e+03, threshold=1.521e+03, percent-clipped=0.0 +2023-03-27 16:01:18,847 INFO [train.py:892] (1/4) Epoch 2, batch 350, loss[loss=0.4117, simple_loss=0.4173, pruned_loss=0.203, over 19487.00 frames. ], tot_loss[loss=0.4425, simple_loss=0.4268, pruned_loss=0.2291, over 3269784.62 frames. ], batch size: 43, lr: 4.69e-02, grad_scale: 16.0 +2023-03-27 16:01:32,898 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2213.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:01:49,541 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2222.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 16:01:51,428 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0174, 2.7276, 2.9788, 3.0167, 2.7462, 2.7058, 2.8069, 2.3648], + device='cuda:1'), covar=tensor([0.0167, 0.0420, 0.0272, 0.0173, 0.0283, 0.0291, 0.0195, 0.0568], + device='cuda:1'), in_proj_covar=tensor([0.0027, 0.0028, 0.0029, 0.0023, 0.0027, 0.0031, 0.0030, 0.0031], + device='cuda:1'), out_proj_covar=tensor([2.0223e-05, 2.2885e-05, 2.1616e-05, 1.7260e-05, 2.0785e-05, 2.3662e-05, + 2.3181e-05, 2.4162e-05], device='cuda:1') +2023-03-27 16:01:56,885 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2226.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:02:45,229 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.03 vs. limit=2.0 +2023-03-27 16:02:54,469 INFO [train.py:892] (1/4) Epoch 2, batch 400, loss[loss=0.4318, simple_loss=0.4231, pruned_loss=0.2202, over 19717.00 frames. ], tot_loss[loss=0.4406, simple_loss=0.4258, pruned_loss=0.2277, over 3419924.88 frames. ], batch size: 80, lr: 4.68e-02, grad_scale: 16.0 +2023-03-27 16:03:15,836 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2267.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:03:20,993 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2270.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:03:48,661 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1954, 5.2932, 5.2805, 5.3363, 5.2510, 5.3165, 4.9400, 4.7812], + device='cuda:1'), covar=tensor([0.0296, 0.0271, 0.0408, 0.0300, 0.0236, 0.0388, 0.0327, 0.0466], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0059, 0.0066, 0.0063, 0.0064, 0.0058, 0.0069, 0.0083], + device='cuda:1'), out_proj_covar=tensor([6.4164e-05, 6.2498e-05, 6.8581e-05, 6.9058e-05, 6.4012e-05, 6.0648e-05, + 7.0997e-05, 8.9620e-05], device='cuda:1') +2023-03-27 16:03:50,790 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9020, 3.5457, 2.4184, 3.6074, 3.8369, 1.8512, 3.8085, 3.5229], + device='cuda:1'), covar=tensor([0.0179, 0.0295, 0.1896, 0.0098, 0.0109, 0.2678, 0.0171, 0.0115], + device='cuda:1'), in_proj_covar=tensor([0.0039, 0.0036, 0.0075, 0.0026, 0.0029, 0.0086, 0.0042, 0.0035], + device='cuda:1'), out_proj_covar=tensor([3.0148e-05, 2.9797e-05, 6.9593e-05, 1.9862e-05, 2.0525e-05, 7.6749e-05, + 3.3718e-05, 2.4320e-05], device='cuda:1') +2023-03-27 16:04:26,680 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.159e+02 7.202e+02 8.951e+02 1.212e+03 2.125e+03, threshold=1.790e+03, percent-clipped=11.0 +2023-03-27 16:04:31,840 INFO [train.py:892] (1/4) Epoch 2, batch 450, loss[loss=0.5454, simple_loss=0.5056, pruned_loss=0.2926, over 19566.00 frames. ], tot_loss[loss=0.4393, simple_loss=0.4257, pruned_loss=0.2265, over 3538126.50 frames. ], batch size: 376, lr: 4.67e-02, grad_scale: 16.0 +2023-03-27 16:04:51,233 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2315.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:05:40,813 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2340.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:06:13,201 INFO [train.py:892] (1/4) Epoch 2, batch 500, loss[loss=0.4286, simple_loss=0.4157, pruned_loss=0.2207, over 19766.00 frames. ], tot_loss[loss=0.4345, simple_loss=0.4232, pruned_loss=0.2229, over 3629859.80 frames. ], batch size: 241, lr: 4.66e-02, grad_scale: 16.0 +2023-03-27 16:07:44,028 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2401.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-27 16:07:46,784 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.849e+02 6.111e+02 7.665e+02 9.809e+02 1.525e+03, threshold=1.533e+03, percent-clipped=0.0 +2023-03-27 16:07:51,925 INFO [train.py:892] (1/4) Epoch 2, batch 550, loss[loss=0.3771, simple_loss=0.3766, pruned_loss=0.1888, over 19705.00 frames. ], tot_loss[loss=0.4329, simple_loss=0.4226, pruned_loss=0.2216, over 3699732.58 frames. ], batch size: 46, lr: 4.65e-02, grad_scale: 16.0 +2023-03-27 16:09:31,323 INFO [train.py:892] (1/4) Epoch 2, batch 600, loss[loss=0.4055, simple_loss=0.399, pruned_loss=0.206, over 19813.00 frames. ], tot_loss[loss=0.4304, simple_loss=0.4213, pruned_loss=0.2198, over 3753424.48 frames. ], batch size: 167, lr: 4.64e-02, grad_scale: 16.0 +2023-03-27 16:10:33,657 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9235, 3.2263, 2.8059, 3.8196, 2.5165, 3.2130, 2.8442, 2.7028], + device='cuda:1'), covar=tensor([0.0434, 0.0190, 0.0650, 0.0102, 0.0399, 0.0273, 0.0294, 0.0373], + device='cuda:1'), in_proj_covar=tensor([0.0027, 0.0023, 0.0034, 0.0023, 0.0027, 0.0028, 0.0029, 0.0029], + device='cuda:1'), out_proj_covar=tensor([2.2105e-05, 1.8446e-05, 3.1288e-05, 1.7924e-05, 2.2525e-05, 2.4384e-05, + 2.4180e-05, 2.4121e-05], device='cuda:1') +2023-03-27 16:10:49,198 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2494.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 16:11:07,767 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.512e+02 7.276e+02 8.474e+02 1.052e+03 1.837e+03, threshold=1.695e+03, percent-clipped=3.0 +2023-03-27 16:11:13,433 INFO [train.py:892] (1/4) Epoch 2, batch 650, loss[loss=0.4641, simple_loss=0.4366, pruned_loss=0.2458, over 19766.00 frames. ], tot_loss[loss=0.427, simple_loss=0.4188, pruned_loss=0.2177, over 3798104.64 frames. ], batch size: 256, lr: 4.64e-02, grad_scale: 16.0 +2023-03-27 16:11:27,313 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2513.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:11:51,046 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2526.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:11:59,332 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5257, 4.1505, 3.9945, 4.4232, 4.0395, 4.4393, 4.3987, 4.6590], + device='cuda:1'), covar=tensor([0.0200, 0.0284, 0.0485, 0.0192, 0.0395, 0.0196, 0.0194, 0.0266], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0074, 0.0082, 0.0069, 0.0078, 0.0061, 0.0081, 0.0077], + device='cuda:1'), out_proj_covar=tensor([6.2195e-05, 8.5582e-05, 9.6512e-05, 7.7400e-05, 9.2467e-05, 7.2752e-05, + 8.5671e-05, 9.1441e-05], device='cuda:1') +2023-03-27 16:12:26,524 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2542.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:12:51,152 INFO [train.py:892] (1/4) Epoch 2, batch 700, loss[loss=0.4087, simple_loss=0.4059, pruned_loss=0.2057, over 19787.00 frames. ], tot_loss[loss=0.4224, simple_loss=0.416, pruned_loss=0.2144, over 3831812.46 frames. ], batch size: 91, lr: 4.63e-02, grad_scale: 16.0 +2023-03-27 16:13:01,042 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2561.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:13:27,676 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2574.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:13:55,003 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.25 vs. limit=2.0 +2023-03-27 16:14:23,667 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.137e+02 6.142e+02 8.213e+02 1.055e+03 2.684e+03, threshold=1.643e+03, percent-clipped=4.0 +2023-03-27 16:14:30,613 INFO [train.py:892] (1/4) Epoch 2, batch 750, loss[loss=0.3851, simple_loss=0.3829, pruned_loss=0.1937, over 19800.00 frames. ], tot_loss[loss=0.4173, simple_loss=0.4128, pruned_loss=0.211, over 3858797.13 frames. ], batch size: 150, lr: 4.62e-02, grad_scale: 16.0 +2023-03-27 16:16:13,080 INFO [train.py:892] (1/4) Epoch 2, batch 800, loss[loss=0.35, simple_loss=0.3622, pruned_loss=0.1689, over 19682.00 frames. ], tot_loss[loss=0.419, simple_loss=0.4139, pruned_loss=0.212, over 3878574.94 frames. ], batch size: 82, lr: 4.61e-02, grad_scale: 16.0 +2023-03-27 16:17:11,687 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2685.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:17:33,261 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2696.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 16:17:45,223 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.299e+02 7.441e+02 9.827e+02 1.217e+03 2.716e+03, threshold=1.965e+03, percent-clipped=6.0 +2023-03-27 16:17:51,044 INFO [train.py:892] (1/4) Epoch 2, batch 850, loss[loss=0.5143, simple_loss=0.483, pruned_loss=0.2728, over 19630.00 frames. ], tot_loss[loss=0.4182, simple_loss=0.4137, pruned_loss=0.2113, over 3894996.00 frames. ], batch size: 359, lr: 4.60e-02, grad_scale: 16.0 +2023-03-27 16:18:57,821 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.02 vs. limit=5.0 +2023-03-27 16:19:12,069 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2746.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 16:19:32,075 INFO [train.py:892] (1/4) Epoch 2, batch 900, loss[loss=0.5177, simple_loss=0.4837, pruned_loss=0.2758, over 19706.00 frames. ], tot_loss[loss=0.414, simple_loss=0.4114, pruned_loss=0.2083, over 3908851.84 frames. ], batch size: 305, lr: 4.59e-02, grad_scale: 16.0 +2023-03-27 16:20:47,414 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2231, 4.4683, 4.7653, 4.2574, 4.5989, 4.7369, 4.1451, 5.0534], + device='cuda:1'), covar=tensor([0.1101, 0.0240, 0.0279, 0.0370, 0.0197, 0.0179, 0.0316, 0.0194], + device='cuda:1'), in_proj_covar=tensor([0.0091, 0.0078, 0.0070, 0.0070, 0.0066, 0.0070, 0.0069, 0.0060], + device='cuda:1'), out_proj_covar=tensor([1.0250e-04, 9.2176e-05, 8.6797e-05, 8.0739e-05, 7.3966e-05, 8.4967e-05, + 8.0942e-05, 7.0896e-05], device='cuda:1') +2023-03-27 16:21:04,237 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.961e+02 6.224e+02 7.989e+02 1.015e+03 2.345e+03, threshold=1.598e+03, percent-clipped=4.0 +2023-03-27 16:21:10,152 INFO [train.py:892] (1/4) Epoch 2, batch 950, loss[loss=0.3648, simple_loss=0.3677, pruned_loss=0.1809, over 19870.00 frames. ], tot_loss[loss=0.413, simple_loss=0.4113, pruned_loss=0.2073, over 3918361.21 frames. ], batch size: 165, lr: 4.58e-02, grad_scale: 16.0 +2023-03-27 16:21:45,496 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2824.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:22:03,531 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.08 vs. limit=2.0 +2023-03-27 16:22:12,136 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-27 16:22:47,336 INFO [train.py:892] (1/4) Epoch 2, batch 1000, loss[loss=0.4101, simple_loss=0.4014, pruned_loss=0.2094, over 19773.00 frames. ], tot_loss[loss=0.4128, simple_loss=0.4116, pruned_loss=0.207, over 3923969.09 frames. ], batch size: 169, lr: 4.57e-02, grad_scale: 16.0 +2023-03-27 16:23:47,301 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2885.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 16:24:11,107 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-27 16:24:23,437 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.209e+02 6.834e+02 8.062e+02 1.027e+03 2.036e+03, threshold=1.612e+03, percent-clipped=4.0 +2023-03-27 16:24:28,994 INFO [train.py:892] (1/4) Epoch 2, batch 1050, loss[loss=0.4396, simple_loss=0.4542, pruned_loss=0.2125, over 19558.00 frames. ], tot_loss[loss=0.4101, simple_loss=0.4099, pruned_loss=0.2052, over 3929232.58 frames. ], batch size: 60, lr: 4.56e-02, grad_scale: 16.0 +2023-03-27 16:25:08,763 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.91 vs. limit=5.0 +2023-03-27 16:25:29,987 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2937.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:26:06,230 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7949, 2.3551, 3.6535, 3.5266, 2.7681, 2.9266, 3.3966, 3.0652], + device='cuda:1'), covar=tensor([0.0175, 0.1140, 0.0239, 0.0221, 0.0848, 0.0330, 0.0338, 0.1799], + device='cuda:1'), in_proj_covar=tensor([0.0038, 0.0058, 0.0039, 0.0034, 0.0037, 0.0041, 0.0041, 0.0043], + device='cuda:1'), out_proj_covar=tensor([3.2316e-05, 6.1060e-05, 3.3167e-05, 2.8615e-05, 3.6881e-05, 3.7464e-05, + 3.9185e-05, 4.3420e-05], device='cuda:1') +2023-03-27 16:26:09,033 INFO [train.py:892] (1/4) Epoch 2, batch 1100, loss[loss=0.3725, simple_loss=0.3736, pruned_loss=0.1858, over 19875.00 frames. ], tot_loss[loss=0.407, simple_loss=0.408, pruned_loss=0.203, over 3933639.06 frames. ], batch size: 159, lr: 4.55e-02, grad_scale: 16.0 +2023-03-27 16:27:27,622 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.82 vs. limit=2.0 +2023-03-27 16:27:28,865 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2996.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 16:27:29,528 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.86 vs. limit=2.0 +2023-03-27 16:27:32,702 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2998.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 16:27:41,286 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.835e+02 7.115e+02 9.003e+02 1.112e+03 2.091e+03, threshold=1.801e+03, percent-clipped=6.0 +2023-03-27 16:27:46,923 INFO [train.py:892] (1/4) Epoch 2, batch 1150, loss[loss=0.4103, simple_loss=0.4014, pruned_loss=0.2096, over 19757.00 frames. ], tot_loss[loss=0.4058, simple_loss=0.4075, pruned_loss=0.2021, over 3936640.52 frames. ], batch size: 125, lr: 4.54e-02, grad_scale: 16.0 +2023-03-27 16:28:02,404 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3231, 2.2027, 1.9193, 2.1899, 1.9969, 1.8768, 2.1947, 2.0947], + device='cuda:1'), covar=tensor([0.0238, 0.0211, 0.0591, 0.0184, 0.0389, 0.0493, 0.0310, 0.0434], + device='cuda:1'), in_proj_covar=tensor([0.0026, 0.0029, 0.0032, 0.0028, 0.0031, 0.0028, 0.0028, 0.0029], + device='cuda:1'), out_proj_covar=tensor([2.7225e-05, 2.9018e-05, 3.4484e-05, 2.6769e-05, 3.3080e-05, 3.0061e-05, + 2.8613e-05, 3.2881e-05], device='cuda:1') +2023-03-27 16:28:17,755 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3019.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:29:00,188 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3041.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:29:05,926 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3044.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:29:30,568 INFO [train.py:892] (1/4) Epoch 2, batch 1200, loss[loss=0.3631, simple_loss=0.3704, pruned_loss=0.1779, over 19820.00 frames. ], tot_loss[loss=0.4029, simple_loss=0.4058, pruned_loss=0.2, over 3939633.86 frames. ], batch size: 123, lr: 4.53e-02, grad_scale: 16.0 +2023-03-27 16:29:57,865 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2799, 2.3778, 1.9913, 2.3820, 2.1759, 2.3795, 2.4828, 2.1401], + device='cuda:1'), covar=tensor([0.0493, 0.0351, 0.1422, 0.0480, 0.0424, 0.0446, 0.0333, 0.0380], + device='cuda:1'), in_proj_covar=tensor([0.0031, 0.0027, 0.0049, 0.0028, 0.0030, 0.0030, 0.0029, 0.0030], + device='cuda:1'), out_proj_covar=tensor([2.8051e-05, 2.4202e-05, 5.6609e-05, 2.5189e-05, 2.6327e-05, 3.0624e-05, + 2.5376e-05, 2.7007e-05], device='cuda:1') +2023-03-27 16:29:57,893 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.5234, 2.0236, 2.0892, 1.2387, 1.8597, 2.3800, 2.0284, 1.7228], + device='cuda:1'), covar=tensor([0.0195, 0.0189, 0.0222, 0.0402, 0.0227, 0.0118, 0.0166, 0.0332], + device='cuda:1'), in_proj_covar=tensor([0.0022, 0.0026, 0.0025, 0.0026, 0.0024, 0.0022, 0.0022, 0.0023], + device='cuda:1'), out_proj_covar=tensor([2.1527e-05, 2.5619e-05, 2.5471e-05, 2.6753e-05, 2.3761e-05, 2.2319e-05, + 2.2822e-05, 2.4580e-05], device='cuda:1') +2023-03-27 16:30:17,977 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3080.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 16:31:03,699 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.35 vs. limit=5.0 +2023-03-27 16:31:04,251 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.272e+02 6.636e+02 8.301e+02 1.092e+03 3.055e+03, threshold=1.660e+03, percent-clipped=1.0 +2023-03-27 16:31:10,199 INFO [train.py:892] (1/4) Epoch 2, batch 1250, loss[loss=0.3754, simple_loss=0.3896, pruned_loss=0.1805, over 19812.00 frames. ], tot_loss[loss=0.3997, simple_loss=0.4038, pruned_loss=0.1978, over 3939964.18 frames. ], batch size: 117, lr: 4.52e-02, grad_scale: 16.0 +2023-03-27 16:32:26,844 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2605, 4.8757, 5.1253, 4.7804, 4.9208, 5.1371, 4.8616, 5.4692], + device='cuda:1'), covar=tensor([0.1644, 0.0304, 0.0268, 0.0259, 0.0213, 0.0225, 0.0205, 0.0226], + device='cuda:1'), in_proj_covar=tensor([0.0133, 0.0094, 0.0082, 0.0085, 0.0082, 0.0084, 0.0079, 0.0073], + device='cuda:1'), out_proj_covar=tensor([1.5888e-04, 1.1767e-04, 1.0764e-04, 1.0597e-04, 9.9432e-05, 1.0737e-04, + 9.8578e-05, 9.3050e-05], device='cuda:1') +2023-03-27 16:32:49,631 INFO [train.py:892] (1/4) Epoch 2, batch 1300, loss[loss=0.4132, simple_loss=0.4128, pruned_loss=0.2067, over 19778.00 frames. ], tot_loss[loss=0.3995, simple_loss=0.4033, pruned_loss=0.1978, over 3942630.33 frames. ], batch size: 236, lr: 4.51e-02, grad_scale: 16.0 +2023-03-27 16:33:37,169 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3180.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:34:23,392 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.299e+02 7.276e+02 8.986e+02 1.118e+03 2.110e+03, threshold=1.797e+03, percent-clipped=4.0 +2023-03-27 16:34:28,597 INFO [train.py:892] (1/4) Epoch 2, batch 1350, loss[loss=0.4156, simple_loss=0.3989, pruned_loss=0.2162, over 19780.00 frames. ], tot_loss[loss=0.3972, simple_loss=0.4016, pruned_loss=0.1964, over 3945200.29 frames. ], batch size: 215, lr: 4.50e-02, grad_scale: 16.0 +2023-03-27 16:34:32,896 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8838, 4.9156, 5.0905, 5.0531, 4.9872, 5.0660, 4.5746, 4.4223], + device='cuda:1'), covar=tensor([0.0282, 0.0291, 0.0365, 0.0309, 0.0306, 0.0354, 0.0389, 0.0669], + device='cuda:1'), in_proj_covar=tensor([0.0081, 0.0077, 0.0093, 0.0088, 0.0091, 0.0075, 0.0098, 0.0126], + device='cuda:1'), out_proj_covar=tensor([9.9069e-05, 9.2887e-05, 1.0996e-04, 1.0624e-04, 1.0623e-04, 8.8818e-05, + 1.1172e-04, 1.5355e-04], device='cuda:1') +2023-03-27 16:36:05,215 INFO [train.py:892] (1/4) Epoch 2, batch 1400, loss[loss=0.4647, simple_loss=0.4655, pruned_loss=0.232, over 19537.00 frames. ], tot_loss[loss=0.3984, simple_loss=0.4029, pruned_loss=0.1969, over 3944681.79 frames. ], batch size: 54, lr: 4.49e-02, grad_scale: 16.0 +2023-03-27 16:37:09,932 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3288.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:37:17,193 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2887, 4.4122, 4.4816, 4.4932, 4.3280, 4.3821, 4.1204, 3.9884], + device='cuda:1'), covar=tensor([0.0397, 0.0315, 0.0454, 0.0338, 0.0422, 0.0472, 0.0370, 0.0675], + device='cuda:1'), in_proj_covar=tensor([0.0083, 0.0080, 0.0097, 0.0091, 0.0092, 0.0076, 0.0100, 0.0130], + device='cuda:1'), out_proj_covar=tensor([1.0259e-04, 9.6225e-05, 1.1751e-04, 1.1068e-04, 1.0773e-04, 9.1307e-05, + 1.1459e-04, 1.5744e-04], device='cuda:1') +2023-03-27 16:37:18,911 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3293.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:37:38,107 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.923e+02 7.473e+02 1.007e+03 1.301e+03 1.953e+03, threshold=2.013e+03, percent-clipped=1.0 +2023-03-27 16:37:43,354 INFO [train.py:892] (1/4) Epoch 2, batch 1450, loss[loss=0.3672, simple_loss=0.3757, pruned_loss=0.1793, over 19857.00 frames. ], tot_loss[loss=0.397, simple_loss=0.4022, pruned_loss=0.1959, over 3944895.19 frames. ], batch size: 157, lr: 4.48e-02, grad_scale: 16.0 +2023-03-27 16:38:55,769 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3341.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:39:13,595 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3349.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-27 16:39:26,360 INFO [train.py:892] (1/4) Epoch 2, batch 1500, loss[loss=0.3842, simple_loss=0.3843, pruned_loss=0.192, over 19825.00 frames. ], tot_loss[loss=0.3929, simple_loss=0.3992, pruned_loss=0.1933, over 3947457.94 frames. ], batch size: 146, lr: 4.47e-02, grad_scale: 16.0 +2023-03-27 16:40:03,803 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3375.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:40:08,130 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6306, 2.4201, 2.3208, 3.1078, 2.9389, 2.6568, 2.7601, 2.6463], + device='cuda:1'), covar=tensor([0.0435, 0.0362, 0.1840, 0.0323, 0.0290, 0.0536, 0.0367, 0.0333], + device='cuda:1'), in_proj_covar=tensor([0.0037, 0.0032, 0.0060, 0.0032, 0.0034, 0.0034, 0.0032, 0.0031], + device='cuda:1'), out_proj_covar=tensor([3.7812e-05, 3.1666e-05, 7.4884e-05, 3.2637e-05, 3.3591e-05, 3.7318e-05, + 3.1710e-05, 3.1323e-05], device='cuda:1') +2023-03-27 16:40:34,009 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3389.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:40:59,889 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.490e+02 6.824e+02 8.232e+02 9.819e+02 1.797e+03, threshold=1.646e+03, percent-clipped=0.0 +2023-03-27 16:41:05,678 INFO [train.py:892] (1/4) Epoch 2, batch 1550, loss[loss=0.3347, simple_loss=0.359, pruned_loss=0.1552, over 19731.00 frames. ], tot_loss[loss=0.3891, simple_loss=0.3963, pruned_loss=0.1909, over 3949567.00 frames. ], batch size: 47, lr: 4.46e-02, grad_scale: 16.0 +2023-03-27 16:42:07,462 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.13 vs. limit=2.0 +2023-03-27 16:42:46,610 INFO [train.py:892] (1/4) Epoch 2, batch 1600, loss[loss=0.3506, simple_loss=0.3757, pruned_loss=0.1628, over 19680.00 frames. ], tot_loss[loss=0.3882, simple_loss=0.3955, pruned_loss=0.1904, over 3950317.38 frames. ], batch size: 52, lr: 4.45e-02, grad_scale: 16.0 +2023-03-27 16:43:19,078 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.7945, 1.5926, 1.7212, 1.6921, 1.4943, 1.2816, 1.6863, 1.7404], + device='cuda:1'), covar=tensor([0.0392, 0.0409, 0.0748, 0.0367, 0.0446, 0.0940, 0.0404, 0.0722], + device='cuda:1'), in_proj_covar=tensor([0.0028, 0.0031, 0.0033, 0.0030, 0.0031, 0.0032, 0.0030, 0.0032], + device='cuda:1'), out_proj_covar=tensor([3.2591e-05, 3.5590e-05, 4.0017e-05, 3.4377e-05, 3.6737e-05, 3.8891e-05, + 3.6425e-05, 3.9951e-05], device='cuda:1') +2023-03-27 16:43:35,413 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3480.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:44:24,961 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 7.682e+02 9.382e+02 1.262e+03 3.177e+03, threshold=1.876e+03, percent-clipped=11.0 +2023-03-27 16:44:28,502 INFO [train.py:892] (1/4) Epoch 2, batch 1650, loss[loss=0.3542, simple_loss=0.374, pruned_loss=0.1672, over 19863.00 frames. ], tot_loss[loss=0.389, simple_loss=0.3968, pruned_loss=0.1906, over 3945886.69 frames. ], batch size: 106, lr: 4.44e-02, grad_scale: 8.0 +2023-03-27 16:44:54,662 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.87 vs. limit=2.0 +2023-03-27 16:45:11,861 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3528.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:45:50,211 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3546.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:46:07,493 INFO [train.py:892] (1/4) Epoch 2, batch 1700, loss[loss=0.3699, simple_loss=0.3881, pruned_loss=0.1758, over 19826.00 frames. ], tot_loss[loss=0.3898, simple_loss=0.3975, pruned_loss=0.1911, over 3947020.68 frames. ], batch size: 93, lr: 4.43e-02, grad_scale: 8.0 +2023-03-27 16:47:22,560 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3593.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:47:24,425 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5687, 4.2452, 4.4079, 4.0345, 4.2691, 4.4598, 3.9135, 4.7351], + device='cuda:1'), covar=tensor([0.2069, 0.0282, 0.0308, 0.0328, 0.0265, 0.0234, 0.0355, 0.0205], + device='cuda:1'), in_proj_covar=tensor([0.0156, 0.0102, 0.0093, 0.0088, 0.0088, 0.0088, 0.0084, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 16:47:42,006 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.530e+02 9.023e+02 1.103e+03 1.410e+03 2.321e+03, threshold=2.205e+03, percent-clipped=10.0 +2023-03-27 16:47:45,750 INFO [train.py:892] (1/4) Epoch 2, batch 1750, loss[loss=0.3619, simple_loss=0.3837, pruned_loss=0.1701, over 19845.00 frames. ], tot_loss[loss=0.386, simple_loss=0.3952, pruned_loss=0.1884, over 3947738.80 frames. ], batch size: 43, lr: 4.42e-02, grad_scale: 8.0 +2023-03-27 16:47:48,488 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3607.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 16:48:03,681 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.3188, 4.0033, 5.1568, 4.2597, 4.4649, 5.2939, 5.1208, 4.9987], + device='cuda:1'), covar=tensor([0.0115, 0.0336, 0.0091, 0.0906, 0.0116, 0.0052, 0.0071, 0.0083], + device='cuda:1'), in_proj_covar=tensor([0.0037, 0.0042, 0.0034, 0.0069, 0.0035, 0.0030, 0.0035, 0.0035], + device='cuda:1'), out_proj_covar=tensor([5.7135e-05, 6.8530e-05, 5.1518e-05, 1.0979e-04, 5.2800e-05, 5.1667e-05, + 5.5358e-05, 5.5481e-05], device='cuda:1') +2023-03-27 16:48:11,055 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9379, 4.8181, 3.5316, 5.1901, 5.0558, 2.7353, 4.9055, 3.8766], + device='cuda:1'), covar=tensor([0.0083, 0.0172, 0.1007, 0.0035, 0.0034, 0.1218, 0.0103, 0.0161], + device='cuda:1'), in_proj_covar=tensor([0.0055, 0.0056, 0.0108, 0.0036, 0.0037, 0.0112, 0.0060, 0.0049], + device='cuda:1'), out_proj_covar=tensor([4.4870e-05, 5.2132e-05, 9.6293e-05, 3.1153e-05, 3.1003e-05, 9.7319e-05, + 5.2380e-05, 3.8041e-05], device='cuda:1') +2023-03-27 16:48:44,960 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2844, 5.5831, 5.8001, 5.6531, 5.9378, 5.3128, 5.5504, 5.3636], + device='cuda:1'), covar=tensor([0.0937, 0.0493, 0.0876, 0.0362, 0.0571, 0.0883, 0.1075, 0.1814], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0105, 0.0168, 0.0113, 0.0135, 0.0130, 0.0140, 0.0187], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 16:48:50,040 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3641.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:48:55,202 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3644.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 16:49:15,985 INFO [train.py:892] (1/4) Epoch 2, batch 1800, loss[loss=0.6158, simple_loss=0.5539, pruned_loss=0.3388, over 19420.00 frames. ], tot_loss[loss=0.3852, simple_loss=0.3947, pruned_loss=0.1879, over 3947635.08 frames. ], batch size: 431, lr: 4.41e-02, grad_scale: 8.0 +2023-03-27 16:49:39,678 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2083, 3.5962, 4.0781, 4.5211, 3.0375, 4.0724, 4.2596, 2.2361], + device='cuda:1'), covar=tensor([0.0180, 0.1427, 0.0368, 0.0075, 0.2262, 0.0240, 0.0291, 0.2674], + device='cuda:1'), in_proj_covar=tensor([0.0066, 0.0103, 0.0076, 0.0056, 0.0153, 0.0068, 0.0079, 0.0154], + device='cuda:1'), out_proj_covar=tensor([5.6609e-05, 9.4448e-05, 6.6220e-05, 4.2770e-05, 1.3107e-04, 5.3083e-05, + 6.8523e-05, 1.3309e-04], device='cuda:1') +2023-03-27 16:49:49,909 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3675.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:49:51,520 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3676.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:50:11,632 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-27 16:50:37,049 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.302e+02 8.227e+02 1.028e+03 1.238e+03 2.427e+03, threshold=2.056e+03, percent-clipped=3.0 +2023-03-27 16:50:40,373 INFO [train.py:892] (1/4) Epoch 2, batch 1850, loss[loss=0.3286, simple_loss=0.3686, pruned_loss=0.1443, over 19826.00 frames. ], tot_loss[loss=0.3813, simple_loss=0.3932, pruned_loss=0.1847, over 3947804.89 frames. ], batch size: 57, lr: 4.39e-02, grad_scale: 8.0 +2023-03-27 16:51:36,780 INFO [train.py:892] (1/4) Epoch 3, batch 0, loss[loss=0.4103, simple_loss=0.4108, pruned_loss=0.2049, over 19716.00 frames. ], tot_loss[loss=0.4103, simple_loss=0.4108, pruned_loss=0.2049, over 19716.00 frames. ], batch size: 219, lr: 4.17e-02, grad_scale: 8.0 +2023-03-27 16:51:36,781 INFO [train.py:917] (1/4) Computing validation loss +2023-03-27 16:52:02,990 INFO [train.py:926] (1/4) Epoch 3, validation: loss=0.2594, simple_loss=0.3267, pruned_loss=0.09605, over 2883724.00 frames. +2023-03-27 16:52:02,991 INFO [train.py:927] (1/4) Maximum memory allocated so far is 20817MB +2023-03-27 16:52:30,224 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3723.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:52:57,952 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3737.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 16:53:22,918 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.6048, 5.8660, 5.9466, 5.8234, 5.8652, 5.5815, 5.7141, 5.6110], + device='cuda:1'), covar=tensor([0.0660, 0.0519, 0.0644, 0.0346, 0.0494, 0.0587, 0.0655, 0.1459], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0109, 0.0175, 0.0118, 0.0137, 0.0137, 0.0148, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 16:53:50,422 INFO [train.py:892] (1/4) Epoch 3, batch 50, loss[loss=0.2958, simple_loss=0.3331, pruned_loss=0.1293, over 19560.00 frames. ], tot_loss[loss=0.3544, simple_loss=0.3719, pruned_loss=0.1685, over 890062.59 frames. ], batch size: 47, lr: 4.16e-02, grad_scale: 8.0 +2023-03-27 16:54:17,989 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.10 vs. limit=2.0 +2023-03-27 16:55:16,583 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.227e+02 7.628e+02 9.313e+02 1.232e+03 3.200e+03, threshold=1.863e+03, percent-clipped=4.0 +2023-03-27 16:55:31,487 INFO [train.py:892] (1/4) Epoch 3, batch 100, loss[loss=0.3323, simple_loss=0.356, pruned_loss=0.1542, over 19911.00 frames. ], tot_loss[loss=0.3647, simple_loss=0.3786, pruned_loss=0.1754, over 1569113.01 frames. ], batch size: 45, lr: 4.15e-02, grad_scale: 8.0 +2023-03-27 16:57:14,244 INFO [train.py:892] (1/4) Epoch 3, batch 150, loss[loss=0.3297, simple_loss=0.3488, pruned_loss=0.1553, over 19875.00 frames. ], tot_loss[loss=0.3647, simple_loss=0.3795, pruned_loss=0.1749, over 2098386.13 frames. ], batch size: 159, lr: 4.14e-02, grad_scale: 8.0 +2023-03-27 16:57:34,614 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.8447, 6.1157, 6.0630, 5.8925, 5.9437, 6.0495, 5.4400, 5.3942], + device='cuda:1'), covar=tensor([0.0275, 0.0234, 0.0411, 0.0314, 0.0322, 0.0368, 0.0296, 0.0753], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0088, 0.0120, 0.0102, 0.0103, 0.0084, 0.0115, 0.0150], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-27 16:58:09,539 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.70 vs. limit=2.0 +2023-03-27 16:58:41,883 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3902.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:58:44,800 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.184e+02 7.831e+02 9.821e+02 1.147e+03 2.106e+03, threshold=1.964e+03, percent-clipped=1.0 +2023-03-27 16:58:59,048 INFO [train.py:892] (1/4) Epoch 3, batch 200, loss[loss=0.3578, simple_loss=0.375, pruned_loss=0.1702, over 19810.00 frames. ], tot_loss[loss=0.3668, simple_loss=0.3812, pruned_loss=0.1762, over 2508636.48 frames. ], batch size: 74, lr: 4.13e-02, grad_scale: 8.0 +2023-03-27 16:59:30,223 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3926.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:00:09,686 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3944.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 17:00:41,362 INFO [train.py:892] (1/4) Epoch 3, batch 250, loss[loss=0.3796, simple_loss=0.4079, pruned_loss=0.1757, over 19855.00 frames. ], tot_loss[loss=0.3655, simple_loss=0.3807, pruned_loss=0.1752, over 2826549.00 frames. ], batch size: 60, lr: 4.12e-02, grad_scale: 8.0 +2023-03-27 17:01:27,974 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8042, 4.7179, 5.2649, 4.9968, 5.2756, 4.6509, 5.0259, 4.8155], + device='cuda:1'), covar=tensor([0.0934, 0.0759, 0.0809, 0.0450, 0.0572, 0.1027, 0.0993, 0.1799], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0116, 0.0179, 0.0125, 0.0148, 0.0142, 0.0154, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 17:01:37,875 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3987.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:01:47,632 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3992.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 17:02:15,833 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.914e+02 7.402e+02 9.885e+02 1.286e+03 2.496e+03, threshold=1.977e+03, percent-clipped=5.0 +2023-03-27 17:02:31,809 INFO [train.py:892] (1/4) Epoch 3, batch 300, loss[loss=0.3028, simple_loss=0.3395, pruned_loss=0.1331, over 19730.00 frames. ], tot_loss[loss=0.3638, simple_loss=0.3787, pruned_loss=0.1744, over 3075897.43 frames. ], batch size: 47, lr: 4.11e-02, grad_scale: 8.0 +2023-03-27 17:03:15,068 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4032.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 17:04:07,518 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5258, 2.7342, 3.8051, 3.3933, 3.5079, 3.6168, 2.8219, 2.6821], + device='cuda:1'), covar=tensor([0.0703, 0.5888, 0.0367, 0.0444, 0.1496, 0.0687, 0.1035, 0.1659], + device='cuda:1'), in_proj_covar=tensor([0.0084, 0.0141, 0.0057, 0.0060, 0.0098, 0.0050, 0.0065, 0.0081], + device='cuda:1'), out_proj_covar=tensor([7.9220e-05, 1.4781e-04, 5.0062e-05, 5.1076e-05, 9.5720e-05, 4.8516e-05, + 5.9832e-05, 7.5358e-05], device='cuda:1') +2023-03-27 17:04:16,802 INFO [train.py:892] (1/4) Epoch 3, batch 350, loss[loss=0.3414, simple_loss=0.3705, pruned_loss=0.1561, over 19733.00 frames. ], tot_loss[loss=0.3605, simple_loss=0.3773, pruned_loss=0.1719, over 3267450.91 frames. ], batch size: 92, lr: 4.10e-02, grad_scale: 8.0 +2023-03-27 17:04:37,824 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-27 17:05:09,228 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9742, 3.5154, 3.7271, 3.3706, 3.6312, 3.7505, 3.3339, 3.9620], + device='cuda:1'), covar=tensor([0.1953, 0.0377, 0.0418, 0.0414, 0.0397, 0.0387, 0.0410, 0.0294], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0105, 0.0094, 0.0093, 0.0096, 0.0090, 0.0084, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 17:05:45,543 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.173e+02 7.083e+02 8.600e+02 1.034e+03 2.171e+03, threshold=1.720e+03, percent-clipped=1.0 +2023-03-27 17:05:59,477 INFO [train.py:892] (1/4) Epoch 3, batch 400, loss[loss=0.3471, simple_loss=0.3747, pruned_loss=0.1597, over 19647.00 frames. ], tot_loss[loss=0.3588, simple_loss=0.3754, pruned_loss=0.1711, over 3419315.36 frames. ], batch size: 57, lr: 4.09e-02, grad_scale: 8.0 +2023-03-27 17:07:02,435 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4141.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:07:08,754 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.05 vs. limit=2.0 +2023-03-27 17:07:46,002 INFO [train.py:892] (1/4) Epoch 3, batch 450, loss[loss=0.3498, simple_loss=0.3656, pruned_loss=0.167, over 19836.00 frames. ], tot_loss[loss=0.3603, simple_loss=0.377, pruned_loss=0.1718, over 3537647.26 frames. ], batch size: 171, lr: 4.08e-02, grad_scale: 8.0 +2023-03-27 17:09:11,902 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4202.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 17:09:12,066 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4202.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:09:14,943 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.069e+02 7.856e+02 9.394e+02 1.124e+03 2.140e+03, threshold=1.879e+03, percent-clipped=3.0 +2023-03-27 17:09:28,189 INFO [train.py:892] (1/4) Epoch 3, batch 500, loss[loss=0.3692, simple_loss=0.3815, pruned_loss=0.1784, over 19780.00 frames. ], tot_loss[loss=0.3598, simple_loss=0.3769, pruned_loss=0.1713, over 3627039.96 frames. ], batch size: 215, lr: 4.07e-02, grad_scale: 8.0 +2023-03-27 17:09:59,018 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7795, 2.7823, 3.3572, 2.9006, 4.1290, 3.4250, 3.4117, 3.7596], + device='cuda:1'), covar=tensor([0.0369, 0.1177, 0.0480, 0.1220, 0.0245, 0.0326, 0.0322, 0.0232], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0093, 0.0061, 0.0104, 0.0044, 0.0062, 0.0047, 0.0047], + device='cuda:1'), out_proj_covar=tensor([6.0227e-05, 8.6536e-05, 5.9121e-05, 1.0133e-04, 4.6108e-05, 5.8385e-05, + 4.2070e-05, 4.2523e-05], device='cuda:1') +2023-03-27 17:10:50,175 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4250.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 17:11:07,863 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2693, 2.1349, 1.6826, 2.3726, 2.4955, 2.4707, 2.4313, 2.4608], + device='cuda:1'), covar=tensor([0.0514, 0.0450, 0.1797, 0.0568, 0.0399, 0.0333, 0.0629, 0.0238], + device='cuda:1'), in_proj_covar=tensor([0.0051, 0.0043, 0.0077, 0.0044, 0.0045, 0.0040, 0.0043, 0.0038], + device='cuda:1'), out_proj_covar=tensor([6.2874e-05, 5.6801e-05, 1.0743e-04, 5.7688e-05, 5.6263e-05, 5.5018e-05, + 5.6400e-05, 5.0389e-05], device='cuda:1') +2023-03-27 17:11:12,797 INFO [train.py:892] (1/4) Epoch 3, batch 550, loss[loss=0.3497, simple_loss=0.3782, pruned_loss=0.1606, over 19679.00 frames. ], tot_loss[loss=0.3572, simple_loss=0.3749, pruned_loss=0.1698, over 3699725.64 frames. ], batch size: 52, lr: 4.06e-02, grad_scale: 8.0 +2023-03-27 17:11:49,093 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-27 17:11:56,599 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4282.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:12:08,129 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4288.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:12:42,156 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.575e+02 7.145e+02 8.933e+02 1.130e+03 1.909e+03, threshold=1.787e+03, percent-clipped=2.0 +2023-03-27 17:12:57,543 INFO [train.py:892] (1/4) Epoch 3, batch 600, loss[loss=0.3169, simple_loss=0.3572, pruned_loss=0.1382, over 19914.00 frames. ], tot_loss[loss=0.3566, simple_loss=0.3744, pruned_loss=0.1694, over 3755289.76 frames. ], batch size: 53, lr: 4.05e-02, grad_scale: 8.0 +2023-03-27 17:13:41,298 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4332.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 17:13:52,415 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.7610, 6.0911, 6.0909, 5.8448, 6.0244, 6.0619, 5.3568, 5.3634], + device='cuda:1'), covar=tensor([0.0301, 0.0199, 0.0404, 0.0277, 0.0357, 0.0403, 0.0413, 0.0837], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0096, 0.0135, 0.0105, 0.0112, 0.0090, 0.0123, 0.0162], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 17:14:16,455 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4349.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:14:38,642 INFO [train.py:892] (1/4) Epoch 3, batch 650, loss[loss=0.3225, simple_loss=0.3393, pruned_loss=0.1529, over 19750.00 frames. ], tot_loss[loss=0.3541, simple_loss=0.3724, pruned_loss=0.1678, over 3798077.80 frames. ], batch size: 139, lr: 4.04e-02, grad_scale: 8.0 +2023-03-27 17:15:21,794 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4380.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 17:16:08,632 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+02 7.551e+02 9.171e+02 1.079e+03 2.038e+03, threshold=1.834e+03, percent-clipped=1.0 +2023-03-27 17:16:24,581 INFO [train.py:892] (1/4) Epoch 3, batch 700, loss[loss=0.3256, simple_loss=0.3558, pruned_loss=0.1476, over 19597.00 frames. ], tot_loss[loss=0.3562, simple_loss=0.3738, pruned_loss=0.1693, over 3830426.59 frames. ], batch size: 42, lr: 4.03e-02, grad_scale: 8.0 +2023-03-27 17:16:58,912 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7450, 2.4002, 3.0940, 3.1392, 4.0852, 3.3950, 3.7282, 3.6228], + device='cuda:1'), covar=tensor([0.0362, 0.1368, 0.0591, 0.1246, 0.0371, 0.0368, 0.0221, 0.0285], + device='cuda:1'), in_proj_covar=tensor([0.0063, 0.0096, 0.0066, 0.0109, 0.0048, 0.0066, 0.0049, 0.0053], + device='cuda:1'), out_proj_covar=tensor([6.5313e-05, 9.1447e-05, 6.6768e-05, 1.0878e-04, 5.2631e-05, 6.4927e-05, + 4.5955e-05, 5.0376e-05], device='cuda:1') +2023-03-27 17:17:04,818 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4795, 3.0435, 2.3133, 3.5422, 2.8695, 3.0563, 3.3965, 2.6501], + device='cuda:1'), covar=tensor([0.0786, 0.0504, 0.1678, 0.0543, 0.0780, 0.0534, 0.0402, 0.0432], + device='cuda:1'), in_proj_covar=tensor([0.0051, 0.0043, 0.0078, 0.0045, 0.0047, 0.0041, 0.0043, 0.0038], + device='cuda:1'), out_proj_covar=tensor([6.5281e-05, 5.8314e-05, 1.1042e-04, 6.1881e-05, 6.0846e-05, 5.7160e-05, + 5.9199e-05, 5.2574e-05], device='cuda:1') +2023-03-27 17:17:07,081 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-27 17:17:26,189 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.72 vs. limit=2.0 +2023-03-27 17:17:36,234 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9309, 1.5892, 3.0494, 2.9571, 2.8734, 2.8230, 2.8150, 2.7577], + device='cuda:1'), covar=tensor([0.0243, 0.2157, 0.0217, 0.0209, 0.0276, 0.0258, 0.0302, 0.0535], + device='cuda:1'), in_proj_covar=tensor([0.0053, 0.0113, 0.0058, 0.0052, 0.0049, 0.0056, 0.0050, 0.0058], + device='cuda:1'), out_proj_covar=tensor([6.5238e-05, 1.3171e-04, 6.7842e-05, 6.3629e-05, 6.4970e-05, 6.7973e-05, + 6.5350e-05, 7.8656e-05], device='cuda:1') +2023-03-27 17:18:06,992 INFO [train.py:892] (1/4) Epoch 3, batch 750, loss[loss=0.3531, simple_loss=0.3712, pruned_loss=0.1675, over 19883.00 frames. ], tot_loss[loss=0.352, simple_loss=0.3711, pruned_loss=0.1664, over 3858022.38 frames. ], batch size: 47, lr: 4.02e-02, grad_scale: 8.0 +2023-03-27 17:19:22,228 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4497.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:19:34,746 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.458e+02 7.939e+02 9.251e+02 1.113e+03 1.728e+03, threshold=1.850e+03, percent-clipped=0.0 +2023-03-27 17:19:41,681 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.39 vs. limit=5.0 +2023-03-27 17:19:48,316 INFO [train.py:892] (1/4) Epoch 3, batch 800, loss[loss=0.3464, simple_loss=0.3656, pruned_loss=0.1636, over 19787.00 frames. ], tot_loss[loss=0.3524, simple_loss=0.3717, pruned_loss=0.1666, over 3877333.00 frames. ], batch size: 91, lr: 4.01e-02, grad_scale: 8.0 +2023-03-27 17:19:57,914 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6180, 4.2352, 4.5014, 4.0422, 4.1988, 4.5254, 4.2261, 4.7729], + device='cuda:1'), covar=tensor([0.1664, 0.0273, 0.0294, 0.0330, 0.0370, 0.0228, 0.0253, 0.0194], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0108, 0.0097, 0.0095, 0.0103, 0.0090, 0.0086, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 17:21:33,148 INFO [train.py:892] (1/4) Epoch 3, batch 850, loss[loss=0.3255, simple_loss=0.3608, pruned_loss=0.1451, over 19640.00 frames. ], tot_loss[loss=0.3504, simple_loss=0.3704, pruned_loss=0.1652, over 3893357.40 frames. ], batch size: 72, lr: 4.00e-02, grad_scale: 8.0 +2023-03-27 17:22:03,303 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1201, 2.9736, 3.8670, 4.2052, 2.9742, 3.6395, 3.4620, 2.2762], + device='cuda:1'), covar=tensor([0.0206, 0.2537, 0.0456, 0.0102, 0.1747, 0.0256, 0.0449, 0.2104], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0213, 0.0109, 0.0081, 0.0179, 0.0092, 0.0111, 0.0177], + device='cuda:1'), out_proj_covar=tensor([9.0100e-05, 2.0357e-04, 1.0534e-04, 7.1485e-05, 1.6480e-04, 8.2652e-05, + 1.0586e-04, 1.6388e-04], device='cuda:1') +2023-03-27 17:22:13,955 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4582.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:22:48,585 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-27 17:22:58,519 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.263e+02 6.963e+02 8.935e+02 1.119e+03 2.174e+03, threshold=1.787e+03, percent-clipped=2.0 +2023-03-27 17:23:13,497 INFO [train.py:892] (1/4) Epoch 3, batch 900, loss[loss=0.3264, simple_loss=0.3581, pruned_loss=0.1474, over 19780.00 frames. ], tot_loss[loss=0.3487, simple_loss=0.3691, pruned_loss=0.1641, over 3907524.17 frames. ], batch size: 215, lr: 3.99e-02, grad_scale: 8.0 +2023-03-27 17:23:53,311 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4630.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:23:56,251 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.00 vs. limit=5.0 +2023-03-27 17:24:23,400 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4644.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:24:56,909 INFO [train.py:892] (1/4) Epoch 3, batch 950, loss[loss=0.3444, simple_loss=0.3643, pruned_loss=0.1622, over 19762.00 frames. ], tot_loss[loss=0.3481, simple_loss=0.3687, pruned_loss=0.1638, over 3917100.62 frames. ], batch size: 155, lr: 3.98e-02, grad_scale: 8.0 +2023-03-27 17:25:02,106 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.90 vs. limit=2.0 +2023-03-27 17:25:18,613 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-27 17:26:26,124 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 7.071e+02 8.023e+02 9.803e+02 2.669e+03, threshold=1.605e+03, percent-clipped=2.0 +2023-03-27 17:26:42,509 INFO [train.py:892] (1/4) Epoch 3, batch 1000, loss[loss=0.3329, simple_loss=0.3499, pruned_loss=0.1579, over 19816.00 frames. ], tot_loss[loss=0.3505, simple_loss=0.3706, pruned_loss=0.1652, over 3922506.24 frames. ], batch size: 40, lr: 3.97e-02, grad_scale: 8.0 +2023-03-27 17:26:43,689 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-27 17:27:01,231 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4915, 1.3007, 2.6216, 2.4697, 2.4199, 2.5369, 2.6784, 2.4812], + device='cuda:1'), covar=tensor([0.0276, 0.2159, 0.0277, 0.0260, 0.0330, 0.0242, 0.0229, 0.0421], + device='cuda:1'), in_proj_covar=tensor([0.0055, 0.0117, 0.0060, 0.0052, 0.0050, 0.0056, 0.0049, 0.0059], + device='cuda:1'), out_proj_covar=tensor([6.9568e-05, 1.3914e-04, 7.3012e-05, 6.7229e-05, 6.9499e-05, 7.0473e-05, + 6.5382e-05, 8.1245e-05], device='cuda:1') +2023-03-27 17:28:22,560 INFO [train.py:892] (1/4) Epoch 3, batch 1050, loss[loss=0.3628, simple_loss=0.3735, pruned_loss=0.1761, over 19812.00 frames. ], tot_loss[loss=0.3483, simple_loss=0.3688, pruned_loss=0.1639, over 3928338.14 frames. ], batch size: 72, lr: 3.96e-02, grad_scale: 8.0 +2023-03-27 17:29:37,163 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4797.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:29:50,536 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.737e+02 7.391e+02 9.077e+02 1.100e+03 2.003e+03, threshold=1.815e+03, percent-clipped=3.0 +2023-03-27 17:30:03,722 INFO [train.py:892] (1/4) Epoch 3, batch 1100, loss[loss=0.3284, simple_loss=0.3541, pruned_loss=0.1513, over 19760.00 frames. ], tot_loss[loss=0.3478, simple_loss=0.3686, pruned_loss=0.1635, over 3933706.03 frames. ], batch size: 88, lr: 3.95e-02, grad_scale: 8.0 +2023-03-27 17:30:04,672 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3136, 4.0305, 2.5918, 4.5656, 4.5149, 1.6333, 3.6228, 3.9043], + device='cuda:1'), covar=tensor([0.0165, 0.0329, 0.1552, 0.0075, 0.0058, 0.1997, 0.0335, 0.0150], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0090, 0.0130, 0.0050, 0.0048, 0.0132, 0.0091, 0.0067], + device='cuda:1'), out_proj_covar=tensor([7.7723e-05, 9.4187e-05, 1.2594e-04, 5.1732e-05, 4.7916e-05, 1.2324e-04, + 9.0295e-05, 6.2117e-05], device='cuda:1') +2023-03-27 17:31:15,744 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4845.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:31:50,562 INFO [train.py:892] (1/4) Epoch 3, batch 1150, loss[loss=0.3372, simple_loss=0.3586, pruned_loss=0.1579, over 19839.00 frames. ], tot_loss[loss=0.349, simple_loss=0.3694, pruned_loss=0.1643, over 3937335.53 frames. ], batch size: 75, lr: 3.95e-02, grad_scale: 8.0 +2023-03-27 17:33:20,479 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.536e+02 7.167e+02 9.190e+02 1.175e+03 2.796e+03, threshold=1.838e+03, percent-clipped=7.0 +2023-03-27 17:33:33,649 INFO [train.py:892] (1/4) Epoch 3, batch 1200, loss[loss=0.3208, simple_loss=0.3509, pruned_loss=0.1454, over 19663.00 frames. ], tot_loss[loss=0.3469, simple_loss=0.3685, pruned_loss=0.1627, over 3938057.63 frames. ], batch size: 67, lr: 3.94e-02, grad_scale: 8.0 +2023-03-27 17:33:34,722 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0114, 2.8799, 2.5622, 3.3342, 3.2154, 3.4067, 3.4833, 3.1709], + device='cuda:1'), covar=tensor([0.0555, 0.0562, 0.1261, 0.0697, 0.0392, 0.0698, 0.0587, 0.0255], + device='cuda:1'), in_proj_covar=tensor([0.0059, 0.0048, 0.0083, 0.0050, 0.0051, 0.0043, 0.0049, 0.0042], + device='cuda:1'), out_proj_covar=tensor([8.1592e-05, 7.2043e-05, 1.2246e-04, 7.6505e-05, 7.2998e-05, 6.6270e-05, + 7.2989e-05, 6.4210e-05], device='cuda:1') +2023-03-27 17:33:51,467 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.4886, 2.4065, 1.5976, 2.2055, 1.7922, 1.4980, 1.9107, 2.5657], + device='cuda:1'), covar=tensor([0.0985, 0.0310, 0.0609, 0.0258, 0.0367, 0.0861, 0.0711, 0.0481], + device='cuda:1'), in_proj_covar=tensor([0.0026, 0.0027, 0.0031, 0.0027, 0.0027, 0.0031, 0.0032, 0.0028], + device='cuda:1'), out_proj_covar=tensor([4.0215e-05, 3.9904e-05, 4.8077e-05, 4.1268e-05, 4.3187e-05, 4.6959e-05, + 4.8666e-05, 4.3946e-05], device='cuda:1') +2023-03-27 17:34:25,053 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6363, 2.6712, 3.0577, 2.6011, 2.9718, 2.3390, 2.6479, 3.3271], + device='cuda:1'), covar=tensor([0.0921, 0.0257, 0.0502, 0.0440, 0.0520, 0.0474, 0.0480, 0.0319], + device='cuda:1'), in_proj_covar=tensor([0.0039, 0.0032, 0.0037, 0.0044, 0.0037, 0.0031, 0.0031, 0.0030], + device='cuda:1'), out_proj_covar=tensor([6.2384e-05, 5.0045e-05, 5.7039e-05, 6.3639e-05, 5.9384e-05, 5.2378e-05, + 5.0204e-05, 4.7990e-05], device='cuda:1') +2023-03-27 17:34:43,347 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4944.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:35:18,120 INFO [train.py:892] (1/4) Epoch 3, batch 1250, loss[loss=0.3508, simple_loss=0.3652, pruned_loss=0.1682, over 19755.00 frames. ], tot_loss[loss=0.347, simple_loss=0.3689, pruned_loss=0.1626, over 3938283.60 frames. ], batch size: 205, lr: 3.93e-02, grad_scale: 8.0 +2023-03-27 17:36:20,281 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4992.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:36:47,192 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.843e+02 6.781e+02 8.883e+02 1.088e+03 2.699e+03, threshold=1.777e+03, percent-clipped=1.0 +2023-03-27 17:36:55,790 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.3891, 5.5002, 5.8287, 5.6273, 5.7173, 5.3717, 5.4021, 5.3484], + device='cuda:1'), covar=tensor([0.1067, 0.0670, 0.0848, 0.0524, 0.0585, 0.0833, 0.1762, 0.2493], + device='cuda:1'), in_proj_covar=tensor([0.0171, 0.0127, 0.0197, 0.0143, 0.0156, 0.0146, 0.0171, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-27 17:37:00,619 INFO [train.py:892] (1/4) Epoch 3, batch 1300, loss[loss=0.3842, simple_loss=0.4133, pruned_loss=0.1776, over 19674.00 frames. ], tot_loss[loss=0.3474, simple_loss=0.3694, pruned_loss=0.1627, over 3940956.62 frames. ], batch size: 51, lr: 3.92e-02, grad_scale: 8.0 +2023-03-27 17:37:10,634 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5292, 3.2514, 4.2716, 3.4395, 3.6137, 4.3756, 4.2734, 3.9082], + device='cuda:1'), covar=tensor([0.0211, 0.0520, 0.0121, 0.2089, 0.0211, 0.0122, 0.0141, 0.0129], + device='cuda:1'), in_proj_covar=tensor([0.0049, 0.0052, 0.0048, 0.0127, 0.0046, 0.0043, 0.0051, 0.0045], + device='cuda:1'), out_proj_covar=tensor([9.6480e-05, 1.1523e-04, 9.0440e-05, 2.3950e-04, 8.9294e-05, 9.7526e-05, + 1.0481e-04, 9.2218e-05], device='cuda:1') +2023-03-27 17:38:02,551 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1945, 4.8699, 5.1226, 4.7326, 4.5259, 4.9404, 4.7002, 5.4588], + device='cuda:1'), covar=tensor([0.1481, 0.0247, 0.0299, 0.0268, 0.0303, 0.0195, 0.0238, 0.0178], + device='cuda:1'), in_proj_covar=tensor([0.0156, 0.0110, 0.0106, 0.0099, 0.0105, 0.0094, 0.0090, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 17:38:43,018 INFO [train.py:892] (1/4) Epoch 3, batch 1350, loss[loss=0.3178, simple_loss=0.3394, pruned_loss=0.1481, over 19792.00 frames. ], tot_loss[loss=0.3441, simple_loss=0.3669, pruned_loss=0.1606, over 3943864.87 frames. ], batch size: 120, lr: 3.91e-02, grad_scale: 8.0 +2023-03-27 17:40:10,989 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 6.737e+02 7.871e+02 9.316e+02 1.602e+03, threshold=1.574e+03, percent-clipped=0.0 +2023-03-27 17:40:26,684 INFO [train.py:892] (1/4) Epoch 3, batch 1400, loss[loss=0.3115, simple_loss=0.3432, pruned_loss=0.1399, over 19789.00 frames. ], tot_loss[loss=0.3451, simple_loss=0.3678, pruned_loss=0.1612, over 3944339.71 frames. ], batch size: 87, lr: 3.90e-02, grad_scale: 8.0 +2023-03-27 17:42:06,531 INFO [train.py:892] (1/4) Epoch 3, batch 1450, loss[loss=0.3219, simple_loss=0.3567, pruned_loss=0.1435, over 19889.00 frames. ], tot_loss[loss=0.3453, simple_loss=0.3678, pruned_loss=0.1614, over 3944450.27 frames. ], batch size: 47, lr: 3.89e-02, grad_scale: 8.0 +2023-03-27 17:43:35,887 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 5.438e+02 7.354e+02 9.251e+02 1.233e+03 2.096e+03, threshold=1.850e+03, percent-clipped=6.0 +2023-03-27 17:43:49,221 INFO [train.py:892] (1/4) Epoch 3, batch 1500, loss[loss=0.3159, simple_loss=0.3403, pruned_loss=0.1458, over 19872.00 frames. ], tot_loss[loss=0.3405, simple_loss=0.3643, pruned_loss=0.1583, over 3944132.66 frames. ], batch size: 138, lr: 3.88e-02, grad_scale: 8.0 +2023-03-27 17:44:42,896 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4604, 2.2050, 1.5894, 2.5617, 2.5977, 2.5638, 2.6133, 2.4082], + device='cuda:1'), covar=tensor([0.0611, 0.0477, 0.1644, 0.0455, 0.0332, 0.0438, 0.0414, 0.0343], + device='cuda:1'), in_proj_covar=tensor([0.0059, 0.0049, 0.0081, 0.0050, 0.0048, 0.0042, 0.0048, 0.0044], + device='cuda:1'), out_proj_covar=tensor([8.6434e-05, 7.6739e-05, 1.2391e-04, 8.0247e-05, 7.3103e-05, 6.8110e-05, + 7.6659e-05, 7.0971e-05], device='cuda:1') +2023-03-27 17:45:34,252 INFO [train.py:892] (1/4) Epoch 3, batch 1550, loss[loss=0.3023, simple_loss=0.3391, pruned_loss=0.1327, over 19860.00 frames. ], tot_loss[loss=0.3417, simple_loss=0.3651, pruned_loss=0.1592, over 3945368.90 frames. ], batch size: 106, lr: 3.87e-02, grad_scale: 8.0 +2023-03-27 17:46:15,229 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.44 vs. limit=5.0 +2023-03-27 17:47:02,176 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.559e+02 6.524e+02 7.733e+02 9.539e+02 1.385e+03, threshold=1.547e+03, percent-clipped=0.0 +2023-03-27 17:47:15,532 INFO [train.py:892] (1/4) Epoch 3, batch 1600, loss[loss=0.3305, simple_loss=0.3712, pruned_loss=0.1449, over 19881.00 frames. ], tot_loss[loss=0.3394, simple_loss=0.3631, pruned_loss=0.1578, over 3948296.90 frames. ], batch size: 84, lr: 3.86e-02, grad_scale: 8.0 +2023-03-27 17:47:44,582 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1644, 3.2155, 3.8401, 4.3275, 2.9124, 3.7112, 3.5411, 2.5623], + device='cuda:1'), covar=tensor([0.0206, 0.2229, 0.0380, 0.0103, 0.1791, 0.0261, 0.0434, 0.1724], + device='cuda:1'), in_proj_covar=tensor([0.0107, 0.0255, 0.0128, 0.0087, 0.0192, 0.0102, 0.0126, 0.0183], + device='cuda:1'), out_proj_covar=tensor([1.1020e-04, 2.4957e-04, 1.3228e-04, 8.1926e-05, 1.8462e-04, 9.8003e-05, + 1.2666e-04, 1.7785e-04], device='cuda:1') +2023-03-27 17:48:27,107 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.7956, 2.2452, 1.3643, 2.7633, 1.9472, 1.2543, 2.0333, 2.3690], + device='cuda:1'), covar=tensor([0.0468, 0.0376, 0.0575, 0.0161, 0.0339, 0.0876, 0.0455, 0.0865], + device='cuda:1'), in_proj_covar=tensor([0.0028, 0.0030, 0.0034, 0.0029, 0.0029, 0.0034, 0.0033, 0.0030], + device='cuda:1'), out_proj_covar=tensor([4.6170e-05, 4.7102e-05, 5.5506e-05, 4.5861e-05, 5.0122e-05, 5.5145e-05, + 5.3964e-05, 5.0897e-05], device='cuda:1') +2023-03-27 17:48:38,994 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.81 vs. limit=2.0 +2023-03-27 17:48:56,942 INFO [train.py:892] (1/4) Epoch 3, batch 1650, loss[loss=0.2924, simple_loss=0.3333, pruned_loss=0.1258, over 19857.00 frames. ], tot_loss[loss=0.3363, simple_loss=0.3608, pruned_loss=0.1559, over 3948176.65 frames. ], batch size: 112, lr: 3.85e-02, grad_scale: 8.0 +2023-03-27 17:50:27,886 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.447e+02 7.036e+02 8.570e+02 1.037e+03 1.553e+03, threshold=1.714e+03, percent-clipped=1.0 +2023-03-27 17:50:41,759 INFO [train.py:892] (1/4) Epoch 3, batch 1700, loss[loss=0.3158, simple_loss=0.3555, pruned_loss=0.138, over 19828.00 frames. ], tot_loss[loss=0.3371, simple_loss=0.3612, pruned_loss=0.1565, over 3949674.55 frames. ], batch size: 52, lr: 3.84e-02, grad_scale: 8.0 +2023-03-27 17:51:11,706 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.97 vs. limit=2.0 +2023-03-27 17:51:28,939 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.87 vs. limit=2.0 +2023-03-27 17:51:59,933 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.73 vs. limit=2.0 +2023-03-27 17:52:19,755 INFO [train.py:892] (1/4) Epoch 3, batch 1750, loss[loss=0.339, simple_loss=0.368, pruned_loss=0.1551, over 19772.00 frames. ], tot_loss[loss=0.3375, simple_loss=0.3614, pruned_loss=0.1568, over 3950550.31 frames. ], batch size: 198, lr: 3.83e-02, grad_scale: 8.0 +2023-03-27 17:53:28,758 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1801, 2.0892, 2.5888, 2.3141, 2.5614, 2.0069, 2.2935, 2.5587], + device='cuda:1'), covar=tensor([0.0709, 0.0344, 0.0460, 0.0447, 0.0362, 0.0504, 0.0411, 0.0451], + device='cuda:1'), in_proj_covar=tensor([0.0043, 0.0033, 0.0038, 0.0049, 0.0040, 0.0034, 0.0033, 0.0032], + device='cuda:1'), out_proj_covar=tensor([7.6186e-05, 5.8625e-05, 6.6411e-05, 7.7739e-05, 7.0489e-05, 6.3044e-05, + 5.8704e-05, 5.7496e-05], device='cuda:1') +2023-03-27 17:53:34,942 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.884e+02 6.493e+02 7.809e+02 9.948e+02 1.676e+03, threshold=1.562e+03, percent-clipped=0.0 +2023-03-27 17:53:46,190 INFO [train.py:892] (1/4) Epoch 3, batch 1800, loss[loss=0.3624, simple_loss=0.3732, pruned_loss=0.1757, over 19736.00 frames. ], tot_loss[loss=0.3364, simple_loss=0.3606, pruned_loss=0.1561, over 3951392.24 frames. ], batch size: 259, lr: 3.82e-02, grad_scale: 16.0 +2023-03-27 17:55:08,421 INFO [train.py:892] (1/4) Epoch 3, batch 1850, loss[loss=0.3421, simple_loss=0.3796, pruned_loss=0.1524, over 19854.00 frames. ], tot_loss[loss=0.3348, simple_loss=0.3607, pruned_loss=0.1544, over 3951626.37 frames. ], batch size: 58, lr: 3.81e-02, grad_scale: 16.0 +2023-03-27 17:56:05,354 INFO [train.py:892] (1/4) Epoch 4, batch 0, loss[loss=0.3434, simple_loss=0.3652, pruned_loss=0.1608, over 19912.00 frames. ], tot_loss[loss=0.3434, simple_loss=0.3652, pruned_loss=0.1608, over 19912.00 frames. ], batch size: 45, lr: 3.56e-02, grad_scale: 16.0 +2023-03-27 17:56:05,355 INFO [train.py:917] (1/4) Computing validation loss +2023-03-27 17:56:21,539 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5534, 3.6079, 4.4864, 4.9519, 3.5829, 4.0287, 4.0486, 3.1473], + device='cuda:1'), covar=tensor([0.0273, 0.2403, 0.0423, 0.0093, 0.1865, 0.0374, 0.0459, 0.1861], + device='cuda:1'), in_proj_covar=tensor([0.0110, 0.0263, 0.0133, 0.0088, 0.0189, 0.0103, 0.0128, 0.0184], + device='cuda:1'), out_proj_covar=tensor([1.1510e-04, 2.5885e-04, 1.3836e-04, 8.4456e-05, 1.8556e-04, 9.9827e-05, + 1.2950e-04, 1.8085e-04], device='cuda:1') +2023-03-27 17:56:31,644 INFO [train.py:926] (1/4) Epoch 4, validation: loss=0.2293, simple_loss=0.3025, pruned_loss=0.07807, over 2883724.00 frames. +2023-03-27 17:56:31,645 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22502MB +2023-03-27 17:57:43,654 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4852, 4.2906, 4.6754, 4.6413, 4.6829, 4.0286, 4.3778, 4.3880], + device='cuda:1'), covar=tensor([0.0906, 0.0899, 0.0838, 0.0462, 0.0732, 0.1063, 0.1473, 0.2047], + device='cuda:1'), in_proj_covar=tensor([0.0174, 0.0138, 0.0206, 0.0155, 0.0160, 0.0153, 0.0189, 0.0243], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-27 17:57:55,094 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.716e+02 7.146e+02 8.558e+02 9.901e+02 2.056e+03, threshold=1.712e+03, percent-clipped=2.0 +2023-03-27 17:58:21,201 INFO [train.py:892] (1/4) Epoch 4, batch 50, loss[loss=0.2916, simple_loss=0.323, pruned_loss=0.1301, over 19733.00 frames. ], tot_loss[loss=0.3259, simple_loss=0.3524, pruned_loss=0.1497, over 891264.03 frames. ], batch size: 118, lr: 3.55e-02, grad_scale: 16.0 +2023-03-27 17:59:09,659 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-03-27 17:59:36,002 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5650.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:00:10,209 INFO [train.py:892] (1/4) Epoch 4, batch 100, loss[loss=0.2741, simple_loss=0.3046, pruned_loss=0.1218, over 19873.00 frames. ], tot_loss[loss=0.325, simple_loss=0.3517, pruned_loss=0.1491, over 1570237.28 frames. ], batch size: 92, lr: 3.54e-02, grad_scale: 16.0 +2023-03-27 18:00:50,336 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-27 18:01:15,456 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0772, 4.7872, 5.0487, 4.7050, 4.3595, 4.8547, 4.8409, 5.2798], + device='cuda:1'), covar=tensor([0.1568, 0.0214, 0.0227, 0.0222, 0.0419, 0.0227, 0.0223, 0.0195], + device='cuda:1'), in_proj_covar=tensor([0.0170, 0.0116, 0.0108, 0.0105, 0.0114, 0.0103, 0.0094, 0.0095], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 18:01:31,612 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+02 7.031e+02 8.200e+02 9.251e+02 1.434e+03, threshold=1.640e+03, percent-clipped=0.0 +2023-03-27 18:01:46,543 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5711.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 18:01:55,779 INFO [train.py:892] (1/4) Epoch 4, batch 150, loss[loss=0.2786, simple_loss=0.3224, pruned_loss=0.1174, over 19800.00 frames. ], tot_loss[loss=0.3277, simple_loss=0.3546, pruned_loss=0.1504, over 2097819.55 frames. ], batch size: 47, lr: 3.54e-02, grad_scale: 16.0 +2023-03-27 18:01:58,711 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8837, 3.5389, 3.5411, 3.8233, 3.5134, 3.7418, 3.8263, 3.9913], + device='cuda:1'), covar=tensor([0.0370, 0.0294, 0.0378, 0.0263, 0.0466, 0.0309, 0.0260, 0.0328], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0096, 0.0105, 0.0093, 0.0094, 0.0075, 0.0102, 0.0117], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-27 18:02:30,166 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8901, 4.4790, 4.7601, 4.4713, 4.1425, 4.5738, 4.4328, 4.9595], + device='cuda:1'), covar=tensor([0.1580, 0.0250, 0.0293, 0.0241, 0.0497, 0.0264, 0.0282, 0.0211], + device='cuda:1'), in_proj_covar=tensor([0.0168, 0.0117, 0.0109, 0.0106, 0.0117, 0.0103, 0.0094, 0.0096], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-03-27 18:03:18,810 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5755.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:03:37,798 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.7295, 2.2253, 2.1115, 2.3023, 1.8409, 1.7303, 1.8628, 2.4339], + device='cuda:1'), covar=tensor([0.0731, 0.0337, 0.0321, 0.0274, 0.0265, 0.0630, 0.0600, 0.0377], + device='cuda:1'), in_proj_covar=tensor([0.0026, 0.0029, 0.0031, 0.0026, 0.0027, 0.0031, 0.0034, 0.0028], + device='cuda:1'), out_proj_covar=tensor([4.5568e-05, 4.8529e-05, 5.2656e-05, 4.4392e-05, 4.8606e-05, 5.2649e-05, + 5.7971e-05, 4.9029e-05], device='cuda:1') +2023-03-27 18:03:42,972 INFO [train.py:892] (1/4) Epoch 4, batch 200, loss[loss=0.3248, simple_loss=0.3548, pruned_loss=0.1473, over 19820.00 frames. ], tot_loss[loss=0.3246, simple_loss=0.3527, pruned_loss=0.1482, over 2510437.17 frames. ], batch size: 128, lr: 3.53e-02, grad_scale: 16.0 +2023-03-27 18:05:07,192 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.424e+02 6.665e+02 7.849e+02 9.625e+02 1.890e+03, threshold=1.570e+03, percent-clipped=1.0 +2023-03-27 18:05:18,006 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9982, 4.5642, 4.5082, 4.8939, 4.6403, 4.9915, 4.8190, 5.1142], + device='cuda:1'), covar=tensor([0.0330, 0.0236, 0.0295, 0.0190, 0.0378, 0.0149, 0.0266, 0.0287], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0095, 0.0103, 0.0092, 0.0091, 0.0076, 0.0100, 0.0116], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-27 18:05:31,201 INFO [train.py:892] (1/4) Epoch 4, batch 250, loss[loss=0.2637, simple_loss=0.3099, pruned_loss=0.1087, over 19826.00 frames. ], tot_loss[loss=0.3281, simple_loss=0.3558, pruned_loss=0.1502, over 2828674.64 frames. ], batch size: 75, lr: 3.52e-02, grad_scale: 16.0 +2023-03-27 18:05:32,095 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5816.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:06:59,809 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5858.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:07:17,596 INFO [train.py:892] (1/4) Epoch 4, batch 300, loss[loss=0.3372, simple_loss=0.365, pruned_loss=0.1547, over 19818.00 frames. ], tot_loss[loss=0.3292, simple_loss=0.3563, pruned_loss=0.151, over 3077294.95 frames. ], batch size: 121, lr: 3.51e-02, grad_scale: 16.0 +2023-03-27 18:07:34,150 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.6121, 1.9330, 1.7060, 1.7803, 1.6000, 1.5660, 1.5832, 1.7280], + device='cuda:1'), covar=tensor([0.0387, 0.0343, 0.0439, 0.0347, 0.0501, 0.0577, 0.0529, 0.0660], + device='cuda:1'), in_proj_covar=tensor([0.0026, 0.0028, 0.0031, 0.0027, 0.0028, 0.0030, 0.0033, 0.0028], + device='cuda:1'), out_proj_covar=tensor([4.6337e-05, 4.7133e-05, 5.3680e-05, 4.5106e-05, 4.9905e-05, 5.1677e-05, + 5.6928e-05, 4.9543e-05], device='cuda:1') +2023-03-27 18:08:37,830 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.638e+02 6.552e+02 7.972e+02 9.789e+02 1.946e+03, threshold=1.594e+03, percent-clipped=3.0 +2023-03-27 18:09:05,485 INFO [train.py:892] (1/4) Epoch 4, batch 350, loss[loss=0.3173, simple_loss=0.3432, pruned_loss=0.1457, over 19780.00 frames. ], tot_loss[loss=0.3283, simple_loss=0.3557, pruned_loss=0.1505, over 3270966.87 frames. ], batch size: 191, lr: 3.50e-02, grad_scale: 16.0 +2023-03-27 18:09:12,523 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5919.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:09:21,831 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0150, 4.5043, 4.7221, 4.3107, 4.2779, 4.6924, 4.3483, 5.0172], + device='cuda:1'), covar=tensor([0.1343, 0.0236, 0.0311, 0.0255, 0.0392, 0.0205, 0.0266, 0.0177], + device='cuda:1'), in_proj_covar=tensor([0.0171, 0.0118, 0.0113, 0.0109, 0.0117, 0.0107, 0.0097, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 18:09:30,275 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.86 vs. limit=2.0 +2023-03-27 18:10:48,452 INFO [train.py:892] (1/4) Epoch 4, batch 400, loss[loss=0.4006, simple_loss=0.412, pruned_loss=0.1946, over 19737.00 frames. ], tot_loss[loss=0.3283, simple_loss=0.3555, pruned_loss=0.1505, over 3421794.60 frames. ], batch size: 259, lr: 3.49e-02, grad_scale: 16.0 +2023-03-27 18:11:25,071 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0518, 3.0539, 4.2917, 3.6516, 3.8816, 4.4486, 3.1092, 3.0209], + device='cuda:1'), covar=tensor([0.0497, 0.3806, 0.0260, 0.0532, 0.0990, 0.0209, 0.0772, 0.1299], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0238, 0.0112, 0.0120, 0.0193, 0.0098, 0.0126, 0.0146], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 18:11:41,717 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5990.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:11:53,010 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2053, 1.6362, 2.4750, 2.1155, 2.3107, 1.8200, 2.0714, 2.5017], + device='cuda:1'), covar=tensor([0.0617, 0.0372, 0.0355, 0.0519, 0.0424, 0.0402, 0.0395, 0.0222], + device='cuda:1'), in_proj_covar=tensor([0.0042, 0.0034, 0.0037, 0.0051, 0.0037, 0.0033, 0.0034, 0.0030], + device='cuda:1'), out_proj_covar=tensor([8.0297e-05, 6.3890e-05, 6.9097e-05, 8.5273e-05, 6.9553e-05, 6.5631e-05, + 6.5449e-05, 5.7463e-05], device='cuda:1') +2023-03-27 18:12:12,456 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.258e+02 6.552e+02 7.732e+02 9.637e+02 1.932e+03, threshold=1.546e+03, percent-clipped=3.0 +2023-03-27 18:12:17,322 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6006.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:12:36,862 INFO [train.py:892] (1/4) Epoch 4, batch 450, loss[loss=0.2934, simple_loss=0.337, pruned_loss=0.1248, over 19832.00 frames. ], tot_loss[loss=0.3276, simple_loss=0.355, pruned_loss=0.1501, over 3539567.99 frames. ], batch size: 76, lr: 3.48e-02, grad_scale: 16.0 +2023-03-27 18:13:33,594 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6043.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:13:50,962 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6051.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:14:22,272 INFO [train.py:892] (1/4) Epoch 4, batch 500, loss[loss=0.2837, simple_loss=0.3202, pruned_loss=0.1236, over 19803.00 frames. ], tot_loss[loss=0.3266, simple_loss=0.3544, pruned_loss=0.1494, over 3630597.79 frames. ], batch size: 111, lr: 3.47e-02, grad_scale: 16.0 +2023-03-27 18:14:31,595 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-27 18:15:43,600 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.065e+02 6.254e+02 7.910e+02 9.308e+02 1.450e+03, threshold=1.582e+03, percent-clipped=0.0 +2023-03-27 18:15:44,530 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6104.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:15:58,572 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6111.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:16:07,747 INFO [train.py:892] (1/4) Epoch 4, batch 550, loss[loss=0.2721, simple_loss=0.3148, pruned_loss=0.1147, over 19696.00 frames. ], tot_loss[loss=0.327, simple_loss=0.3551, pruned_loss=0.1494, over 3699910.67 frames. ], batch size: 45, lr: 3.47e-02, grad_scale: 16.0 +2023-03-27 18:16:51,689 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6135.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:16:55,566 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.4385, 1.9340, 1.6296, 0.8369, 1.5662, 1.4378, 1.5600, 1.7737], + device='cuda:1'), covar=tensor([0.0279, 0.0215, 0.0204, 0.0850, 0.0407, 0.0320, 0.0226, 0.0245], + device='cuda:1'), in_proj_covar=tensor([0.0028, 0.0027, 0.0028, 0.0042, 0.0040, 0.0030, 0.0024, 0.0029], + device='cuda:1'), out_proj_covar=tensor([4.8717e-05, 4.7370e-05, 4.6577e-05, 7.4776e-05, 7.0273e-05, 5.1576e-05, + 4.5337e-05, 5.0775e-05], device='cuda:1') +2023-03-27 18:17:55,485 INFO [train.py:892] (1/4) Epoch 4, batch 600, loss[loss=0.3326, simple_loss=0.3617, pruned_loss=0.1517, over 19893.00 frames. ], tot_loss[loss=0.3266, simple_loss=0.3548, pruned_loss=0.1492, over 3753783.77 frames. ], batch size: 94, lr: 3.46e-02, grad_scale: 16.0 +2023-03-27 18:18:03,555 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6170.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:18:56,317 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6196.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:19:15,162 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6203.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:19:16,015 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.339e+02 6.549e+02 8.017e+02 9.808e+02 1.883e+03, threshold=1.603e+03, percent-clipped=1.0 +2023-03-27 18:19:30,036 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6211.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:19:37,392 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6214.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:19:40,659 INFO [train.py:892] (1/4) Epoch 4, batch 650, loss[loss=0.3378, simple_loss=0.3626, pruned_loss=0.1565, over 19837.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3538, pruned_loss=0.1486, over 3797339.86 frames. ], batch size: 239, lr: 3.45e-02, grad_scale: 16.0 +2023-03-27 18:20:03,956 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.14 vs. limit=5.0 +2023-03-27 18:20:11,578 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6231.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:20:39,975 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8855, 2.3019, 1.4646, 1.3855, 1.6334, 2.0284, 1.9211, 1.8283], + device='cuda:1'), covar=tensor([0.0293, 0.0257, 0.0316, 0.0737, 0.0685, 0.0350, 0.0309, 0.0410], + device='cuda:1'), in_proj_covar=tensor([0.0029, 0.0028, 0.0028, 0.0043, 0.0041, 0.0030, 0.0025, 0.0029], + device='cuda:1'), out_proj_covar=tensor([5.0861e-05, 5.0177e-05, 4.7574e-05, 7.6663e-05, 7.3992e-05, 5.2194e-05, + 4.6029e-05, 5.1572e-05], device='cuda:1') +2023-03-27 18:21:03,671 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0905, 2.4402, 3.3463, 2.8093, 2.8806, 3.5330, 2.3804, 2.2514], + device='cuda:1'), covar=tensor([0.0627, 0.3430, 0.0346, 0.0608, 0.1219, 0.0294, 0.0842, 0.1616], + device='cuda:1'), in_proj_covar=tensor([0.0165, 0.0243, 0.0116, 0.0126, 0.0201, 0.0102, 0.0131, 0.0150], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-27 18:21:22,754 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6264.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:21:25,687 INFO [train.py:892] (1/4) Epoch 4, batch 700, loss[loss=0.3069, simple_loss=0.3406, pruned_loss=0.1366, over 19830.00 frames. ], tot_loss[loss=0.3256, simple_loss=0.3542, pruned_loss=0.1485, over 3831703.98 frames. ], batch size: 76, lr: 3.44e-02, grad_scale: 16.0 +2023-03-27 18:21:26,836 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6266.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:21:41,917 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6272.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:21:57,053 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4250, 2.6126, 3.7664, 2.9988, 3.1612, 3.8309, 2.4703, 2.4530], + device='cuda:1'), covar=tensor([0.0507, 0.3061, 0.0299, 0.0506, 0.0893, 0.0264, 0.0800, 0.1331], + device='cuda:1'), in_proj_covar=tensor([0.0162, 0.0240, 0.0116, 0.0124, 0.0197, 0.0100, 0.0128, 0.0148], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-27 18:22:47,948 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.869e+02 6.690e+02 8.159e+02 9.859e+02 1.679e+03, threshold=1.632e+03, percent-clipped=2.0 +2023-03-27 18:22:52,789 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6306.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 18:23:12,539 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2741, 2.0262, 2.1813, 2.0632, 2.5842, 2.1403, 2.4575, 2.1517], + device='cuda:1'), covar=tensor([0.0608, 0.0423, 0.0459, 0.0530, 0.0354, 0.0298, 0.0330, 0.0303], + device='cuda:1'), in_proj_covar=tensor([0.0042, 0.0035, 0.0038, 0.0051, 0.0039, 0.0032, 0.0034, 0.0031], + device='cuda:1'), out_proj_covar=tensor([8.1718e-05, 6.9469e-05, 7.3707e-05, 8.8717e-05, 7.4769e-05, 6.5083e-05, + 6.7505e-05, 6.2242e-05], device='cuda:1') +2023-03-27 18:23:15,297 INFO [train.py:892] (1/4) Epoch 4, batch 750, loss[loss=0.3069, simple_loss=0.3457, pruned_loss=0.134, over 19763.00 frames. ], tot_loss[loss=0.3245, simple_loss=0.3538, pruned_loss=0.1476, over 3857594.43 frames. ], batch size: 110, lr: 3.43e-02, grad_scale: 8.0 +2023-03-27 18:23:38,590 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6327.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:24:16,293 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6346.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:24:34,072 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6354.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:24:57,112 INFO [train.py:892] (1/4) Epoch 4, batch 800, loss[loss=0.3395, simple_loss=0.3566, pruned_loss=0.1611, over 19882.00 frames. ], tot_loss[loss=0.3244, simple_loss=0.3535, pruned_loss=0.1476, over 3879529.53 frames. ], batch size: 158, lr: 3.42e-02, grad_scale: 8.0 +2023-03-27 18:25:03,927 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1537, 2.2587, 1.8298, 1.3609, 1.8820, 1.8737, 2.1338, 2.2084], + device='cuda:1'), covar=tensor([0.0204, 0.0321, 0.0357, 0.0878, 0.0466, 0.0487, 0.0470, 0.0276], + device='cuda:1'), in_proj_covar=tensor([0.0029, 0.0029, 0.0027, 0.0043, 0.0040, 0.0029, 0.0025, 0.0029], + device='cuda:1'), out_proj_covar=tensor([5.1425e-05, 5.2612e-05, 4.6955e-05, 7.7920e-05, 7.2514e-05, 5.2011e-05, + 4.6692e-05, 5.1658e-05], device='cuda:1') +2023-03-27 18:26:05,876 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6399.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:26:17,458 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.240e+02 6.935e+02 8.296e+02 1.069e+03 2.251e+03, threshold=1.659e+03, percent-clipped=2.0 +2023-03-27 18:26:29,654 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6411.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:26:38,529 INFO [train.py:892] (1/4) Epoch 4, batch 850, loss[loss=0.3039, simple_loss=0.3381, pruned_loss=0.1349, over 19653.00 frames. ], tot_loss[loss=0.3216, simple_loss=0.3521, pruned_loss=0.1455, over 3893891.13 frames. ], batch size: 69, lr: 3.42e-02, grad_scale: 8.0 +2023-03-27 18:28:08,149 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6459.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:28:14,548 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6462.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:28:23,855 INFO [train.py:892] (1/4) Epoch 4, batch 900, loss[loss=0.3035, simple_loss=0.3285, pruned_loss=0.1392, over 19735.00 frames. ], tot_loss[loss=0.3182, simple_loss=0.3491, pruned_loss=0.1436, over 3907841.84 frames. ], batch size: 134, lr: 3.41e-02, grad_scale: 8.0 +2023-03-27 18:28:26,607 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6467.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:28:57,903 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2295, 3.7632, 3.9608, 4.3141, 4.0354, 4.2899, 4.2261, 4.4850], + device='cuda:1'), covar=tensor([0.0635, 0.0357, 0.0393, 0.0248, 0.0443, 0.0219, 0.0336, 0.0299], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0101, 0.0116, 0.0101, 0.0102, 0.0083, 0.0111, 0.0119], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 18:29:14,974 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6491.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:29:44,757 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.785e+02 6.152e+02 8.046e+02 9.426e+02 1.704e+03, threshold=1.609e+03, percent-clipped=1.0 +2023-03-27 18:30:02,700 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6514.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:30:05,645 INFO [train.py:892] (1/4) Epoch 4, batch 950, loss[loss=0.3482, simple_loss=0.3673, pruned_loss=0.1645, over 19804.00 frames. ], tot_loss[loss=0.3215, simple_loss=0.3517, pruned_loss=0.1457, over 3915633.42 frames. ], batch size: 148, lr: 3.40e-02, grad_scale: 8.0 +2023-03-27 18:30:19,446 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6523.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:30:24,500 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6526.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:30:28,580 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6528.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:31:32,879 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6559.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:31:38,669 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6562.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:31:46,914 INFO [train.py:892] (1/4) Epoch 4, batch 1000, loss[loss=0.3096, simple_loss=0.3395, pruned_loss=0.1398, over 19862.00 frames. ], tot_loss[loss=0.3199, simple_loss=0.3506, pruned_loss=0.1446, over 3924379.97 frames. ], batch size: 104, lr: 3.39e-02, grad_scale: 8.0 +2023-03-27 18:31:49,603 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6567.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:31:52,044 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1360, 2.2234, 2.5331, 2.3175, 2.4978, 1.9450, 2.5830, 2.5473], + device='cuda:1'), covar=tensor([0.0686, 0.0306, 0.0253, 0.0448, 0.0324, 0.0399, 0.0235, 0.0220], + device='cuda:1'), in_proj_covar=tensor([0.0040, 0.0033, 0.0036, 0.0050, 0.0035, 0.0031, 0.0031, 0.0029], + device='cuda:1'), out_proj_covar=tensor([7.9885e-05, 6.8110e-05, 7.1893e-05, 8.8438e-05, 7.0172e-05, 6.4413e-05, + 6.4779e-05, 5.8574e-05], device='cuda:1') +2023-03-27 18:32:26,953 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3268, 2.6840, 3.1804, 3.6145, 2.3359, 2.8457, 2.8061, 2.0410], + device='cuda:1'), covar=tensor([0.0377, 0.2563, 0.0700, 0.0181, 0.2094, 0.0449, 0.0713, 0.2233], + device='cuda:1'), in_proj_covar=tensor([0.0117, 0.0280, 0.0151, 0.0092, 0.0199, 0.0110, 0.0141, 0.0189], + device='cuda:1'), out_proj_covar=tensor([1.2980e-04, 2.8176e-04, 1.6086e-04, 9.3235e-05, 2.0247e-04, 1.1349e-04, + 1.4774e-04, 1.9423e-04], device='cuda:1') +2023-03-27 18:32:52,340 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8280, 3.6886, 4.7831, 3.9699, 4.1940, 4.5103, 4.5461, 4.5554], + device='cuda:1'), covar=tensor([0.0157, 0.0343, 0.0097, 0.1380, 0.0116, 0.0163, 0.0111, 0.0118], + device='cuda:1'), in_proj_covar=tensor([0.0053, 0.0058, 0.0050, 0.0134, 0.0049, 0.0050, 0.0052, 0.0048], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0003, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 18:33:08,466 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.155e+02 6.376e+02 7.596e+02 9.023e+02 1.357e+03, threshold=1.519e+03, percent-clipped=0.0 +2023-03-27 18:33:31,583 INFO [train.py:892] (1/4) Epoch 4, batch 1050, loss[loss=0.2705, simple_loss=0.3281, pruned_loss=0.1065, over 19609.00 frames. ], tot_loss[loss=0.3186, simple_loss=0.3495, pruned_loss=0.1438, over 3930267.48 frames. ], batch size: 51, lr: 3.38e-02, grad_scale: 8.0 +2023-03-27 18:33:45,127 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6622.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:33:57,486 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.97 vs. limit=2.0 +2023-03-27 18:33:59,065 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6629.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:34:04,935 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-27 18:34:08,499 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2829, 3.3039, 4.2448, 3.6055, 3.6161, 4.0766, 3.9056, 4.0505], + device='cuda:1'), covar=tensor([0.0140, 0.0389, 0.0089, 0.1231, 0.0135, 0.0139, 0.0157, 0.0107], + device='cuda:1'), in_proj_covar=tensor([0.0054, 0.0057, 0.0050, 0.0133, 0.0049, 0.0049, 0.0051, 0.0048], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0003, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 18:34:34,397 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6646.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:34:47,727 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-03-27 18:35:14,543 INFO [train.py:892] (1/4) Epoch 4, batch 1100, loss[loss=0.3233, simple_loss=0.3651, pruned_loss=0.1407, over 19727.00 frames. ], tot_loss[loss=0.3189, simple_loss=0.3501, pruned_loss=0.1439, over 3934973.29 frames. ], batch size: 50, lr: 3.37e-02, grad_scale: 8.0 +2023-03-27 18:36:06,444 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6690.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:36:14,094 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6694.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:36:23,783 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6699.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:36:34,437 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.080e+02 6.475e+02 7.910e+02 9.923e+02 2.054e+03, threshold=1.582e+03, percent-clipped=5.0 +2023-03-27 18:36:57,952 INFO [train.py:892] (1/4) Epoch 4, batch 1150, loss[loss=0.2968, simple_loss=0.3284, pruned_loss=0.1325, over 19764.00 frames. ], tot_loss[loss=0.3189, simple_loss=0.3498, pruned_loss=0.144, over 3938450.99 frames. ], batch size: 179, lr: 3.37e-02, grad_scale: 8.0 +2023-03-27 18:38:01,836 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6747.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:38:36,144 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2486, 2.2472, 1.6482, 1.4879, 1.9141, 1.9815, 2.0772, 2.0793], + device='cuda:1'), covar=tensor([0.0213, 0.0275, 0.0231, 0.0801, 0.0409, 0.0416, 0.0268, 0.0420], + device='cuda:1'), in_proj_covar=tensor([0.0030, 0.0028, 0.0027, 0.0042, 0.0040, 0.0028, 0.0024, 0.0029], + device='cuda:1'), out_proj_covar=tensor([5.4371e-05, 5.3830e-05, 4.8681e-05, 7.7997e-05, 7.4023e-05, 5.1508e-05, + 4.7350e-05, 5.4189e-05], device='cuda:1') +2023-03-27 18:38:41,094 INFO [train.py:892] (1/4) Epoch 4, batch 1200, loss[loss=0.3029, simple_loss=0.3491, pruned_loss=0.1284, over 19732.00 frames. ], tot_loss[loss=0.3195, simple_loss=0.3506, pruned_loss=0.1442, over 3940252.80 frames. ], batch size: 54, lr: 3.36e-02, grad_scale: 8.0 +2023-03-27 18:39:03,422 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.5686, 1.4906, 1.4031, 1.4460, 1.2051, 1.4367, 1.4841, 1.8935], + device='cuda:1'), covar=tensor([0.0311, 0.0387, 0.0531, 0.0437, 0.0503, 0.0557, 0.0763, 0.0324], + device='cuda:1'), in_proj_covar=tensor([0.0030, 0.0030, 0.0032, 0.0029, 0.0029, 0.0032, 0.0037, 0.0030], + device='cuda:1'), out_proj_covar=tensor([5.5201e-05, 5.4512e-05, 5.9007e-05, 5.2561e-05, 5.5761e-05, 5.8894e-05, + 6.7873e-05, 5.6843e-05], device='cuda:1') +2023-03-27 18:39:32,822 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5666, 4.7905, 4.8884, 4.8303, 4.7544, 4.8251, 4.2883, 4.3488], + device='cuda:1'), covar=tensor([0.0371, 0.0415, 0.0687, 0.0450, 0.0531, 0.0593, 0.0722, 0.1240], + device='cuda:1'), in_proj_covar=tensor([0.0118, 0.0115, 0.0165, 0.0126, 0.0122, 0.0110, 0.0145, 0.0179], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 18:39:32,923 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6791.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:39:59,924 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.559e+02 6.894e+02 8.217e+02 1.051e+03 2.121e+03, threshold=1.643e+03, percent-clipped=4.0 +2023-03-27 18:40:22,610 INFO [train.py:892] (1/4) Epoch 4, batch 1250, loss[loss=0.3132, simple_loss=0.3445, pruned_loss=0.141, over 19798.00 frames. ], tot_loss[loss=0.3174, simple_loss=0.349, pruned_loss=0.1429, over 3943278.24 frames. ], batch size: 211, lr: 3.35e-02, grad_scale: 8.0 +2023-03-27 18:40:26,927 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6818.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 18:40:36,521 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6823.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:40:42,323 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6826.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:41:10,821 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6839.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:41:50,031 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6859.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:41:57,286 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3796, 2.1434, 1.4447, 2.4747, 2.5181, 2.5948, 2.4735, 2.2555], + device='cuda:1'), covar=tensor([0.0640, 0.0622, 0.1408, 0.0634, 0.0541, 0.0413, 0.0615, 0.0527], + device='cuda:1'), in_proj_covar=tensor([0.0081, 0.0067, 0.0099, 0.0071, 0.0068, 0.0057, 0.0064, 0.0067], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 18:42:04,720 INFO [train.py:892] (1/4) Epoch 4, batch 1300, loss[loss=0.2703, simple_loss=0.3074, pruned_loss=0.1166, over 19716.00 frames. ], tot_loss[loss=0.3168, simple_loss=0.3485, pruned_loss=0.1426, over 3944700.86 frames. ], batch size: 104, lr: 3.34e-02, grad_scale: 8.0 +2023-03-27 18:42:07,245 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6867.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:42:21,480 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6874.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:42:29,201 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6878.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:42:42,894 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4816, 3.4103, 4.1929, 5.0370, 3.0628, 3.9161, 3.4419, 2.4132], + device='cuda:1'), covar=tensor([0.0304, 0.3308, 0.0613, 0.0072, 0.2089, 0.0385, 0.0719, 0.2307], + device='cuda:1'), in_proj_covar=tensor([0.0122, 0.0282, 0.0154, 0.0090, 0.0202, 0.0111, 0.0141, 0.0186], + device='cuda:1'), out_proj_covar=tensor([1.3685e-04, 2.8607e-04, 1.6547e-04, 9.2019e-05, 2.0782e-04, 1.1538e-04, + 1.4745e-04, 1.9294e-04], device='cuda:1') +2023-03-27 18:43:01,172 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4235, 2.8104, 2.9485, 2.1583, 2.8181, 1.4153, 2.2783, 2.9812], + device='cuda:1'), covar=tensor([0.0921, 0.0329, 0.0486, 0.0641, 0.0513, 0.0943, 0.0431, 0.0248], + device='cuda:1'), in_proj_covar=tensor([0.0045, 0.0037, 0.0038, 0.0056, 0.0042, 0.0036, 0.0036, 0.0033], + device='cuda:1'), out_proj_covar=tensor([9.3167e-05, 7.7854e-05, 7.9656e-05, 1.0339e-04, 8.5860e-05, 7.6824e-05, + 7.7961e-05, 6.9450e-05], device='cuda:1') +2023-03-27 18:43:17,694 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.81 vs. limit=2.0 +2023-03-27 18:43:24,638 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 6.185e+02 7.634e+02 1.003e+03 1.964e+03, threshold=1.527e+03, percent-clipped=1.0 +2023-03-27 18:43:29,134 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6907.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:43:34,568 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6909.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:43:45,439 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6915.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:43:46,683 INFO [train.py:892] (1/4) Epoch 4, batch 1350, loss[loss=0.2983, simple_loss=0.329, pruned_loss=0.1338, over 19795.00 frames. ], tot_loss[loss=0.3171, simple_loss=0.3494, pruned_loss=0.1424, over 3944697.69 frames. ], batch size: 120, lr: 3.33e-02, grad_scale: 8.0 +2023-03-27 18:43:58,840 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6922.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:44:32,672 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6939.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 18:45:27,465 INFO [train.py:892] (1/4) Epoch 4, batch 1400, loss[loss=0.3001, simple_loss=0.3295, pruned_loss=0.1353, over 19771.00 frames. ], tot_loss[loss=0.3162, simple_loss=0.3486, pruned_loss=0.1419, over 3946188.09 frames. ], batch size: 182, lr: 3.33e-02, grad_scale: 8.0 +2023-03-27 18:45:33,588 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6250, 4.0338, 4.2875, 4.0548, 3.8760, 4.2590, 4.0407, 4.4465], + device='cuda:1'), covar=tensor([0.1525, 0.0302, 0.0312, 0.0278, 0.0545, 0.0313, 0.0270, 0.0251], + device='cuda:1'), in_proj_covar=tensor([0.0184, 0.0128, 0.0122, 0.0123, 0.0128, 0.0117, 0.0104, 0.0107], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 18:45:37,282 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6970.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:45:37,553 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6970.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:45:52,329 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9159, 2.3532, 2.9176, 2.2488, 2.6724, 2.7800, 2.7915, 2.8461], + device='cuda:1'), covar=tensor([0.0188, 0.0403, 0.0136, 0.1079, 0.0144, 0.0204, 0.0212, 0.0150], + device='cuda:1'), in_proj_covar=tensor([0.0054, 0.0057, 0.0051, 0.0134, 0.0050, 0.0050, 0.0053, 0.0048], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0003, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 18:46:09,134 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6985.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:46:30,360 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-27 18:46:49,379 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.908e+02 6.198e+02 7.481e+02 9.100e+02 1.903e+03, threshold=1.496e+03, percent-clipped=2.0 +2023-03-27 18:47:13,285 INFO [train.py:892] (1/4) Epoch 4, batch 1450, loss[loss=0.2988, simple_loss=0.3465, pruned_loss=0.1255, over 19736.00 frames. ], tot_loss[loss=0.3163, simple_loss=0.3486, pruned_loss=0.142, over 3947776.19 frames. ], batch size: 92, lr: 3.32e-02, grad_scale: 8.0 +2023-03-27 18:48:17,448 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7048.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 18:48:54,063 INFO [train.py:892] (1/4) Epoch 4, batch 1500, loss[loss=0.3077, simple_loss=0.3342, pruned_loss=0.1406, over 19752.00 frames. ], tot_loss[loss=0.3161, simple_loss=0.3484, pruned_loss=0.1419, over 3947362.57 frames. ], batch size: 209, lr: 3.31e-02, grad_scale: 8.0 +2023-03-27 18:49:02,295 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7874, 4.7008, 5.1959, 5.0102, 5.1746, 4.6019, 4.8739, 4.7734], + device='cuda:1'), covar=tensor([0.1172, 0.0792, 0.0834, 0.0680, 0.0620, 0.0850, 0.1692, 0.2333], + device='cuda:1'), in_proj_covar=tensor([0.0178, 0.0149, 0.0213, 0.0165, 0.0164, 0.0159, 0.0194, 0.0243], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-27 18:50:13,930 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.314e+02 6.331e+02 7.635e+02 9.535e+02 1.700e+03, threshold=1.527e+03, percent-clipped=1.0 +2023-03-27 18:50:22,657 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7109.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 18:50:36,753 INFO [train.py:892] (1/4) Epoch 4, batch 1550, loss[loss=0.2579, simple_loss=0.299, pruned_loss=0.1084, over 19793.00 frames. ], tot_loss[loss=0.314, simple_loss=0.347, pruned_loss=0.1405, over 3949089.59 frames. ], batch size: 83, lr: 3.30e-02, grad_scale: 8.0 +2023-03-27 18:50:41,089 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7118.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 18:50:51,558 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7123.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:52:15,307 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7163.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:52:19,903 INFO [train.py:892] (1/4) Epoch 4, batch 1600, loss[loss=0.3318, simple_loss=0.3451, pruned_loss=0.1593, over 19866.00 frames. ], tot_loss[loss=0.3154, simple_loss=0.3477, pruned_loss=0.1415, over 3949613.70 frames. ], batch size: 154, lr: 3.30e-02, grad_scale: 8.0 +2023-03-27 18:52:20,690 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7166.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:52:31,403 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7171.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:53:22,273 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5184, 3.8511, 4.1273, 3.7434, 3.7046, 3.9294, 3.8295, 4.2198], + device='cuda:1'), covar=tensor([0.1296, 0.0308, 0.0296, 0.0352, 0.0644, 0.0353, 0.0285, 0.0259], + device='cuda:1'), in_proj_covar=tensor([0.0178, 0.0125, 0.0121, 0.0120, 0.0128, 0.0112, 0.0103, 0.0106], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 18:53:39,224 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.151e+02 6.559e+02 7.813e+02 1.032e+03 1.739e+03, threshold=1.563e+03, percent-clipped=2.0 +2023-03-27 18:54:01,350 INFO [train.py:892] (1/4) Epoch 4, batch 1650, loss[loss=0.3129, simple_loss=0.3455, pruned_loss=0.1401, over 19786.00 frames. ], tot_loss[loss=0.3153, simple_loss=0.348, pruned_loss=0.1413, over 3948289.94 frames. ], batch size: 178, lr: 3.29e-02, grad_scale: 8.0 +2023-03-27 18:54:08,538 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.90 vs. limit=2.0 +2023-03-27 18:54:09,943 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3252, 3.9931, 4.6585, 4.4676, 4.6179, 3.9075, 4.3885, 4.2813], + device='cuda:1'), covar=tensor([0.1116, 0.1027, 0.0874, 0.0757, 0.0716, 0.1119, 0.1521, 0.2321], + device='cuda:1'), in_proj_covar=tensor([0.0179, 0.0146, 0.0214, 0.0165, 0.0166, 0.0158, 0.0196, 0.0246], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-27 18:54:18,783 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7224.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:54:39,115 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7234.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:55:42,214 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7265.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:55:43,212 INFO [train.py:892] (1/4) Epoch 4, batch 1700, loss[loss=0.3087, simple_loss=0.3357, pruned_loss=0.1409, over 19805.00 frames. ], tot_loss[loss=0.3173, simple_loss=0.3493, pruned_loss=0.1426, over 3948809.28 frames. ], batch size: 195, lr: 3.28e-02, grad_scale: 8.0 +2023-03-27 18:55:53,834 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9750, 2.4087, 3.1411, 2.6763, 2.7188, 3.4300, 2.0361, 2.1175], + device='cuda:1'), covar=tensor([0.0660, 0.2571, 0.0356, 0.0514, 0.1148, 0.0250, 0.0931, 0.1381], + device='cuda:1'), in_proj_covar=tensor([0.0187, 0.0255, 0.0136, 0.0135, 0.0221, 0.0117, 0.0146, 0.0158], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 18:55:58,296 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.71 vs. limit=2.0 +2023-03-27 18:56:02,927 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7275.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:56:23,763 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7285.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:57:00,734 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.185e+02 6.685e+02 7.833e+02 9.153e+02 1.678e+03, threshold=1.567e+03, percent-clipped=4.0 +2023-03-27 18:57:20,326 INFO [train.py:892] (1/4) Epoch 4, batch 1750, loss[loss=0.2848, simple_loss=0.3223, pruned_loss=0.1237, over 19785.00 frames. ], tot_loss[loss=0.3147, simple_loss=0.3469, pruned_loss=0.1413, over 3950605.99 frames. ], batch size: 120, lr: 3.27e-02, grad_scale: 8.0 +2023-03-27 18:57:51,810 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7333.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:57:57,590 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7336.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:58:01,116 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9149, 2.9439, 4.0846, 3.3171, 3.6040, 4.2375, 2.6159, 2.6744], + device='cuda:1'), covar=tensor([0.0513, 0.3073, 0.0310, 0.0604, 0.1041, 0.0296, 0.0977, 0.1671], + device='cuda:1'), in_proj_covar=tensor([0.0195, 0.0267, 0.0142, 0.0142, 0.0232, 0.0123, 0.0153, 0.0167], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 18:58:49,021 INFO [train.py:892] (1/4) Epoch 4, batch 1800, loss[loss=0.3114, simple_loss=0.3415, pruned_loss=0.1407, over 19833.00 frames. ], tot_loss[loss=0.3164, simple_loss=0.3487, pruned_loss=0.142, over 3946867.08 frames. ], batch size: 177, lr: 3.27e-02, grad_scale: 8.0 +2023-03-27 18:59:17,839 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7929, 3.2085, 4.5354, 3.7537, 4.0995, 4.3807, 4.6048, 4.3646], + device='cuda:1'), covar=tensor([0.0111, 0.0475, 0.0099, 0.1425, 0.0116, 0.0168, 0.0136, 0.0110], + device='cuda:1'), in_proj_covar=tensor([0.0053, 0.0058, 0.0052, 0.0131, 0.0049, 0.0053, 0.0053, 0.0048], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0003, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 18:59:46,667 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.55 vs. limit=5.0 +2023-03-27 18:59:52,483 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7404.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 18:59:53,635 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.607e+02 6.051e+02 7.381e+02 9.131e+02 1.849e+03, threshold=1.476e+03, percent-clipped=3.0 +2023-03-27 18:59:57,509 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7407.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:00:11,535 INFO [train.py:892] (1/4) Epoch 4, batch 1850, loss[loss=0.3341, simple_loss=0.3828, pruned_loss=0.1427, over 19686.00 frames. ], tot_loss[loss=0.3172, simple_loss=0.351, pruned_loss=0.1417, over 3946322.43 frames. ], batch size: 56, lr: 3.26e-02, grad_scale: 8.0 +2023-03-27 19:01:08,059 INFO [train.py:892] (1/4) Epoch 5, batch 0, loss[loss=0.2949, simple_loss=0.3322, pruned_loss=0.1288, over 19727.00 frames. ], tot_loss[loss=0.2949, simple_loss=0.3322, pruned_loss=0.1288, over 19727.00 frames. ], batch size: 99, lr: 3.03e-02, grad_scale: 8.0 +2023-03-27 19:01:08,059 INFO [train.py:917] (1/4) Computing validation loss +2023-03-27 19:01:34,467 INFO [train.py:926] (1/4) Epoch 5, validation: loss=0.2154, simple_loss=0.2917, pruned_loss=0.06955, over 2883724.00 frames. +2023-03-27 19:01:34,468 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22502MB +2023-03-27 19:02:08,097 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6074, 1.5148, 3.0992, 3.2227, 3.3046, 3.3890, 3.4685, 3.3354], + device='cuda:1'), covar=tensor([0.0526, 0.2325, 0.0340, 0.0255, 0.0247, 0.0159, 0.0167, 0.0311], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0150, 0.0088, 0.0079, 0.0069, 0.0072, 0.0064, 0.0071], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 19:03:19,074 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7468.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:03:24,016 INFO [train.py:892] (1/4) Epoch 5, batch 50, loss[loss=0.243, simple_loss=0.2959, pruned_loss=0.09508, over 19716.00 frames. ], tot_loss[loss=0.3049, simple_loss=0.3369, pruned_loss=0.1365, over 892058.55 frames. ], batch size: 81, lr: 3.03e-02, grad_scale: 8.0 +2023-03-27 19:04:36,703 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.390e+02 6.285e+02 7.498e+02 8.995e+02 1.568e+03, threshold=1.500e+03, percent-clipped=1.0 +2023-03-27 19:05:06,305 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7519.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 19:05:09,589 INFO [train.py:892] (1/4) Epoch 5, batch 100, loss[loss=0.2904, simple_loss=0.3435, pruned_loss=0.1187, over 19841.00 frames. ], tot_loss[loss=0.3018, simple_loss=0.3377, pruned_loss=0.1329, over 1570912.97 frames. ], batch size: 58, lr: 3.02e-02, grad_scale: 8.0 +2023-03-27 19:05:37,897 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7534.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 19:06:29,075 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.22 vs. limit=5.0 +2023-03-27 19:06:37,694 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8964, 2.2297, 2.9683, 3.0480, 3.5639, 3.1600, 3.8175, 3.9769], + device='cuda:1'), covar=tensor([0.0414, 0.1614, 0.0934, 0.1470, 0.0932, 0.0893, 0.0240, 0.0337], + device='cuda:1'), in_proj_covar=tensor([0.0128, 0.0158, 0.0142, 0.0178, 0.0138, 0.0147, 0.0096, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 19:06:43,531 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7565.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:06:53,851 INFO [train.py:892] (1/4) Epoch 5, batch 150, loss[loss=0.3024, simple_loss=0.3464, pruned_loss=0.1292, over 19675.00 frames. ], tot_loss[loss=0.3085, simple_loss=0.343, pruned_loss=0.137, over 2095793.99 frames. ], batch size: 49, lr: 3.01e-02, grad_scale: 8.0 +2023-03-27 19:07:19,287 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7582.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 19:08:06,881 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.365e+02 5.876e+02 7.241e+02 9.334e+02 1.719e+03, threshold=1.448e+03, percent-clipped=1.0 +2023-03-27 19:08:24,634 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7613.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:08:42,168 INFO [train.py:892] (1/4) Epoch 5, batch 200, loss[loss=0.3303, simple_loss=0.3583, pruned_loss=0.1512, over 19677.00 frames. ], tot_loss[loss=0.3046, simple_loss=0.3404, pruned_loss=0.1344, over 2507818.56 frames. ], batch size: 64, lr: 3.01e-02, grad_scale: 8.0 +2023-03-27 19:08:49,078 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7624.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:09:02,475 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7631.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:09:53,833 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.43 vs. limit=5.0 +2023-03-27 19:10:28,104 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.70 vs. limit=2.0 +2023-03-27 19:10:28,551 INFO [train.py:892] (1/4) Epoch 5, batch 250, loss[loss=0.2671, simple_loss=0.3034, pruned_loss=0.1154, over 19773.00 frames. ], tot_loss[loss=0.3023, simple_loss=0.3381, pruned_loss=0.1332, over 2828043.52 frames. ], batch size: 116, lr: 3.00e-02, grad_scale: 8.0 +2023-03-27 19:10:46,993 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.73 vs. limit=2.0 +2023-03-27 19:10:55,899 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7685.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:11:37,161 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7704.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 19:11:38,143 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.842e+02 5.939e+02 7.915e+02 9.500e+02 2.384e+03, threshold=1.583e+03, percent-clipped=4.0 +2023-03-27 19:12:10,438 INFO [train.py:892] (1/4) Epoch 5, batch 300, loss[loss=0.2746, simple_loss=0.3124, pruned_loss=0.1184, over 19769.00 frames. ], tot_loss[loss=0.3019, simple_loss=0.3386, pruned_loss=0.1326, over 3076130.24 frames. ], batch size: 198, lr: 2.99e-02, grad_scale: 8.0 +2023-03-27 19:13:00,787 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7743.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:13:18,446 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7752.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 19:13:42,649 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7763.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:13:50,048 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8699, 2.3805, 1.7323, 1.3799, 1.8084, 2.3344, 2.0877, 2.1071], + device='cuda:1'), covar=tensor([0.0347, 0.0353, 0.0306, 0.0826, 0.0589, 0.0294, 0.0249, 0.0355], + device='cuda:1'), in_proj_covar=tensor([0.0035, 0.0031, 0.0032, 0.0047, 0.0048, 0.0032, 0.0026, 0.0030], + device='cuda:1'), out_proj_covar=tensor([6.8825e-05, 6.2574e-05, 6.2907e-05, 9.4228e-05, 9.6226e-05, 6.3672e-05, + 5.2814e-05, 6.0843e-05], device='cuda:1') +2023-03-27 19:13:58,912 INFO [train.py:892] (1/4) Epoch 5, batch 350, loss[loss=0.2929, simple_loss=0.3362, pruned_loss=0.1248, over 19797.00 frames. ], tot_loss[loss=0.3027, simple_loss=0.3389, pruned_loss=0.1333, over 3270193.87 frames. ], batch size: 67, lr: 2.98e-02, grad_scale: 8.0 +2023-03-27 19:15:04,609 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7802.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:15:08,570 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7804.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:15:09,451 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.882e+02 6.339e+02 7.549e+02 9.099e+02 1.830e+03, threshold=1.510e+03, percent-clipped=2.0 +2023-03-27 19:15:38,920 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7819.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 19:15:42,005 INFO [train.py:892] (1/4) Epoch 5, batch 400, loss[loss=0.2679, simple_loss=0.3155, pruned_loss=0.1101, over 19741.00 frames. ], tot_loss[loss=0.3019, simple_loss=0.3379, pruned_loss=0.133, over 3421597.36 frames. ], batch size: 92, lr: 2.98e-02, grad_scale: 8.0 +2023-03-27 19:17:11,390 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7863.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:17:19,250 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7867.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:17:26,095 INFO [train.py:892] (1/4) Epoch 5, batch 450, loss[loss=0.3176, simple_loss=0.3577, pruned_loss=0.1387, over 19778.00 frames. ], tot_loss[loss=0.3028, simple_loss=0.3389, pruned_loss=0.1334, over 3537997.22 frames. ], batch size: 70, lr: 2.97e-02, grad_scale: 8.0 +2023-03-27 19:18:37,149 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.709e+02 6.480e+02 7.693e+02 9.079e+02 2.029e+03, threshold=1.539e+03, percent-clipped=2.0 +2023-03-27 19:18:38,134 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.0459, 2.7251, 1.7984, 1.5396, 2.0651, 2.3963, 2.2517, 2.3355], + device='cuda:1'), covar=tensor([0.0301, 0.0346, 0.0277, 0.0744, 0.0481, 0.0339, 0.0192, 0.0321], + device='cuda:1'), in_proj_covar=tensor([0.0036, 0.0031, 0.0033, 0.0047, 0.0049, 0.0032, 0.0026, 0.0030], + device='cuda:1'), out_proj_covar=tensor([7.0889e-05, 6.3583e-05, 6.3952e-05, 9.4986e-05, 9.7956e-05, 6.4348e-05, + 5.3853e-05, 6.0801e-05], device='cuda:1') +2023-03-27 19:19:12,466 INFO [train.py:892] (1/4) Epoch 5, batch 500, loss[loss=0.3073, simple_loss=0.3506, pruned_loss=0.132, over 19641.00 frames. ], tot_loss[loss=0.3024, simple_loss=0.3382, pruned_loss=0.1333, over 3630057.75 frames. ], batch size: 72, lr: 2.96e-02, grad_scale: 8.0 +2023-03-27 19:19:17,692 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7330, 1.8278, 2.0343, 1.8734, 2.6330, 2.3147, 2.4951, 2.5462], + device='cuda:1'), covar=tensor([0.0631, 0.1526, 0.1288, 0.1620, 0.0739, 0.0895, 0.0441, 0.0444], + device='cuda:1'), in_proj_covar=tensor([0.0131, 0.0159, 0.0150, 0.0182, 0.0148, 0.0151, 0.0099, 0.0102], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 19:19:34,595 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7931.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:20:12,390 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.98 vs. limit=2.0 +2023-03-27 19:20:57,025 INFO [train.py:892] (1/4) Epoch 5, batch 550, loss[loss=0.2636, simple_loss=0.3008, pruned_loss=0.1132, over 19817.00 frames. ], tot_loss[loss=0.3036, simple_loss=0.339, pruned_loss=0.1341, over 3700630.39 frames. ], batch size: 123, lr: 2.96e-02, grad_scale: 8.0 +2023-03-27 19:21:13,918 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7979.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:21:15,781 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7980.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:22:12,797 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.154e+02 6.076e+02 7.580e+02 9.097e+02 2.073e+03, threshold=1.516e+03, percent-clipped=3.0 +2023-03-27 19:22:44,351 INFO [train.py:892] (1/4) Epoch 5, batch 600, loss[loss=0.4056, simple_loss=0.4153, pruned_loss=0.198, over 19589.00 frames. ], tot_loss[loss=0.3029, simple_loss=0.339, pruned_loss=0.1334, over 3755410.37 frames. ], batch size: 376, lr: 2.95e-02, grad_scale: 8.0 +2023-03-27 19:24:11,332 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8063.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:24:28,883 INFO [train.py:892] (1/4) Epoch 5, batch 650, loss[loss=0.3004, simple_loss=0.3417, pruned_loss=0.1296, over 19812.00 frames. ], tot_loss[loss=0.3009, simple_loss=0.3373, pruned_loss=0.1322, over 3799493.10 frames. ], batch size: 132, lr: 2.94e-02, grad_scale: 8.0 +2023-03-27 19:25:10,579 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-27 19:25:31,083 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8099.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:25:43,549 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.014e+02 6.583e+02 7.852e+02 9.012e+02 1.490e+03, threshold=1.570e+03, percent-clipped=0.0 +2023-03-27 19:25:56,287 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8111.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:26:16,745 INFO [train.py:892] (1/4) Epoch 5, batch 700, loss[loss=0.3057, simple_loss=0.3482, pruned_loss=0.1316, over 19707.00 frames. ], tot_loss[loss=0.3005, simple_loss=0.3376, pruned_loss=0.1317, over 3833148.40 frames. ], batch size: 109, lr: 2.94e-02, grad_scale: 8.0 +2023-03-27 19:27:38,094 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8158.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:28:05,582 INFO [train.py:892] (1/4) Epoch 5, batch 750, loss[loss=0.313, simple_loss=0.345, pruned_loss=0.1404, over 19734.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3364, pruned_loss=0.131, over 3859740.75 frames. ], batch size: 269, lr: 2.93e-02, grad_scale: 8.0 +2023-03-27 19:29:16,506 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.022e+02 6.531e+02 7.679e+02 9.153e+02 1.674e+03, threshold=1.536e+03, percent-clipped=1.0 +2023-03-27 19:29:50,732 INFO [train.py:892] (1/4) Epoch 5, batch 800, loss[loss=0.3098, simple_loss=0.3424, pruned_loss=0.1385, over 19759.00 frames. ], tot_loss[loss=0.3, simple_loss=0.3371, pruned_loss=0.1315, over 3878846.03 frames. ], batch size: 182, lr: 2.92e-02, grad_scale: 8.0 +2023-03-27 19:29:54,013 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-03-27 19:30:04,745 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7582, 4.5212, 5.0813, 4.8323, 5.0063, 4.0564, 4.6803, 4.6510], + device='cuda:1'), covar=tensor([0.1168, 0.1007, 0.0890, 0.0886, 0.0740, 0.1237, 0.2015, 0.2351], + device='cuda:1'), in_proj_covar=tensor([0.0195, 0.0166, 0.0231, 0.0188, 0.0182, 0.0176, 0.0216, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-27 19:30:40,788 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0119, 4.5347, 4.5450, 5.0634, 4.6631, 5.1704, 4.9531, 5.2654], + device='cuda:1'), covar=tensor([0.0528, 0.0262, 0.0342, 0.0183, 0.0463, 0.0144, 0.0299, 0.0238], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0111, 0.0128, 0.0110, 0.0108, 0.0088, 0.0112, 0.0127], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 19:31:21,399 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0131, 5.0235, 5.4803, 5.2408, 5.2480, 4.9258, 5.1086, 5.0462], + device='cuda:1'), covar=tensor([0.1020, 0.1031, 0.0899, 0.0778, 0.0728, 0.1016, 0.1778, 0.1910], + device='cuda:1'), in_proj_covar=tensor([0.0193, 0.0165, 0.0229, 0.0186, 0.0180, 0.0175, 0.0216, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-27 19:31:25,601 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8265.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:31:36,268 INFO [train.py:892] (1/4) Epoch 5, batch 850, loss[loss=0.2664, simple_loss=0.3188, pruned_loss=0.107, over 19794.00 frames. ], tot_loss[loss=0.2978, simple_loss=0.3357, pruned_loss=0.13, over 3895515.35 frames. ], batch size: 40, lr: 2.92e-02, grad_scale: 8.0 +2023-03-27 19:31:57,902 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8280.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:32:49,195 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.707e+02 5.455e+02 6.919e+02 8.386e+02 1.759e+03, threshold=1.384e+03, percent-clipped=2.0 +2023-03-27 19:33:06,168 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.03 vs. limit=5.0 +2023-03-27 19:33:13,984 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.7302, 1.9396, 1.8331, 1.6385, 1.2344, 1.6422, 1.7335, 2.1279], + device='cuda:1'), covar=tensor([0.0305, 0.0338, 0.0292, 0.0408, 0.0456, 0.0424, 0.0458, 0.0396], + device='cuda:1'), in_proj_covar=tensor([0.0030, 0.0032, 0.0032, 0.0029, 0.0031, 0.0032, 0.0039, 0.0029], + device='cuda:1'), out_proj_covar=tensor([6.2449e-05, 6.3373e-05, 6.4600e-05, 5.8563e-05, 6.5348e-05, 6.5766e-05, + 7.7468e-05, 6.0534e-05], device='cuda:1') +2023-03-27 19:33:22,236 INFO [train.py:892] (1/4) Epoch 5, batch 900, loss[loss=0.3085, simple_loss=0.3527, pruned_loss=0.1322, over 19777.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3358, pruned_loss=0.1305, over 3908562.40 frames. ], batch size: 70, lr: 2.91e-02, grad_scale: 16.0 +2023-03-27 19:33:34,334 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8326.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:33:37,971 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8328.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:34:47,428 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8782, 4.0781, 4.0971, 4.0601, 3.8837, 4.0049, 3.6636, 3.6682], + device='cuda:1'), covar=tensor([0.0452, 0.0465, 0.0789, 0.0547, 0.0724, 0.0722, 0.0677, 0.1139], + device='cuda:1'), in_proj_covar=tensor([0.0133, 0.0127, 0.0187, 0.0140, 0.0136, 0.0123, 0.0155, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 19:35:08,190 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-03-27 19:35:10,580 INFO [train.py:892] (1/4) Epoch 5, batch 950, loss[loss=0.3051, simple_loss=0.3448, pruned_loss=0.1327, over 19739.00 frames. ], tot_loss[loss=0.2996, simple_loss=0.3368, pruned_loss=0.1312, over 3917772.97 frames. ], batch size: 92, lr: 2.91e-02, grad_scale: 16.0 +2023-03-27 19:35:56,555 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0475, 4.3274, 4.3938, 4.3693, 4.0783, 4.2635, 3.9151, 3.9560], + device='cuda:1'), covar=tensor([0.0446, 0.0470, 0.0759, 0.0439, 0.0713, 0.0679, 0.0608, 0.1172], + device='cuda:1'), in_proj_covar=tensor([0.0133, 0.0127, 0.0186, 0.0139, 0.0137, 0.0122, 0.0157, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 19:36:10,079 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8399.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:36:23,194 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.522e+02 6.186e+02 7.459e+02 8.919e+02 2.032e+03, threshold=1.492e+03, percent-clipped=3.0 +2023-03-27 19:36:55,733 INFO [train.py:892] (1/4) Epoch 5, batch 1000, loss[loss=0.2845, simple_loss=0.3242, pruned_loss=0.1224, over 19780.00 frames. ], tot_loss[loss=0.3025, simple_loss=0.3391, pruned_loss=0.1329, over 3923930.14 frames. ], batch size: 52, lr: 2.90e-02, grad_scale: 16.0 +2023-03-27 19:37:53,240 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8447.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:37:59,328 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5924, 3.5737, 2.4316, 4.0209, 4.1249, 1.7275, 3.4116, 3.2156], + device='cuda:1'), covar=tensor([0.0549, 0.0771, 0.2158, 0.0320, 0.0133, 0.3195, 0.0825, 0.0457], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0148, 0.0175, 0.0100, 0.0072, 0.0173, 0.0171, 0.0109], + device='cuda:1'), out_proj_covar=tensor([1.5031e-04, 1.7020e-04, 1.9190e-04, 1.2230e-04, 8.6286e-05, 1.8421e-04, + 1.9215e-04, 1.2069e-04], device='cuda:1') +2023-03-27 19:38:15,142 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8458.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:38:41,303 INFO [train.py:892] (1/4) Epoch 5, batch 1050, loss[loss=0.2924, simple_loss=0.3369, pruned_loss=0.124, over 19865.00 frames. ], tot_loss[loss=0.3036, simple_loss=0.3398, pruned_loss=0.1337, over 3929853.87 frames. ], batch size: 51, lr: 2.89e-02, grad_scale: 16.0 +2023-03-27 19:39:06,906 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1883, 3.1926, 4.7983, 3.6102, 4.0746, 4.7248, 2.7943, 2.9335], + device='cuda:1'), covar=tensor([0.0505, 0.3004, 0.0249, 0.0587, 0.1164, 0.0382, 0.0921, 0.1354], + device='cuda:1'), in_proj_covar=tensor([0.0217, 0.0275, 0.0158, 0.0154, 0.0248, 0.0139, 0.0172, 0.0182], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 19:39:52,156 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.605e+02 6.353e+02 7.329e+02 8.982e+02 1.532e+03, threshold=1.466e+03, percent-clipped=3.0 +2023-03-27 19:39:54,938 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8506.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:40:27,780 INFO [train.py:892] (1/4) Epoch 5, batch 1100, loss[loss=0.2537, simple_loss=0.2967, pruned_loss=0.1053, over 19911.00 frames. ], tot_loss[loss=0.301, simple_loss=0.3381, pruned_loss=0.132, over 3934480.19 frames. ], batch size: 45, lr: 2.89e-02, grad_scale: 16.0 +2023-03-27 19:42:10,186 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-27 19:42:10,852 INFO [train.py:892] (1/4) Epoch 5, batch 1150, loss[loss=0.3025, simple_loss=0.335, pruned_loss=0.135, over 19753.00 frames. ], tot_loss[loss=0.3014, simple_loss=0.3382, pruned_loss=0.1323, over 3938995.84 frames. ], batch size: 139, lr: 2.88e-02, grad_scale: 16.0 +2023-03-27 19:42:37,258 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9517, 3.0735, 3.8868, 3.4164, 3.6411, 3.6581, 3.6666, 3.7281], + device='cuda:1'), covar=tensor([0.0139, 0.0391, 0.0092, 0.0983, 0.0128, 0.0171, 0.0166, 0.0112], + device='cuda:1'), in_proj_covar=tensor([0.0056, 0.0064, 0.0056, 0.0134, 0.0051, 0.0057, 0.0057, 0.0049], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0003, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-27 19:42:56,336 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0143, 4.5960, 4.8592, 4.6568, 4.9526, 3.5187, 3.9906, 3.8555], + device='cuda:1'), covar=tensor([0.0171, 0.0130, 0.0110, 0.0130, 0.0091, 0.0544, 0.0890, 0.0487], + device='cuda:1'), in_proj_covar=tensor([0.0059, 0.0072, 0.0070, 0.0080, 0.0072, 0.0095, 0.0107, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 19:43:21,778 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 6.114e+02 7.456e+02 8.704e+02 1.962e+03, threshold=1.491e+03, percent-clipped=1.0 +2023-03-27 19:43:56,816 INFO [train.py:892] (1/4) Epoch 5, batch 1200, loss[loss=0.2859, simple_loss=0.3243, pruned_loss=0.1238, over 19713.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.3359, pruned_loss=0.1308, over 3942387.18 frames. ], batch size: 109, lr: 2.87e-02, grad_scale: 16.0 +2023-03-27 19:43:57,702 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8621.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:45:38,933 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.16 vs. limit=5.0 +2023-03-27 19:45:41,181 INFO [train.py:892] (1/4) Epoch 5, batch 1250, loss[loss=0.2658, simple_loss=0.3102, pruned_loss=0.1107, over 19784.00 frames. ], tot_loss[loss=0.2993, simple_loss=0.3363, pruned_loss=0.1312, over 3943083.35 frames. ], batch size: 91, lr: 2.87e-02, grad_scale: 16.0 +2023-03-27 19:46:53,152 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.783e+02 6.822e+02 8.338e+02 1.055e+03 2.150e+03, threshold=1.668e+03, percent-clipped=7.0 +2023-03-27 19:47:25,002 INFO [train.py:892] (1/4) Epoch 5, batch 1300, loss[loss=0.2514, simple_loss=0.3009, pruned_loss=0.1009, over 19806.00 frames. ], tot_loss[loss=0.2963, simple_loss=0.3343, pruned_loss=0.1291, over 3945869.84 frames. ], batch size: 98, lr: 2.86e-02, grad_scale: 16.0 +2023-03-27 19:48:46,406 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.0750, 2.1567, 1.9369, 1.3434, 2.0428, 2.1954, 2.1398, 1.8266], + device='cuda:1'), covar=tensor([0.0260, 0.0311, 0.0217, 0.0615, 0.0405, 0.0335, 0.0167, 0.0388], + device='cuda:1'), in_proj_covar=tensor([0.0037, 0.0033, 0.0035, 0.0050, 0.0051, 0.0034, 0.0027, 0.0032], + device='cuda:1'), out_proj_covar=tensor([7.6466e-05, 7.0320e-05, 7.0637e-05, 1.0414e-04, 1.0457e-04, 7.2083e-05, + 5.7183e-05, 6.6489e-05], device='cuda:1') +2023-03-27 19:48:48,220 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8760.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:49:09,912 INFO [train.py:892] (1/4) Epoch 5, batch 1350, loss[loss=0.2411, simple_loss=0.2855, pruned_loss=0.0983, over 19731.00 frames. ], tot_loss[loss=0.2955, simple_loss=0.3341, pruned_loss=0.1284, over 3947650.00 frames. ], batch size: 95, lr: 2.86e-02, grad_scale: 16.0 +2023-03-27 19:50:22,595 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.065e+02 5.700e+02 7.084e+02 8.473e+02 1.458e+03, threshold=1.417e+03, percent-clipped=0.0 +2023-03-27 19:50:58,497 INFO [train.py:892] (1/4) Epoch 5, batch 1400, loss[loss=0.32, simple_loss=0.3466, pruned_loss=0.1467, over 19798.00 frames. ], tot_loss[loss=0.2942, simple_loss=0.3334, pruned_loss=0.1276, over 3948508.41 frames. ], batch size: 224, lr: 2.85e-02, grad_scale: 16.0 +2023-03-27 19:50:59,479 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8821.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:52:37,520 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4397, 5.4648, 5.9199, 5.6678, 5.6222, 5.4117, 5.5454, 5.4649], + device='cuda:1'), covar=tensor([0.1138, 0.0895, 0.0789, 0.0852, 0.0608, 0.0886, 0.1612, 0.1945], + device='cuda:1'), in_proj_covar=tensor([0.0204, 0.0173, 0.0237, 0.0196, 0.0187, 0.0181, 0.0226, 0.0278], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-27 19:52:40,302 INFO [train.py:892] (1/4) Epoch 5, batch 1450, loss[loss=0.3503, simple_loss=0.3755, pruned_loss=0.1625, over 19788.00 frames. ], tot_loss[loss=0.2955, simple_loss=0.3343, pruned_loss=0.1283, over 3949155.58 frames. ], batch size: 263, lr: 2.84e-02, grad_scale: 16.0 +2023-03-27 19:53:52,855 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+02 6.588e+02 7.767e+02 1.033e+03 1.538e+03, threshold=1.553e+03, percent-clipped=3.0 +2023-03-27 19:54:26,576 INFO [train.py:892] (1/4) Epoch 5, batch 1500, loss[loss=0.2855, simple_loss=0.3259, pruned_loss=0.1226, over 19817.00 frames. ], tot_loss[loss=0.2968, simple_loss=0.3351, pruned_loss=0.1293, over 3949147.06 frames. ], batch size: 133, lr: 2.84e-02, grad_scale: 16.0 +2023-03-27 19:54:27,568 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8921.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:54:40,841 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8927.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:56:10,557 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8969.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:56:13,867 INFO [train.py:892] (1/4) Epoch 5, batch 1550, loss[loss=0.3141, simple_loss=0.3329, pruned_loss=0.1477, over 19813.00 frames. ], tot_loss[loss=0.296, simple_loss=0.3341, pruned_loss=0.129, over 3949033.48 frames. ], batch size: 148, lr: 2.83e-02, grad_scale: 16.0 +2023-03-27 19:56:19,445 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-27 19:56:45,336 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8986.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:56:50,728 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8988.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:57:26,094 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.607e+02 5.861e+02 6.933e+02 8.857e+02 2.450e+03, threshold=1.387e+03, percent-clipped=2.0 +2023-03-27 19:57:35,349 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-03-27 19:57:42,854 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9013.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:57:57,832 INFO [train.py:892] (1/4) Epoch 5, batch 1600, loss[loss=0.3487, simple_loss=0.3739, pruned_loss=0.1617, over 19632.00 frames. ], tot_loss[loss=0.2946, simple_loss=0.3336, pruned_loss=0.1278, over 3949733.43 frames. ], batch size: 351, lr: 2.83e-02, grad_scale: 16.0 +2023-03-27 19:58:43,579 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1708, 4.7535, 4.8139, 5.3466, 4.8207, 5.5473, 5.2141, 5.6079], + device='cuda:1'), covar=tensor([0.0686, 0.0298, 0.0419, 0.0218, 0.0437, 0.0163, 0.0361, 0.0247], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0113, 0.0133, 0.0115, 0.0109, 0.0089, 0.0117, 0.0131], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 19:58:52,904 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9047.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:59:40,275 INFO [train.py:892] (1/4) Epoch 5, batch 1650, loss[loss=0.3339, simple_loss=0.36, pruned_loss=0.1539, over 19767.00 frames. ], tot_loss[loss=0.2948, simple_loss=0.3334, pruned_loss=0.1281, over 3949250.96 frames. ], batch size: 241, lr: 2.82e-02, grad_scale: 16.0 +2023-03-27 19:59:48,630 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9074.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:00:05,733 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.5184, 1.8631, 1.4812, 1.0566, 1.5677, 2.0382, 1.6163, 1.7404], + device='cuda:1'), covar=tensor([0.0303, 0.0298, 0.0255, 0.0783, 0.0586, 0.0257, 0.0184, 0.0294], + device='cuda:1'), in_proj_covar=tensor([0.0037, 0.0034, 0.0035, 0.0051, 0.0052, 0.0034, 0.0027, 0.0032], + device='cuda:1'), out_proj_covar=tensor([7.7534e-05, 7.4023e-05, 7.2949e-05, 1.0785e-04, 1.0863e-04, 7.3213e-05, + 5.9801e-05, 6.9441e-05], device='cuda:1') +2023-03-27 20:00:12,917 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6069, 2.5376, 1.4378, 3.2748, 2.8071, 3.1746, 3.1783, 2.5355], + device='cuda:1'), covar=tensor([0.0734, 0.0598, 0.1708, 0.0395, 0.0548, 0.0385, 0.0497, 0.0658], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0080, 0.0110, 0.0089, 0.0077, 0.0067, 0.0078, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 20:00:51,669 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.078e+02 6.548e+02 7.778e+02 9.936e+02 1.704e+03, threshold=1.556e+03, percent-clipped=1.0 +2023-03-27 20:01:16,365 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9116.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:01:25,604 INFO [train.py:892] (1/4) Epoch 5, batch 1700, loss[loss=0.2703, simple_loss=0.3239, pruned_loss=0.1084, over 19883.00 frames. ], tot_loss[loss=0.2959, simple_loss=0.3348, pruned_loss=0.1285, over 3948311.85 frames. ], batch size: 77, lr: 2.81e-02, grad_scale: 16.0 +2023-03-27 20:01:26,552 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0909, 2.4939, 3.6511, 3.6742, 4.0714, 4.3760, 4.2880, 4.2959], + device='cuda:1'), covar=tensor([0.0530, 0.1825, 0.0395, 0.0296, 0.0298, 0.0129, 0.0170, 0.0206], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0152, 0.0098, 0.0093, 0.0076, 0.0075, 0.0069, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 20:03:04,921 INFO [train.py:892] (1/4) Epoch 5, batch 1750, loss[loss=0.2869, simple_loss=0.3281, pruned_loss=0.1229, over 19800.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3359, pruned_loss=0.1298, over 3949173.94 frames. ], batch size: 150, lr: 2.81e-02, grad_scale: 16.0 +2023-03-27 20:03:41,338 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-27 20:03:51,448 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1644, 3.0826, 4.2038, 3.5776, 3.6573, 4.1766, 3.8810, 4.2563], + device='cuda:1'), covar=tensor([0.0191, 0.0488, 0.0125, 0.1165, 0.0139, 0.0176, 0.0170, 0.0097], + device='cuda:1'), in_proj_covar=tensor([0.0057, 0.0064, 0.0055, 0.0129, 0.0050, 0.0056, 0.0057, 0.0049], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0003, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-27 20:04:05,088 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+02 6.292e+02 7.596e+02 9.035e+02 1.874e+03, threshold=1.519e+03, percent-clipped=3.0 +2023-03-27 20:04:33,322 INFO [train.py:892] (1/4) Epoch 5, batch 1800, loss[loss=0.3013, simple_loss=0.3379, pruned_loss=0.1323, over 19811.00 frames. ], tot_loss[loss=0.2994, simple_loss=0.3373, pruned_loss=0.1307, over 3945138.12 frames. ], batch size: 72, lr: 2.80e-02, grad_scale: 16.0 +2023-03-27 20:05:15,969 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7971, 2.4233, 3.3763, 2.3882, 3.1683, 2.3032, 2.6285, 3.4790], + device='cuda:1'), covar=tensor([0.0599, 0.0330, 0.0370, 0.0505, 0.0360, 0.0281, 0.0318, 0.0179], + device='cuda:1'), in_proj_covar=tensor([0.0045, 0.0041, 0.0043, 0.0062, 0.0043, 0.0037, 0.0039, 0.0035], + device='cuda:1'), out_proj_covar=tensor([1.0815e-04, 1.0083e-04, 1.0447e-04, 1.3699e-04, 1.0399e-04, 9.2557e-05, + 9.8880e-05, 8.5450e-05], device='cuda:1') +2023-03-27 20:06:00,371 INFO [train.py:892] (1/4) Epoch 5, batch 1850, loss[loss=0.3343, simple_loss=0.3728, pruned_loss=0.1479, over 19822.00 frames. ], tot_loss[loss=0.3004, simple_loss=0.3388, pruned_loss=0.131, over 3944860.15 frames. ], batch size: 57, lr: 2.80e-02, grad_scale: 16.0 +2023-03-27 20:06:59,490 INFO [train.py:892] (1/4) Epoch 6, batch 0, loss[loss=0.264, simple_loss=0.3105, pruned_loss=0.1088, over 19793.00 frames. ], tot_loss[loss=0.264, simple_loss=0.3105, pruned_loss=0.1088, over 19793.00 frames. ], batch size: 83, lr: 2.61e-02, grad_scale: 16.0 +2023-03-27 20:06:59,491 INFO [train.py:917] (1/4) Computing validation loss +2023-03-27 20:07:11,669 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4984, 4.2688, 4.2868, 4.1158, 4.5206, 3.2610, 3.5582, 3.5638], + device='cuda:1'), covar=tensor([0.0151, 0.0151, 0.0153, 0.0159, 0.0116, 0.0712, 0.0938, 0.0442], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0075, 0.0075, 0.0083, 0.0075, 0.0098, 0.0111, 0.0093], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 20:07:25,896 INFO [train.py:926] (1/4) Epoch 6, validation: loss=0.2048, simple_loss=0.2829, pruned_loss=0.06328, over 2883724.00 frames. +2023-03-27 20:07:25,897 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22502MB +2023-03-27 20:07:45,227 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9283.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:08:30,072 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 6.201e+02 7.498e+02 9.060e+02 1.792e+03, threshold=1.500e+03, percent-clipped=2.0 +2023-03-27 20:09:01,904 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.0470, 1.8191, 2.1728, 1.9495, 1.4837, 1.7627, 1.6627, 2.0574], + device='cuda:1'), covar=tensor([0.0242, 0.0560, 0.0330, 0.0262, 0.0396, 0.0442, 0.0580, 0.0463], + device='cuda:1'), in_proj_covar=tensor([0.0031, 0.0035, 0.0034, 0.0027, 0.0033, 0.0034, 0.0041, 0.0032], + device='cuda:1'), out_proj_covar=tensor([6.6695e-05, 7.2076e-05, 6.9564e-05, 5.7719e-05, 7.0987e-05, 7.1434e-05, + 8.5292e-05, 6.8765e-05], device='cuda:1') +2023-03-27 20:09:13,758 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1503, 3.1598, 3.5707, 4.5357, 2.4621, 3.1931, 2.9000, 2.0548], + device='cuda:1'), covar=tensor([0.0358, 0.3441, 0.0802, 0.0137, 0.2612, 0.0705, 0.1064, 0.2408], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0311, 0.0183, 0.0101, 0.0215, 0.0135, 0.0162, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 20:09:14,661 INFO [train.py:892] (1/4) Epoch 6, batch 50, loss[loss=0.2846, simple_loss=0.3318, pruned_loss=0.1188, over 19895.00 frames. ], tot_loss[loss=0.274, simple_loss=0.3179, pruned_loss=0.115, over 891967.88 frames. ], batch size: 87, lr: 2.60e-02, grad_scale: 16.0 +2023-03-27 20:09:48,468 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9342.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:10:40,270 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7020, 3.5681, 4.0268, 5.1459, 3.1199, 3.6561, 3.2259, 2.5502], + device='cuda:1'), covar=tensor([0.0302, 0.3031, 0.0670, 0.0079, 0.2134, 0.0580, 0.0923, 0.2092], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0309, 0.0180, 0.0100, 0.0213, 0.0134, 0.0162, 0.0192], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 20:10:46,095 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9369.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:10:59,010 INFO [train.py:892] (1/4) Epoch 6, batch 100, loss[loss=0.2563, simple_loss=0.3089, pruned_loss=0.1019, over 19791.00 frames. ], tot_loss[loss=0.2753, simple_loss=0.3197, pruned_loss=0.1155, over 1570400.49 frames. ], batch size: 74, lr: 2.60e-02, grad_scale: 16.0 +2023-03-27 20:12:00,064 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 5.626e+02 6.778e+02 8.697e+02 1.693e+03, threshold=1.356e+03, percent-clipped=2.0 +2023-03-27 20:12:20,614 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9414.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:12:24,240 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9416.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:12:43,793 INFO [train.py:892] (1/4) Epoch 6, batch 150, loss[loss=0.2767, simple_loss=0.3303, pruned_loss=0.1116, over 19863.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.324, pruned_loss=0.1192, over 2095268.35 frames. ], batch size: 48, lr: 2.59e-02, grad_scale: 16.0 +2023-03-27 20:14:08,397 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9464.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:14:32,807 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9475.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:14:33,667 INFO [train.py:892] (1/4) Epoch 6, batch 200, loss[loss=0.2457, simple_loss=0.303, pruned_loss=0.09419, over 19826.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.325, pruned_loss=0.1207, over 2505963.74 frames. ], batch size: 76, lr: 2.59e-02, grad_scale: 16.0 +2023-03-27 20:14:50,259 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6165, 4.0345, 4.3797, 4.0075, 3.9540, 4.1748, 4.1070, 4.4455], + device='cuda:1'), covar=tensor([0.1638, 0.0335, 0.0375, 0.0381, 0.0608, 0.0436, 0.0313, 0.0404], + device='cuda:1'), in_proj_covar=tensor([0.0197, 0.0146, 0.0137, 0.0138, 0.0143, 0.0135, 0.0123, 0.0122], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 20:14:58,390 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-03-27 20:15:34,377 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.804e+02 5.747e+02 7.100e+02 9.043e+02 1.475e+03, threshold=1.420e+03, percent-clipped=5.0 +2023-03-27 20:15:52,692 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8153, 3.0132, 3.3874, 3.8637, 2.4361, 3.2749, 2.7267, 2.2754], + device='cuda:1'), covar=tensor([0.0380, 0.2748, 0.0701, 0.0125, 0.2420, 0.0414, 0.0895, 0.1878], + device='cuda:1'), in_proj_covar=tensor([0.0144, 0.0308, 0.0183, 0.0101, 0.0220, 0.0134, 0.0166, 0.0192], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 20:16:16,356 INFO [train.py:892] (1/4) Epoch 6, batch 250, loss[loss=0.2417, simple_loss=0.2906, pruned_loss=0.09637, over 19730.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.325, pruned_loss=0.1204, over 2826405.87 frames. ], batch size: 118, lr: 2.58e-02, grad_scale: 16.0 +2023-03-27 20:18:00,755 INFO [train.py:892] (1/4) Epoch 6, batch 300, loss[loss=0.2828, simple_loss=0.3193, pruned_loss=0.1231, over 19826.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.3254, pruned_loss=0.1208, over 3076790.61 frames. ], batch size: 121, lr: 2.58e-02, grad_scale: 16.0 +2023-03-27 20:18:19,811 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9583.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:18:21,717 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9584.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:18:33,339 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1969, 2.3431, 3.4978, 2.8189, 2.9680, 3.6296, 2.1042, 2.1858], + device='cuda:1'), covar=tensor([0.0703, 0.3075, 0.0464, 0.0654, 0.1255, 0.0344, 0.1184, 0.1826], + device='cuda:1'), in_proj_covar=tensor([0.0231, 0.0286, 0.0176, 0.0165, 0.0264, 0.0152, 0.0187, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 20:19:02,337 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.913e+02 5.609e+02 7.067e+02 9.111e+02 1.524e+03, threshold=1.413e+03, percent-clipped=2.0 +2023-03-27 20:19:33,200 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-03-27 20:19:49,311 INFO [train.py:892] (1/4) Epoch 6, batch 350, loss[loss=0.3029, simple_loss=0.3386, pruned_loss=0.1336, over 19823.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.3272, pruned_loss=0.1225, over 3270373.66 frames. ], batch size: 72, lr: 2.57e-02, grad_scale: 16.0 +2023-03-27 20:19:52,506 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-27 20:20:00,209 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9631.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:20:06,338 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.0683, 1.5067, 2.1535, 2.5647, 2.6961, 2.9192, 2.8362, 2.8421], + device='cuda:1'), covar=tensor([0.0984, 0.2173, 0.0715, 0.0446, 0.0374, 0.0194, 0.0256, 0.0289], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0156, 0.0102, 0.0096, 0.0079, 0.0076, 0.0071, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 20:20:22,990 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9642.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:20:29,131 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9645.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:20:57,554 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9658.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:21:18,811 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9669.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:21:32,097 INFO [train.py:892] (1/4) Epoch 6, batch 400, loss[loss=0.2576, simple_loss=0.2948, pruned_loss=0.1102, over 19800.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.327, pruned_loss=0.1216, over 3421575.96 frames. ], batch size: 148, lr: 2.57e-02, grad_scale: 16.0 +2023-03-27 20:21:32,847 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1429, 4.2185, 4.5323, 4.4130, 4.5225, 4.0132, 4.1843, 4.1215], + device='cuda:1'), covar=tensor([0.1249, 0.0939, 0.1039, 0.0815, 0.0817, 0.1192, 0.2089, 0.2239], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0176, 0.0251, 0.0197, 0.0190, 0.0190, 0.0237, 0.0279], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-27 20:22:03,525 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9690.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:22:32,718 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.696e+02 5.768e+02 7.171e+02 9.130e+02 1.746e+03, threshold=1.434e+03, percent-clipped=4.0 +2023-03-27 20:22:58,077 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9717.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:23:02,367 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9719.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:23:16,571 INFO [train.py:892] (1/4) Epoch 6, batch 450, loss[loss=0.2689, simple_loss=0.3264, pruned_loss=0.1057, over 19600.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3277, pruned_loss=0.1218, over 3537839.23 frames. ], batch size: 50, lr: 2.56e-02, grad_scale: 16.0 +2023-03-27 20:23:24,994 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1458, 2.8916, 1.8348, 3.9599, 3.2169, 3.9050, 4.0322, 3.1979], + device='cuda:1'), covar=tensor([0.0744, 0.0778, 0.1651, 0.0578, 0.0833, 0.0470, 0.0464, 0.0755], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0085, 0.0115, 0.0093, 0.0082, 0.0072, 0.0081, 0.0096], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 20:23:54,108 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9743.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:24:50,382 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9770.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:24:59,404 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7188, 1.7804, 2.9505, 3.1702, 3.4627, 3.5991, 3.5265, 3.7758], + device='cuda:1'), covar=tensor([0.0695, 0.2384, 0.0570, 0.0388, 0.0312, 0.0152, 0.0200, 0.0233], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0161, 0.0105, 0.0098, 0.0083, 0.0078, 0.0074, 0.0078], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 20:25:02,470 INFO [train.py:892] (1/4) Epoch 6, batch 500, loss[loss=0.3235, simple_loss=0.3553, pruned_loss=0.1458, over 19788.00 frames. ], tot_loss[loss=0.2853, simple_loss=0.3273, pruned_loss=0.1217, over 3628243.74 frames. ], batch size: 280, lr: 2.56e-02, grad_scale: 16.0 +2023-03-27 20:26:01,155 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9804.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:26:01,917 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.739e+02 6.410e+02 7.467e+02 9.319e+02 1.836e+03, threshold=1.493e+03, percent-clipped=5.0 +2023-03-27 20:26:44,456 INFO [train.py:892] (1/4) Epoch 6, batch 550, loss[loss=0.2707, simple_loss=0.3143, pruned_loss=0.1136, over 19817.00 frames. ], tot_loss[loss=0.2865, simple_loss=0.3282, pruned_loss=0.1224, over 3700585.09 frames. ], batch size: 147, lr: 2.55e-02, grad_scale: 16.0 +2023-03-27 20:27:51,360 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4344, 2.8916, 1.7840, 3.9321, 3.5612, 3.9717, 3.8451, 3.0401], + device='cuda:1'), covar=tensor([0.0407, 0.0484, 0.1514, 0.0363, 0.0441, 0.0360, 0.0498, 0.0505], + device='cuda:1'), in_proj_covar=tensor([0.0095, 0.0084, 0.0112, 0.0092, 0.0079, 0.0069, 0.0079, 0.0093], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 20:28:14,180 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-27 20:28:30,687 INFO [train.py:892] (1/4) Epoch 6, batch 600, loss[loss=0.2476, simple_loss=0.3041, pruned_loss=0.09551, over 19777.00 frames. ], tot_loss[loss=0.2843, simple_loss=0.3266, pruned_loss=0.121, over 3757003.67 frames. ], batch size: 46, lr: 2.54e-02, grad_scale: 16.0 +2023-03-27 20:29:32,378 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.880e+02 5.938e+02 7.019e+02 8.775e+02 1.647e+03, threshold=1.404e+03, percent-clipped=2.0 +2023-03-27 20:30:16,208 INFO [train.py:892] (1/4) Epoch 6, batch 650, loss[loss=0.2404, simple_loss=0.2931, pruned_loss=0.09387, over 19736.00 frames. ], tot_loss[loss=0.2843, simple_loss=0.3266, pruned_loss=0.121, over 3798788.97 frames. ], batch size: 99, lr: 2.54e-02, grad_scale: 16.0 +2023-03-27 20:30:45,581 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9940.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:32:00,352 INFO [train.py:892] (1/4) Epoch 6, batch 700, loss[loss=0.3137, simple_loss=0.3684, pruned_loss=0.1294, over 19831.00 frames. ], tot_loss[loss=0.2828, simple_loss=0.326, pruned_loss=0.1198, over 3833158.20 frames. ], batch size: 57, lr: 2.53e-02, grad_scale: 16.0 +2023-03-27 20:32:09,856 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-27 20:33:05,039 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.264e+02 6.107e+02 7.493e+02 9.243e+02 1.709e+03, threshold=1.499e+03, percent-clipped=3.0 +2023-03-27 20:33:24,843 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10014.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:33:49,834 INFO [train.py:892] (1/4) Epoch 6, batch 750, loss[loss=0.2891, simple_loss=0.3438, pruned_loss=0.1173, over 19770.00 frames. ], tot_loss[loss=0.2838, simple_loss=0.3265, pruned_loss=0.1205, over 3859390.81 frames. ], batch size: 70, lr: 2.53e-02, grad_scale: 16.0 +2023-03-27 20:33:54,543 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9734, 5.3069, 5.2704, 5.2751, 5.0165, 5.1550, 4.7176, 4.8191], + device='cuda:1'), covar=tensor([0.0359, 0.0302, 0.0485, 0.0370, 0.0414, 0.0572, 0.0506, 0.0915], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0139, 0.0197, 0.0154, 0.0144, 0.0135, 0.0168, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-27 20:34:04,142 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.99 vs. limit=2.0 +2023-03-27 20:34:17,087 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10039.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:35:04,881 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10062.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 20:35:23,465 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10070.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:35:34,018 INFO [train.py:892] (1/4) Epoch 6, batch 800, loss[loss=0.2567, simple_loss=0.314, pruned_loss=0.09972, over 19897.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3262, pruned_loss=0.1198, over 3878434.75 frames. ], batch size: 91, lr: 2.52e-02, grad_scale: 16.0 +2023-03-27 20:35:57,149 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6031, 2.6232, 3.8516, 2.9849, 3.3558, 4.0239, 2.2529, 2.1451], + device='cuda:1'), covar=tensor([0.0548, 0.2694, 0.0330, 0.0498, 0.1060, 0.0293, 0.1137, 0.1908], + device='cuda:1'), in_proj_covar=tensor([0.0242, 0.0293, 0.0184, 0.0171, 0.0273, 0.0159, 0.0200, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 20:36:08,407 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8704, 1.9186, 3.1703, 3.1153, 3.6454, 3.9063, 3.9460, 3.9939], + device='cuda:1'), covar=tensor([0.0656, 0.2245, 0.0575, 0.0463, 0.0273, 0.0126, 0.0152, 0.0252], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0155, 0.0106, 0.0098, 0.0080, 0.0077, 0.0072, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 20:36:21,629 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10099.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:36:23,967 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10100.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 20:36:35,376 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.822e+02 5.825e+02 7.423e+02 9.383e+02 2.108e+03, threshold=1.485e+03, percent-clipped=8.0 +2023-03-27 20:37:01,065 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10118.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:37:13,106 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10123.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 20:37:17,776 INFO [train.py:892] (1/4) Epoch 6, batch 850, loss[loss=0.315, simple_loss=0.3396, pruned_loss=0.1452, over 19807.00 frames. ], tot_loss[loss=0.2832, simple_loss=0.3265, pruned_loss=0.1199, over 3894831.46 frames. ], batch size: 181, lr: 2.52e-02, grad_scale: 16.0 +2023-03-27 20:38:37,695 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1419, 2.1034, 2.0751, 1.9411, 1.7291, 1.8782, 1.6965, 2.0986], + device='cuda:1'), covar=tensor([0.0296, 0.0265, 0.0373, 0.0340, 0.0349, 0.0347, 0.0698, 0.0470], + device='cuda:1'), in_proj_covar=tensor([0.0031, 0.0034, 0.0034, 0.0028, 0.0035, 0.0035, 0.0042, 0.0034], + device='cuda:1'), out_proj_covar=tensor([6.6306e-05, 7.2557e-05, 7.2430e-05, 6.0583e-05, 7.4836e-05, 7.5754e-05, + 8.9497e-05, 7.2868e-05], device='cuda:1') +2023-03-27 20:39:04,579 INFO [train.py:892] (1/4) Epoch 6, batch 900, loss[loss=0.2801, simple_loss=0.3306, pruned_loss=0.1147, over 19648.00 frames. ], tot_loss[loss=0.283, simple_loss=0.3262, pruned_loss=0.1198, over 3906667.38 frames. ], batch size: 57, lr: 2.51e-02, grad_scale: 16.0 +2023-03-27 20:40:03,583 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 5.709e+02 6.647e+02 8.458e+02 1.607e+03, threshold=1.329e+03, percent-clipped=2.0 +2023-03-27 20:40:48,910 INFO [train.py:892] (1/4) Epoch 6, batch 950, loss[loss=0.2696, simple_loss=0.3043, pruned_loss=0.1174, over 19765.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.3272, pruned_loss=0.1213, over 3917207.96 frames. ], batch size: 125, lr: 2.51e-02, grad_scale: 16.0 +2023-03-27 20:41:19,015 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10240.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:42:33,725 INFO [train.py:892] (1/4) Epoch 6, batch 1000, loss[loss=0.2625, simple_loss=0.3224, pruned_loss=0.1013, over 19647.00 frames. ], tot_loss[loss=0.2831, simple_loss=0.3261, pruned_loss=0.1201, over 3923983.38 frames. ], batch size: 66, lr: 2.50e-02, grad_scale: 16.0 +2023-03-27 20:43:00,910 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10288.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:43:34,971 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.462e+02 6.368e+02 7.709e+02 9.244e+02 1.500e+03, threshold=1.542e+03, percent-clipped=2.0 +2023-03-27 20:43:54,970 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10314.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:44:20,094 INFO [train.py:892] (1/4) Epoch 6, batch 1050, loss[loss=0.2375, simple_loss=0.2997, pruned_loss=0.08769, over 19776.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3269, pruned_loss=0.1204, over 3928266.93 frames. ], batch size: 52, lr: 2.50e-02, grad_scale: 16.0 +2023-03-27 20:45:36,762 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10362.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:46:05,028 INFO [train.py:892] (1/4) Epoch 6, batch 1100, loss[loss=0.2608, simple_loss=0.3066, pruned_loss=0.1075, over 19877.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3267, pruned_loss=0.1203, over 3932715.25 frames. ], batch size: 92, lr: 2.49e-02, grad_scale: 16.0 +2023-03-27 20:46:30,890 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1187, 3.2700, 3.6251, 4.3286, 2.9698, 3.4407, 2.8574, 2.4357], + device='cuda:1'), covar=tensor([0.0395, 0.2552, 0.0827, 0.0153, 0.2116, 0.0510, 0.0971, 0.2065], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0310, 0.0190, 0.0107, 0.0216, 0.0138, 0.0169, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 20:46:32,642 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7354, 2.7429, 2.9732, 2.0347, 3.1184, 2.3015, 2.7443, 3.3724], + device='cuda:1'), covar=tensor([0.0638, 0.0346, 0.0419, 0.0768, 0.0498, 0.0426, 0.0480, 0.0205], + device='cuda:1'), in_proj_covar=tensor([0.0046, 0.0043, 0.0048, 0.0069, 0.0046, 0.0041, 0.0042, 0.0037], + device='cuda:1'), out_proj_covar=tensor([1.1699e-04, 1.1082e-04, 1.2266e-04, 1.6122e-04, 1.1429e-04, 1.0727e-04, + 1.1214e-04, 9.5438e-05], device='cuda:1') +2023-03-27 20:46:47,209 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10395.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 20:46:55,572 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10399.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:47:09,520 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.844e+02 6.468e+02 7.471e+02 8.887e+02 1.453e+03, threshold=1.494e+03, percent-clipped=0.0 +2023-03-27 20:47:16,078 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10409.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:47:22,476 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.7434, 1.7216, 1.8254, 1.8563, 1.6455, 1.7377, 1.7098, 2.0047], + device='cuda:1'), covar=tensor([0.0282, 0.0210, 0.0218, 0.0216, 0.0300, 0.0294, 0.0462, 0.0251], + device='cuda:1'), in_proj_covar=tensor([0.0031, 0.0033, 0.0033, 0.0027, 0.0035, 0.0035, 0.0042, 0.0033], + device='cuda:1'), out_proj_covar=tensor([6.7374e-05, 7.1181e-05, 7.1521e-05, 5.8833e-05, 7.5366e-05, 7.5744e-05, + 8.9861e-05, 7.0250e-05], device='cuda:1') +2023-03-27 20:47:35,383 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10418.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 20:47:51,082 INFO [train.py:892] (1/4) Epoch 6, batch 1150, loss[loss=0.3454, simple_loss=0.3666, pruned_loss=0.1621, over 19689.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3266, pruned_loss=0.1207, over 3936477.08 frames. ], batch size: 265, lr: 2.49e-02, grad_scale: 16.0 +2023-03-27 20:48:33,385 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.83 vs. limit=5.0 +2023-03-27 20:48:36,942 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10447.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:48:51,876 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10454.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:49:03,573 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.6542, 1.9658, 1.3883, 1.0787, 1.7017, 2.0402, 1.7460, 1.8975], + device='cuda:1'), covar=tensor([0.0284, 0.0294, 0.0258, 0.0648, 0.0348, 0.0232, 0.0171, 0.0194], + device='cuda:1'), in_proj_covar=tensor([0.0040, 0.0038, 0.0039, 0.0054, 0.0053, 0.0037, 0.0030, 0.0035], + device='cuda:1'), out_proj_covar=tensor([8.5694e-05, 8.2949e-05, 8.3829e-05, 1.2048e-04, 1.1658e-04, 8.1882e-05, + 6.7487e-05, 7.6041e-05], device='cuda:1') +2023-03-27 20:49:20,795 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1713, 5.5047, 5.4940, 5.4983, 5.1556, 5.4540, 4.7942, 4.9875], + device='cuda:1'), covar=tensor([0.0333, 0.0354, 0.0619, 0.0316, 0.0626, 0.0591, 0.0703, 0.0847], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0142, 0.0200, 0.0156, 0.0149, 0.0136, 0.0172, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-27 20:49:24,652 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10470.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:49:35,664 INFO [train.py:892] (1/4) Epoch 6, batch 1200, loss[loss=0.2699, simple_loss=0.3148, pruned_loss=0.1125, over 19784.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3258, pruned_loss=0.1201, over 3939870.98 frames. ], batch size: 236, lr: 2.49e-02, grad_scale: 8.0 +2023-03-27 20:50:01,891 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2137, 3.3698, 4.8130, 3.5092, 4.0804, 4.7616, 2.6429, 2.7544], + device='cuda:1'), covar=tensor([0.0543, 0.2773, 0.0256, 0.0550, 0.1160, 0.0294, 0.1084, 0.1792], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0296, 0.0188, 0.0172, 0.0274, 0.0166, 0.0202, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 20:50:41,367 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.64 vs. limit=2.0 +2023-03-27 20:50:41,659 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.428e+02 5.589e+02 6.626e+02 8.703e+02 1.747e+03, threshold=1.325e+03, percent-clipped=3.0 +2023-03-27 20:50:58,511 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10515.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 20:51:21,752 INFO [train.py:892] (1/4) Epoch 6, batch 1250, loss[loss=0.2614, simple_loss=0.3049, pruned_loss=0.1089, over 19754.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.3235, pruned_loss=0.1185, over 3943018.55 frames. ], batch size: 110, lr: 2.48e-02, grad_scale: 8.0 +2023-03-27 20:53:08,394 INFO [train.py:892] (1/4) Epoch 6, batch 1300, loss[loss=0.2835, simple_loss=0.3267, pruned_loss=0.1202, over 19731.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3245, pruned_loss=0.1191, over 3944101.91 frames. ], batch size: 63, lr: 2.48e-02, grad_scale: 8.0 +2023-03-27 20:54:11,557 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.477e+02 5.973e+02 6.933e+02 8.304e+02 2.000e+03, threshold=1.387e+03, percent-clipped=2.0 +2023-03-27 20:54:51,973 INFO [train.py:892] (1/4) Epoch 6, batch 1350, loss[loss=0.2726, simple_loss=0.32, pruned_loss=0.1126, over 19922.00 frames. ], tot_loss[loss=0.279, simple_loss=0.3229, pruned_loss=0.1175, over 3947278.81 frames. ], batch size: 45, lr: 2.47e-02, grad_scale: 8.0 +2023-03-27 20:55:16,378 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8465, 5.2094, 5.1384, 5.1718, 4.9278, 5.0614, 4.6684, 4.7045], + device='cuda:1'), covar=tensor([0.0332, 0.0341, 0.0516, 0.0392, 0.0454, 0.0569, 0.0546, 0.0793], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0144, 0.0198, 0.0159, 0.0151, 0.0140, 0.0174, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-27 20:55:24,250 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-27 20:55:57,628 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0244, 4.3863, 4.4874, 5.0925, 4.5608, 5.0771, 4.9748, 5.2567], + device='cuda:1'), covar=tensor([0.0585, 0.0315, 0.0472, 0.0241, 0.0527, 0.0190, 0.0369, 0.0267], + device='cuda:1'), in_proj_covar=tensor([0.0108, 0.0119, 0.0141, 0.0121, 0.0114, 0.0095, 0.0117, 0.0135], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 20:56:39,339 INFO [train.py:892] (1/4) Epoch 6, batch 1400, loss[loss=0.2443, simple_loss=0.2937, pruned_loss=0.09748, over 19872.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.324, pruned_loss=0.1187, over 3948023.87 frames. ], batch size: 108, lr: 2.47e-02, grad_scale: 8.0 +2023-03-27 20:57:21,127 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10695.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:57:46,681 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.801e+02 6.022e+02 7.479e+02 9.663e+02 1.752e+03, threshold=1.496e+03, percent-clipped=3.0 +2023-03-27 20:58:01,557 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7927, 4.3443, 4.2837, 5.0171, 4.5658, 5.2088, 4.7771, 4.9067], + device='cuda:1'), covar=tensor([0.0840, 0.0494, 0.0704, 0.0334, 0.0683, 0.0304, 0.0592, 0.0863], + device='cuda:1'), in_proj_covar=tensor([0.0108, 0.0119, 0.0142, 0.0122, 0.0115, 0.0096, 0.0118, 0.0136], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 20:58:11,722 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10718.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 20:58:26,793 INFO [train.py:892] (1/4) Epoch 6, batch 1450, loss[loss=0.2966, simple_loss=0.336, pruned_loss=0.1286, over 19746.00 frames. ], tot_loss[loss=0.2796, simple_loss=0.3235, pruned_loss=0.1179, over 3947934.62 frames. ], batch size: 221, lr: 2.46e-02, grad_scale: 8.0 +2023-03-27 20:59:03,309 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10743.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:59:06,524 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10744.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:59:50,207 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10765.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:59:52,113 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10766.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 21:00:09,432 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10774.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:00:12,528 INFO [train.py:892] (1/4) Epoch 6, batch 1500, loss[loss=0.2393, simple_loss=0.295, pruned_loss=0.09185, over 19795.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3225, pruned_loss=0.117, over 3947372.95 frames. ], batch size: 83, lr: 2.46e-02, grad_scale: 8.0 +2023-03-27 21:00:44,235 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0192, 5.3351, 5.3221, 5.3022, 5.0026, 5.2363, 4.7837, 4.7917], + device='cuda:1'), covar=tensor([0.0314, 0.0292, 0.0469, 0.0366, 0.0517, 0.0489, 0.0458, 0.0903], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0141, 0.0194, 0.0155, 0.0150, 0.0138, 0.0172, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-27 21:01:14,062 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10805.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:01:16,853 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.160e+02 5.698e+02 7.148e+02 8.817e+02 1.909e+03, threshold=1.430e+03, percent-clipped=1.0 +2023-03-27 21:01:25,503 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10810.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 21:01:57,121 INFO [train.py:892] (1/4) Epoch 6, batch 1550, loss[loss=0.2946, simple_loss=0.3445, pruned_loss=0.1223, over 19820.00 frames. ], tot_loss[loss=0.2776, simple_loss=0.3223, pruned_loss=0.1164, over 3948198.88 frames. ], batch size: 93, lr: 2.45e-02, grad_scale: 8.0 +2023-03-27 21:02:16,510 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10835.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:02:35,619 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-27 21:03:41,429 INFO [train.py:892] (1/4) Epoch 6, batch 1600, loss[loss=0.3201, simple_loss=0.3519, pruned_loss=0.1441, over 19698.00 frames. ], tot_loss[loss=0.2774, simple_loss=0.3221, pruned_loss=0.1164, over 3949070.91 frames. ], batch size: 295, lr: 2.45e-02, grad_scale: 8.0 +2023-03-27 21:04:01,459 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10885.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 21:04:14,594 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1221, 3.0653, 3.5026, 4.2053, 2.7807, 3.2025, 2.7967, 2.3600], + device='cuda:1'), covar=tensor([0.0367, 0.3013, 0.0843, 0.0136, 0.2118, 0.0533, 0.1074, 0.2061], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0318, 0.0193, 0.0108, 0.0223, 0.0142, 0.0175, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 21:04:26,807 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-27 21:04:44,770 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.321e+02 5.550e+02 6.735e+02 8.191e+02 1.432e+03, threshold=1.347e+03, percent-clipped=1.0 +2023-03-27 21:05:25,980 INFO [train.py:892] (1/4) Epoch 6, batch 1650, loss[loss=0.3921, simple_loss=0.4106, pruned_loss=0.1868, over 19632.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.3224, pruned_loss=0.1166, over 3949219.51 frames. ], batch size: 359, lr: 2.44e-02, grad_scale: 8.0 +2023-03-27 21:05:40,619 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2386, 4.2731, 2.9117, 4.7154, 5.0107, 2.0541, 3.9094, 3.8776], + device='cuda:1'), covar=tensor([0.0511, 0.0636, 0.2071, 0.0471, 0.0128, 0.2877, 0.0891, 0.0483], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0174, 0.0189, 0.0130, 0.0092, 0.0190, 0.0190, 0.0124], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-27 21:05:51,926 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9198, 2.8035, 3.3565, 2.1769, 3.2346, 2.4354, 2.6904, 3.3508], + device='cuda:1'), covar=tensor([0.0734, 0.0303, 0.0377, 0.0626, 0.0289, 0.0364, 0.0377, 0.0327], + device='cuda:1'), in_proj_covar=tensor([0.0047, 0.0045, 0.0050, 0.0069, 0.0046, 0.0042, 0.0042, 0.0038], + device='cuda:1'), out_proj_covar=tensor([1.2143e-04, 1.1941e-04, 1.2849e-04, 1.6598e-04, 1.1717e-04, 1.1106e-04, + 1.1406e-04, 9.9055e-05], device='cuda:1') +2023-03-27 21:06:07,952 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10946.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 21:06:45,981 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9767, 2.9259, 1.8174, 3.7215, 3.2734, 3.5685, 3.7311, 2.7593], + device='cuda:1'), covar=tensor([0.0593, 0.0622, 0.1338, 0.0522, 0.0513, 0.0432, 0.0517, 0.0688], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0092, 0.0117, 0.0098, 0.0084, 0.0075, 0.0089, 0.0101], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 21:07:11,130 INFO [train.py:892] (1/4) Epoch 6, batch 1700, loss[loss=0.2605, simple_loss=0.3078, pruned_loss=0.1067, over 19837.00 frames. ], tot_loss[loss=0.2771, simple_loss=0.3223, pruned_loss=0.1159, over 3949130.42 frames. ], batch size: 101, lr: 2.44e-02, grad_scale: 8.0 +2023-03-27 21:07:29,329 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.6823, 6.0327, 6.0116, 5.9188, 5.7723, 6.0005, 5.2025, 5.2726], + device='cuda:1'), covar=tensor([0.0281, 0.0300, 0.0559, 0.0360, 0.0515, 0.0503, 0.0583, 0.0922], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0144, 0.0192, 0.0153, 0.0147, 0.0136, 0.0170, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-27 21:08:17,553 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.223e+02 5.843e+02 7.049e+02 9.611e+02 2.273e+03, threshold=1.410e+03, percent-clipped=6.0 +2023-03-27 21:08:26,405 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-03-27 21:08:51,987 INFO [train.py:892] (1/4) Epoch 6, batch 1750, loss[loss=0.2462, simple_loss=0.2942, pruned_loss=0.09904, over 19751.00 frames. ], tot_loss[loss=0.2756, simple_loss=0.3209, pruned_loss=0.1152, over 3950138.56 frames. ], batch size: 129, lr: 2.43e-02, grad_scale: 8.0 +2023-03-27 21:09:32,804 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2898, 3.2574, 2.1145, 3.4918, 3.5226, 1.6217, 2.8791, 2.8812], + device='cuda:1'), covar=tensor([0.0574, 0.0694, 0.2322, 0.0420, 0.0234, 0.2749, 0.0880, 0.0496], + device='cuda:1'), in_proj_covar=tensor([0.0154, 0.0177, 0.0193, 0.0133, 0.0095, 0.0194, 0.0195, 0.0127], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-27 21:10:03,017 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11065.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:10:03,131 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.9666, 2.3596, 1.4901, 1.3542, 1.9533, 2.3972, 2.2350, 2.1017], + device='cuda:1'), covar=tensor([0.0258, 0.0321, 0.0318, 0.0721, 0.0389, 0.0224, 0.0186, 0.0261], + device='cuda:1'), in_proj_covar=tensor([0.0042, 0.0041, 0.0042, 0.0058, 0.0057, 0.0038, 0.0034, 0.0036], + device='cuda:1'), out_proj_covar=tensor([9.1076e-05, 9.1622e-05, 8.9969e-05, 1.3129e-04, 1.2572e-04, 8.7033e-05, + 7.6673e-05, 7.9508e-05], device='cuda:1') +2023-03-27 21:10:21,318 INFO [train.py:892] (1/4) Epoch 6, batch 1800, loss[loss=0.2536, simple_loss=0.305, pruned_loss=0.1011, over 19639.00 frames. ], tot_loss[loss=0.2749, simple_loss=0.3199, pruned_loss=0.115, over 3950029.65 frames. ], batch size: 72, lr: 2.43e-02, grad_scale: 8.0 +2023-03-27 21:10:32,658 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0612, 2.9242, 1.4695, 3.9606, 3.2909, 3.7321, 3.8404, 2.9294], + device='cuda:1'), covar=tensor([0.0539, 0.0440, 0.1695, 0.0290, 0.0473, 0.0269, 0.0357, 0.0647], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0091, 0.0116, 0.0097, 0.0084, 0.0075, 0.0088, 0.0101], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 21:11:02,760 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11100.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:11:13,682 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.857e+02 5.952e+02 7.096e+02 9.277e+02 1.982e+03, threshold=1.419e+03, percent-clipped=4.0 +2023-03-27 21:11:19,006 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11110.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 21:11:23,874 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11113.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:11:44,939 INFO [train.py:892] (1/4) Epoch 6, batch 1850, loss[loss=0.2765, simple_loss=0.3345, pruned_loss=0.1093, over 19698.00 frames. ], tot_loss[loss=0.2761, simple_loss=0.322, pruned_loss=0.1151, over 3948758.36 frames. ], batch size: 56, lr: 2.42e-02, grad_scale: 8.0 +2023-03-27 21:11:49,866 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0572, 2.8188, 3.2910, 2.0862, 3.3823, 2.6253, 2.3254, 3.6792], + device='cuda:1'), covar=tensor([0.0627, 0.0294, 0.0633, 0.0729, 0.0285, 0.0312, 0.0828, 0.0213], + device='cuda:1'), in_proj_covar=tensor([0.0048, 0.0045, 0.0050, 0.0070, 0.0046, 0.0042, 0.0043, 0.0038], + device='cuda:1'), out_proj_covar=tensor([1.2523e-04, 1.2059e-04, 1.3118e-04, 1.6950e-04, 1.1899e-04, 1.1251e-04, + 1.1643e-04, 9.8713e-05], device='cuda:1') +2023-03-27 21:12:41,543 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11130.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:12:42,797 INFO [train.py:892] (1/4) Epoch 7, batch 0, loss[loss=0.2561, simple_loss=0.3214, pruned_loss=0.0954, over 19728.00 frames. ], tot_loss[loss=0.2561, simple_loss=0.3214, pruned_loss=0.0954, over 19728.00 frames. ], batch size: 51, lr: 2.27e-02, grad_scale: 8.0 +2023-03-27 21:12:42,798 INFO [train.py:917] (1/4) Computing validation loss +2023-03-27 21:13:10,592 INFO [train.py:926] (1/4) Epoch 7, validation: loss=0.1961, simple_loss=0.2755, pruned_loss=0.05831, over 2883724.00 frames. +2023-03-27 21:13:10,593 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-27 21:14:09,092 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11158.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:14:09,334 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9519, 2.7284, 1.3147, 3.4733, 3.1507, 3.3782, 3.4765, 2.7900], + device='cuda:1'), covar=tensor([0.0513, 0.0579, 0.1957, 0.0423, 0.0437, 0.0327, 0.0551, 0.0653], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0092, 0.0118, 0.0099, 0.0085, 0.0077, 0.0089, 0.0102], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 21:14:42,637 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11174.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:14:56,450 INFO [train.py:892] (1/4) Epoch 7, batch 50, loss[loss=0.3015, simple_loss=0.338, pruned_loss=0.1325, over 19717.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3147, pruned_loss=0.1129, over 891252.93 frames. ], batch size: 291, lr: 2.27e-02, grad_scale: 8.0 +2023-03-27 21:15:51,181 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.005e+02 5.596e+02 6.713e+02 8.168e+02 2.476e+03, threshold=1.343e+03, percent-clipped=3.0 +2023-03-27 21:16:43,172 INFO [train.py:892] (1/4) Epoch 7, batch 100, loss[loss=0.2552, simple_loss=0.2945, pruned_loss=0.108, over 19745.00 frames. ], tot_loss[loss=0.2706, simple_loss=0.3157, pruned_loss=0.1128, over 1569058.05 frames. ], batch size: 179, lr: 2.26e-02, grad_scale: 8.0 +2023-03-27 21:16:52,197 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11235.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:17:05,964 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11241.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 21:18:30,197 INFO [train.py:892] (1/4) Epoch 7, batch 150, loss[loss=0.3032, simple_loss=0.3423, pruned_loss=0.1321, over 19702.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.316, pruned_loss=0.1117, over 2096659.33 frames. ], batch size: 283, lr: 2.26e-02, grad_scale: 8.0 +2023-03-27 21:18:31,815 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.08 vs. limit=5.0 +2023-03-27 21:18:48,418 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2329, 3.2543, 3.7372, 4.5082, 2.6483, 3.3831, 2.9855, 2.5617], + device='cuda:1'), covar=tensor([0.0385, 0.3060, 0.0788, 0.0142, 0.2494, 0.0586, 0.0980, 0.1880], + device='cuda:1'), in_proj_covar=tensor([0.0157, 0.0323, 0.0195, 0.0109, 0.0226, 0.0145, 0.0174, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 21:19:26,070 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.645e+02 5.370e+02 6.417e+02 7.584e+02 2.097e+03, threshold=1.283e+03, percent-clipped=1.0 +2023-03-27 21:20:18,350 INFO [train.py:892] (1/4) Epoch 7, batch 200, loss[loss=0.2924, simple_loss=0.3353, pruned_loss=0.1247, over 19705.00 frames. ], tot_loss[loss=0.2684, simple_loss=0.3154, pruned_loss=0.1107, over 2508398.34 frames. ], batch size: 265, lr: 2.26e-02, grad_scale: 8.0 +2023-03-27 21:22:03,988 INFO [train.py:892] (1/4) Epoch 7, batch 250, loss[loss=0.2504, simple_loss=0.2971, pruned_loss=0.1018, over 19841.00 frames. ], tot_loss[loss=0.2663, simple_loss=0.314, pruned_loss=0.1093, over 2828500.56 frames. ], batch size: 161, lr: 2.25e-02, grad_scale: 8.0 +2023-03-27 21:22:44,475 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11400.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:22:44,984 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-27 21:22:57,029 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.370e+02 5.501e+02 6.755e+02 8.647e+02 2.452e+03, threshold=1.351e+03, percent-clipped=4.0 +2023-03-27 21:23:29,242 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8943, 2.4130, 2.0465, 2.2395, 1.7565, 1.6629, 1.8718, 2.0927], + device='cuda:1'), covar=tensor([0.0273, 0.0359, 0.0491, 0.0274, 0.0381, 0.0532, 0.0595, 0.0539], + device='cuda:1'), in_proj_covar=tensor([0.0033, 0.0035, 0.0037, 0.0029, 0.0038, 0.0037, 0.0047, 0.0034], + device='cuda:1'), out_proj_covar=tensor([7.1814e-05, 7.7004e-05, 7.9389e-05, 6.4568e-05, 8.1748e-05, 8.0399e-05, + 9.9821e-05, 7.3753e-05], device='cuda:1') +2023-03-27 21:23:45,424 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11430.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:23:46,584 INFO [train.py:892] (1/4) Epoch 7, batch 300, loss[loss=0.24, simple_loss=0.2959, pruned_loss=0.09207, over 19796.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.3157, pruned_loss=0.1102, over 3075863.60 frames. ], batch size: 45, lr: 2.25e-02, grad_scale: 8.0 +2023-03-27 21:24:10,767 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-27 21:24:25,383 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11448.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:24:50,416 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1098, 2.6647, 3.4107, 2.3146, 3.2893, 2.3795, 2.5642, 3.3847], + device='cuda:1'), covar=tensor([0.0553, 0.0425, 0.0328, 0.0689, 0.0269, 0.0455, 0.0424, 0.0285], + device='cuda:1'), in_proj_covar=tensor([0.0047, 0.0046, 0.0049, 0.0071, 0.0046, 0.0042, 0.0042, 0.0039], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 21:25:28,286 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11478.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:25:34,599 INFO [train.py:892] (1/4) Epoch 7, batch 350, loss[loss=0.2901, simple_loss=0.3304, pruned_loss=0.1249, over 19837.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3173, pruned_loss=0.1116, over 3268752.82 frames. ], batch size: 239, lr: 2.24e-02, grad_scale: 8.0 +2023-03-27 21:25:58,036 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2339, 4.1631, 4.6010, 4.4724, 4.5207, 4.1244, 4.3027, 4.1909], + device='cuda:1'), covar=tensor([0.1056, 0.1124, 0.0940, 0.0854, 0.0756, 0.0915, 0.1776, 0.2014], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0191, 0.0261, 0.0207, 0.0193, 0.0189, 0.0247, 0.0288], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-27 21:26:12,177 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9634, 4.0993, 2.4816, 4.4762, 4.7732, 1.8994, 3.8167, 3.6260], + device='cuda:1'), covar=tensor([0.0485, 0.0640, 0.2327, 0.0415, 0.0160, 0.3027, 0.0928, 0.0495], + device='cuda:1'), in_proj_covar=tensor([0.0157, 0.0181, 0.0196, 0.0138, 0.0099, 0.0195, 0.0198, 0.0130], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-27 21:26:29,989 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.848e+02 5.697e+02 6.987e+02 8.639e+02 1.327e+03, threshold=1.397e+03, percent-clipped=1.0 +2023-03-27 21:26:33,234 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.95 vs. limit=2.0 +2023-03-27 21:27:01,453 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9699, 3.8131, 4.2194, 4.1054, 4.1701, 3.5965, 3.9706, 3.8351], + device='cuda:1'), covar=tensor([0.1255, 0.1359, 0.1071, 0.1017, 0.1000, 0.1201, 0.2003, 0.2453], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0193, 0.0262, 0.0208, 0.0194, 0.0189, 0.0250, 0.0289], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-27 21:27:05,435 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3246, 2.5366, 3.5099, 3.9033, 4.0116, 4.6034, 4.3837, 4.4120], + device='cuda:1'), covar=tensor([0.0615, 0.2057, 0.0645, 0.0344, 0.0280, 0.0123, 0.0282, 0.0311], + device='cuda:1'), in_proj_covar=tensor([0.0108, 0.0160, 0.0112, 0.0103, 0.0084, 0.0082, 0.0079, 0.0080], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 21:27:18,063 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11530.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:27:19,366 INFO [train.py:892] (1/4) Epoch 7, batch 400, loss[loss=0.2775, simple_loss=0.3101, pruned_loss=0.1225, over 19782.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3163, pruned_loss=0.1113, over 3419457.39 frames. ], batch size: 191, lr: 2.24e-02, grad_scale: 8.0 +2023-03-27 21:27:22,513 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11532.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:27:23,248 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.71 vs. limit=2.0 +2023-03-27 21:27:28,322 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1779, 2.2309, 2.3850, 1.8051, 2.4310, 1.9882, 2.2417, 2.3636], + device='cuda:1'), covar=tensor([0.0431, 0.0323, 0.0372, 0.0706, 0.0264, 0.0346, 0.0290, 0.0226], + device='cuda:1'), in_proj_covar=tensor([0.0048, 0.0047, 0.0051, 0.0073, 0.0047, 0.0044, 0.0043, 0.0040], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 21:27:41,066 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11541.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 21:29:06,951 INFO [train.py:892] (1/4) Epoch 7, batch 450, loss[loss=0.2496, simple_loss=0.3078, pruned_loss=0.09574, over 19673.00 frames. ], tot_loss[loss=0.2709, simple_loss=0.3177, pruned_loss=0.1121, over 3537479.50 frames. ], batch size: 73, lr: 2.23e-02, grad_scale: 8.0 +2023-03-27 21:29:23,366 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11589.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 21:29:31,249 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11593.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:30:00,687 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.550e+02 5.488e+02 6.753e+02 8.459e+02 1.411e+03, threshold=1.351e+03, percent-clipped=1.0 +2023-03-27 21:30:52,140 INFO [train.py:892] (1/4) Epoch 7, batch 500, loss[loss=0.2985, simple_loss=0.3405, pruned_loss=0.1283, over 19739.00 frames. ], tot_loss[loss=0.2715, simple_loss=0.3182, pruned_loss=0.1124, over 3627102.34 frames. ], batch size: 276, lr: 2.23e-02, grad_scale: 8.0 +2023-03-27 21:30:53,438 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-27 21:31:00,992 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11635.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:32:37,372 INFO [train.py:892] (1/4) Epoch 7, batch 550, loss[loss=0.256, simple_loss=0.2976, pruned_loss=0.1072, over 19870.00 frames. ], tot_loss[loss=0.2732, simple_loss=0.3196, pruned_loss=0.1135, over 3695982.93 frames. ], batch size: 165, lr: 2.23e-02, grad_scale: 8.0 +2023-03-27 21:32:44,582 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-27 21:33:09,723 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11696.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:33:15,380 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11699.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:33:15,481 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9060, 2.7143, 3.0185, 3.0073, 3.2465, 3.2083, 3.6175, 3.8444], + device='cuda:1'), covar=tensor([0.0438, 0.1222, 0.0998, 0.1359, 0.1310, 0.1009, 0.0312, 0.0377], + device='cuda:1'), in_proj_covar=tensor([0.0172, 0.0191, 0.0192, 0.0210, 0.0219, 0.0196, 0.0134, 0.0134], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 21:33:24,637 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.03 vs. limit=5.0 +2023-03-27 21:33:31,007 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.036e+02 6.098e+02 7.200e+02 8.839e+02 1.494e+03, threshold=1.440e+03, percent-clipped=2.0 +2023-03-27 21:34:01,964 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9591, 2.3167, 3.0099, 2.5324, 2.6099, 3.3232, 1.8222, 2.0499], + device='cuda:1'), covar=tensor([0.0625, 0.1926, 0.0433, 0.0596, 0.1095, 0.0391, 0.1271, 0.1594], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0296, 0.0202, 0.0177, 0.0276, 0.0178, 0.0215, 0.0215], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 21:34:22,374 INFO [train.py:892] (1/4) Epoch 7, batch 600, loss[loss=0.2536, simple_loss=0.2965, pruned_loss=0.1053, over 19852.00 frames. ], tot_loss[loss=0.2729, simple_loss=0.319, pruned_loss=0.1134, over 3753699.85 frames. ], batch size: 137, lr: 2.22e-02, grad_scale: 8.0 +2023-03-27 21:34:54,411 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.7947, 2.1343, 1.6665, 1.3397, 2.0238, 2.3148, 2.2771, 2.1859], + device='cuda:1'), covar=tensor([0.0253, 0.0283, 0.0231, 0.0635, 0.0372, 0.0180, 0.0150, 0.0218], + device='cuda:1'), in_proj_covar=tensor([0.0043, 0.0040, 0.0043, 0.0060, 0.0058, 0.0039, 0.0033, 0.0036], + device='cuda:1'), out_proj_covar=tensor([9.5448e-05, 9.0508e-05, 9.3807e-05, 1.3677e-04, 1.2916e-04, 8.8887e-05, + 7.7273e-05, 7.9967e-05], device='cuda:1') +2023-03-27 21:35:06,859 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7450, 3.7881, 2.2677, 4.2312, 4.3188, 1.7926, 3.3986, 3.4249], + device='cuda:1'), covar=tensor([0.0555, 0.0651, 0.2213, 0.0369, 0.0162, 0.2883, 0.0919, 0.0514], + device='cuda:1'), in_proj_covar=tensor([0.0161, 0.0184, 0.0197, 0.0142, 0.0100, 0.0196, 0.0205, 0.0134], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 21:35:24,227 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11760.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:36:07,573 INFO [train.py:892] (1/4) Epoch 7, batch 650, loss[loss=0.2348, simple_loss=0.2858, pruned_loss=0.09191, over 19846.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3161, pruned_loss=0.1113, over 3798167.59 frames. ], batch size: 115, lr: 2.22e-02, grad_scale: 8.0 +2023-03-27 21:36:34,701 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1324, 3.1111, 1.7875, 4.1062, 3.5968, 3.9797, 4.0841, 3.1399], + device='cuda:1'), covar=tensor([0.0616, 0.0537, 0.1584, 0.0507, 0.0552, 0.0476, 0.0529, 0.0688], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0094, 0.0118, 0.0099, 0.0086, 0.0079, 0.0093, 0.0106], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 21:37:02,909 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.631e+02 5.681e+02 6.679e+02 8.355e+02 2.061e+03, threshold=1.336e+03, percent-clipped=4.0 +2023-03-27 21:37:22,516 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11816.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:37:52,416 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11830.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:37:53,595 INFO [train.py:892] (1/4) Epoch 7, batch 700, loss[loss=0.2201, simple_loss=0.2723, pruned_loss=0.08394, over 19695.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.3169, pruned_loss=0.1114, over 3830671.22 frames. ], batch size: 46, lr: 2.21e-02, grad_scale: 8.0 +2023-03-27 21:38:25,430 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11846.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:38:28,096 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.84 vs. limit=5.0 +2023-03-27 21:39:32,233 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11877.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:39:34,210 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11878.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:39:39,320 INFO [train.py:892] (1/4) Epoch 7, batch 750, loss[loss=0.2535, simple_loss=0.2927, pruned_loss=0.1072, over 19781.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3159, pruned_loss=0.1109, over 3858152.10 frames. ], batch size: 182, lr: 2.21e-02, grad_scale: 8.0 +2023-03-27 21:39:53,454 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11888.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:40:11,166 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-03-27 21:40:34,149 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.808e+02 5.639e+02 6.770e+02 8.403e+02 1.981e+03, threshold=1.354e+03, percent-clipped=4.0 +2023-03-27 21:40:35,166 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11907.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:41:02,213 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11920.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:41:22,938 INFO [train.py:892] (1/4) Epoch 7, batch 800, loss[loss=0.266, simple_loss=0.3123, pruned_loss=0.1098, over 19795.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3153, pruned_loss=0.1102, over 3878008.72 frames. ], batch size: 185, lr: 2.21e-02, grad_scale: 8.0 +2023-03-27 21:41:49,909 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.63 vs. limit=2.0 +2023-03-27 21:41:59,572 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11948.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:43:08,375 INFO [train.py:892] (1/4) Epoch 7, batch 850, loss[loss=0.2921, simple_loss=0.3457, pruned_loss=0.1193, over 19530.00 frames. ], tot_loss[loss=0.2696, simple_loss=0.3167, pruned_loss=0.1113, over 3892734.60 frames. ], batch size: 54, lr: 2.20e-02, grad_scale: 8.0 +2023-03-27 21:43:09,387 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11981.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:43:14,913 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11984.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:43:29,710 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11991.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:44:07,071 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.301e+02 6.093e+02 7.463e+02 9.130e+02 1.696e+03, threshold=1.493e+03, percent-clipped=3.0 +2023-03-27 21:44:11,952 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12009.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:44:56,862 INFO [train.py:892] (1/4) Epoch 7, batch 900, loss[loss=0.2555, simple_loss=0.2942, pruned_loss=0.1084, over 19826.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3167, pruned_loss=0.1111, over 3904913.40 frames. ], batch size: 123, lr: 2.20e-02, grad_scale: 8.0 +2023-03-27 21:45:11,575 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9241, 2.6826, 1.5188, 3.4907, 3.4284, 3.4088, 3.5774, 2.8247], + device='cuda:1'), covar=tensor([0.0463, 0.0516, 0.1619, 0.0413, 0.0280, 0.0268, 0.0350, 0.0542], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0093, 0.0118, 0.0098, 0.0084, 0.0078, 0.0092, 0.0103], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 21:45:27,394 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12045.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:45:48,833 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12055.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:46:32,866 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12076.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:46:44,682 INFO [train.py:892] (1/4) Epoch 7, batch 950, loss[loss=0.236, simple_loss=0.2885, pruned_loss=0.09178, over 19893.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3173, pruned_loss=0.1113, over 3913506.55 frames. ], batch size: 71, lr: 2.19e-02, grad_scale: 8.0 +2023-03-27 21:47:39,037 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.928e+02 5.727e+02 6.871e+02 8.158e+02 1.418e+03, threshold=1.374e+03, percent-clipped=0.0 +2023-03-27 21:48:29,697 INFO [train.py:892] (1/4) Epoch 7, batch 1000, loss[loss=0.2733, simple_loss=0.307, pruned_loss=0.1198, over 19836.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.3176, pruned_loss=0.1117, over 3921098.09 frames. ], batch size: 143, lr: 2.19e-02, grad_scale: 8.0 +2023-03-27 21:48:43,786 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12137.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:49:40,256 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3629, 2.7340, 1.9851, 1.6859, 2.1859, 2.6318, 2.8125, 2.6390], + device='cuda:1'), covar=tensor([0.0217, 0.0367, 0.0222, 0.0631, 0.0392, 0.0301, 0.0116, 0.0294], + device='cuda:1'), in_proj_covar=tensor([0.0043, 0.0042, 0.0042, 0.0059, 0.0059, 0.0040, 0.0033, 0.0037], + device='cuda:1'), out_proj_covar=tensor([9.7439e-05, 9.4556e-05, 9.3118e-05, 1.3597e-04, 1.3253e-04, 9.1883e-05, + 7.7363e-05, 8.3421e-05], device='cuda:1') +2023-03-27 21:49:55,688 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12172.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:50:13,720 INFO [train.py:892] (1/4) Epoch 7, batch 1050, loss[loss=0.2658, simple_loss=0.3153, pruned_loss=0.1081, over 19855.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3167, pruned_loss=0.1112, over 3928439.45 frames. ], batch size: 104, lr: 2.19e-02, grad_scale: 8.0 +2023-03-27 21:50:31,435 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12188.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:50:59,951 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12202.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:51:00,538 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.95 vs. limit=2.0 +2023-03-27 21:51:09,336 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.917e+02 5.376e+02 7.198e+02 8.445e+02 1.495e+03, threshold=1.440e+03, percent-clipped=1.0 +2023-03-27 21:52:00,178 INFO [train.py:892] (1/4) Epoch 7, batch 1100, loss[loss=0.2804, simple_loss=0.3212, pruned_loss=0.1198, over 19776.00 frames. ], tot_loss[loss=0.2683, simple_loss=0.3157, pruned_loss=0.1104, over 3933380.97 frames. ], batch size: 193, lr: 2.18e-02, grad_scale: 8.0 +2023-03-27 21:52:12,247 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12236.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:53:23,500 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.05 vs. limit=5.0 +2023-03-27 21:53:35,988 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12276.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:53:45,159 INFO [train.py:892] (1/4) Epoch 7, batch 1150, loss[loss=0.2465, simple_loss=0.3235, pruned_loss=0.08473, over 19900.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3147, pruned_loss=0.1096, over 3936688.12 frames. ], batch size: 50, lr: 2.18e-02, grad_scale: 8.0 +2023-03-27 21:54:06,673 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12291.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:54:34,562 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12304.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:54:41,311 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.471e+02 5.763e+02 6.995e+02 9.054e+02 1.717e+03, threshold=1.399e+03, percent-clipped=2.0 +2023-03-27 21:55:31,484 INFO [train.py:892] (1/4) Epoch 7, batch 1200, loss[loss=0.2671, simple_loss=0.3307, pruned_loss=0.1017, over 19617.00 frames. ], tot_loss[loss=0.2676, simple_loss=0.3154, pruned_loss=0.1099, over 3939055.76 frames. ], batch size: 52, lr: 2.18e-02, grad_scale: 8.0 +2023-03-27 21:55:48,755 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12339.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:55:50,660 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12340.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:56:13,758 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8617, 2.0213, 3.2036, 3.3649, 3.8779, 4.2201, 4.2287, 4.2763], + device='cuda:1'), covar=tensor([0.0778, 0.2333, 0.0734, 0.0457, 0.0254, 0.0140, 0.0192, 0.0277], + device='cuda:1'), in_proj_covar=tensor([0.0115, 0.0162, 0.0123, 0.0109, 0.0089, 0.0086, 0.0080, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 21:56:14,241 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-27 21:56:21,400 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12355.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:56:51,782 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8941, 2.3167, 3.0525, 2.5265, 2.5643, 3.1630, 1.7787, 2.0533], + device='cuda:1'), covar=tensor([0.0603, 0.1826, 0.0434, 0.0506, 0.1110, 0.0375, 0.1242, 0.1525], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0306, 0.0210, 0.0178, 0.0285, 0.0189, 0.0223, 0.0222], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 21:57:17,159 INFO [train.py:892] (1/4) Epoch 7, batch 1250, loss[loss=0.2877, simple_loss=0.3312, pruned_loss=0.1221, over 19652.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.3163, pruned_loss=0.1109, over 3938833.31 frames. ], batch size: 66, lr: 2.17e-02, grad_scale: 8.0 +2023-03-27 21:57:49,646 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-27 21:58:02,728 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12403.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:58:09,913 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.119e+02 6.346e+02 7.494e+02 8.891e+02 1.193e+03, threshold=1.499e+03, percent-clipped=0.0 +2023-03-27 21:59:01,226 INFO [train.py:892] (1/4) Epoch 7, batch 1300, loss[loss=0.2277, simple_loss=0.2912, pruned_loss=0.0821, over 19390.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3157, pruned_loss=0.1099, over 3941521.97 frames. ], batch size: 40, lr: 2.17e-02, grad_scale: 8.0 +2023-03-27 21:59:03,858 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12432.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:59:33,330 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12446.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:59:53,511 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-03-27 22:00:17,351 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7705, 2.1761, 2.9080, 3.4698, 3.8538, 4.1448, 4.0636, 4.2287], + device='cuda:1'), covar=tensor([0.0831, 0.2135, 0.0913, 0.0396, 0.0243, 0.0113, 0.0229, 0.0225], + device='cuda:1'), in_proj_covar=tensor([0.0117, 0.0164, 0.0125, 0.0111, 0.0090, 0.0087, 0.0083, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-27 22:00:27,428 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12472.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:00:45,675 INFO [train.py:892] (1/4) Epoch 7, batch 1350, loss[loss=0.2631, simple_loss=0.3063, pruned_loss=0.11, over 19826.00 frames. ], tot_loss[loss=0.2676, simple_loss=0.3155, pruned_loss=0.1099, over 3943238.21 frames. ], batch size: 229, lr: 2.16e-02, grad_scale: 16.0 +2023-03-27 22:01:29,004 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12502.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:01:39,459 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.642e+02 6.155e+02 7.029e+02 9.046e+02 1.832e+03, threshold=1.406e+03, percent-clipped=3.0 +2023-03-27 22:01:40,590 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12507.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:01:50,415 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4980, 3.5874, 3.9230, 3.6142, 3.4093, 3.8110, 3.6430, 4.0148], + device='cuda:1'), covar=tensor([0.1171, 0.0375, 0.0344, 0.0337, 0.0940, 0.0420, 0.0341, 0.0289], + device='cuda:1'), in_proj_covar=tensor([0.0222, 0.0166, 0.0156, 0.0161, 0.0161, 0.0155, 0.0146, 0.0142], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 22:02:07,088 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12520.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:02:29,813 INFO [train.py:892] (1/4) Epoch 7, batch 1400, loss[loss=0.4926, simple_loss=0.4895, pruned_loss=0.2478, over 19201.00 frames. ], tot_loss[loss=0.2688, simple_loss=0.3166, pruned_loss=0.1105, over 3942905.04 frames. ], batch size: 452, lr: 2.16e-02, grad_scale: 16.0 +2023-03-27 22:03:08,821 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12550.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:04:04,095 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12576.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:04:14,041 INFO [train.py:892] (1/4) Epoch 7, batch 1450, loss[loss=0.2633, simple_loss=0.3138, pruned_loss=0.1064, over 19878.00 frames. ], tot_loss[loss=0.2676, simple_loss=0.3156, pruned_loss=0.1098, over 3944760.30 frames. ], batch size: 92, lr: 2.16e-02, grad_scale: 16.0 +2023-03-27 22:05:02,498 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12604.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:05:07,603 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.016e+02 5.644e+02 6.580e+02 8.199e+02 1.502e+03, threshold=1.316e+03, percent-clipped=1.0 +2023-03-27 22:05:31,630 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.95 vs. limit=2.0 +2023-03-27 22:05:43,405 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12624.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:05:57,452 INFO [train.py:892] (1/4) Epoch 7, batch 1500, loss[loss=0.3036, simple_loss=0.3293, pruned_loss=0.139, over 19779.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.3144, pruned_loss=0.1099, over 3945723.49 frames. ], batch size: 182, lr: 2.15e-02, grad_scale: 16.0 +2023-03-27 22:06:16,736 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12640.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:06:16,907 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.6574, 1.9764, 1.6233, 1.1585, 1.7396, 1.8911, 1.8339, 1.8051], + device='cuda:1'), covar=tensor([0.0270, 0.0226, 0.0219, 0.0588, 0.0413, 0.0188, 0.0186, 0.0181], + device='cuda:1'), in_proj_covar=tensor([0.0046, 0.0044, 0.0046, 0.0063, 0.0062, 0.0041, 0.0036, 0.0040], + device='cuda:1'), out_proj_covar=tensor([1.0504e-04, 1.0083e-04, 1.0102e-04, 1.4553e-04, 1.4052e-04, 9.5309e-05, + 8.3815e-05, 8.9735e-05], device='cuda:1') +2023-03-27 22:06:41,614 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12652.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:07:05,720 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-27 22:07:41,429 INFO [train.py:892] (1/4) Epoch 7, batch 1550, loss[loss=0.2723, simple_loss=0.3278, pruned_loss=0.1084, over 19665.00 frames. ], tot_loss[loss=0.266, simple_loss=0.3138, pruned_loss=0.1092, over 3946876.42 frames. ], batch size: 50, lr: 2.15e-02, grad_scale: 16.0 +2023-03-27 22:07:56,845 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12688.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:08:35,757 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 5.812e+02 6.854e+02 8.280e+02 1.889e+03, threshold=1.371e+03, percent-clipped=7.0 +2023-03-27 22:09:26,220 INFO [train.py:892] (1/4) Epoch 7, batch 1600, loss[loss=0.2174, simple_loss=0.2696, pruned_loss=0.08257, over 19729.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.3139, pruned_loss=0.1082, over 3946812.48 frames. ], batch size: 47, lr: 2.15e-02, grad_scale: 16.0 +2023-03-27 22:09:28,857 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12732.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:10:32,904 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12763.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:10:51,544 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4710, 4.3749, 4.8641, 4.7150, 4.7463, 4.2384, 4.5598, 4.4780], + device='cuda:1'), covar=tensor([0.1263, 0.1180, 0.0978, 0.1011, 0.0856, 0.0997, 0.1837, 0.2144], + device='cuda:1'), in_proj_covar=tensor([0.0222, 0.0202, 0.0268, 0.0210, 0.0198, 0.0198, 0.0255, 0.0294], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-27 22:11:06,776 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12780.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:11:08,038 INFO [train.py:892] (1/4) Epoch 7, batch 1650, loss[loss=0.276, simple_loss=0.3185, pruned_loss=0.1167, over 19801.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3143, pruned_loss=0.1087, over 3947966.59 frames. ], batch size: 224, lr: 2.14e-02, grad_scale: 16.0 +2023-03-27 22:11:26,604 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6635, 2.3114, 2.9186, 2.8200, 3.0045, 2.9757, 3.5690, 3.6733], + device='cuda:1'), covar=tensor([0.0474, 0.1496, 0.1069, 0.1412, 0.1359, 0.1085, 0.0325, 0.0351], + device='cuda:1'), in_proj_covar=tensor([0.0174, 0.0193, 0.0198, 0.0213, 0.0226, 0.0198, 0.0138, 0.0140], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 22:11:54,399 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12802.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:12:04,857 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.717e+02 5.657e+02 6.905e+02 8.764e+02 1.774e+03, threshold=1.381e+03, percent-clipped=3.0 +2023-03-27 22:12:22,200 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3618, 3.5345, 3.7897, 3.5600, 3.4628, 3.7744, 3.5125, 3.8792], + device='cuda:1'), covar=tensor([0.1256, 0.0353, 0.0396, 0.0313, 0.0847, 0.0384, 0.0357, 0.0330], + device='cuda:1'), in_proj_covar=tensor([0.0229, 0.0173, 0.0163, 0.0166, 0.0162, 0.0160, 0.0151, 0.0148], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 22:12:43,796 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12824.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 22:12:56,374 INFO [train.py:892] (1/4) Epoch 7, batch 1700, loss[loss=0.2995, simple_loss=0.3403, pruned_loss=0.1294, over 19825.00 frames. ], tot_loss[loss=0.2669, simple_loss=0.3149, pruned_loss=0.1094, over 3948469.77 frames. ], batch size: 76, lr: 2.14e-02, grad_scale: 16.0 +2023-03-27 22:12:57,427 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8036, 3.1986, 2.0613, 1.9784, 2.3825, 2.7770, 2.9517, 2.7977], + device='cuda:1'), covar=tensor([0.0141, 0.0222, 0.0227, 0.0516, 0.0395, 0.0252, 0.0134, 0.0228], + device='cuda:1'), in_proj_covar=tensor([0.0046, 0.0044, 0.0047, 0.0061, 0.0062, 0.0042, 0.0035, 0.0040], + device='cuda:1'), out_proj_covar=tensor([1.0445e-04, 9.9936e-05, 1.0315e-04, 1.4197e-04, 1.4057e-04, 9.8078e-05, + 8.2784e-05, 9.1121e-05], device='cuda:1') +2023-03-27 22:13:42,994 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8142, 1.9605, 2.1128, 1.8996, 1.6039, 1.8945, 1.8429, 2.0719], + device='cuda:1'), covar=tensor([0.0220, 0.0226, 0.0209, 0.0217, 0.0366, 0.0350, 0.0429, 0.0255], + device='cuda:1'), in_proj_covar=tensor([0.0036, 0.0038, 0.0040, 0.0033, 0.0042, 0.0040, 0.0053, 0.0037], + device='cuda:1'), out_proj_covar=tensor([8.1221e-05, 8.3934e-05, 8.7840e-05, 7.2887e-05, 9.2865e-05, 8.9653e-05, + 1.1538e-04, 8.2479e-05], device='cuda:1') +2023-03-27 22:14:36,925 INFO [train.py:892] (1/4) Epoch 7, batch 1750, loss[loss=0.2129, simple_loss=0.268, pruned_loss=0.07887, over 19717.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3127, pruned_loss=0.1083, over 3948578.25 frames. ], batch size: 104, lr: 2.14e-02, grad_scale: 16.0 +2023-03-27 22:15:06,716 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7514, 2.3535, 2.9458, 2.7962, 3.0658, 3.0149, 3.6486, 3.6835], + device='cuda:1'), covar=tensor([0.0466, 0.1619, 0.1114, 0.1599, 0.1390, 0.1342, 0.0361, 0.0335], + device='cuda:1'), in_proj_covar=tensor([0.0177, 0.0197, 0.0202, 0.0215, 0.0228, 0.0204, 0.0142, 0.0142], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 22:15:23,467 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 5.556e+02 6.737e+02 8.077e+02 1.296e+03, threshold=1.347e+03, percent-clipped=0.0 +2023-03-27 22:16:05,793 INFO [train.py:892] (1/4) Epoch 7, batch 1800, loss[loss=0.2934, simple_loss=0.3376, pruned_loss=0.1245, over 19893.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3128, pruned_loss=0.1084, over 3949548.71 frames. ], batch size: 62, lr: 2.13e-02, grad_scale: 16.0 +2023-03-27 22:16:44,475 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2497, 5.6372, 5.7437, 5.6089, 5.3888, 5.3483, 5.2917, 5.3469], + device='cuda:1'), covar=tensor([0.1202, 0.0801, 0.0855, 0.0836, 0.0716, 0.0758, 0.1973, 0.1857], + device='cuda:1'), in_proj_covar=tensor([0.0232, 0.0210, 0.0277, 0.0216, 0.0207, 0.0202, 0.0263, 0.0306], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-27 22:17:15,763 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-27 22:17:27,338 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-27 22:17:28,656 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1810, 2.3911, 2.6791, 2.4968, 1.8899, 2.2931, 2.0754, 2.4528], + device='cuda:1'), covar=tensor([0.0265, 0.0300, 0.0217, 0.0212, 0.0387, 0.0381, 0.0395, 0.0645], + device='cuda:1'), in_proj_covar=tensor([0.0037, 0.0038, 0.0040, 0.0033, 0.0042, 0.0040, 0.0053, 0.0038], + device='cuda:1'), out_proj_covar=tensor([8.2514e-05, 8.3952e-05, 8.7704e-05, 7.3142e-05, 9.2428e-05, 8.9375e-05, + 1.1507e-04, 8.4193e-05], device='cuda:1') +2023-03-27 22:17:29,659 INFO [train.py:892] (1/4) Epoch 7, batch 1850, loss[loss=0.2334, simple_loss=0.2957, pruned_loss=0.08558, over 19812.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.314, pruned_loss=0.1073, over 3949115.56 frames. ], batch size: 57, lr: 2.13e-02, grad_scale: 16.0 +2023-03-27 22:18:27,965 INFO [train.py:892] (1/4) Epoch 8, batch 0, loss[loss=0.2311, simple_loss=0.2791, pruned_loss=0.0916, over 19836.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.2791, pruned_loss=0.0916, over 19836.00 frames. ], batch size: 171, lr: 2.00e-02, grad_scale: 16.0 +2023-03-27 22:18:27,965 INFO [train.py:917] (1/4) Computing validation loss +2023-03-27 22:18:55,073 INFO [train.py:926] (1/4) Epoch 8, validation: loss=0.189, simple_loss=0.2688, pruned_loss=0.05453, over 2883724.00 frames. +2023-03-27 22:18:55,075 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-27 22:19:33,658 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9206, 2.3184, 3.1772, 3.4860, 3.8860, 4.4469, 4.3180, 4.4752], + device='cuda:1'), covar=tensor([0.0715, 0.2096, 0.0907, 0.0430, 0.0254, 0.0098, 0.0171, 0.0249], + device='cuda:1'), in_proj_covar=tensor([0.0120, 0.0166, 0.0130, 0.0116, 0.0091, 0.0092, 0.0085, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 22:19:40,816 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.963e+02 5.840e+02 6.949e+02 8.045e+02 1.580e+03, threshold=1.390e+03, percent-clipped=1.0 +2023-03-27 22:20:44,106 INFO [train.py:892] (1/4) Epoch 8, batch 50, loss[loss=0.2808, simple_loss=0.3287, pruned_loss=0.1165, over 19750.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3004, pruned_loss=0.09958, over 891173.88 frames. ], batch size: 250, lr: 2.00e-02, grad_scale: 16.0 +2023-03-27 22:22:29,892 INFO [train.py:892] (1/4) Epoch 8, batch 100, loss[loss=0.2506, simple_loss=0.3007, pruned_loss=0.1002, over 19829.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3079, pruned_loss=0.1054, over 1568092.14 frames. ], batch size: 177, lr: 2.00e-02, grad_scale: 16.0 +2023-03-27 22:23:01,136 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6500, 2.5654, 2.6937, 2.7868, 2.0186, 2.1889, 2.1737, 2.6817], + device='cuda:1'), covar=tensor([0.0170, 0.0300, 0.0307, 0.0240, 0.0358, 0.0359, 0.0383, 0.0477], + device='cuda:1'), in_proj_covar=tensor([0.0037, 0.0039, 0.0042, 0.0033, 0.0042, 0.0040, 0.0054, 0.0037], + device='cuda:1'), out_proj_covar=tensor([8.2344e-05, 8.4907e-05, 9.0992e-05, 7.4465e-05, 9.3920e-05, 8.9761e-05, + 1.1803e-04, 8.3794e-05], device='cuda:1') +2023-03-27 22:23:02,878 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13102.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:23:03,479 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.14 vs. limit=2.0 +2023-03-27 22:23:13,343 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.875e+02 5.582e+02 6.747e+02 8.374e+02 1.388e+03, threshold=1.349e+03, percent-clipped=0.0 +2023-03-27 22:23:39,232 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=13119.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 22:24:13,116 INFO [train.py:892] (1/4) Epoch 8, batch 150, loss[loss=0.2699, simple_loss=0.3111, pruned_loss=0.1143, over 19792.00 frames. ], tot_loss[loss=0.2572, simple_loss=0.3063, pruned_loss=0.104, over 2096874.20 frames. ], batch size: 162, lr: 1.99e-02, grad_scale: 16.0 +2023-03-27 22:24:43,916 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=13150.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:24:59,309 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.61 vs. limit=5.0 +2023-03-27 22:25:58,992 INFO [train.py:892] (1/4) Epoch 8, batch 200, loss[loss=0.2374, simple_loss=0.298, pruned_loss=0.08841, over 19877.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3051, pruned_loss=0.1024, over 2508696.11 frames. ], batch size: 64, lr: 1.99e-02, grad_scale: 16.0 +2023-03-27 22:26:43,153 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.035e+02 5.713e+02 7.016e+02 8.259e+02 1.478e+03, threshold=1.403e+03, percent-clipped=2.0 +2023-03-27 22:27:31,491 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-27 22:27:40,628 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0205, 3.8722, 4.3044, 4.1582, 4.2767, 3.6421, 4.0318, 3.9725], + device='cuda:1'), covar=tensor([0.1254, 0.1185, 0.0952, 0.0856, 0.0879, 0.1009, 0.1887, 0.1912], + device='cuda:1'), in_proj_covar=tensor([0.0228, 0.0206, 0.0269, 0.0207, 0.0198, 0.0195, 0.0257, 0.0292], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-27 22:27:43,930 INFO [train.py:892] (1/4) Epoch 8, batch 250, loss[loss=0.2352, simple_loss=0.3047, pruned_loss=0.08284, over 19663.00 frames. ], tot_loss[loss=0.253, simple_loss=0.3034, pruned_loss=0.1013, over 2827523.77 frames. ], batch size: 50, lr: 1.99e-02, grad_scale: 16.0 +2023-03-27 22:29:31,230 INFO [train.py:892] (1/4) Epoch 8, batch 300, loss[loss=0.2969, simple_loss=0.3347, pruned_loss=0.1295, over 19834.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3057, pruned_loss=0.1024, over 3077483.06 frames. ], batch size: 184, lr: 1.98e-02, grad_scale: 16.0 +2023-03-27 22:29:37,968 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2684, 4.6307, 4.5997, 4.5764, 4.2671, 4.5298, 3.9836, 4.1184], + device='cuda:1'), covar=tensor([0.0457, 0.0354, 0.0554, 0.0454, 0.0584, 0.0597, 0.0832, 0.0905], + device='cuda:1'), in_proj_covar=tensor([0.0168, 0.0160, 0.0213, 0.0171, 0.0160, 0.0157, 0.0186, 0.0223], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-27 22:30:13,181 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 5.556e+02 6.827e+02 8.454e+02 1.465e+03, threshold=1.365e+03, percent-clipped=2.0 +2023-03-27 22:31:14,171 INFO [train.py:892] (1/4) Epoch 8, batch 350, loss[loss=0.2349, simple_loss=0.3062, pruned_loss=0.08181, over 19735.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3073, pruned_loss=0.1037, over 3271087.89 frames. ], batch size: 51, lr: 1.98e-02, grad_scale: 16.0 +2023-03-27 22:32:48,815 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9483, 3.6477, 3.6730, 4.1527, 3.8969, 4.2293, 3.8895, 4.0383], + device='cuda:1'), covar=tensor([0.0995, 0.0566, 0.0665, 0.0394, 0.0659, 0.0319, 0.0678, 0.0820], + device='cuda:1'), in_proj_covar=tensor([0.0120, 0.0133, 0.0160, 0.0136, 0.0129, 0.0111, 0.0125, 0.0146], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 22:32:58,932 INFO [train.py:892] (1/4) Epoch 8, batch 400, loss[loss=0.2485, simple_loss=0.3201, pruned_loss=0.08842, over 19575.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3063, pruned_loss=0.1032, over 3422757.84 frames. ], batch size: 53, lr: 1.98e-02, grad_scale: 16.0 +2023-03-27 22:33:07,597 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.9034, 2.4914, 1.8731, 1.3901, 2.0024, 2.2805, 2.3800, 2.3568], + device='cuda:1'), covar=tensor([0.0235, 0.0149, 0.0238, 0.0539, 0.0353, 0.0286, 0.0106, 0.0144], + device='cuda:1'), in_proj_covar=tensor([0.0046, 0.0043, 0.0049, 0.0061, 0.0062, 0.0041, 0.0036, 0.0040], + device='cuda:1'), out_proj_covar=tensor([1.0572e-04, 9.8971e-05, 1.0906e-04, 1.4372e-04, 1.4122e-04, 9.7538e-05, + 8.5484e-05, 9.1761e-05], device='cuda:1') +2023-03-27 22:33:41,530 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.399e+02 5.749e+02 7.183e+02 8.844e+02 1.624e+03, threshold=1.437e+03, percent-clipped=4.0 +2023-03-27 22:34:07,831 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13419.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:34:07,995 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0683, 2.5451, 3.6484, 3.8012, 4.0322, 4.5401, 4.5004, 4.6862], + device='cuda:1'), covar=tensor([0.0718, 0.1970, 0.0769, 0.0368, 0.0282, 0.0115, 0.0157, 0.0229], + device='cuda:1'), in_proj_covar=tensor([0.0121, 0.0163, 0.0133, 0.0115, 0.0094, 0.0091, 0.0084, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 22:34:21,686 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-27 22:34:44,926 INFO [train.py:892] (1/4) Epoch 8, batch 450, loss[loss=0.2542, simple_loss=0.3102, pruned_loss=0.09904, over 19829.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3074, pruned_loss=0.1038, over 3539578.68 frames. ], batch size: 57, lr: 1.97e-02, grad_scale: 16.0 +2023-03-27 22:35:49,877 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=13467.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:35:52,222 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7331, 3.1348, 4.6561, 3.6984, 4.4173, 4.4738, 4.5386, 4.2199], + device='cuda:1'), covar=tensor([0.0120, 0.0567, 0.0088, 0.1254, 0.0078, 0.0156, 0.0113, 0.0116], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0078, 0.0061, 0.0131, 0.0054, 0.0068, 0.0062, 0.0055], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 22:36:07,879 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8084, 5.0858, 5.0717, 5.0893, 4.6999, 5.0646, 4.5344, 4.6340], + device='cuda:1'), covar=tensor([0.0307, 0.0285, 0.0443, 0.0350, 0.0464, 0.0459, 0.0511, 0.0665], + device='cuda:1'), in_proj_covar=tensor([0.0169, 0.0162, 0.0211, 0.0171, 0.0159, 0.0157, 0.0185, 0.0222], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-27 22:36:19,870 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-03-27 22:36:27,709 INFO [train.py:892] (1/4) Epoch 8, batch 500, loss[loss=0.2414, simple_loss=0.2907, pruned_loss=0.09604, over 19821.00 frames. ], tot_loss[loss=0.2567, simple_loss=0.3069, pruned_loss=0.1032, over 3631572.05 frames. ], batch size: 50, lr: 1.97e-02, grad_scale: 16.0 +2023-03-27 22:37:04,929 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4440, 2.3660, 2.5333, 1.8483, 2.4195, 1.7769, 2.3975, 2.6017], + device='cuda:1'), covar=tensor([0.0390, 0.0330, 0.0518, 0.0751, 0.0349, 0.0462, 0.0303, 0.0203], + device='cuda:1'), in_proj_covar=tensor([0.0049, 0.0050, 0.0054, 0.0077, 0.0050, 0.0045, 0.0044, 0.0041], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 22:37:11,215 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.922e+02 5.157e+02 6.466e+02 8.211e+02 1.475e+03, threshold=1.293e+03, percent-clipped=2.0 +2023-03-27 22:38:12,611 INFO [train.py:892] (1/4) Epoch 8, batch 550, loss[loss=0.2425, simple_loss=0.299, pruned_loss=0.09298, over 19771.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3066, pruned_loss=0.103, over 3702983.89 frames. ], batch size: 87, lr: 1.97e-02, grad_scale: 16.0 +2023-03-27 22:39:58,923 INFO [train.py:892] (1/4) Epoch 8, batch 600, loss[loss=0.2643, simple_loss=0.293, pruned_loss=0.1178, over 19795.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.3061, pruned_loss=0.1029, over 3757728.49 frames. ], batch size: 185, lr: 1.97e-02, grad_scale: 16.0 +2023-03-27 22:40:11,748 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9055, 4.0612, 4.3607, 4.0216, 3.8501, 4.2104, 4.0182, 4.4532], + device='cuda:1'), covar=tensor([0.1023, 0.0264, 0.0315, 0.0278, 0.0710, 0.0322, 0.0291, 0.0257], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0171, 0.0161, 0.0165, 0.0162, 0.0161, 0.0155, 0.0146], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 22:40:41,840 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.580e+02 5.730e+02 6.817e+02 8.110e+02 1.556e+03, threshold=1.363e+03, percent-clipped=3.0 +2023-03-27 22:40:52,036 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.4629, 1.5171, 1.6271, 1.5294, 1.1378, 1.4617, 1.4279, 1.6333], + device='cuda:1'), covar=tensor([0.0198, 0.0238, 0.0262, 0.0305, 0.0407, 0.0307, 0.0424, 0.0216], + device='cuda:1'), in_proj_covar=tensor([0.0037, 0.0037, 0.0040, 0.0034, 0.0042, 0.0040, 0.0053, 0.0036], + device='cuda:1'), out_proj_covar=tensor([8.1546e-05, 8.1439e-05, 8.7077e-05, 7.5103e-05, 9.2287e-05, 8.9244e-05, + 1.1445e-04, 8.1139e-05], device='cuda:1') +2023-03-27 22:41:04,045 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6505, 4.5250, 4.9718, 4.8296, 4.8816, 4.2641, 4.6190, 4.5538], + device='cuda:1'), covar=tensor([0.1369, 0.1515, 0.1037, 0.1086, 0.0845, 0.1223, 0.2397, 0.2316], + device='cuda:1'), in_proj_covar=tensor([0.0229, 0.0210, 0.0269, 0.0210, 0.0204, 0.0195, 0.0260, 0.0292], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-27 22:41:42,664 INFO [train.py:892] (1/4) Epoch 8, batch 650, loss[loss=0.2267, simple_loss=0.2794, pruned_loss=0.08697, over 19895.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3055, pruned_loss=0.1022, over 3801472.24 frames. ], batch size: 91, lr: 1.96e-02, grad_scale: 16.0 +2023-03-27 22:43:26,157 INFO [train.py:892] (1/4) Epoch 8, batch 700, loss[loss=0.2437, simple_loss=0.3101, pruned_loss=0.08866, over 19554.00 frames. ], tot_loss[loss=0.2574, simple_loss=0.3077, pruned_loss=0.1036, over 3833342.75 frames. ], batch size: 53, lr: 1.96e-02, grad_scale: 16.0 +2023-03-27 22:43:51,736 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.88 vs. limit=5.0 +2023-03-27 22:44:11,779 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.892e+02 5.950e+02 6.978e+02 8.185e+02 2.283e+03, threshold=1.396e+03, percent-clipped=2.0 +2023-03-27 22:45:13,773 INFO [train.py:892] (1/4) Epoch 8, batch 750, loss[loss=0.2546, simple_loss=0.3047, pruned_loss=0.1023, over 19667.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3065, pruned_loss=0.1023, over 3859990.81 frames. ], batch size: 43, lr: 1.96e-02, grad_scale: 16.0 +2023-03-27 22:45:16,631 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=13737.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:46:44,561 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0033, 1.9823, 3.0277, 3.3253, 3.9604, 4.2913, 4.3524, 4.2756], + device='cuda:1'), covar=tensor([0.0841, 0.2534, 0.1119, 0.0508, 0.0309, 0.0155, 0.0165, 0.0381], + device='cuda:1'), in_proj_covar=tensor([0.0126, 0.0169, 0.0140, 0.0118, 0.0097, 0.0092, 0.0089, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 22:46:57,384 INFO [train.py:892] (1/4) Epoch 8, batch 800, loss[loss=0.2705, simple_loss=0.3204, pruned_loss=0.1103, over 19742.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.3083, pruned_loss=0.1027, over 3876876.71 frames. ], batch size: 80, lr: 1.95e-02, grad_scale: 16.0 +2023-03-27 22:47:22,159 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=13798.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 22:47:40,993 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.356e+02 5.653e+02 6.887e+02 8.342e+02 1.511e+03, threshold=1.377e+03, percent-clipped=1.0 +2023-03-27 22:48:41,451 INFO [train.py:892] (1/4) Epoch 8, batch 850, loss[loss=0.2427, simple_loss=0.3005, pruned_loss=0.09242, over 19568.00 frames. ], tot_loss[loss=0.2562, simple_loss=0.3074, pruned_loss=0.1025, over 3893099.60 frames. ], batch size: 53, lr: 1.95e-02, grad_scale: 16.0 +2023-03-27 22:50:25,547 INFO [train.py:892] (1/4) Epoch 8, batch 900, loss[loss=0.2486, simple_loss=0.2999, pruned_loss=0.0987, over 19892.00 frames. ], tot_loss[loss=0.256, simple_loss=0.3072, pruned_loss=0.1024, over 3906031.26 frames. ], batch size: 71, lr: 1.95e-02, grad_scale: 16.0 +2023-03-27 22:51:07,623 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.950e+02 5.711e+02 6.914e+02 8.522e+02 2.139e+03, threshold=1.383e+03, percent-clipped=3.0 +2023-03-27 22:51:49,483 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.95 vs. limit=5.0 +2023-03-27 22:52:07,733 INFO [train.py:892] (1/4) Epoch 8, batch 950, loss[loss=0.2368, simple_loss=0.2873, pruned_loss=0.0932, over 19857.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.3066, pruned_loss=0.1013, over 3915106.33 frames. ], batch size: 112, lr: 1.94e-02, grad_scale: 16.0 +2023-03-27 22:53:29,707 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-03-27 22:53:53,095 INFO [train.py:892] (1/4) Epoch 8, batch 1000, loss[loss=0.2792, simple_loss=0.3335, pruned_loss=0.1124, over 19835.00 frames. ], tot_loss[loss=0.2561, simple_loss=0.3077, pruned_loss=0.1022, over 3923085.73 frames. ], batch size: 58, lr: 1.94e-02, grad_scale: 16.0 +2023-03-27 22:54:42,905 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.512e+02 5.428e+02 6.416e+02 7.650e+02 1.405e+03, threshold=1.283e+03, percent-clipped=1.0 +2023-03-27 22:55:01,254 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.02 vs. limit=2.0 +2023-03-27 22:55:44,629 INFO [train.py:892] (1/4) Epoch 8, batch 1050, loss[loss=0.2595, simple_loss=0.3222, pruned_loss=0.0984, over 19727.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.3076, pruned_loss=0.1019, over 3927490.30 frames. ], batch size: 52, lr: 1.94e-02, grad_scale: 16.0 +2023-03-27 22:56:07,117 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9719, 3.0901, 1.7016, 3.7306, 3.4053, 3.7323, 3.6877, 2.8873], + device='cuda:1'), covar=tensor([0.0611, 0.0499, 0.1477, 0.0578, 0.0518, 0.0305, 0.0629, 0.0738], + device='cuda:1'), in_proj_covar=tensor([0.0113, 0.0102, 0.0124, 0.0108, 0.0093, 0.0087, 0.0101, 0.0113], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 22:57:00,397 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14072.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:57:29,680 INFO [train.py:892] (1/4) Epoch 8, batch 1100, loss[loss=0.2531, simple_loss=0.301, pruned_loss=0.1026, over 19758.00 frames. ], tot_loss[loss=0.254, simple_loss=0.3058, pruned_loss=0.101, over 3934190.86 frames. ], batch size: 253, lr: 1.93e-02, grad_scale: 16.0 +2023-03-27 22:57:43,881 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14093.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 22:58:04,857 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14103.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:58:13,329 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.085e+02 5.555e+02 6.523e+02 7.495e+02 1.242e+03, threshold=1.305e+03, percent-clipped=0.0 +2023-03-27 22:58:23,918 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6868, 3.5957, 2.3144, 4.0819, 4.0479, 1.7429, 3.2770, 3.2367], + device='cuda:1'), covar=tensor([0.0532, 0.0845, 0.2293, 0.0445, 0.0218, 0.3130, 0.1037, 0.0580], + device='cuda:1'), in_proj_covar=tensor([0.0170, 0.0193, 0.0203, 0.0160, 0.0115, 0.0194, 0.0205, 0.0139], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 22:58:37,620 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-27 22:58:40,439 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14120.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 22:59:06,776 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14133.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:59:11,553 INFO [train.py:892] (1/4) Epoch 8, batch 1150, loss[loss=0.243, simple_loss=0.2973, pruned_loss=0.09431, over 19709.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3053, pruned_loss=0.101, over 3937305.08 frames. ], batch size: 85, lr: 1.93e-02, grad_scale: 16.0 +2023-03-27 23:00:12,682 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14164.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:00:49,263 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14181.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 23:00:58,070 INFO [train.py:892] (1/4) Epoch 8, batch 1200, loss[loss=0.2514, simple_loss=0.3052, pruned_loss=0.09878, over 19681.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3056, pruned_loss=0.1011, over 3940355.17 frames. ], batch size: 52, lr: 1.93e-02, grad_scale: 16.0 +2023-03-27 23:01:41,590 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.316e+02 5.724e+02 6.994e+02 8.657e+02 1.468e+03, threshold=1.399e+03, percent-clipped=2.0 +2023-03-27 23:02:04,566 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8956, 4.0270, 4.3081, 3.9781, 3.7921, 4.2181, 3.9279, 4.4600], + device='cuda:1'), covar=tensor([0.1039, 0.0278, 0.0309, 0.0326, 0.0773, 0.0354, 0.0401, 0.0254], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0170, 0.0162, 0.0167, 0.0164, 0.0162, 0.0157, 0.0147], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 23:02:43,567 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.64 vs. limit=2.0 +2023-03-27 23:02:44,258 INFO [train.py:892] (1/4) Epoch 8, batch 1250, loss[loss=0.2233, simple_loss=0.2782, pruned_loss=0.08421, over 19668.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3059, pruned_loss=0.1011, over 3939908.13 frames. ], batch size: 64, lr: 1.92e-02, grad_scale: 16.0 +2023-03-27 23:03:03,096 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8851, 2.5593, 3.9283, 3.3492, 3.7984, 3.7176, 3.6592, 3.6439], + device='cuda:1'), covar=tensor([0.0140, 0.0623, 0.0092, 0.0805, 0.0088, 0.0208, 0.0169, 0.0109], + device='cuda:1'), in_proj_covar=tensor([0.0068, 0.0077, 0.0061, 0.0132, 0.0055, 0.0069, 0.0064, 0.0054], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 23:04:27,632 INFO [train.py:892] (1/4) Epoch 8, batch 1300, loss[loss=0.2577, simple_loss=0.3117, pruned_loss=0.1019, over 19725.00 frames. ], tot_loss[loss=0.2536, simple_loss=0.3054, pruned_loss=0.1009, over 3942254.82 frames. ], batch size: 63, lr: 1.92e-02, grad_scale: 16.0 +2023-03-27 23:05:14,654 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.149e+02 5.379e+02 6.577e+02 8.584e+02 1.412e+03, threshold=1.315e+03, percent-clipped=1.0 +2023-03-27 23:06:13,956 INFO [train.py:892] (1/4) Epoch 8, batch 1350, loss[loss=0.2571, simple_loss=0.2983, pruned_loss=0.1079, over 19776.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3044, pruned_loss=0.1001, over 3944389.68 frames. ], batch size: 198, lr: 1.92e-02, grad_scale: 16.0 +2023-03-27 23:07:58,354 INFO [train.py:892] (1/4) Epoch 8, batch 1400, loss[loss=0.2423, simple_loss=0.2924, pruned_loss=0.09614, over 19801.00 frames. ], tot_loss[loss=0.2542, simple_loss=0.3058, pruned_loss=0.1013, over 3944796.47 frames. ], batch size: 149, lr: 1.92e-02, grad_scale: 16.0 +2023-03-27 23:08:13,085 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14393.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:08:40,814 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 5.328e+02 6.639e+02 8.222e+02 1.941e+03, threshold=1.328e+03, percent-clipped=1.0 +2023-03-27 23:08:52,753 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.63 vs. limit=5.0 +2023-03-27 23:08:56,021 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14413.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:09:24,622 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14428.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:09:31,899 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5307, 3.1609, 4.5701, 3.9282, 4.0834, 4.3399, 4.3426, 3.9301], + device='cuda:1'), covar=tensor([0.0093, 0.0497, 0.0060, 0.0749, 0.0091, 0.0152, 0.0105, 0.0107], + device='cuda:1'), in_proj_covar=tensor([0.0069, 0.0080, 0.0062, 0.0134, 0.0056, 0.0070, 0.0066, 0.0056], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 23:09:41,896 INFO [train.py:892] (1/4) Epoch 8, batch 1450, loss[loss=0.2715, simple_loss=0.322, pruned_loss=0.1105, over 19767.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3059, pruned_loss=0.1014, over 3946439.86 frames. ], batch size: 256, lr: 1.91e-02, grad_scale: 16.0 +2023-03-27 23:09:52,298 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14441.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:10:23,916 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6948, 3.7421, 2.3431, 4.0543, 4.0475, 1.7104, 3.3993, 3.5027], + device='cuda:1'), covar=tensor([0.0563, 0.0781, 0.2383, 0.0501, 0.0283, 0.3148, 0.0901, 0.0470], + device='cuda:1'), in_proj_covar=tensor([0.0171, 0.0194, 0.0202, 0.0163, 0.0118, 0.0192, 0.0205, 0.0139], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 23:10:29,160 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14459.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:10:59,957 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14474.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:11:03,491 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14476.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 23:11:24,911 INFO [train.py:892] (1/4) Epoch 8, batch 1500, loss[loss=0.2429, simple_loss=0.3034, pruned_loss=0.09118, over 19791.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.3043, pruned_loss=0.1006, over 3948010.52 frames. ], batch size: 83, lr: 1.91e-02, grad_scale: 32.0 +2023-03-27 23:11:27,521 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14487.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:12:08,355 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.288e+02 5.447e+02 6.433e+02 7.944e+02 1.401e+03, threshold=1.287e+03, percent-clipped=2.0 +2023-03-27 23:12:59,477 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7777, 4.8120, 5.2223, 5.0663, 5.0690, 4.7421, 4.8426, 4.8279], + device='cuda:1'), covar=tensor([0.1305, 0.1236, 0.0906, 0.0967, 0.0762, 0.0838, 0.1981, 0.2212], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0221, 0.0281, 0.0220, 0.0214, 0.0207, 0.0271, 0.0305], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-27 23:13:06,133 INFO [train.py:892] (1/4) Epoch 8, batch 1550, loss[loss=0.3117, simple_loss=0.356, pruned_loss=0.1337, over 19696.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3053, pruned_loss=0.1015, over 3949480.02 frames. ], batch size: 337, lr: 1.91e-02, grad_scale: 16.0 +2023-03-27 23:13:33,924 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14548.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:14:23,400 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-03-27 23:14:54,530 INFO [train.py:892] (1/4) Epoch 8, batch 1600, loss[loss=0.2456, simple_loss=0.3051, pruned_loss=0.09307, over 19815.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.3047, pruned_loss=0.1004, over 3950710.46 frames. ], batch size: 50, lr: 1.90e-02, grad_scale: 16.0 +2023-03-27 23:15:42,294 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.460e+02 5.480e+02 6.486e+02 8.210e+02 1.658e+03, threshold=1.297e+03, percent-clipped=3.0 +2023-03-27 23:16:47,970 INFO [train.py:892] (1/4) Epoch 8, batch 1650, loss[loss=0.236, simple_loss=0.3054, pruned_loss=0.08327, over 19833.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3046, pruned_loss=0.09969, over 3949451.70 frames. ], batch size: 57, lr: 1.90e-02, grad_scale: 16.0 +2023-03-27 23:18:02,709 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6920, 3.1551, 2.3511, 1.9478, 2.3351, 3.0844, 2.9609, 2.8192], + device='cuda:1'), covar=tensor([0.0176, 0.0212, 0.0198, 0.0518, 0.0318, 0.0155, 0.0150, 0.0188], + device='cuda:1'), in_proj_covar=tensor([0.0050, 0.0046, 0.0052, 0.0068, 0.0066, 0.0045, 0.0040, 0.0044], + device='cuda:1'), out_proj_covar=tensor([1.1557e-04, 1.0598e-04, 1.1706e-04, 1.5865e-04, 1.5307e-04, 1.0552e-04, + 9.6482e-05, 9.9738e-05], device='cuda:1') +2023-03-27 23:18:38,576 INFO [train.py:892] (1/4) Epoch 8, batch 1700, loss[loss=0.2185, simple_loss=0.2807, pruned_loss=0.0782, over 19791.00 frames. ], tot_loss[loss=0.2527, simple_loss=0.3051, pruned_loss=0.1001, over 3947983.59 frames. ], batch size: 83, lr: 1.90e-02, grad_scale: 16.0 +2023-03-27 23:19:29,029 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.414e+02 5.107e+02 6.150e+02 7.747e+02 1.488e+03, threshold=1.230e+03, percent-clipped=2.0 +2023-03-27 23:19:36,571 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1682, 3.1347, 1.6854, 4.0273, 3.6516, 3.9316, 4.0442, 3.1756], + device='cuda:1'), covar=tensor([0.0479, 0.0502, 0.1375, 0.0382, 0.0405, 0.0284, 0.0505, 0.0628], + device='cuda:1'), in_proj_covar=tensor([0.0112, 0.0105, 0.0124, 0.0110, 0.0094, 0.0088, 0.0100, 0.0114], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-27 23:20:11,061 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14728.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:20:25,102 INFO [train.py:892] (1/4) Epoch 8, batch 1750, loss[loss=0.2267, simple_loss=0.3001, pruned_loss=0.0767, over 19932.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.3047, pruned_loss=0.1002, over 3947419.74 frames. ], batch size: 51, lr: 1.90e-02, grad_scale: 16.0 +2023-03-27 23:20:32,955 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14740.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:21:11,651 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14759.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:21:29,725 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14769.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:21:41,280 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14776.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:21:41,383 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14776.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 23:21:52,082 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1292, 4.1953, 2.5033, 4.5510, 4.7478, 1.9359, 3.9406, 3.5268], + device='cuda:1'), covar=tensor([0.0536, 0.0619, 0.2334, 0.0495, 0.0207, 0.2817, 0.0881, 0.0533], + device='cuda:1'), in_proj_covar=tensor([0.0173, 0.0194, 0.0202, 0.0164, 0.0121, 0.0193, 0.0208, 0.0141], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 23:22:00,364 INFO [train.py:892] (1/4) Epoch 8, batch 1800, loss[loss=0.2318, simple_loss=0.2893, pruned_loss=0.08719, over 19899.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3046, pruned_loss=0.1, over 3948221.96 frames. ], batch size: 94, lr: 1.89e-02, grad_scale: 16.0 +2023-03-27 23:22:28,467 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14801.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:22:39,828 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14807.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:22:41,056 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.298e+02 5.336e+02 6.514e+02 7.931e+02 1.246e+03, threshold=1.303e+03, percent-clipped=1.0 +2023-03-27 23:23:10,740 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14824.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 23:23:25,307 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-03-27 23:23:30,857 INFO [train.py:892] (1/4) Epoch 8, batch 1850, loss[loss=0.2569, simple_loss=0.319, pruned_loss=0.09742, over 19601.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.305, pruned_loss=0.09885, over 3947948.89 frames. ], batch size: 53, lr: 1.89e-02, grad_scale: 16.0 +2023-03-27 23:24:36,964 INFO [train.py:892] (1/4) Epoch 9, batch 0, loss[loss=0.2145, simple_loss=0.268, pruned_loss=0.0805, over 19790.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.268, pruned_loss=0.0805, over 19790.00 frames. ], batch size: 79, lr: 1.79e-02, grad_scale: 16.0 +2023-03-27 23:24:36,965 INFO [train.py:917] (1/4) Computing validation loss +2023-03-27 23:25:11,156 INFO [train.py:926] (1/4) Epoch 9, validation: loss=0.1843, simple_loss=0.2646, pruned_loss=0.05198, over 2883724.00 frames. +2023-03-27 23:25:11,157 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-27 23:25:16,225 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14843.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:27:06,413 INFO [train.py:892] (1/4) Epoch 9, batch 50, loss[loss=0.2204, simple_loss=0.2822, pruned_loss=0.07931, over 19609.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.2934, pruned_loss=0.09442, over 890863.01 frames. ], batch size: 46, lr: 1.79e-02, grad_scale: 16.0 +2023-03-27 23:27:44,393 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.418e+02 5.041e+02 5.916e+02 7.576e+02 1.365e+03, threshold=1.183e+03, percent-clipped=1.0 +2023-03-27 23:29:00,084 INFO [train.py:892] (1/4) Epoch 9, batch 100, loss[loss=0.2408, simple_loss=0.2907, pruned_loss=0.09542, over 19784.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.2936, pruned_loss=0.09308, over 1570654.30 frames. ], batch size: 191, lr: 1.78e-02, grad_scale: 16.0 +2023-03-27 23:30:12,692 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6978, 3.8485, 3.9114, 3.9689, 3.6723, 3.8127, 3.5877, 3.1111], + device='cuda:1'), covar=tensor([0.1002, 0.1297, 0.1345, 0.1048, 0.1320, 0.1406, 0.1351, 0.2934], + device='cuda:1'), in_proj_covar=tensor([0.0181, 0.0175, 0.0219, 0.0178, 0.0174, 0.0162, 0.0197, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-27 23:30:48,412 INFO [train.py:892] (1/4) Epoch 9, batch 150, loss[loss=0.2777, simple_loss=0.3162, pruned_loss=0.1196, over 19874.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.2977, pruned_loss=0.09504, over 2097986.26 frames. ], batch size: 158, lr: 1.78e-02, grad_scale: 16.0 +2023-03-27 23:31:00,996 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5817, 1.6897, 2.1651, 2.9583, 3.3362, 3.4976, 3.4782, 3.4931], + device='cuda:1'), covar=tensor([0.0740, 0.2037, 0.1118, 0.0446, 0.0286, 0.0163, 0.0197, 0.0204], + device='cuda:1'), in_proj_covar=tensor([0.0124, 0.0162, 0.0139, 0.0116, 0.0099, 0.0091, 0.0086, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 23:31:28,932 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.328e+02 4.885e+02 5.781e+02 7.549e+02 1.487e+03, threshold=1.156e+03, percent-clipped=3.0 +2023-03-27 23:31:59,443 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1503, 2.2299, 2.4928, 2.0929, 2.1180, 1.8310, 1.9807, 2.5020], + device='cuda:1'), covar=tensor([0.0215, 0.0238, 0.0184, 0.0268, 0.0257, 0.0327, 0.0407, 0.0157], + device='cuda:1'), in_proj_covar=tensor([0.0040, 0.0041, 0.0042, 0.0036, 0.0044, 0.0043, 0.0057, 0.0040], + device='cuda:1'), out_proj_covar=tensor([8.9936e-05, 9.1094e-05, 9.2435e-05, 8.1499e-05, 9.7930e-05, 9.6544e-05, + 1.2423e-04, 9.1331e-05], device='cuda:1') +2023-03-27 23:32:28,755 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.93 vs. limit=5.0 +2023-03-27 23:32:44,981 INFO [train.py:892] (1/4) Epoch 9, batch 200, loss[loss=0.2798, simple_loss=0.3235, pruned_loss=0.1181, over 19742.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.301, pruned_loss=0.09706, over 2508123.00 frames. ], batch size: 221, lr: 1.78e-02, grad_scale: 16.0 +2023-03-27 23:33:47,933 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15069.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:34:38,145 INFO [train.py:892] (1/4) Epoch 9, batch 250, loss[loss=0.2452, simple_loss=0.2978, pruned_loss=0.09636, over 19851.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.303, pruned_loss=0.09825, over 2827338.03 frames. ], batch size: 124, lr: 1.78e-02, grad_scale: 16.0 +2023-03-27 23:34:48,972 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15096.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:35:13,659 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.451e+02 5.417e+02 6.236e+02 8.058e+02 1.427e+03, threshold=1.247e+03, percent-clipped=5.0 +2023-03-27 23:35:35,287 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15117.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:36:04,623 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-03-27 23:36:26,584 INFO [train.py:892] (1/4) Epoch 9, batch 300, loss[loss=0.2516, simple_loss=0.2936, pruned_loss=0.1048, over 19780.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3015, pruned_loss=0.09749, over 3077225.68 frames. ], batch size: 191, lr: 1.77e-02, grad_scale: 16.0 +2023-03-27 23:36:33,435 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15143.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:38:17,897 INFO [train.py:892] (1/4) Epoch 9, batch 350, loss[loss=0.2675, simple_loss=0.3125, pruned_loss=0.1113, over 19787.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3029, pruned_loss=0.09882, over 3270605.64 frames. ], batch size: 241, lr: 1.77e-02, grad_scale: 16.0 +2023-03-27 23:38:18,769 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15191.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:38:27,565 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15195.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:38:52,909 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.594e+02 5.622e+02 6.774e+02 8.741e+02 1.605e+03, threshold=1.355e+03, percent-clipped=2.0 +2023-03-27 23:40:06,029 INFO [train.py:892] (1/4) Epoch 9, batch 400, loss[loss=0.278, simple_loss=0.3183, pruned_loss=0.1188, over 19825.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3027, pruned_loss=0.09867, over 3421355.86 frames. ], batch size: 288, lr: 1.77e-02, grad_scale: 16.0 +2023-03-27 23:40:40,941 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15256.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 23:41:58,970 INFO [train.py:892] (1/4) Epoch 9, batch 450, loss[loss=0.2671, simple_loss=0.3164, pruned_loss=0.1089, over 19801.00 frames. ], tot_loss[loss=0.2494, simple_loss=0.3021, pruned_loss=0.09836, over 3539131.12 frames. ], batch size: 162, lr: 1.76e-02, grad_scale: 16.0 +2023-03-27 23:42:37,304 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 5.167e+02 6.128e+02 7.422e+02 1.689e+03, threshold=1.226e+03, percent-clipped=1.0 +2023-03-27 23:43:54,445 INFO [train.py:892] (1/4) Epoch 9, batch 500, loss[loss=0.2285, simple_loss=0.2803, pruned_loss=0.08839, over 19828.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3047, pruned_loss=0.0998, over 3628739.96 frames. ], batch size: 177, lr: 1.76e-02, grad_scale: 16.0 +2023-03-27 23:45:18,876 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2621, 3.0752, 3.4244, 2.6257, 3.9916, 2.7701, 2.5602, 3.6535], + device='cuda:1'), covar=tensor([0.0435, 0.0306, 0.0849, 0.0654, 0.0205, 0.0369, 0.0812, 0.0224], + device='cuda:1'), in_proj_covar=tensor([0.0052, 0.0054, 0.0056, 0.0084, 0.0054, 0.0048, 0.0048, 0.0044], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 23:45:44,801 INFO [train.py:892] (1/4) Epoch 9, batch 550, loss[loss=0.2196, simple_loss=0.2889, pruned_loss=0.07512, over 19867.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3034, pruned_loss=0.09913, over 3700176.89 frames. ], batch size: 48, lr: 1.76e-02, grad_scale: 16.0 +2023-03-27 23:45:55,126 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15396.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:46:09,136 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3555, 3.4849, 3.7007, 4.6248, 3.0329, 3.4986, 3.2018, 2.5860], + device='cuda:1'), covar=tensor([0.0384, 0.2711, 0.0809, 0.0141, 0.1973, 0.0669, 0.0902, 0.1775], + device='cuda:1'), in_proj_covar=tensor([0.0182, 0.0327, 0.0214, 0.0135, 0.0231, 0.0160, 0.0189, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0001, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-27 23:46:26,533 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.786e+02 5.578e+02 6.357e+02 7.832e+02 1.667e+03, threshold=1.271e+03, percent-clipped=4.0 +2023-03-27 23:46:40,779 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.04 vs. limit=5.0 +2023-03-27 23:46:55,839 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-27 23:47:38,395 INFO [train.py:892] (1/4) Epoch 9, batch 600, loss[loss=0.2504, simple_loss=0.3042, pruned_loss=0.09828, over 19814.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3023, pruned_loss=0.09848, over 3756485.04 frames. ], batch size: 181, lr: 1.76e-02, grad_scale: 16.0 +2023-03-27 23:47:47,258 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15444.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:49:30,502 INFO [train.py:892] (1/4) Epoch 9, batch 650, loss[loss=0.2539, simple_loss=0.3037, pruned_loss=0.102, over 19770.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.301, pruned_loss=0.09778, over 3798556.73 frames. ], batch size: 113, lr: 1.75e-02, grad_scale: 16.0 +2023-03-27 23:50:08,031 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.542e+02 5.095e+02 6.402e+02 7.692e+02 1.403e+03, threshold=1.280e+03, percent-clipped=2.0 +2023-03-27 23:50:59,476 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1114, 2.2141, 2.2721, 2.2045, 1.8559, 1.8739, 1.9212, 2.3518], + device='cuda:1'), covar=tensor([0.0217, 0.0301, 0.0260, 0.0192, 0.0339, 0.0326, 0.0398, 0.0232], + device='cuda:1'), in_proj_covar=tensor([0.0041, 0.0040, 0.0043, 0.0036, 0.0045, 0.0043, 0.0058, 0.0040], + device='cuda:1'), out_proj_covar=tensor([9.0855e-05, 9.0382e-05, 9.4083e-05, 8.2122e-05, 9.9533e-05, 9.6335e-05, + 1.2623e-04, 9.1918e-05], device='cuda:1') +2023-03-27 23:51:23,085 INFO [train.py:892] (1/4) Epoch 9, batch 700, loss[loss=0.26, simple_loss=0.3197, pruned_loss=0.1002, over 19806.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.2998, pruned_loss=0.09687, over 3834227.52 frames. ], batch size: 82, lr: 1.75e-02, grad_scale: 16.0 +2023-03-27 23:51:45,038 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15551.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 23:53:15,935 INFO [train.py:892] (1/4) Epoch 9, batch 750, loss[loss=0.2367, simple_loss=0.2834, pruned_loss=0.09499, over 19836.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.2996, pruned_loss=0.09705, over 3860143.91 frames. ], batch size: 171, lr: 1.75e-02, grad_scale: 16.0 +2023-03-27 23:53:54,155 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.826e+02 5.119e+02 6.405e+02 7.720e+02 1.158e+03, threshold=1.281e+03, percent-clipped=0.0 +2023-03-27 23:55:10,137 INFO [train.py:892] (1/4) Epoch 9, batch 800, loss[loss=0.2474, simple_loss=0.3032, pruned_loss=0.09581, over 19783.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3013, pruned_loss=0.09727, over 3877176.96 frames. ], batch size: 91, lr: 1.75e-02, grad_scale: 16.0 +2023-03-27 23:57:05,129 INFO [train.py:892] (1/4) Epoch 9, batch 850, loss[loss=0.2493, simple_loss=0.3148, pruned_loss=0.09193, over 19647.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.3026, pruned_loss=0.09763, over 3892395.66 frames. ], batch size: 57, lr: 1.74e-02, grad_scale: 16.0 +2023-03-27 23:57:44,964 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.938e+02 5.306e+02 6.456e+02 8.498e+02 2.233e+03, threshold=1.291e+03, percent-clipped=2.0 +2023-03-27 23:58:02,148 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0412, 3.1946, 3.5130, 2.4183, 3.3453, 2.7222, 2.8022, 3.3598], + device='cuda:1'), covar=tensor([0.0417, 0.0235, 0.0250, 0.0610, 0.0259, 0.0279, 0.0291, 0.0267], + device='cuda:1'), in_proj_covar=tensor([0.0053, 0.0053, 0.0055, 0.0083, 0.0053, 0.0049, 0.0047, 0.0043], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-27 23:58:58,983 INFO [train.py:892] (1/4) Epoch 9, batch 900, loss[loss=0.2232, simple_loss=0.2732, pruned_loss=0.0866, over 19795.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3037, pruned_loss=0.09897, over 3904637.18 frames. ], batch size: 185, lr: 1.74e-02, grad_scale: 16.0 +2023-03-27 23:59:20,004 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.66 vs. limit=5.0 +2023-03-28 00:00:51,465 INFO [train.py:892] (1/4) Epoch 9, batch 950, loss[loss=0.233, simple_loss=0.2786, pruned_loss=0.09374, over 19872.00 frames. ], tot_loss[loss=0.2501, simple_loss=0.3032, pruned_loss=0.09848, over 3913629.18 frames. ], batch size: 138, lr: 1.74e-02, grad_scale: 16.0 +2023-03-28 00:01:20,798 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1221, 2.8119, 4.4349, 3.7763, 4.0976, 4.2873, 4.1106, 4.1041], + device='cuda:1'), covar=tensor([0.0191, 0.0678, 0.0080, 0.0895, 0.0111, 0.0167, 0.0139, 0.0102], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0083, 0.0064, 0.0138, 0.0058, 0.0071, 0.0068, 0.0058], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 00:01:31,054 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.223e+02 4.952e+02 6.049e+02 8.217e+02 1.611e+03, threshold=1.210e+03, percent-clipped=3.0 +2023-03-28 00:02:47,473 INFO [train.py:892] (1/4) Epoch 9, batch 1000, loss[loss=0.2527, simple_loss=0.3077, pruned_loss=0.09891, over 19750.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3038, pruned_loss=0.09856, over 3919651.57 frames. ], batch size: 209, lr: 1.74e-02, grad_scale: 16.0 +2023-03-28 00:02:50,816 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-28 00:03:10,933 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15851.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:03:29,649 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4159, 3.1528, 4.7188, 3.9401, 4.5234, 4.5246, 4.4388, 4.3911], + device='cuda:1'), covar=tensor([0.0186, 0.0601, 0.0079, 0.0955, 0.0080, 0.0153, 0.0110, 0.0094], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0082, 0.0065, 0.0138, 0.0058, 0.0072, 0.0069, 0.0058], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 00:03:38,772 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0989, 5.5076, 5.4341, 5.4070, 5.1574, 5.3585, 4.7473, 4.9151], + device='cuda:1'), covar=tensor([0.0399, 0.0351, 0.0539, 0.0379, 0.0585, 0.0564, 0.0606, 0.0910], + device='cuda:1'), in_proj_covar=tensor([0.0180, 0.0173, 0.0222, 0.0181, 0.0177, 0.0165, 0.0193, 0.0232], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 00:04:24,312 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15883.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:04:41,510 INFO [train.py:892] (1/4) Epoch 9, batch 1050, loss[loss=0.2359, simple_loss=0.2901, pruned_loss=0.09082, over 19821.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3033, pruned_loss=0.098, over 3925346.92 frames. ], batch size: 127, lr: 1.73e-02, grad_scale: 16.0 +2023-03-28 00:04:58,488 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15899.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:05:18,405 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.416e+02 5.707e+02 6.582e+02 7.521e+02 1.130e+03, threshold=1.316e+03, percent-clipped=0.0 +2023-03-28 00:06:34,650 INFO [train.py:892] (1/4) Epoch 9, batch 1100, loss[loss=0.2369, simple_loss=0.2752, pruned_loss=0.09926, over 19873.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3021, pruned_loss=0.09697, over 3929855.66 frames. ], batch size: 157, lr: 1.73e-02, grad_scale: 16.0 +2023-03-28 00:06:42,194 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15944.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:08:26,907 INFO [train.py:892] (1/4) Epoch 9, batch 1150, loss[loss=0.2355, simple_loss=0.2838, pruned_loss=0.09365, over 19773.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3009, pruned_loss=0.09663, over 3934103.34 frames. ], batch size: 198, lr: 1.73e-02, grad_scale: 16.0 +2023-03-28 00:08:54,457 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16001.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:09:09,171 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.185e+02 5.244e+02 6.208e+02 7.678e+02 1.349e+03, threshold=1.242e+03, percent-clipped=1.0 +2023-03-28 00:10:22,653 INFO [train.py:892] (1/4) Epoch 9, batch 1200, loss[loss=0.2519, simple_loss=0.3188, pruned_loss=0.09246, over 19657.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.3005, pruned_loss=0.09605, over 3938269.02 frames. ], batch size: 57, lr: 1.73e-02, grad_scale: 16.0 +2023-03-28 00:11:12,449 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16062.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:12:03,708 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16085.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:12:11,658 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2236, 3.4290, 3.5919, 4.3432, 3.0537, 3.4088, 2.9571, 2.4684], + device='cuda:1'), covar=tensor([0.0389, 0.2346, 0.0847, 0.0160, 0.1949, 0.0633, 0.0942, 0.1849], + device='cuda:1'), in_proj_covar=tensor([0.0181, 0.0328, 0.0216, 0.0139, 0.0231, 0.0163, 0.0190, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 00:12:15,721 INFO [train.py:892] (1/4) Epoch 9, batch 1250, loss[loss=0.2139, simple_loss=0.2725, pruned_loss=0.07771, over 19561.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.2985, pruned_loss=0.0952, over 3942108.51 frames. ], batch size: 41, lr: 1.72e-02, grad_scale: 16.0 +2023-03-28 00:12:49,931 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.594e+02 5.905e+02 6.787e+02 8.144e+02 1.550e+03, threshold=1.357e+03, percent-clipped=6.0 +2023-03-28 00:14:05,698 INFO [train.py:892] (1/4) Epoch 9, batch 1300, loss[loss=0.2412, simple_loss=0.2889, pruned_loss=0.09676, over 19799.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.2978, pruned_loss=0.09453, over 3944395.68 frames. ], batch size: 126, lr: 1.72e-02, grad_scale: 16.0 +2023-03-28 00:14:17,830 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16146.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:14:25,407 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16149.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:15:57,808 INFO [train.py:892] (1/4) Epoch 9, batch 1350, loss[loss=0.2258, simple_loss=0.2858, pruned_loss=0.08296, over 19807.00 frames. ], tot_loss[loss=0.243, simple_loss=0.2979, pruned_loss=0.0941, over 3944801.64 frames. ], batch size: 82, lr: 1.72e-02, grad_scale: 16.0 +2023-03-28 00:16:35,998 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.606e+02 5.207e+02 6.312e+02 7.771e+02 1.214e+03, threshold=1.262e+03, percent-clipped=0.0 +2023-03-28 00:16:41,309 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16210.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:17:50,411 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16239.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:17:53,559 INFO [train.py:892] (1/4) Epoch 9, batch 1400, loss[loss=0.3807, simple_loss=0.4077, pruned_loss=0.1768, over 19462.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.2978, pruned_loss=0.09383, over 3945330.53 frames. ], batch size: 396, lr: 1.72e-02, grad_scale: 16.0 +2023-03-28 00:19:10,714 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7807, 4.9732, 5.3646, 4.9594, 4.4403, 5.0720, 5.0084, 5.5870], + device='cuda:1'), covar=tensor([0.1072, 0.0321, 0.0327, 0.0287, 0.0481, 0.0380, 0.0340, 0.0213], + device='cuda:1'), in_proj_covar=tensor([0.0239, 0.0179, 0.0170, 0.0176, 0.0169, 0.0177, 0.0167, 0.0161], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 00:19:32,351 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-28 00:19:46,206 INFO [train.py:892] (1/4) Epoch 9, batch 1450, loss[loss=0.2283, simple_loss=0.2941, pruned_loss=0.08126, over 19666.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.2967, pruned_loss=0.09274, over 3946126.29 frames. ], batch size: 43, lr: 1.71e-02, grad_scale: 16.0 +2023-03-28 00:20:26,647 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.895e+02 4.802e+02 5.731e+02 6.907e+02 1.143e+03, threshold=1.146e+03, percent-clipped=0.0 +2023-03-28 00:21:40,557 INFO [train.py:892] (1/4) Epoch 9, batch 1500, loss[loss=0.2546, simple_loss=0.3201, pruned_loss=0.09453, over 19664.00 frames. ], tot_loss[loss=0.2428, simple_loss=0.298, pruned_loss=0.09383, over 3946760.94 frames. ], batch size: 55, lr: 1.71e-02, grad_scale: 16.0 +2023-03-28 00:22:17,046 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16357.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:22:45,733 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16371.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:23:23,584 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.7779, 1.9655, 2.1045, 1.9543, 1.8552, 1.9955, 1.8961, 2.1625], + device='cuda:1'), covar=tensor([0.0243, 0.0222, 0.0217, 0.0195, 0.0293, 0.0249, 0.0331, 0.0210], + device='cuda:1'), in_proj_covar=tensor([0.0043, 0.0043, 0.0046, 0.0037, 0.0048, 0.0045, 0.0059, 0.0042], + device='cuda:1'), out_proj_covar=tensor([9.5850e-05, 9.5162e-05, 1.0115e-04, 8.3760e-05, 1.0687e-04, 9.9419e-05, + 1.2933e-04, 9.5789e-05], device='cuda:1') +2023-03-28 00:23:27,960 INFO [train.py:892] (1/4) Epoch 9, batch 1550, loss[loss=0.2705, simple_loss=0.3192, pruned_loss=0.111, over 19740.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.2963, pruned_loss=0.09298, over 3947636.25 frames. ], batch size: 77, lr: 1.71e-02, grad_scale: 16.0 +2023-03-28 00:24:05,934 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.814e+02 5.273e+02 6.072e+02 7.301e+02 1.702e+03, threshold=1.214e+03, percent-clipped=5.0 +2023-03-28 00:24:17,776 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-28 00:24:45,400 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.01 vs. limit=5.0 +2023-03-28 00:25:00,849 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16432.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:25:19,711 INFO [train.py:892] (1/4) Epoch 9, batch 1600, loss[loss=0.2274, simple_loss=0.2926, pruned_loss=0.08113, over 19902.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2967, pruned_loss=0.09302, over 3948643.98 frames. ], batch size: 50, lr: 1.71e-02, grad_scale: 16.0 +2023-03-28 00:25:20,633 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16441.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:26:48,549 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6859, 4.1811, 4.2254, 4.6203, 4.3375, 4.7593, 4.7702, 4.8963], + device='cuda:1'), covar=tensor([0.0603, 0.0300, 0.0401, 0.0270, 0.0471, 0.0232, 0.0344, 0.0217], + device='cuda:1'), in_proj_covar=tensor([0.0122, 0.0141, 0.0167, 0.0139, 0.0136, 0.0119, 0.0129, 0.0156], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 00:27:15,099 INFO [train.py:892] (1/4) Epoch 9, batch 1650, loss[loss=0.2214, simple_loss=0.279, pruned_loss=0.08186, over 19773.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.2998, pruned_loss=0.09545, over 3947929.91 frames. ], batch size: 191, lr: 1.71e-02, grad_scale: 16.0 +2023-03-28 00:27:46,367 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16505.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:27:52,666 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.351e+02 5.128e+02 6.235e+02 7.643e+02 1.695e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-03-28 00:28:30,821 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-28 00:29:04,325 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16539.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:29:07,614 INFO [train.py:892] (1/4) Epoch 9, batch 1700, loss[loss=0.2129, simple_loss=0.2641, pruned_loss=0.08083, over 19752.00 frames. ], tot_loss[loss=0.2426, simple_loss=0.2973, pruned_loss=0.09396, over 3949149.10 frames. ], batch size: 129, lr: 1.70e-02, grad_scale: 32.0 +2023-03-28 00:29:37,891 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 00:30:46,927 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16587.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:30:53,426 INFO [train.py:892] (1/4) Epoch 9, batch 1750, loss[loss=0.2528, simple_loss=0.3145, pruned_loss=0.09549, over 19930.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.2978, pruned_loss=0.0943, over 3948790.93 frames. ], batch size: 51, lr: 1.70e-02, grad_scale: 32.0 +2023-03-28 00:31:28,581 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.990e+02 5.355e+02 6.431e+02 7.521e+02 1.438e+03, threshold=1.286e+03, percent-clipped=3.0 +2023-03-28 00:31:47,018 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-28 00:32:24,325 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9583, 4.9528, 5.3824, 5.1416, 5.1615, 4.5670, 4.9866, 4.8196], + device='cuda:1'), covar=tensor([0.1160, 0.1080, 0.0970, 0.0983, 0.0807, 0.1157, 0.2306, 0.2169], + device='cuda:1'), in_proj_covar=tensor([0.0239, 0.0226, 0.0286, 0.0220, 0.0218, 0.0211, 0.0278, 0.0313], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 00:32:29,171 INFO [train.py:892] (1/4) Epoch 9, batch 1800, loss[loss=0.2172, simple_loss=0.2692, pruned_loss=0.08259, over 19812.00 frames. ], tot_loss[loss=0.244, simple_loss=0.2983, pruned_loss=0.09483, over 3949426.98 frames. ], batch size: 167, lr: 1.70e-02, grad_scale: 32.0 +2023-03-28 00:32:57,773 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16657.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:33:05,742 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-28 00:33:39,185 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.9935, 2.2096, 2.3877, 2.0533, 1.9977, 2.0180, 2.0341, 2.1927], + device='cuda:1'), covar=tensor([0.0262, 0.0251, 0.0202, 0.0232, 0.0312, 0.0243, 0.0378, 0.0291], + device='cuda:1'), in_proj_covar=tensor([0.0043, 0.0043, 0.0046, 0.0038, 0.0049, 0.0046, 0.0059, 0.0043], + device='cuda:1'), out_proj_covar=tensor([9.6804e-05, 9.6658e-05, 1.0096e-04, 8.6199e-05, 1.0922e-04, 1.0260e-04, + 1.3010e-04, 9.7364e-05], device='cuda:1') +2023-03-28 00:33:58,409 INFO [train.py:892] (1/4) Epoch 9, batch 1850, loss[loss=0.2116, simple_loss=0.2737, pruned_loss=0.07472, over 19821.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.2996, pruned_loss=0.09428, over 3948216.78 frames. ], batch size: 57, lr: 1.70e-02, grad_scale: 32.0 +2023-03-28 00:35:08,023 INFO [train.py:892] (1/4) Epoch 10, batch 0, loss[loss=0.197, simple_loss=0.2609, pruned_loss=0.06657, over 19788.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2609, pruned_loss=0.06657, over 19788.00 frames. ], batch size: 79, lr: 1.61e-02, grad_scale: 32.0 +2023-03-28 00:35:08,024 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 00:35:42,738 INFO [train.py:926] (1/4) Epoch 10, validation: loss=0.1801, simple_loss=0.2601, pruned_loss=0.05003, over 2883724.00 frames. +2023-03-28 00:35:42,740 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 00:36:04,278 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16705.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:36:11,165 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.977e+02 5.093e+02 5.971e+02 7.550e+02 1.362e+03, threshold=1.194e+03, percent-clipped=1.0 +2023-03-28 00:36:55,598 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16727.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:37:30,694 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16741.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:37:43,774 INFO [train.py:892] (1/4) Epoch 10, batch 50, loss[loss=0.2094, simple_loss=0.2684, pruned_loss=0.07521, over 19594.00 frames. ], tot_loss[loss=0.241, simple_loss=0.2949, pruned_loss=0.0935, over 890484.56 frames. ], batch size: 45, lr: 1.61e-02, grad_scale: 32.0 +2023-03-28 00:39:23,328 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16789.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:39:38,220 INFO [train.py:892] (1/4) Epoch 10, batch 100, loss[loss=0.2735, simple_loss=0.3295, pruned_loss=0.1088, over 19719.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.2971, pruned_loss=0.09511, over 1569392.04 frames. ], batch size: 62, lr: 1.61e-02, grad_scale: 32.0 +2023-03-28 00:39:59,334 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16805.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:40:04,290 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.771e+02 5.348e+02 6.393e+02 7.632e+02 1.172e+03, threshold=1.279e+03, percent-clipped=0.0 +2023-03-28 00:41:31,398 INFO [train.py:892] (1/4) Epoch 10, batch 150, loss[loss=0.4395, simple_loss=0.4468, pruned_loss=0.2161, over 19164.00 frames. ], tot_loss[loss=0.2447, simple_loss=0.299, pruned_loss=0.09515, over 2096653.31 frames. ], batch size: 452, lr: 1.61e-02, grad_scale: 32.0 +2023-03-28 00:41:49,413 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16853.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:42:23,088 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16868.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 00:43:24,901 INFO [train.py:892] (1/4) Epoch 10, batch 200, loss[loss=0.2473, simple_loss=0.3057, pruned_loss=0.09443, over 19746.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.2961, pruned_loss=0.09252, over 2507675.98 frames. ], batch size: 256, lr: 1.60e-02, grad_scale: 16.0 +2023-03-28 00:43:47,944 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16906.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:43:53,275 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.558e+02 5.040e+02 6.126e+02 7.288e+02 1.628e+03, threshold=1.225e+03, percent-clipped=1.0 +2023-03-28 00:44:41,652 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16929.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 00:44:53,679 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2776, 2.4415, 3.5146, 2.7451, 3.0574, 3.3136, 1.9271, 2.0996], + device='cuda:1'), covar=tensor([0.0694, 0.2403, 0.0499, 0.0643, 0.1234, 0.0674, 0.1575, 0.1950], + device='cuda:1'), in_proj_covar=tensor([0.0291, 0.0327, 0.0252, 0.0205, 0.0321, 0.0240, 0.0266, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 00:45:17,973 INFO [train.py:892] (1/4) Epoch 10, batch 250, loss[loss=0.2254, simple_loss=0.288, pruned_loss=0.08135, over 19465.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.2942, pruned_loss=0.09137, over 2827563.49 frames. ], batch size: 43, lr: 1.60e-02, grad_scale: 16.0 +2023-03-28 00:46:06,342 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16967.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 00:46:11,523 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3750, 4.0938, 4.2255, 3.9719, 4.3523, 3.1375, 3.6291, 2.3025], + device='cuda:1'), covar=tensor([0.0206, 0.0209, 0.0140, 0.0172, 0.0141, 0.0740, 0.0739, 0.1232], + device='cuda:1'), in_proj_covar=tensor([0.0080, 0.0108, 0.0093, 0.0107, 0.0096, 0.0115, 0.0129, 0.0110], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 00:47:10,533 INFO [train.py:892] (1/4) Epoch 10, batch 300, loss[loss=0.2445, simple_loss=0.2947, pruned_loss=0.09717, over 19772.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.2947, pruned_loss=0.09188, over 3076541.61 frames. ], batch size: 198, lr: 1.60e-02, grad_scale: 16.0 +2023-03-28 00:47:41,236 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.429e+02 5.102e+02 6.617e+02 8.376e+02 1.348e+03, threshold=1.323e+03, percent-clipped=5.0 +2023-03-28 00:48:21,936 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17027.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:49:09,400 INFO [train.py:892] (1/4) Epoch 10, batch 350, loss[loss=0.2898, simple_loss=0.3343, pruned_loss=0.1227, over 19629.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.2953, pruned_loss=0.09222, over 3269559.36 frames. ], batch size: 351, lr: 1.60e-02, grad_scale: 16.0 +2023-03-28 00:49:30,738 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1496, 3.2948, 3.4474, 4.1948, 2.8998, 3.0094, 2.6371, 2.4389], + device='cuda:1'), covar=tensor([0.0372, 0.2702, 0.0851, 0.0209, 0.1910, 0.0767, 0.1217, 0.1864], + device='cuda:1'), in_proj_covar=tensor([0.0184, 0.0330, 0.0216, 0.0141, 0.0234, 0.0163, 0.0192, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 00:50:14,173 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17075.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:50:50,485 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3821, 3.1822, 4.5296, 3.8404, 4.2115, 4.4205, 4.4891, 4.0719], + device='cuda:1'), covar=tensor([0.0150, 0.0521, 0.0088, 0.0838, 0.0099, 0.0150, 0.0113, 0.0116], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0082, 0.0065, 0.0137, 0.0059, 0.0071, 0.0068, 0.0059], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 00:50:59,677 INFO [train.py:892] (1/4) Epoch 10, batch 400, loss[loss=0.225, simple_loss=0.2754, pruned_loss=0.08728, over 19728.00 frames. ], tot_loss[loss=0.237, simple_loss=0.2929, pruned_loss=0.0906, over 3421368.82 frames. ], batch size: 179, lr: 1.59e-02, grad_scale: 16.0 +2023-03-28 00:51:07,020 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0133, 3.8096, 3.8772, 3.7064, 4.0332, 2.9163, 3.3219, 2.0382], + device='cuda:1'), covar=tensor([0.0262, 0.0218, 0.0160, 0.0181, 0.0167, 0.0829, 0.0775, 0.1392], + device='cuda:1'), in_proj_covar=tensor([0.0080, 0.0107, 0.0093, 0.0106, 0.0095, 0.0115, 0.0127, 0.0110], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 00:51:28,647 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.044e+02 4.864e+02 6.028e+02 7.083e+02 1.226e+03, threshold=1.206e+03, percent-clipped=0.0 +2023-03-28 00:52:51,384 INFO [train.py:892] (1/4) Epoch 10, batch 450, loss[loss=0.2519, simple_loss=0.32, pruned_loss=0.09186, over 19703.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.2963, pruned_loss=0.0922, over 3538570.67 frames. ], batch size: 48, lr: 1.59e-02, grad_scale: 16.0 +2023-03-28 00:53:43,206 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-28 00:53:55,075 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.94 vs. limit=5.0 +2023-03-28 00:54:31,158 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17189.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:54:47,745 INFO [train.py:892] (1/4) Epoch 10, batch 500, loss[loss=0.4063, simple_loss=0.4238, pruned_loss=0.1944, over 19462.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.2977, pruned_loss=0.09349, over 3629171.97 frames. ], batch size: 396, lr: 1.59e-02, grad_scale: 16.0 +2023-03-28 00:55:18,934 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.795e+02 5.158e+02 6.137e+02 7.953e+02 1.330e+03, threshold=1.227e+03, percent-clipped=1.0 +2023-03-28 00:55:33,435 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3457, 2.6294, 4.4229, 3.7079, 4.1229, 4.3005, 4.2807, 4.0966], + device='cuda:1'), covar=tensor([0.0124, 0.0682, 0.0079, 0.0798, 0.0088, 0.0154, 0.0108, 0.0089], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0082, 0.0065, 0.0136, 0.0059, 0.0071, 0.0068, 0.0058], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 00:55:50,591 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17224.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 00:56:39,659 INFO [train.py:892] (1/4) Epoch 10, batch 550, loss[loss=0.2276, simple_loss=0.3, pruned_loss=0.07759, over 19926.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.2972, pruned_loss=0.09233, over 3699442.85 frames. ], batch size: 51, lr: 1.59e-02, grad_scale: 16.0 +2023-03-28 00:56:49,524 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17250.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:57:16,234 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17262.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 00:58:13,688 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6274, 4.9025, 4.9388, 4.8639, 4.6254, 4.8656, 4.3466, 4.4143], + device='cuda:1'), covar=tensor([0.0396, 0.0353, 0.0460, 0.0388, 0.0489, 0.0526, 0.0591, 0.0884], + device='cuda:1'), in_proj_covar=tensor([0.0187, 0.0184, 0.0225, 0.0190, 0.0177, 0.0168, 0.0204, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 00:58:30,077 INFO [train.py:892] (1/4) Epoch 10, batch 600, loss[loss=0.2525, simple_loss=0.3103, pruned_loss=0.09738, over 19610.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.2956, pruned_loss=0.09135, over 3755512.95 frames. ], batch size: 48, lr: 1.59e-02, grad_scale: 16.0 +2023-03-28 00:59:00,059 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.688e+02 5.075e+02 6.633e+02 7.997e+02 1.370e+03, threshold=1.327e+03, percent-clipped=3.0 +2023-03-28 01:00:24,360 INFO [train.py:892] (1/4) Epoch 10, batch 650, loss[loss=0.2211, simple_loss=0.2738, pruned_loss=0.08421, over 19753.00 frames. ], tot_loss[loss=0.2388, simple_loss=0.2951, pruned_loss=0.09128, over 3798812.64 frames. ], batch size: 129, lr: 1.58e-02, grad_scale: 16.0 +2023-03-28 01:01:01,085 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17362.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:01:50,379 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17383.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 01:02:19,151 INFO [train.py:892] (1/4) Epoch 10, batch 700, loss[loss=0.2017, simple_loss=0.262, pruned_loss=0.07069, over 19471.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.2954, pruned_loss=0.09122, over 3832303.99 frames. ], batch size: 43, lr: 1.58e-02, grad_scale: 16.0 +2023-03-28 01:02:48,231 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.516e+02 5.181e+02 6.486e+02 7.594e+02 1.327e+03, threshold=1.297e+03, percent-clipped=1.0 +2023-03-28 01:02:55,476 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2158, 4.2863, 2.5450, 4.6371, 4.7950, 1.8736, 3.8865, 3.6000], + device='cuda:1'), covar=tensor([0.0584, 0.0695, 0.2521, 0.0563, 0.0313, 0.3005, 0.0930, 0.0643], + device='cuda:1'), in_proj_covar=tensor([0.0183, 0.0206, 0.0206, 0.0183, 0.0141, 0.0195, 0.0215, 0.0150], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 01:03:20,633 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17423.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:03:22,849 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1615, 2.9944, 3.3533, 2.4688, 3.6229, 2.8420, 2.8827, 3.5590], + device='cuda:1'), covar=tensor([0.0409, 0.0310, 0.0258, 0.0642, 0.0218, 0.0257, 0.0401, 0.0196], + device='cuda:1'), in_proj_covar=tensor([0.0053, 0.0053, 0.0056, 0.0083, 0.0053, 0.0050, 0.0048, 0.0043], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 01:04:06,851 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17444.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 01:04:11,118 INFO [train.py:892] (1/4) Epoch 10, batch 750, loss[loss=0.2189, simple_loss=0.2791, pruned_loss=0.0794, over 19737.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.293, pruned_loss=0.08996, over 3859320.28 frames. ], batch size: 118, lr: 1.58e-02, grad_scale: 16.0 +2023-03-28 01:04:14,656 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-28 01:06:02,488 INFO [train.py:892] (1/4) Epoch 10, batch 800, loss[loss=0.2739, simple_loss=0.3228, pruned_loss=0.1125, over 19665.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.2949, pruned_loss=0.09191, over 3879333.17 frames. ], batch size: 64, lr: 1.58e-02, grad_scale: 16.0 +2023-03-28 01:06:32,362 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.436e+02 5.370e+02 6.448e+02 7.965e+02 2.022e+03, threshold=1.290e+03, percent-clipped=3.0 +2023-03-28 01:07:07,264 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17524.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 01:07:53,870 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17545.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:07:54,956 INFO [train.py:892] (1/4) Epoch 10, batch 850, loss[loss=0.3209, simple_loss=0.3564, pruned_loss=0.1427, over 19621.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.2951, pruned_loss=0.09196, over 3894409.41 frames. ], batch size: 351, lr: 1.58e-02, grad_scale: 16.0 +2023-03-28 01:08:34,139 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17562.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 01:08:53,874 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17572.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 01:09:47,781 INFO [train.py:892] (1/4) Epoch 10, batch 900, loss[loss=0.229, simple_loss=0.2781, pruned_loss=0.08994, over 19818.00 frames. ], tot_loss[loss=0.2389, simple_loss=0.2947, pruned_loss=0.09158, over 3906180.82 frames. ], batch size: 148, lr: 1.57e-02, grad_scale: 16.0 +2023-03-28 01:10:17,487 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 5.484e+02 6.655e+02 8.099e+02 1.732e+03, threshold=1.331e+03, percent-clipped=3.0 +2023-03-28 01:10:20,852 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17610.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:11:25,809 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.7318, 6.0416, 6.0209, 5.9291, 5.6959, 5.9978, 5.2594, 5.3104], + device='cuda:1'), covar=tensor([0.0310, 0.0294, 0.0508, 0.0380, 0.0553, 0.0525, 0.0625, 0.0795], + device='cuda:1'), in_proj_covar=tensor([0.0185, 0.0184, 0.0227, 0.0190, 0.0180, 0.0171, 0.0205, 0.0241], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 01:11:39,739 INFO [train.py:892] (1/4) Epoch 10, batch 950, loss[loss=0.2403, simple_loss=0.297, pruned_loss=0.09179, over 19701.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.2946, pruned_loss=0.09128, over 3915900.93 frames. ], batch size: 85, lr: 1.57e-02, grad_scale: 16.0 +2023-03-28 01:11:54,997 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4203, 3.4688, 4.8104, 3.5600, 4.1945, 4.4088, 2.4604, 2.8494], + device='cuda:1'), covar=tensor([0.0536, 0.2196, 0.0304, 0.0571, 0.1069, 0.0509, 0.1552, 0.1653], + device='cuda:1'), in_proj_covar=tensor([0.0288, 0.0326, 0.0251, 0.0205, 0.0316, 0.0237, 0.0266, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 01:12:06,055 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-28 01:12:14,725 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6896, 2.6798, 2.8545, 2.2171, 2.7559, 2.5477, 2.7096, 3.0120], + device='cuda:1'), covar=tensor([0.0608, 0.0384, 0.0463, 0.0768, 0.0432, 0.0341, 0.0371, 0.0222], + device='cuda:1'), in_proj_covar=tensor([0.0056, 0.0056, 0.0059, 0.0087, 0.0056, 0.0053, 0.0051, 0.0046], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 01:12:43,886 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2071, 5.3289, 5.7362, 5.4842, 5.4353, 5.0481, 5.3411, 5.1931], + device='cuda:1'), covar=tensor([0.1192, 0.1059, 0.0791, 0.0931, 0.0642, 0.0905, 0.1909, 0.2006], + device='cuda:1'), in_proj_covar=tensor([0.0241, 0.0235, 0.0296, 0.0225, 0.0222, 0.0211, 0.0282, 0.0317], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 01:13:32,330 INFO [train.py:892] (1/4) Epoch 10, batch 1000, loss[loss=0.2694, simple_loss=0.3212, pruned_loss=0.1088, over 19762.00 frames. ], tot_loss[loss=0.239, simple_loss=0.2947, pruned_loss=0.09164, over 3923334.57 frames. ], batch size: 256, lr: 1.57e-02, grad_scale: 16.0 +2023-03-28 01:13:41,774 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17700.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:14:02,570 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.971e+02 5.177e+02 6.007e+02 7.029e+02 1.681e+03, threshold=1.201e+03, percent-clipped=2.0 +2023-03-28 01:14:13,650 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2501, 2.7300, 3.1992, 3.1350, 3.4693, 3.5003, 4.0583, 4.3475], + device='cuda:1'), covar=tensor([0.0496, 0.1698, 0.1266, 0.1767, 0.1615, 0.1284, 0.0427, 0.0411], + device='cuda:1'), in_proj_covar=tensor([0.0197, 0.0209, 0.0227, 0.0227, 0.0249, 0.0221, 0.0168, 0.0168], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 01:14:21,013 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17718.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:14:47,938 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17730.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:15:09,070 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17739.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 01:15:26,014 INFO [train.py:892] (1/4) Epoch 10, batch 1050, loss[loss=0.2105, simple_loss=0.2858, pruned_loss=0.06764, over 19890.00 frames. ], tot_loss[loss=0.2393, simple_loss=0.2953, pruned_loss=0.09162, over 3928647.80 frames. ], batch size: 91, lr: 1.57e-02, grad_scale: 16.0 +2023-03-28 01:15:57,215 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17761.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:17:06,160 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17791.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:17:16,132 INFO [train.py:892] (1/4) Epoch 10, batch 1100, loss[loss=0.2557, simple_loss=0.3108, pruned_loss=0.1003, over 19771.00 frames. ], tot_loss[loss=0.24, simple_loss=0.2963, pruned_loss=0.09188, over 3932942.92 frames. ], batch size: 280, lr: 1.57e-02, grad_scale: 16.0 +2023-03-28 01:17:26,301 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-28 01:17:45,688 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.952e+02 5.448e+02 6.504e+02 7.690e+02 1.433e+03, threshold=1.301e+03, percent-clipped=1.0 +2023-03-28 01:19:10,701 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17845.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:19:11,946 INFO [train.py:892] (1/4) Epoch 10, batch 1150, loss[loss=0.2516, simple_loss=0.3028, pruned_loss=0.1002, over 19872.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.2963, pruned_loss=0.09207, over 3935440.42 frames. ], batch size: 138, lr: 1.56e-02, grad_scale: 16.0 +2023-03-28 01:19:42,944 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.10 vs. limit=5.0 +2023-03-28 01:20:48,227 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7303, 2.7443, 3.7364, 3.3334, 3.5475, 3.8116, 3.6819, 3.6213], + device='cuda:1'), covar=tensor([0.0162, 0.0562, 0.0088, 0.0577, 0.0099, 0.0147, 0.0129, 0.0101], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0086, 0.0067, 0.0140, 0.0061, 0.0073, 0.0070, 0.0061], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 01:20:58,762 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17893.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:20:59,506 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-28 01:21:06,393 INFO [train.py:892] (1/4) Epoch 10, batch 1200, loss[loss=0.209, simple_loss=0.2613, pruned_loss=0.07839, over 19877.00 frames. ], tot_loss[loss=0.24, simple_loss=0.2962, pruned_loss=0.09195, over 3938506.52 frames. ], batch size: 139, lr: 1.56e-02, grad_scale: 16.0 +2023-03-28 01:21:34,282 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.688e+02 5.021e+02 6.012e+02 7.162e+02 1.373e+03, threshold=1.202e+03, percent-clipped=1.0 +2023-03-28 01:22:56,654 INFO [train.py:892] (1/4) Epoch 10, batch 1250, loss[loss=0.2027, simple_loss=0.255, pruned_loss=0.07515, over 19884.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.2953, pruned_loss=0.09181, over 3940620.69 frames. ], batch size: 77, lr: 1.56e-02, grad_scale: 16.0 +2023-03-28 01:23:05,736 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5033, 4.6020, 5.0130, 4.5569, 4.1532, 4.7572, 4.6165, 5.1051], + device='cuda:1'), covar=tensor([0.0856, 0.0265, 0.0285, 0.0279, 0.0656, 0.0373, 0.0348, 0.0293], + device='cuda:1'), in_proj_covar=tensor([0.0243, 0.0184, 0.0179, 0.0187, 0.0180, 0.0184, 0.0176, 0.0169], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 01:23:30,821 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17960.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:24:49,222 INFO [train.py:892] (1/4) Epoch 10, batch 1300, loss[loss=0.226, simple_loss=0.2872, pruned_loss=0.08242, over 19794.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.2944, pruned_loss=0.09102, over 3942895.32 frames. ], batch size: 105, lr: 1.56e-02, grad_scale: 16.0 +2023-03-28 01:25:24,401 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.596e+02 5.072e+02 5.807e+02 7.353e+02 1.557e+03, threshold=1.161e+03, percent-clipped=4.0 +2023-03-28 01:25:44,703 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18018.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:25:50,478 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18021.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:26:34,315 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18039.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 01:26:47,444 INFO [train.py:892] (1/4) Epoch 10, batch 1350, loss[loss=0.2084, simple_loss=0.2696, pruned_loss=0.07363, over 19945.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.2941, pruned_loss=0.09032, over 3944433.23 frames. ], batch size: 46, lr: 1.56e-02, grad_scale: 16.0 +2023-03-28 01:27:11,959 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18056.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:27:15,661 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3548, 2.2748, 2.3877, 1.9314, 2.4511, 2.0381, 2.3036, 2.5963], + device='cuda:1'), covar=tensor([0.0389, 0.0360, 0.0417, 0.0715, 0.0304, 0.0354, 0.0350, 0.0177], + device='cuda:1'), in_proj_covar=tensor([0.0055, 0.0056, 0.0058, 0.0086, 0.0055, 0.0053, 0.0050, 0.0046], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 01:27:33,202 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18066.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:27:39,995 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.75 vs. limit=5.0 +2023-03-28 01:27:52,718 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2966, 4.6163, 4.6591, 4.6137, 4.3220, 4.6120, 4.0884, 4.1712], + device='cuda:1'), covar=tensor([0.0478, 0.0467, 0.0581, 0.0442, 0.0614, 0.0585, 0.0689, 0.0916], + device='cuda:1'), in_proj_covar=tensor([0.0185, 0.0186, 0.0226, 0.0188, 0.0180, 0.0170, 0.0202, 0.0238], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 01:28:19,539 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18086.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:28:21,749 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18087.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 01:28:36,930 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7945, 2.9900, 2.2685, 1.9735, 2.6427, 3.0657, 2.8441, 2.8321], + device='cuda:1'), covar=tensor([0.0242, 0.0227, 0.0245, 0.0540, 0.0288, 0.0173, 0.0136, 0.0215], + device='cuda:1'), in_proj_covar=tensor([0.0059, 0.0053, 0.0060, 0.0072, 0.0073, 0.0051, 0.0045, 0.0047], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 01:28:41,058 INFO [train.py:892] (1/4) Epoch 10, batch 1400, loss[loss=0.3836, simple_loss=0.425, pruned_loss=0.1711, over 19272.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.2952, pruned_loss=0.09087, over 3944864.00 frames. ], batch size: 483, lr: 1.55e-02, grad_scale: 16.0 +2023-03-28 01:29:09,617 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.393e+02 4.780e+02 5.656e+02 7.029e+02 1.296e+03, threshold=1.131e+03, percent-clipped=2.0 +2023-03-28 01:30:10,275 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1998, 3.2234, 4.7628, 3.4526, 3.9938, 4.1483, 2.2106, 2.6709], + device='cuda:1'), covar=tensor([0.0668, 0.2621, 0.0293, 0.0617, 0.1171, 0.0636, 0.1716, 0.1953], + device='cuda:1'), in_proj_covar=tensor([0.0292, 0.0327, 0.0256, 0.0207, 0.0317, 0.0243, 0.0270, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 01:30:37,312 INFO [train.py:892] (1/4) Epoch 10, batch 1450, loss[loss=0.2403, simple_loss=0.306, pruned_loss=0.08732, over 19760.00 frames. ], tot_loss[loss=0.236, simple_loss=0.2934, pruned_loss=0.08933, over 3946460.98 frames. ], batch size: 49, lr: 1.55e-02, grad_scale: 16.0 +2023-03-28 01:31:45,230 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18174.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:31:47,569 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18175.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:32:31,755 INFO [train.py:892] (1/4) Epoch 10, batch 1500, loss[loss=0.223, simple_loss=0.2734, pruned_loss=0.08628, over 19773.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.2939, pruned_loss=0.0899, over 3945129.42 frames. ], batch size: 169, lr: 1.55e-02, grad_scale: 16.0 +2023-03-28 01:32:40,404 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.27 vs. limit=5.0 +2023-03-28 01:33:02,753 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.786e+02 5.080e+02 6.023e+02 7.472e+02 1.411e+03, threshold=1.205e+03, percent-clipped=3.0 +2023-03-28 01:33:24,404 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8179, 2.0288, 1.7340, 1.2470, 1.9353, 2.0035, 1.9252, 1.9721], + device='cuda:1'), covar=tensor([0.0257, 0.0202, 0.0229, 0.0469, 0.0336, 0.0187, 0.0151, 0.0157], + device='cuda:1'), in_proj_covar=tensor([0.0059, 0.0053, 0.0060, 0.0072, 0.0073, 0.0051, 0.0045, 0.0048], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 01:34:02,773 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18235.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:34:04,844 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18236.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:34:27,239 INFO [train.py:892] (1/4) Epoch 10, batch 1550, loss[loss=0.2395, simple_loss=0.3073, pruned_loss=0.08583, over 19617.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.2939, pruned_loss=0.08952, over 3944616.33 frames. ], batch size: 65, lr: 1.55e-02, grad_scale: 16.0 +2023-03-28 01:36:22,677 INFO [train.py:892] (1/4) Epoch 10, batch 1600, loss[loss=0.2336, simple_loss=0.2888, pruned_loss=0.08923, over 19722.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.2932, pruned_loss=0.08911, over 3944939.78 frames. ], batch size: 219, lr: 1.55e-02, grad_scale: 16.0 +2023-03-28 01:36:32,752 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-28 01:36:49,439 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.277e+02 5.238e+02 6.120e+02 7.230e+02 1.238e+03, threshold=1.224e+03, percent-clipped=2.0 +2023-03-28 01:37:03,865 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18316.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:37:54,944 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-28 01:38:12,210 INFO [train.py:892] (1/4) Epoch 10, batch 1650, loss[loss=0.3312, simple_loss=0.3711, pruned_loss=0.1457, over 19565.00 frames. ], tot_loss[loss=0.2383, simple_loss=0.295, pruned_loss=0.0908, over 3945335.84 frames. ], batch size: 376, lr: 1.54e-02, grad_scale: 16.0 +2023-03-28 01:38:36,392 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18356.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:39:43,902 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18386.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:40:06,502 INFO [train.py:892] (1/4) Epoch 10, batch 1700, loss[loss=0.2517, simple_loss=0.3176, pruned_loss=0.09288, over 19823.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.2967, pruned_loss=0.09151, over 3946087.58 frames. ], batch size: 57, lr: 1.54e-02, grad_scale: 16.0 +2023-03-28 01:40:24,866 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18404.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:40:35,310 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.958e+02 5.263e+02 6.960e+02 8.795e+02 1.250e+03, threshold=1.392e+03, percent-clipped=4.0 +2023-03-28 01:41:30,308 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18434.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:41:54,404 INFO [train.py:892] (1/4) Epoch 10, batch 1750, loss[loss=0.2546, simple_loss=0.3089, pruned_loss=0.1001, over 19825.00 frames. ], tot_loss[loss=0.238, simple_loss=0.295, pruned_loss=0.09052, over 3948295.75 frames. ], batch size: 204, lr: 1.54e-02, grad_scale: 16.0 +2023-03-28 01:43:20,496 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18490.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:43:30,609 INFO [train.py:892] (1/4) Epoch 10, batch 1800, loss[loss=0.2301, simple_loss=0.2885, pruned_loss=0.08589, over 19771.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.2952, pruned_loss=0.0906, over 3947367.50 frames. ], batch size: 253, lr: 1.54e-02, grad_scale: 16.0 +2023-03-28 01:43:51,993 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18508.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 01:43:53,006 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.190e+02 4.885e+02 5.842e+02 7.130e+02 1.277e+03, threshold=1.168e+03, percent-clipped=0.0 +2023-03-28 01:44:29,395 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2743, 3.4869, 1.8928, 4.3405, 3.7040, 4.3241, 4.3017, 3.1807], + device='cuda:1'), covar=tensor([0.0602, 0.0471, 0.1528, 0.0347, 0.0436, 0.0276, 0.0433, 0.0727], + device='cuda:1'), in_proj_covar=tensor([0.0119, 0.0111, 0.0129, 0.0115, 0.0103, 0.0096, 0.0110, 0.0122], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 01:44:30,981 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18530.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:44:32,818 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18531.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:44:58,717 INFO [train.py:892] (1/4) Epoch 10, batch 1850, loss[loss=0.2463, simple_loss=0.308, pruned_loss=0.0923, over 19669.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.2954, pruned_loss=0.08943, over 3947699.71 frames. ], batch size: 55, lr: 1.54e-02, grad_scale: 16.0 +2023-03-28 01:46:02,463 INFO [train.py:892] (1/4) Epoch 11, batch 0, loss[loss=0.2156, simple_loss=0.2717, pruned_loss=0.07977, over 19730.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2717, pruned_loss=0.07977, over 19730.00 frames. ], batch size: 63, lr: 1.47e-02, grad_scale: 16.0 +2023-03-28 01:46:02,464 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 01:46:36,864 INFO [train.py:926] (1/4) Epoch 11, validation: loss=0.1783, simple_loss=0.2585, pruned_loss=0.04909, over 2883724.00 frames. +2023-03-28 01:46:36,866 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 01:46:37,966 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18551.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:47:18,699 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18569.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 01:48:31,680 INFO [train.py:892] (1/4) Epoch 11, batch 50, loss[loss=0.2184, simple_loss=0.2781, pruned_loss=0.07935, over 19842.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2819, pruned_loss=0.0826, over 891899.76 frames. ], batch size: 56, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:48:47,839 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.428e+02 5.161e+02 6.219e+02 7.148e+02 1.502e+03, threshold=1.244e+03, percent-clipped=1.0 +2023-03-28 01:48:49,116 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9945, 2.9376, 2.6615, 2.1508, 2.6344, 3.0811, 2.8857, 2.8925], + device='cuda:1'), covar=tensor([0.0171, 0.0266, 0.0239, 0.0547, 0.0330, 0.0296, 0.0159, 0.0189], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0055, 0.0062, 0.0075, 0.0075, 0.0053, 0.0046, 0.0050], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 01:49:05,491 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18616.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:50:21,249 INFO [train.py:892] (1/4) Epoch 11, batch 100, loss[loss=0.1873, simple_loss=0.2493, pruned_loss=0.06261, over 19946.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.284, pruned_loss=0.08243, over 1570911.13 frames. ], batch size: 46, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:50:22,353 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2617, 4.8434, 4.7850, 5.3039, 4.8671, 5.4698, 5.3674, 5.5536], + device='cuda:1'), covar=tensor([0.0559, 0.0275, 0.0371, 0.0214, 0.0496, 0.0201, 0.0301, 0.0223], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0148, 0.0176, 0.0142, 0.0145, 0.0126, 0.0133, 0.0166], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 01:50:29,497 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-28 01:50:49,852 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18664.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:50:50,660 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-28 01:51:14,196 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3648, 2.2165, 2.5855, 2.4978, 2.7784, 2.8125, 3.1762, 3.4866], + device='cuda:1'), covar=tensor([0.0539, 0.1562, 0.1432, 0.1753, 0.1400, 0.1214, 0.0495, 0.0441], + device='cuda:1'), in_proj_covar=tensor([0.0198, 0.0211, 0.0228, 0.0227, 0.0253, 0.0222, 0.0171, 0.0174], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 01:51:16,094 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18674.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:52:13,683 INFO [train.py:892] (1/4) Epoch 11, batch 150, loss[loss=0.2264, simple_loss=0.2819, pruned_loss=0.08548, over 19857.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.287, pruned_loss=0.08563, over 2097824.79 frames. ], batch size: 197, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:52:35,336 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.696e+02 5.324e+02 6.674e+02 8.055e+02 1.622e+03, threshold=1.335e+03, percent-clipped=4.0 +2023-03-28 01:53:22,632 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6214, 3.6854, 2.2670, 3.9940, 4.1026, 1.7446, 3.3359, 3.1239], + device='cuda:1'), covar=tensor([0.0733, 0.0875, 0.2411, 0.0604, 0.0295, 0.2980, 0.1060, 0.0618], + device='cuda:1'), in_proj_covar=tensor([0.0188, 0.0212, 0.0206, 0.0193, 0.0149, 0.0196, 0.0219, 0.0152], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 01:53:31,438 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18735.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:54:09,397 INFO [train.py:892] (1/4) Epoch 11, batch 200, loss[loss=0.2369, simple_loss=0.2833, pruned_loss=0.0953, over 19740.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.2884, pruned_loss=0.08608, over 2508997.31 frames. ], batch size: 140, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:55:42,444 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3792, 4.4900, 4.8699, 4.4284, 4.0439, 4.6866, 4.4704, 4.9804], + device='cuda:1'), covar=tensor([0.1006, 0.0308, 0.0356, 0.0369, 0.0794, 0.0377, 0.0374, 0.0293], + device='cuda:1'), in_proj_covar=tensor([0.0248, 0.0187, 0.0180, 0.0192, 0.0181, 0.0188, 0.0180, 0.0170], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 01:56:02,276 INFO [train.py:892] (1/4) Epoch 11, batch 250, loss[loss=0.2125, simple_loss=0.2782, pruned_loss=0.07339, over 19792.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.288, pruned_loss=0.08556, over 2828514.48 frames. ], batch size: 73, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:56:18,967 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.168e+02 4.641e+02 5.456e+02 6.612e+02 1.459e+03, threshold=1.091e+03, percent-clipped=1.0 +2023-03-28 01:57:08,215 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18830.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:57:10,194 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18831.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:57:44,359 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18846.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:57:56,656 INFO [train.py:892] (1/4) Epoch 11, batch 300, loss[loss=0.2357, simple_loss=0.2973, pruned_loss=0.08706, over 19645.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.2881, pruned_loss=0.08505, over 3076086.99 frames. ], batch size: 66, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:58:25,302 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18864.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 01:58:56,602 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18878.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:58:58,534 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18879.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:59:31,264 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3140, 4.5288, 4.5260, 4.4678, 4.2523, 4.4815, 4.0077, 4.0350], + device='cuda:1'), covar=tensor([0.0422, 0.0424, 0.0583, 0.0463, 0.0610, 0.0630, 0.0645, 0.1021], + device='cuda:1'), in_proj_covar=tensor([0.0191, 0.0191, 0.0231, 0.0194, 0.0182, 0.0176, 0.0207, 0.0249], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 01:59:31,509 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2508, 3.3355, 2.1251, 3.5049, 3.5893, 1.6396, 2.9479, 2.8172], + device='cuda:1'), covar=tensor([0.0652, 0.0781, 0.2348, 0.0579, 0.0339, 0.2693, 0.1013, 0.0625], + device='cuda:1'), in_proj_covar=tensor([0.0189, 0.0212, 0.0206, 0.0193, 0.0150, 0.0196, 0.0220, 0.0154], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 01:59:48,636 INFO [train.py:892] (1/4) Epoch 11, batch 350, loss[loss=0.2143, simple_loss=0.2763, pruned_loss=0.07609, over 19750.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.2888, pruned_loss=0.08607, over 3270921.88 frames. ], batch size: 84, lr: 1.45e-02, grad_scale: 32.0 +2023-03-28 02:00:05,180 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.253e+02 5.242e+02 6.180e+02 7.064e+02 1.171e+03, threshold=1.236e+03, percent-clipped=1.0 +2023-03-28 02:00:54,884 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18930.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:01:14,037 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.83 vs. limit=2.0 +2023-03-28 02:01:24,103 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9832, 3.1541, 1.7043, 3.8691, 3.5110, 3.8974, 3.9419, 2.8439], + device='cuda:1'), covar=tensor([0.0631, 0.0525, 0.1555, 0.0448, 0.0459, 0.0319, 0.0405, 0.0762], + device='cuda:1'), in_proj_covar=tensor([0.0119, 0.0112, 0.0127, 0.0116, 0.0102, 0.0096, 0.0113, 0.0120], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 02:01:38,289 INFO [train.py:892] (1/4) Epoch 11, batch 400, loss[loss=0.2488, simple_loss=0.3071, pruned_loss=0.0952, over 19772.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.2882, pruned_loss=0.08568, over 3421840.76 frames. ], batch size: 263, lr: 1.45e-02, grad_scale: 32.0 +2023-03-28 02:03:08,368 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18991.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:03:30,259 INFO [train.py:892] (1/4) Epoch 11, batch 450, loss[loss=0.193, simple_loss=0.2581, pruned_loss=0.0639, over 19709.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.2882, pruned_loss=0.0856, over 3539567.81 frames. ], batch size: 85, lr: 1.45e-02, grad_scale: 32.0 +2023-03-28 02:03:47,205 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.137e+02 4.905e+02 5.923e+02 7.329e+02 1.154e+03, threshold=1.185e+03, percent-clipped=0.0 +2023-03-28 02:04:35,149 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19030.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:05:18,657 INFO [train.py:892] (1/4) Epoch 11, batch 500, loss[loss=0.2377, simple_loss=0.3002, pruned_loss=0.0876, over 19715.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.2868, pruned_loss=0.08507, over 3630852.02 frames. ], batch size: 78, lr: 1.45e-02, grad_scale: 32.0 +2023-03-28 02:07:11,286 INFO [train.py:892] (1/4) Epoch 11, batch 550, loss[loss=0.2389, simple_loss=0.3083, pruned_loss=0.08472, over 19764.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.2877, pruned_loss=0.08549, over 3701482.93 frames. ], batch size: 88, lr: 1.45e-02, grad_scale: 32.0 +2023-03-28 02:07:27,680 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.995e+02 4.889e+02 6.159e+02 7.664e+02 1.397e+03, threshold=1.232e+03, percent-clipped=1.0 +2023-03-28 02:08:08,160 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.78 vs. limit=5.0 +2023-03-28 02:08:16,680 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19129.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 02:08:51,802 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19146.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:08:58,514 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3900, 2.4256, 2.7028, 2.4105, 2.5376, 2.2047, 2.1568, 2.8304], + device='cuda:1'), covar=tensor([0.0178, 0.0256, 0.0192, 0.0222, 0.0228, 0.0368, 0.0383, 0.0231], + device='cuda:1'), in_proj_covar=tensor([0.0047, 0.0046, 0.0049, 0.0041, 0.0051, 0.0049, 0.0064, 0.0044], + device='cuda:1'), out_proj_covar=tensor([1.0520e-04, 1.0255e-04, 1.0837e-04, 9.1837e-05, 1.1541e-04, 1.1006e-04, + 1.4013e-04, 9.9444e-05], device='cuda:1') +2023-03-28 02:09:03,308 INFO [train.py:892] (1/4) Epoch 11, batch 600, loss[loss=0.2253, simple_loss=0.2836, pruned_loss=0.0835, over 19799.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.2868, pruned_loss=0.08507, over 3758211.85 frames. ], batch size: 107, lr: 1.44e-02, grad_scale: 32.0 +2023-03-28 02:09:34,418 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19164.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 02:10:33,417 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19190.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 02:10:41,232 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19194.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:10:55,602 INFO [train.py:892] (1/4) Epoch 11, batch 650, loss[loss=0.314, simple_loss=0.379, pruned_loss=0.1245, over 18974.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.2886, pruned_loss=0.08603, over 3798196.89 frames. ], batch size: 514, lr: 1.44e-02, grad_scale: 32.0 +2023-03-28 02:11:13,825 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.235e+02 5.164e+02 6.078e+02 7.650e+02 1.184e+03, threshold=1.216e+03, percent-clipped=0.0 +2023-03-28 02:11:21,850 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19212.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 02:11:51,078 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1852, 2.5367, 2.0356, 1.6671, 2.2127, 2.5978, 2.4870, 2.3647], + device='cuda:1'), covar=tensor([0.0241, 0.0228, 0.0272, 0.0545, 0.0381, 0.0203, 0.0153, 0.0216], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0056, 0.0064, 0.0076, 0.0076, 0.0053, 0.0047, 0.0050], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 02:12:45,860 INFO [train.py:892] (1/4) Epoch 11, batch 700, loss[loss=0.2263, simple_loss=0.2882, pruned_loss=0.0822, over 19764.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.2878, pruned_loss=0.08546, over 3832461.86 frames. ], batch size: 244, lr: 1.44e-02, grad_scale: 32.0 +2023-03-28 02:14:07,840 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19286.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:14:41,799 INFO [train.py:892] (1/4) Epoch 11, batch 750, loss[loss=0.2316, simple_loss=0.2753, pruned_loss=0.09399, over 19760.00 frames. ], tot_loss[loss=0.231, simple_loss=0.2891, pruned_loss=0.08644, over 3857408.64 frames. ], batch size: 155, lr: 1.44e-02, grad_scale: 16.0 +2023-03-28 02:14:51,306 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1369, 3.8935, 3.8696, 4.2257, 4.0733, 4.3321, 4.1961, 4.2423], + device='cuda:1'), covar=tensor([0.0967, 0.0534, 0.0661, 0.0400, 0.0668, 0.0457, 0.0622, 0.0884], + device='cuda:1'), in_proj_covar=tensor([0.0123, 0.0140, 0.0164, 0.0136, 0.0139, 0.0120, 0.0127, 0.0157], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 02:15:01,328 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.572e+02 4.867e+02 5.905e+02 6.947e+02 1.334e+03, threshold=1.181e+03, percent-clipped=1.0 +2023-03-28 02:15:47,108 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19330.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:16:33,340 INFO [train.py:892] (1/4) Epoch 11, batch 800, loss[loss=0.1972, simple_loss=0.2659, pruned_loss=0.06431, over 19844.00 frames. ], tot_loss[loss=0.2333, simple_loss=0.2911, pruned_loss=0.08769, over 3877330.89 frames. ], batch size: 85, lr: 1.44e-02, grad_scale: 16.0 +2023-03-28 02:17:24,840 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1138, 2.1955, 3.2454, 2.7409, 3.1296, 3.2662, 3.1475, 3.1447], + device='cuda:1'), covar=tensor([0.0243, 0.0778, 0.0093, 0.0532, 0.0104, 0.0197, 0.0161, 0.0142], + device='cuda:1'), in_proj_covar=tensor([0.0076, 0.0089, 0.0070, 0.0143, 0.0063, 0.0077, 0.0073, 0.0064], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 02:17:37,520 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19378.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:18:12,997 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19394.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:18:27,077 INFO [train.py:892] (1/4) Epoch 11, batch 850, loss[loss=0.2142, simple_loss=0.2739, pruned_loss=0.07725, over 19692.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.29, pruned_loss=0.08723, over 3893851.36 frames. ], batch size: 46, lr: 1.44e-02, grad_scale: 16.0 +2023-03-28 02:18:49,373 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.769e+02 4.779e+02 5.853e+02 6.766e+02 1.621e+03, threshold=1.171e+03, percent-clipped=3.0 +2023-03-28 02:20:21,667 INFO [train.py:892] (1/4) Epoch 11, batch 900, loss[loss=0.2577, simple_loss=0.3109, pruned_loss=0.1023, over 19749.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.2878, pruned_loss=0.08585, over 3906575.02 frames. ], batch size: 259, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:20:30,575 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19455.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:21:39,569 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19485.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 02:22:13,035 INFO [train.py:892] (1/4) Epoch 11, batch 950, loss[loss=0.1979, simple_loss=0.2552, pruned_loss=0.07027, over 19844.00 frames. ], tot_loss[loss=0.231, simple_loss=0.2894, pruned_loss=0.08633, over 3915782.84 frames. ], batch size: 142, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:22:30,342 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.330e+02 4.753e+02 5.819e+02 7.309e+02 1.447e+03, threshold=1.164e+03, percent-clipped=3.0 +2023-03-28 02:23:14,983 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2678, 3.4743, 3.7695, 3.4407, 3.3146, 3.7134, 3.5202, 3.8374], + device='cuda:1'), covar=tensor([0.1305, 0.0398, 0.0478, 0.0455, 0.1301, 0.0509, 0.0433, 0.0432], + device='cuda:1'), in_proj_covar=tensor([0.0248, 0.0186, 0.0182, 0.0191, 0.0184, 0.0187, 0.0183, 0.0172], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 02:24:02,923 INFO [train.py:892] (1/4) Epoch 11, batch 1000, loss[loss=0.2416, simple_loss=0.2949, pruned_loss=0.09408, over 19755.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.2888, pruned_loss=0.08587, over 3922511.18 frames. ], batch size: 253, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:24:31,387 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3685, 2.7763, 4.4380, 3.6459, 3.9957, 4.3361, 4.3172, 4.2052], + device='cuda:1'), covar=tensor([0.0162, 0.0687, 0.0090, 0.0858, 0.0118, 0.0165, 0.0106, 0.0087], + device='cuda:1'), in_proj_covar=tensor([0.0076, 0.0088, 0.0069, 0.0140, 0.0062, 0.0076, 0.0072, 0.0063], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 02:25:24,897 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19586.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:25:55,186 INFO [train.py:892] (1/4) Epoch 11, batch 1050, loss[loss=0.202, simple_loss=0.2762, pruned_loss=0.06386, over 19858.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.2893, pruned_loss=0.08629, over 3929143.67 frames. ], batch size: 81, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:26:15,113 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.901e+02 5.121e+02 6.164e+02 7.352e+02 1.446e+03, threshold=1.233e+03, percent-clipped=2.0 +2023-03-28 02:26:35,429 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19618.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:26:50,143 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9310, 4.9762, 5.3975, 5.0692, 5.0087, 4.6706, 5.0510, 4.8108], + device='cuda:1'), covar=tensor([0.1171, 0.1139, 0.0759, 0.0943, 0.0797, 0.0856, 0.1857, 0.1855], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0240, 0.0299, 0.0231, 0.0228, 0.0216, 0.0285, 0.0319], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 02:27:13,309 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19634.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:27:49,218 INFO [train.py:892] (1/4) Epoch 11, batch 1100, loss[loss=0.2493, simple_loss=0.3209, pruned_loss=0.08881, over 19533.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.2889, pruned_loss=0.08596, over 3933443.52 frames. ], batch size: 54, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:28:10,351 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0159, 3.1252, 1.9282, 3.2453, 3.3716, 1.4464, 2.7064, 2.5413], + device='cuda:1'), covar=tensor([0.0772, 0.0840, 0.2654, 0.0652, 0.0400, 0.2710, 0.1154, 0.0774], + device='cuda:1'), in_proj_covar=tensor([0.0190, 0.0212, 0.0210, 0.0197, 0.0155, 0.0196, 0.0219, 0.0155], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 02:28:53,879 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19679.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:28:57,805 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.9826, 2.0222, 2.2443, 2.1450, 1.9071, 2.0751, 1.8397, 2.0883], + device='cuda:1'), covar=tensor([0.0227, 0.0304, 0.0220, 0.0214, 0.0316, 0.0256, 0.0465, 0.0281], + device='cuda:1'), in_proj_covar=tensor([0.0046, 0.0045, 0.0048, 0.0041, 0.0051, 0.0048, 0.0063, 0.0044], + device='cuda:1'), out_proj_covar=tensor([1.0333e-04, 1.0117e-04, 1.0628e-04, 9.1856e-05, 1.1468e-04, 1.0841e-04, + 1.3847e-04, 9.9892e-05], device='cuda:1') +2023-03-28 02:29:11,192 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2801, 3.5698, 3.6398, 4.5377, 2.8087, 3.2204, 2.8831, 2.4935], + device='cuda:1'), covar=tensor([0.0428, 0.2151, 0.0837, 0.0192, 0.2006, 0.0842, 0.1088, 0.1841], + device='cuda:1'), in_proj_covar=tensor([0.0196, 0.0330, 0.0227, 0.0146, 0.0238, 0.0173, 0.0197, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 02:29:42,841 INFO [train.py:892] (1/4) Epoch 11, batch 1150, loss[loss=0.3458, simple_loss=0.3998, pruned_loss=0.1459, over 19247.00 frames. ], tot_loss[loss=0.23, simple_loss=0.2888, pruned_loss=0.08558, over 3936513.22 frames. ], batch size: 483, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:30:05,127 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.048e+02 4.779e+02 5.602e+02 6.982e+02 1.239e+03, threshold=1.120e+03, percent-clipped=1.0 +2023-03-28 02:31:36,114 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19750.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:31:37,412 INFO [train.py:892] (1/4) Epoch 11, batch 1200, loss[loss=0.2346, simple_loss=0.2981, pruned_loss=0.08559, over 19529.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2891, pruned_loss=0.08556, over 3939115.71 frames. ], batch size: 54, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:32:55,604 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19785.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 02:33:30,042 INFO [train.py:892] (1/4) Epoch 11, batch 1250, loss[loss=0.2174, simple_loss=0.2762, pruned_loss=0.07931, over 19780.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.2875, pruned_loss=0.08474, over 3942709.19 frames. ], batch size: 163, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:33:35,320 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8271, 4.9577, 5.2576, 4.9250, 4.4055, 5.1574, 4.8350, 5.4652], + device='cuda:1'), covar=tensor([0.0896, 0.0229, 0.0332, 0.0254, 0.0604, 0.0316, 0.0309, 0.0245], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0189, 0.0183, 0.0193, 0.0183, 0.0191, 0.0185, 0.0174], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 02:33:49,285 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.235e+02 4.789e+02 5.822e+02 7.202e+02 1.423e+03, threshold=1.164e+03, percent-clipped=5.0 +2023-03-28 02:34:34,494 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9986, 3.9906, 3.9080, 3.7480, 4.0926, 2.9031, 3.2005, 1.9329], + device='cuda:1'), covar=tensor([0.0333, 0.0207, 0.0226, 0.0209, 0.0245, 0.0966, 0.0977, 0.1893], + device='cuda:1'), in_proj_covar=tensor([0.0082, 0.0113, 0.0097, 0.0110, 0.0099, 0.0118, 0.0128, 0.0113], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 02:34:44,027 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19833.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 02:35:24,481 INFO [train.py:892] (1/4) Epoch 11, batch 1300, loss[loss=0.2162, simple_loss=0.2803, pruned_loss=0.07602, over 19673.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.2879, pruned_loss=0.08526, over 3944300.28 frames. ], batch size: 73, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:37:16,655 INFO [train.py:892] (1/4) Epoch 11, batch 1350, loss[loss=0.2269, simple_loss=0.3013, pruned_loss=0.07632, over 19797.00 frames. ], tot_loss[loss=0.229, simple_loss=0.288, pruned_loss=0.08495, over 3945842.04 frames. ], batch size: 51, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:37:38,421 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.602e+02 5.196e+02 5.941e+02 7.329e+02 1.053e+03, threshold=1.188e+03, percent-clipped=0.0 +2023-03-28 02:37:47,779 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3738, 3.9099, 4.0592, 4.3924, 4.0695, 4.4802, 4.4971, 4.7051], + device='cuda:1'), covar=tensor([0.0646, 0.0372, 0.0474, 0.0270, 0.0575, 0.0377, 0.0407, 0.0258], + device='cuda:1'), in_proj_covar=tensor([0.0127, 0.0144, 0.0169, 0.0139, 0.0141, 0.0122, 0.0132, 0.0162], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 02:39:10,196 INFO [train.py:892] (1/4) Epoch 11, batch 1400, loss[loss=0.2408, simple_loss=0.2956, pruned_loss=0.09301, over 19706.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.2886, pruned_loss=0.08522, over 3944181.73 frames. ], batch size: 305, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:39:59,618 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0996, 2.2518, 2.8555, 3.1129, 3.8766, 4.0063, 4.0211, 4.1297], + device='cuda:1'), covar=tensor([0.0621, 0.1761, 0.1104, 0.0526, 0.0288, 0.0202, 0.0202, 0.0251], + device='cuda:1'), in_proj_covar=tensor([0.0134, 0.0170, 0.0156, 0.0129, 0.0107, 0.0104, 0.0096, 0.0096], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 02:40:05,692 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19974.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:41:09,390 INFO [train.py:892] (1/4) Epoch 11, batch 1450, loss[loss=0.2023, simple_loss=0.2668, pruned_loss=0.06893, over 19949.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.2891, pruned_loss=0.08518, over 3945952.05 frames. ], batch size: 46, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:41:30,119 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.362e+02 4.773e+02 5.638e+02 7.039e+02 1.448e+03, threshold=1.128e+03, percent-clipped=1.0 +2023-03-28 02:41:54,646 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20020.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:43:00,592 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20050.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:43:01,874 INFO [train.py:892] (1/4) Epoch 11, batch 1500, loss[loss=0.2163, simple_loss=0.2723, pruned_loss=0.08011, over 19774.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.2884, pruned_loss=0.08501, over 3947046.31 frames. ], batch size: 193, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:44:11,201 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20081.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:44:30,240 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20089.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:44:50,401 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20098.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:44:52,997 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.71 vs. limit=2.0 +2023-03-28 02:44:55,654 INFO [train.py:892] (1/4) Epoch 11, batch 1550, loss[loss=0.1964, simple_loss=0.265, pruned_loss=0.06396, over 19531.00 frames. ], tot_loss[loss=0.2304, simple_loss=0.2896, pruned_loss=0.0856, over 3945148.33 frames. ], batch size: 46, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:45:16,069 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.591e+02 5.026e+02 5.881e+02 6.796e+02 1.345e+03, threshold=1.176e+03, percent-clipped=3.0 +2023-03-28 02:45:21,263 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20112.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:46:41,421 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20150.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:46:42,448 INFO [train.py:892] (1/4) Epoch 11, batch 1600, loss[loss=0.2134, simple_loss=0.2693, pruned_loss=0.07874, over 19805.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.2891, pruned_loss=0.08535, over 3946116.59 frames. ], batch size: 72, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:47:01,472 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2003, 2.5501, 2.1915, 1.6771, 2.3424, 2.5554, 2.5369, 2.4442], + device='cuda:1'), covar=tensor([0.0206, 0.0225, 0.0206, 0.0482, 0.0287, 0.0188, 0.0155, 0.0177], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0057, 0.0065, 0.0075, 0.0076, 0.0051, 0.0047, 0.0050], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 02:47:03,416 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2804, 2.1584, 2.4902, 2.2474, 2.1711, 2.2565, 2.1257, 2.4588], + device='cuda:1'), covar=tensor([0.0206, 0.0274, 0.0195, 0.0183, 0.0285, 0.0245, 0.0347, 0.0261], + device='cuda:1'), in_proj_covar=tensor([0.0047, 0.0047, 0.0048, 0.0042, 0.0052, 0.0048, 0.0064, 0.0044], + device='cuda:1'), out_proj_covar=tensor([1.0432e-04, 1.0534e-04, 1.0689e-04, 9.5611e-05, 1.1769e-04, 1.0769e-04, + 1.4220e-04, 1.0066e-04], device='cuda:1') +2023-03-28 02:47:33,540 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20173.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:48:38,473 INFO [train.py:892] (1/4) Epoch 11, batch 1650, loss[loss=0.2072, simple_loss=0.2779, pruned_loss=0.06823, over 19808.00 frames. ], tot_loss[loss=0.2313, simple_loss=0.2901, pruned_loss=0.08621, over 3946557.32 frames. ], batch size: 98, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:48:45,313 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20204.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:48:56,036 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.919e+02 4.977e+02 5.709e+02 7.210e+02 1.480e+03, threshold=1.142e+03, percent-clipped=1.0 +2023-03-28 02:49:58,992 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.76 vs. limit=5.0 +2023-03-28 02:50:09,642 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3015, 2.5586, 3.3392, 2.9469, 3.0497, 3.2989, 1.9727, 2.0998], + device='cuda:1'), covar=tensor([0.0689, 0.2099, 0.0408, 0.0583, 0.1147, 0.0704, 0.1604, 0.2035], + device='cuda:1'), in_proj_covar=tensor([0.0299, 0.0331, 0.0266, 0.0218, 0.0323, 0.0258, 0.0282, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 02:50:13,600 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.0202, 2.4075, 2.0408, 1.5848, 2.1962, 2.4625, 2.4473, 2.2945], + device='cuda:1'), covar=tensor([0.0264, 0.0234, 0.0246, 0.0556, 0.0340, 0.0240, 0.0184, 0.0176], + device='cuda:1'), in_proj_covar=tensor([0.0062, 0.0057, 0.0066, 0.0076, 0.0077, 0.0052, 0.0048, 0.0050], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 02:50:27,708 INFO [train.py:892] (1/4) Epoch 11, batch 1700, loss[loss=0.2932, simple_loss=0.3411, pruned_loss=0.1227, over 19635.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.2896, pruned_loss=0.08536, over 3947571.39 frames. ], batch size: 359, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:50:28,465 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0889, 5.1130, 5.5615, 5.2786, 5.2873, 4.8616, 5.1527, 5.0729], + device='cuda:1'), covar=tensor([0.1179, 0.0943, 0.0769, 0.0986, 0.0675, 0.0810, 0.1911, 0.1773], + device='cuda:1'), in_proj_covar=tensor([0.0245, 0.0244, 0.0300, 0.0233, 0.0227, 0.0215, 0.0286, 0.0320], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 02:50:58,750 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20265.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:51:19,816 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20274.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:51:41,426 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2335, 4.2673, 2.5903, 4.5437, 4.7634, 2.0865, 4.0046, 3.6655], + device='cuda:1'), covar=tensor([0.0528, 0.0706, 0.2587, 0.0591, 0.0383, 0.2707, 0.0893, 0.0550], + device='cuda:1'), in_proj_covar=tensor([0.0191, 0.0216, 0.0213, 0.0200, 0.0157, 0.0197, 0.0223, 0.0157], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 02:52:09,947 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8128, 2.5420, 3.0638, 2.8490, 3.1171, 3.1904, 3.5133, 4.0659], + device='cuda:1'), covar=tensor([0.0626, 0.1654, 0.1326, 0.1737, 0.1844, 0.1350, 0.0451, 0.0455], + device='cuda:1'), in_proj_covar=tensor([0.0204, 0.0213, 0.0232, 0.0229, 0.0256, 0.0223, 0.0173, 0.0178], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 02:52:10,278 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-28 02:52:12,850 INFO [train.py:892] (1/4) Epoch 11, batch 1750, loss[loss=0.223, simple_loss=0.274, pruned_loss=0.08601, over 19753.00 frames. ], tot_loss[loss=0.229, simple_loss=0.288, pruned_loss=0.08503, over 3947727.74 frames. ], batch size: 179, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:52:29,861 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 5.025e+02 5.710e+02 7.275e+02 1.664e+03, threshold=1.142e+03, percent-clipped=3.0 +2023-03-28 02:52:53,414 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20322.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:53:48,115 INFO [train.py:892] (1/4) Epoch 11, batch 1800, loss[loss=0.2109, simple_loss=0.2756, pruned_loss=0.07305, over 19778.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.2874, pruned_loss=0.08503, over 3948414.59 frames. ], batch size: 46, lr: 1.40e-02, grad_scale: 16.0 +2023-03-28 02:54:35,683 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20376.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:54:57,917 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20388.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:55:18,988 INFO [train.py:892] (1/4) Epoch 11, batch 1850, loss[loss=0.2312, simple_loss=0.2979, pruned_loss=0.08227, over 19573.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.2886, pruned_loss=0.08499, over 3947937.64 frames. ], batch size: 53, lr: 1.40e-02, grad_scale: 16.0 +2023-03-28 02:56:26,114 INFO [train.py:892] (1/4) Epoch 12, batch 0, loss[loss=0.1849, simple_loss=0.2557, pruned_loss=0.05711, over 19406.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2557, pruned_loss=0.05711, over 19406.00 frames. ], batch size: 40, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 02:56:26,115 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 02:56:56,309 INFO [train.py:926] (1/4) Epoch 12, validation: loss=0.1761, simple_loss=0.2565, pruned_loss=0.0478, over 2883724.00 frames. +2023-03-28 02:56:56,310 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 02:57:06,913 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 5.231e+02 6.194e+02 7.302e+02 1.843e+03, threshold=1.239e+03, percent-clipped=4.0 +2023-03-28 02:57:28,068 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20418.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:58:29,457 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20445.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:58:39,898 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20449.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:58:54,031 INFO [train.py:892] (1/4) Epoch 12, batch 50, loss[loss=0.2538, simple_loss=0.2975, pruned_loss=0.1051, over 19859.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2811, pruned_loss=0.08092, over 889236.85 frames. ], batch size: 165, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 02:59:15,987 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2509, 3.2637, 3.6428, 3.3040, 3.1566, 3.5594, 3.4176, 3.6651], + device='cuda:1'), covar=tensor([0.1018, 0.0360, 0.0369, 0.0368, 0.1196, 0.0493, 0.0408, 0.0367], + device='cuda:1'), in_proj_covar=tensor([0.0243, 0.0185, 0.0179, 0.0188, 0.0178, 0.0189, 0.0182, 0.0171], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 02:59:19,978 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20468.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:59:47,820 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20479.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:00:46,562 INFO [train.py:892] (1/4) Epoch 12, batch 100, loss[loss=0.2657, simple_loss=0.3213, pruned_loss=0.105, over 19689.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2831, pruned_loss=0.08207, over 1567805.13 frames. ], batch size: 315, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 03:00:55,044 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.431e+02 4.892e+02 6.215e+02 7.181e+02 1.654e+03, threshold=1.243e+03, percent-clipped=3.0 +2023-03-28 03:01:40,098 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6856, 2.4632, 2.9232, 2.7483, 2.9487, 3.0288, 3.4974, 3.8240], + device='cuda:1'), covar=tensor([0.0520, 0.1570, 0.1286, 0.1679, 0.1574, 0.1251, 0.0403, 0.0506], + device='cuda:1'), in_proj_covar=tensor([0.0203, 0.0214, 0.0233, 0.0229, 0.0256, 0.0223, 0.0175, 0.0179], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 03:02:39,174 INFO [train.py:892] (1/4) Epoch 12, batch 150, loss[loss=0.2394, simple_loss=0.3045, pruned_loss=0.08714, over 19720.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2816, pruned_loss=0.08061, over 2096711.40 frames. ], batch size: 61, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 03:02:51,626 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20560.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:03:26,636 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3663, 3.4945, 3.6518, 4.4213, 2.9121, 3.2748, 3.0242, 2.6725], + device='cuda:1'), covar=tensor([0.0405, 0.2139, 0.0854, 0.0212, 0.2009, 0.0801, 0.1026, 0.1895], + device='cuda:1'), in_proj_covar=tensor([0.0197, 0.0328, 0.0222, 0.0148, 0.0236, 0.0173, 0.0195, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 03:03:56,830 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20588.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:04:38,604 INFO [train.py:892] (1/4) Epoch 12, batch 200, loss[loss=0.1739, simple_loss=0.2356, pruned_loss=0.05615, over 19783.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2828, pruned_loss=0.08114, over 2506974.90 frames. ], batch size: 94, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 03:04:47,274 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.285e+02 4.571e+02 5.228e+02 6.546e+02 1.569e+03, threshold=1.046e+03, percent-clipped=1.0 +2023-03-28 03:06:25,767 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20649.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:06:39,404 INFO [train.py:892] (1/4) Epoch 12, batch 250, loss[loss=0.2403, simple_loss=0.3035, pruned_loss=0.08853, over 19721.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2825, pruned_loss=0.08124, over 2827345.73 frames. ], batch size: 62, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 03:06:45,939 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4514, 3.3313, 3.3504, 3.1055, 3.4271, 2.7431, 2.7826, 1.4743], + device='cuda:1'), covar=tensor([0.0242, 0.0252, 0.0152, 0.0184, 0.0172, 0.1003, 0.0745, 0.1928], + device='cuda:1'), in_proj_covar=tensor([0.0083, 0.0115, 0.0096, 0.0110, 0.0100, 0.0118, 0.0128, 0.0113], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 03:07:25,033 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20676.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:08:04,259 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6316, 2.8249, 2.2476, 1.9999, 2.6004, 2.9222, 2.6379, 2.7453], + device='cuda:1'), covar=tensor([0.0198, 0.0236, 0.0264, 0.0552, 0.0319, 0.0188, 0.0190, 0.0186], + device='cuda:1'), in_proj_covar=tensor([0.0062, 0.0058, 0.0067, 0.0076, 0.0078, 0.0052, 0.0048, 0.0051], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 03:08:30,783 INFO [train.py:892] (1/4) Epoch 12, batch 300, loss[loss=0.1973, simple_loss=0.2701, pruned_loss=0.0622, over 19672.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2827, pruned_loss=0.08154, over 3077387.10 frames. ], batch size: 49, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:08:41,181 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.455e+02 4.985e+02 6.229e+02 8.000e+02 1.241e+03, threshold=1.246e+03, percent-clipped=1.0 +2023-03-28 03:09:12,852 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20724.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:09:57,915 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20744.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:10:01,861 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20745.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:10:18,181 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6852, 2.5760, 2.8589, 2.6698, 2.9687, 3.0205, 3.4924, 3.8133], + device='cuda:1'), covar=tensor([0.0543, 0.1531, 0.1429, 0.1830, 0.1542, 0.1244, 0.0463, 0.0433], + device='cuda:1'), in_proj_covar=tensor([0.0205, 0.0216, 0.0236, 0.0233, 0.0260, 0.0226, 0.0179, 0.0182], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 03:10:24,868 INFO [train.py:892] (1/4) Epoch 12, batch 350, loss[loss=0.2034, simple_loss=0.266, pruned_loss=0.07043, over 19822.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2834, pruned_loss=0.08183, over 3271130.67 frames. ], batch size: 103, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:10:52,623 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20768.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:11:06,497 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20774.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:11:52,623 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20793.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:12:20,248 INFO [train.py:892] (1/4) Epoch 12, batch 400, loss[loss=0.2926, simple_loss=0.3664, pruned_loss=0.1094, over 18756.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2845, pruned_loss=0.08204, over 3419359.03 frames. ], batch size: 564, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:12:28,480 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.077e+02 4.871e+02 5.546e+02 6.425e+02 9.713e+02, threshold=1.109e+03, percent-clipped=0.0 +2023-03-28 03:12:42,784 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20816.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:13:33,764 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-03-28 03:14:10,003 INFO [train.py:892] (1/4) Epoch 12, batch 450, loss[loss=0.2891, simple_loss=0.3343, pruned_loss=0.122, over 19643.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.285, pruned_loss=0.08191, over 3535526.92 frames. ], batch size: 330, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:14:21,446 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20860.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:14:26,048 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2469, 2.2761, 2.3217, 2.2086, 1.9694, 2.3243, 2.2061, 2.4940], + device='cuda:1'), covar=tensor([0.0191, 0.0259, 0.0213, 0.0235, 0.0320, 0.0229, 0.0330, 0.0166], + device='cuda:1'), in_proj_covar=tensor([0.0047, 0.0048, 0.0048, 0.0043, 0.0053, 0.0049, 0.0064, 0.0044], + device='cuda:1'), out_proj_covar=tensor([1.0595e-04, 1.0668e-04, 1.0846e-04, 9.6875e-05, 1.1901e-04, 1.1003e-04, + 1.4071e-04, 1.0102e-04], device='cuda:1') +2023-03-28 03:14:28,181 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9603, 3.2498, 2.8749, 2.2530, 2.9293, 3.2386, 3.1072, 3.1728], + device='cuda:1'), covar=tensor([0.0203, 0.0248, 0.0194, 0.0510, 0.0277, 0.0222, 0.0157, 0.0164], + device='cuda:1'), in_proj_covar=tensor([0.0063, 0.0059, 0.0067, 0.0077, 0.0078, 0.0052, 0.0048, 0.0051], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 03:15:34,086 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-28 03:16:03,418 INFO [train.py:892] (1/4) Epoch 12, batch 500, loss[loss=0.208, simple_loss=0.2718, pruned_loss=0.07213, over 19869.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2833, pruned_loss=0.0816, over 3628056.65 frames. ], batch size: 89, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:16:08,877 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20908.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:16:11,899 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.335e+02 4.795e+02 5.889e+02 7.056e+02 1.371e+03, threshold=1.178e+03, percent-clipped=3.0 +2023-03-28 03:16:24,430 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1279, 4.1851, 4.5619, 4.4161, 4.4472, 4.0674, 4.2554, 4.1407], + device='cuda:1'), covar=tensor([0.1402, 0.1273, 0.1063, 0.1088, 0.0940, 0.1020, 0.2077, 0.1958], + device='cuda:1'), in_proj_covar=tensor([0.0244, 0.0244, 0.0299, 0.0231, 0.0221, 0.0217, 0.0291, 0.0326], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 03:16:35,249 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0373, 2.7580, 4.1759, 3.6660, 3.9275, 4.1399, 4.1138, 3.9851], + device='cuda:1'), covar=tensor([0.0159, 0.0626, 0.0071, 0.0688, 0.0107, 0.0150, 0.0109, 0.0095], + device='cuda:1'), in_proj_covar=tensor([0.0078, 0.0089, 0.0070, 0.0142, 0.0064, 0.0079, 0.0073, 0.0065], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 03:17:30,614 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20944.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:17:41,560 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2117, 2.1972, 2.2853, 2.1758, 2.1075, 2.2606, 2.1419, 2.4773], + device='cuda:1'), covar=tensor([0.0245, 0.0299, 0.0232, 0.0229, 0.0272, 0.0274, 0.0340, 0.0326], + device='cuda:1'), in_proj_covar=tensor([0.0047, 0.0048, 0.0049, 0.0043, 0.0053, 0.0049, 0.0064, 0.0044], + device='cuda:1'), out_proj_covar=tensor([1.0645e-04, 1.0616e-04, 1.0873e-04, 9.7131e-05, 1.1951e-04, 1.1055e-04, + 1.4072e-04, 1.0074e-04], device='cuda:1') +2023-03-28 03:17:57,137 INFO [train.py:892] (1/4) Epoch 12, batch 550, loss[loss=0.2513, simple_loss=0.3036, pruned_loss=0.09952, over 19778.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2825, pruned_loss=0.08087, over 3700324.77 frames. ], batch size: 263, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:19:53,205 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8881, 1.8612, 1.9761, 1.9312, 1.9177, 1.9192, 1.9292, 2.1932], + device='cuda:1'), covar=tensor([0.0179, 0.0253, 0.0229, 0.0215, 0.0252, 0.0243, 0.0348, 0.0170], + device='cuda:1'), in_proj_covar=tensor([0.0048, 0.0048, 0.0049, 0.0043, 0.0053, 0.0050, 0.0064, 0.0044], + device='cuda:1'), out_proj_covar=tensor([1.0747e-04, 1.0646e-04, 1.1003e-04, 9.7884e-05, 1.1958e-04, 1.1213e-04, + 1.4251e-04, 1.0041e-04], device='cuda:1') +2023-03-28 03:19:54,077 INFO [train.py:892] (1/4) Epoch 12, batch 600, loss[loss=0.2156, simple_loss=0.2712, pruned_loss=0.07999, over 19761.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2833, pruned_loss=0.08164, over 3755309.82 frames. ], batch size: 188, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:20:01,792 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.833e+02 4.713e+02 5.422e+02 6.814e+02 1.308e+03, threshold=1.084e+03, percent-clipped=1.0 +2023-03-28 03:20:32,269 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-03-28 03:21:13,008 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2387, 4.2929, 2.5804, 4.6202, 4.8151, 1.9198, 4.0412, 3.6394], + device='cuda:1'), covar=tensor([0.0531, 0.0681, 0.2349, 0.0515, 0.0245, 0.2765, 0.0819, 0.0516], + device='cuda:1'), in_proj_covar=tensor([0.0195, 0.0217, 0.0209, 0.0202, 0.0160, 0.0198, 0.0222, 0.0158], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 03:21:20,940 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21044.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:21:37,357 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21052.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:21:45,733 INFO [train.py:892] (1/4) Epoch 12, batch 650, loss[loss=0.2057, simple_loss=0.267, pruned_loss=0.07221, over 19805.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.284, pruned_loss=0.08217, over 3798375.04 frames. ], batch size: 114, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:22:28,014 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21074.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:23:09,796 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21092.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:23:10,528 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-28 03:23:39,594 INFO [train.py:892] (1/4) Epoch 12, batch 700, loss[loss=0.2545, simple_loss=0.3162, pruned_loss=0.09633, over 19772.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.2852, pruned_loss=0.0829, over 3832237.14 frames. ], batch size: 273, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:23:49,597 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.058e+02 5.405e+02 6.727e+02 8.130e+02 1.465e+03, threshold=1.345e+03, percent-clipped=5.0 +2023-03-28 03:23:56,901 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21113.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:24:07,653 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21118.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:24:18,046 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21122.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:24:41,251 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2172, 3.8264, 3.9654, 4.2411, 3.9007, 4.2567, 4.3248, 4.4739], + device='cuda:1'), covar=tensor([0.0580, 0.0383, 0.0468, 0.0288, 0.0537, 0.0361, 0.0346, 0.0266], + device='cuda:1'), in_proj_covar=tensor([0.0126, 0.0147, 0.0171, 0.0144, 0.0144, 0.0126, 0.0133, 0.0163], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 03:25:33,694 INFO [train.py:892] (1/4) Epoch 12, batch 750, loss[loss=0.2048, simple_loss=0.2678, pruned_loss=0.07095, over 19654.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2842, pruned_loss=0.08198, over 3857478.49 frames. ], batch size: 58, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:26:27,405 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21179.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:27:28,534 INFO [train.py:892] (1/4) Epoch 12, batch 800, loss[loss=0.2197, simple_loss=0.2832, pruned_loss=0.07814, over 19557.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.284, pruned_loss=0.08193, over 3878584.05 frames. ], batch size: 47, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:27:36,537 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.46 vs. limit=5.0 +2023-03-28 03:27:37,196 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.248e+02 4.727e+02 5.705e+02 6.809e+02 1.528e+03, threshold=1.141e+03, percent-clipped=1.0 +2023-03-28 03:28:55,963 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21244.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:29:23,809 INFO [train.py:892] (1/4) Epoch 12, batch 850, loss[loss=0.2311, simple_loss=0.2882, pruned_loss=0.08704, over 19741.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.2858, pruned_loss=0.08318, over 3891581.92 frames. ], batch size: 62, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:30:45,261 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21292.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:31:14,750 INFO [train.py:892] (1/4) Epoch 12, batch 900, loss[loss=0.3592, simple_loss=0.3948, pruned_loss=0.1618, over 19411.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2853, pruned_loss=0.08256, over 3905316.22 frames. ], batch size: 431, lr: 1.32e-02, grad_scale: 32.0 +2023-03-28 03:31:22,630 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.874e+02 4.590e+02 5.772e+02 7.040e+02 1.378e+03, threshold=1.154e+03, percent-clipped=2.0 +2023-03-28 03:31:27,905 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21312.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:32:07,281 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21328.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:32:11,711 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3274, 2.6184, 2.6840, 3.2370, 2.2400, 2.8894, 2.1912, 1.9371], + device='cuda:1'), covar=tensor([0.0412, 0.1437, 0.0965, 0.0292, 0.2034, 0.0526, 0.1180, 0.1718], + device='cuda:1'), in_proj_covar=tensor([0.0199, 0.0333, 0.0226, 0.0153, 0.0237, 0.0173, 0.0197, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 03:33:10,586 INFO [train.py:892] (1/4) Epoch 12, batch 950, loss[loss=0.1957, simple_loss=0.2503, pruned_loss=0.07051, over 19788.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.286, pruned_loss=0.08263, over 3914861.97 frames. ], batch size: 178, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:33:48,328 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21373.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:34:26,650 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21389.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:34:47,666 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5344, 4.2115, 4.1782, 4.0488, 4.4202, 3.0928, 3.7147, 2.2660], + device='cuda:1'), covar=tensor([0.0155, 0.0197, 0.0124, 0.0146, 0.0110, 0.0806, 0.0649, 0.1248], + device='cuda:1'), in_proj_covar=tensor([0.0083, 0.0118, 0.0096, 0.0112, 0.0100, 0.0119, 0.0130, 0.0114], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 03:35:02,706 INFO [train.py:892] (1/4) Epoch 12, batch 1000, loss[loss=0.3053, simple_loss=0.3538, pruned_loss=0.1284, over 19601.00 frames. ], tot_loss[loss=0.226, simple_loss=0.2861, pruned_loss=0.08297, over 3921813.40 frames. ], batch size: 376, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:35:08,964 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21408.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:35:14,108 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.713e+02 5.169e+02 6.246e+02 8.325e+02 1.505e+03, threshold=1.249e+03, percent-clipped=4.0 +2023-03-28 03:36:53,873 INFO [train.py:892] (1/4) Epoch 12, batch 1050, loss[loss=0.2303, simple_loss=0.2914, pruned_loss=0.08458, over 19833.00 frames. ], tot_loss[loss=0.2264, simple_loss=0.2867, pruned_loss=0.08299, over 3926359.41 frames. ], batch size: 75, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:37:32,713 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21474.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:37:39,038 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3610, 2.6764, 2.4205, 1.6717, 2.4331, 2.5254, 2.5164, 2.6001], + device='cuda:1'), covar=tensor([0.0218, 0.0198, 0.0212, 0.0608, 0.0314, 0.0182, 0.0161, 0.0151], + device='cuda:1'), in_proj_covar=tensor([0.0063, 0.0059, 0.0068, 0.0078, 0.0079, 0.0054, 0.0049, 0.0052], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 03:38:44,205 INFO [train.py:892] (1/4) Epoch 12, batch 1100, loss[loss=0.2365, simple_loss=0.2907, pruned_loss=0.09113, over 19752.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.2842, pruned_loss=0.08135, over 3933332.22 frames. ], batch size: 205, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:38:56,790 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.523e+02 4.626e+02 5.286e+02 6.822e+02 1.206e+03, threshold=1.057e+03, percent-clipped=0.0 +2023-03-28 03:39:51,149 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8605, 3.1031, 2.8006, 2.0654, 2.7115, 3.0372, 2.8692, 2.9677], + device='cuda:1'), covar=tensor([0.0179, 0.0295, 0.0201, 0.0480, 0.0316, 0.0166, 0.0184, 0.0159], + device='cuda:1'), in_proj_covar=tensor([0.0063, 0.0059, 0.0067, 0.0077, 0.0078, 0.0053, 0.0049, 0.0052], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 03:40:37,756 INFO [train.py:892] (1/4) Epoch 12, batch 1150, loss[loss=0.2542, simple_loss=0.3105, pruned_loss=0.09891, over 19716.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2832, pruned_loss=0.08117, over 3938112.53 frames. ], batch size: 269, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:42:06,917 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21595.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:42:32,687 INFO [train.py:892] (1/4) Epoch 12, batch 1200, loss[loss=0.2529, simple_loss=0.3133, pruned_loss=0.09625, over 19742.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2826, pruned_loss=0.08071, over 3940291.09 frames. ], batch size: 291, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:42:43,483 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.726e+02 4.687e+02 5.841e+02 7.196e+02 1.649e+03, threshold=1.168e+03, percent-clipped=4.0 +2023-03-28 03:42:48,439 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21613.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:43:19,402 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0411, 4.5445, 4.7421, 5.0661, 4.6259, 5.3251, 5.2019, 5.3238], + device='cuda:1'), covar=tensor([0.0639, 0.0367, 0.0394, 0.0249, 0.0577, 0.0274, 0.0321, 0.0292], + device='cuda:1'), in_proj_covar=tensor([0.0127, 0.0148, 0.0171, 0.0143, 0.0146, 0.0128, 0.0132, 0.0165], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 03:44:01,718 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2644, 2.8210, 4.3652, 3.8079, 4.1929, 4.3227, 4.3473, 4.0542], + device='cuda:1'), covar=tensor([0.0165, 0.0676, 0.0086, 0.0790, 0.0104, 0.0190, 0.0102, 0.0101], + device='cuda:1'), in_proj_covar=tensor([0.0081, 0.0091, 0.0073, 0.0144, 0.0066, 0.0081, 0.0074, 0.0065], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 03:44:28,813 INFO [train.py:892] (1/4) Epoch 12, batch 1250, loss[loss=0.1984, simple_loss=0.2584, pruned_loss=0.06924, over 19789.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2821, pruned_loss=0.08034, over 3941845.34 frames. ], batch size: 65, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:44:29,867 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21656.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:44:54,431 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21668.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:45:08,201 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21674.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:45:08,999 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-03-28 03:45:32,078 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21684.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:45:55,250 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4976, 4.5112, 4.9741, 4.5173, 4.0763, 4.6709, 4.5881, 5.1078], + device='cuda:1'), covar=tensor([0.0986, 0.0343, 0.0299, 0.0329, 0.0805, 0.0429, 0.0387, 0.0250], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0191, 0.0189, 0.0195, 0.0184, 0.0197, 0.0188, 0.0178], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 03:46:12,447 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6307, 2.6838, 3.8162, 3.1387, 3.4481, 3.3953, 2.1583, 2.1994], + device='cuda:1'), covar=tensor([0.0692, 0.2374, 0.0437, 0.0601, 0.0993, 0.0787, 0.1633, 0.2081], + device='cuda:1'), in_proj_covar=tensor([0.0305, 0.0333, 0.0269, 0.0223, 0.0326, 0.0269, 0.0287, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 03:46:19,247 INFO [train.py:892] (1/4) Epoch 12, batch 1300, loss[loss=0.2059, simple_loss=0.2611, pruned_loss=0.07534, over 19831.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2838, pruned_loss=0.08122, over 3942699.22 frames. ], batch size: 128, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:46:24,450 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21708.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:46:31,054 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 4.739e+02 5.550e+02 6.963e+02 1.418e+03, threshold=1.110e+03, percent-clipped=2.0 +2023-03-28 03:48:12,996 INFO [train.py:892] (1/4) Epoch 12, batch 1350, loss[loss=0.2337, simple_loss=0.2975, pruned_loss=0.08493, over 19788.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.283, pruned_loss=0.08097, over 3944575.91 frames. ], batch size: 236, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:48:13,699 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21756.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:48:27,773 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21763.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:48:52,526 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21774.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:49:05,940 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-28 03:50:04,173 INFO [train.py:892] (1/4) Epoch 12, batch 1400, loss[loss=0.2241, simple_loss=0.278, pruned_loss=0.08509, over 19781.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2815, pruned_loss=0.07993, over 3945441.78 frames. ], batch size: 131, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:50:16,313 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 4.274e+02 5.050e+02 6.143e+02 1.134e+03, threshold=1.010e+03, percent-clipped=1.0 +2023-03-28 03:50:17,313 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7970, 3.1473, 2.5536, 2.0851, 2.6203, 3.0015, 2.9218, 2.9936], + device='cuda:1'), covar=tensor([0.0190, 0.0247, 0.0222, 0.0499, 0.0291, 0.0216, 0.0160, 0.0144], + device='cuda:1'), in_proj_covar=tensor([0.0063, 0.0059, 0.0067, 0.0077, 0.0079, 0.0054, 0.0049, 0.0052], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 03:50:40,268 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21822.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:50:47,640 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21824.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:51:28,475 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-28 03:51:44,484 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21850.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:51:55,169 INFO [train.py:892] (1/4) Epoch 12, batch 1450, loss[loss=0.2223, simple_loss=0.2836, pruned_loss=0.08053, over 19754.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2815, pruned_loss=0.07931, over 3945310.87 frames. ], batch size: 44, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:52:43,114 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9012, 5.2650, 5.2900, 5.2118, 4.8537, 5.2254, 4.6573, 4.7163], + device='cuda:1'), covar=tensor([0.0424, 0.0494, 0.0556, 0.0434, 0.0648, 0.0618, 0.0729, 0.0961], + device='cuda:1'), in_proj_covar=tensor([0.0199, 0.0206, 0.0243, 0.0206, 0.0194, 0.0190, 0.0219, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 03:53:30,223 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.69 vs. limit=5.0 +2023-03-28 03:53:47,746 INFO [train.py:892] (1/4) Epoch 12, batch 1500, loss[loss=0.2523, simple_loss=0.3101, pruned_loss=0.09727, over 19741.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2817, pruned_loss=0.07967, over 3947711.10 frames. ], batch size: 276, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:53:58,748 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.501e+02 4.832e+02 5.724e+02 6.820e+02 1.583e+03, threshold=1.145e+03, percent-clipped=5.0 +2023-03-28 03:53:59,872 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21911.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:55:29,707 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7743, 2.7786, 2.8130, 2.1317, 2.9039, 2.3647, 2.7021, 2.8520], + device='cuda:1'), covar=tensor([0.0389, 0.0292, 0.0441, 0.0703, 0.0278, 0.0332, 0.0352, 0.0215], + device='cuda:1'), in_proj_covar=tensor([0.0057, 0.0062, 0.0063, 0.0091, 0.0059, 0.0056, 0.0054, 0.0049], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 03:55:31,488 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21951.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:55:42,239 INFO [train.py:892] (1/4) Epoch 12, batch 1550, loss[loss=0.196, simple_loss=0.2518, pruned_loss=0.07015, over 19831.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2814, pruned_loss=0.07926, over 3947740.90 frames. ], batch size: 128, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:55:50,206 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5788, 3.6407, 3.9410, 3.6904, 3.8905, 3.2406, 3.5732, 3.4572], + device='cuda:1'), covar=tensor([0.1492, 0.1537, 0.1168, 0.1419, 0.1357, 0.1515, 0.2395, 0.2520], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0254, 0.0308, 0.0240, 0.0228, 0.0224, 0.0293, 0.0330], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 03:56:10,741 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21968.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:56:12,906 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21969.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:56:15,258 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21970.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:56:49,504 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21984.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:57:43,230 INFO [train.py:892] (1/4) Epoch 12, batch 1600, loss[loss=0.2167, simple_loss=0.2849, pruned_loss=0.0742, over 19612.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2812, pruned_loss=0.079, over 3949418.35 frames. ], batch size: 46, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:57:54,068 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.885e+02 4.412e+02 5.565e+02 6.803e+02 1.208e+03, threshold=1.113e+03, percent-clipped=1.0 +2023-03-28 03:58:08,642 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22016.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:58:39,024 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22031.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:58:41,884 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22032.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:59:37,620 INFO [train.py:892] (1/4) Epoch 12, batch 1650, loss[loss=0.2382, simple_loss=0.2959, pruned_loss=0.09028, over 19750.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2808, pruned_loss=0.07859, over 3949441.86 frames. ], batch size: 250, lr: 1.29e-02, grad_scale: 16.0 +2023-03-28 04:00:16,917 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5051, 2.9523, 3.3902, 3.2458, 3.7170, 3.7959, 4.3993, 4.8287], + device='cuda:1'), covar=tensor([0.0482, 0.1588, 0.1384, 0.1908, 0.1515, 0.1158, 0.0405, 0.0430], + device='cuda:1'), in_proj_covar=tensor([0.0205, 0.0215, 0.0234, 0.0232, 0.0259, 0.0226, 0.0179, 0.0186], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 04:01:33,623 INFO [train.py:892] (1/4) Epoch 12, batch 1700, loss[loss=0.2085, simple_loss=0.2715, pruned_loss=0.07275, over 19891.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2801, pruned_loss=0.07835, over 3950095.88 frames. ], batch size: 97, lr: 1.29e-02, grad_scale: 16.0 +2023-03-28 04:01:45,051 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.459e+02 5.514e+02 6.707e+02 1.132e+03, threshold=1.103e+03, percent-clipped=1.0 +2023-03-28 04:02:01,757 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22119.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:02:26,670 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22130.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:03:17,854 INFO [train.py:892] (1/4) Epoch 12, batch 1750, loss[loss=0.2252, simple_loss=0.2932, pruned_loss=0.07861, over 19606.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.28, pruned_loss=0.07823, over 3949404.93 frames. ], batch size: 48, lr: 1.29e-02, grad_scale: 16.0 +2023-03-28 04:03:56,037 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22176.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:03:59,867 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-28 04:04:04,912 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.0436, 2.4422, 2.2579, 1.5015, 2.2412, 2.5288, 2.2244, 2.5045], + device='cuda:1'), covar=tensor([0.0242, 0.0235, 0.0205, 0.0544, 0.0318, 0.0150, 0.0175, 0.0159], + device='cuda:1'), in_proj_covar=tensor([0.0064, 0.0060, 0.0068, 0.0078, 0.0080, 0.0054, 0.0051, 0.0053], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 04:04:10,320 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6058, 2.5918, 1.5605, 3.2414, 2.9132, 3.1145, 3.2155, 2.4771], + device='cuda:1'), covar=tensor([0.0602, 0.0586, 0.1486, 0.0399, 0.0463, 0.0340, 0.0467, 0.0717], + device='cuda:1'), in_proj_covar=tensor([0.0123, 0.0120, 0.0131, 0.0122, 0.0105, 0.0102, 0.0118, 0.0124], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 04:04:22,184 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22191.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:04:47,348 INFO [train.py:892] (1/4) Epoch 12, batch 1800, loss[loss=0.203, simple_loss=0.2678, pruned_loss=0.06906, over 19774.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2811, pruned_loss=0.07897, over 3949027.33 frames. ], batch size: 46, lr: 1.29e-02, grad_scale: 16.0 +2023-03-28 04:04:47,816 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22206.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:04:51,734 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-28 04:04:53,352 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-28 04:04:55,759 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.946e+02 4.603e+02 5.856e+02 6.792e+02 1.518e+03, threshold=1.171e+03, percent-clipped=5.0 +2023-03-28 04:05:03,073 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22215.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:05:20,848 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8777, 2.4365, 4.1762, 3.7712, 4.0641, 4.0857, 4.0410, 4.0690], + device='cuda:1'), covar=tensor([0.0243, 0.0883, 0.0094, 0.0599, 0.0096, 0.0212, 0.0137, 0.0101], + device='cuda:1'), in_proj_covar=tensor([0.0080, 0.0089, 0.0072, 0.0141, 0.0065, 0.0079, 0.0073, 0.0064], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 04:05:41,077 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22237.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:05:41,121 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22237.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:06:03,967 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22251.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:06:11,654 INFO [train.py:892] (1/4) Epoch 12, batch 1850, loss[loss=0.2124, simple_loss=0.2878, pruned_loss=0.06846, over 19569.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2822, pruned_loss=0.07897, over 3948854.53 frames. ], batch size: 53, lr: 1.29e-02, grad_scale: 16.0 +2023-03-28 04:07:10,142 INFO [train.py:892] (1/4) Epoch 13, batch 0, loss[loss=0.2048, simple_loss=0.2689, pruned_loss=0.07034, over 19899.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2689, pruned_loss=0.07034, over 19899.00 frames. ], batch size: 113, lr: 1.24e-02, grad_scale: 16.0 +2023-03-28 04:07:10,143 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 04:07:38,664 INFO [train.py:926] (1/4) Epoch 13, validation: loss=0.1745, simple_loss=0.2543, pruned_loss=0.04732, over 2883724.00 frames. +2023-03-28 04:07:38,665 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 04:07:56,625 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22269.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:07:59,085 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0139, 4.2992, 4.3323, 4.2426, 4.0060, 4.2209, 3.8108, 3.8122], + device='cuda:1'), covar=tensor([0.0519, 0.0505, 0.0572, 0.0502, 0.0758, 0.0692, 0.0722, 0.0971], + device='cuda:1'), in_proj_covar=tensor([0.0202, 0.0209, 0.0244, 0.0209, 0.0194, 0.0192, 0.0219, 0.0258], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 04:08:13,810 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22276.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:09:03,393 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22298.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:09:05,488 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22299.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:09:32,910 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.888e+02 4.546e+02 5.479e+02 6.348e+02 1.428e+03, threshold=1.096e+03, percent-clipped=1.0 +2023-03-28 04:09:32,943 INFO [train.py:892] (1/4) Epoch 13, batch 50, loss[loss=0.1906, simple_loss=0.2445, pruned_loss=0.06838, over 19783.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2712, pruned_loss=0.07426, over 892304.74 frames. ], batch size: 154, lr: 1.24e-02, grad_scale: 16.0 +2023-03-28 04:09:46,029 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22317.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:10:03,011 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8812, 2.1822, 2.7957, 3.3098, 3.8035, 4.0930, 4.1139, 4.2414], + device='cuda:1'), covar=tensor([0.0830, 0.1905, 0.1221, 0.0481, 0.0331, 0.0175, 0.0263, 0.0255], + device='cuda:1'), in_proj_covar=tensor([0.0136, 0.0166, 0.0158, 0.0129, 0.0111, 0.0105, 0.0097, 0.0094], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 04:10:08,862 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22326.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:11:26,820 INFO [train.py:892] (1/4) Epoch 13, batch 100, loss[loss=0.2156, simple_loss=0.2803, pruned_loss=0.07541, over 19728.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2726, pruned_loss=0.07373, over 1571512.31 frames. ], batch size: 71, lr: 1.24e-02, grad_scale: 16.0 +2023-03-28 04:12:24,491 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22385.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:13:22,665 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.479e+02 4.716e+02 5.729e+02 6.753e+02 1.100e+03, threshold=1.146e+03, percent-clipped=1.0 +2023-03-28 04:13:22,706 INFO [train.py:892] (1/4) Epoch 13, batch 150, loss[loss=0.2137, simple_loss=0.2779, pruned_loss=0.07476, over 19693.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2756, pruned_loss=0.07549, over 2098368.33 frames. ], batch size: 74, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:13:38,225 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8756, 3.1370, 3.1627, 3.9480, 2.6264, 3.2279, 2.6075, 2.3514], + device='cuda:1'), covar=tensor([0.0446, 0.2460, 0.1061, 0.0243, 0.1988, 0.0627, 0.1105, 0.1778], + device='cuda:1'), in_proj_covar=tensor([0.0204, 0.0336, 0.0231, 0.0158, 0.0241, 0.0178, 0.0199, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 04:13:40,111 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22419.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:14:42,029 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22446.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:15:13,629 INFO [train.py:892] (1/4) Epoch 13, batch 200, loss[loss=0.1775, simple_loss=0.2452, pruned_loss=0.05491, over 19861.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2786, pruned_loss=0.07717, over 2508979.85 frames. ], batch size: 104, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:15:26,977 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22467.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:15:44,893 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0112, 4.6386, 4.7053, 4.4764, 4.9050, 3.1679, 3.8694, 2.4634], + device='cuda:1'), covar=tensor([0.0147, 0.0165, 0.0131, 0.0167, 0.0133, 0.0818, 0.0937, 0.1354], + device='cuda:1'), in_proj_covar=tensor([0.0084, 0.0119, 0.0098, 0.0115, 0.0102, 0.0120, 0.0131, 0.0116], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 04:16:09,398 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22486.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:16:56,108 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22506.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:17:04,713 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.872e+02 4.579e+02 5.437e+02 6.895e+02 1.289e+03, threshold=1.087e+03, percent-clipped=2.0 +2023-03-28 04:17:04,743 INFO [train.py:892] (1/4) Epoch 13, batch 250, loss[loss=0.1913, simple_loss=0.2486, pruned_loss=0.06698, over 19794.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2786, pruned_loss=0.07717, over 2828887.15 frames. ], batch size: 191, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:17:55,828 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22532.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:18:46,255 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22554.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:18:57,987 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8381, 1.9170, 2.0429, 1.8626, 1.8765, 1.9321, 1.8490, 2.0807], + device='cuda:1'), covar=tensor([0.0197, 0.0178, 0.0169, 0.0218, 0.0256, 0.0186, 0.0334, 0.0161], + device='cuda:1'), in_proj_covar=tensor([0.0049, 0.0048, 0.0049, 0.0045, 0.0055, 0.0050, 0.0067, 0.0044], + device='cuda:1'), out_proj_covar=tensor([1.0902e-04, 1.0665e-04, 1.1036e-04, 1.0167e-04, 1.2326e-04, 1.1305e-04, + 1.4761e-04, 9.9505e-05], device='cuda:1') +2023-03-28 04:19:01,077 INFO [train.py:892] (1/4) Epoch 13, batch 300, loss[loss=0.2803, simple_loss=0.334, pruned_loss=0.1133, over 19690.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2795, pruned_loss=0.07738, over 3075212.13 frames. ], batch size: 337, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:19:15,607 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-28 04:19:22,904 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22571.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:20:04,974 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6113, 4.6268, 5.1158, 4.6285, 4.1077, 4.8302, 4.7580, 5.2083], + device='cuda:1'), covar=tensor([0.0921, 0.0335, 0.0287, 0.0301, 0.0837, 0.0389, 0.0355, 0.0270], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0189, 0.0185, 0.0197, 0.0184, 0.0194, 0.0190, 0.0176], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 04:20:14,510 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22593.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:20:52,590 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.162e+02 4.855e+02 5.580e+02 6.636e+02 1.112e+03, threshold=1.116e+03, percent-clipped=2.0 +2023-03-28 04:20:52,626 INFO [train.py:892] (1/4) Epoch 13, batch 350, loss[loss=0.2401, simple_loss=0.3086, pruned_loss=0.08581, over 19620.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2795, pruned_loss=0.07699, over 3268454.74 frames. ], batch size: 52, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:21:24,687 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22626.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:21:52,265 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22639.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:22:35,986 INFO [train.py:892] (1/4) Epoch 13, batch 400, loss[loss=0.2373, simple_loss=0.2979, pruned_loss=0.08838, over 19712.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2796, pruned_loss=0.07669, over 3418908.39 frames. ], batch size: 310, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:23:05,956 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22674.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:24:00,928 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22700.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 04:24:25,274 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.862e+02 4.701e+02 5.537e+02 6.698e+02 1.146e+03, threshold=1.107e+03, percent-clipped=1.0 +2023-03-28 04:24:25,322 INFO [train.py:892] (1/4) Epoch 13, batch 450, loss[loss=0.201, simple_loss=0.2642, pruned_loss=0.06892, over 19764.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2811, pruned_loss=0.07744, over 3536587.80 frames. ], batch size: 122, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:25:36,264 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22741.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:26:18,716 INFO [train.py:892] (1/4) Epoch 13, batch 500, loss[loss=0.1952, simple_loss=0.2538, pruned_loss=0.06837, over 19783.00 frames. ], tot_loss[loss=0.218, simple_loss=0.281, pruned_loss=0.0775, over 3628362.13 frames. ], batch size: 163, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:26:27,337 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7388, 4.8530, 5.3008, 4.9033, 4.2947, 4.9917, 4.9189, 5.3942], + device='cuda:1'), covar=tensor([0.1028, 0.0280, 0.0342, 0.0288, 0.0664, 0.0367, 0.0399, 0.0285], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0194, 0.0190, 0.0201, 0.0187, 0.0198, 0.0195, 0.0182], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 04:27:17,060 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22786.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:27:33,407 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22794.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:28:12,206 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.034e+02 4.675e+02 5.713e+02 7.210e+02 1.341e+03, threshold=1.143e+03, percent-clipped=1.0 +2023-03-28 04:28:12,228 INFO [train.py:892] (1/4) Epoch 13, batch 550, loss[loss=0.2393, simple_loss=0.3007, pruned_loss=0.08894, over 19728.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2808, pruned_loss=0.07831, over 3700423.52 frames. ], batch size: 291, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:28:45,188 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22825.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:29:01,444 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6040, 3.2568, 3.3740, 3.6376, 3.4158, 3.4205, 3.7286, 3.8124], + device='cuda:1'), covar=tensor([0.0673, 0.0460, 0.0540, 0.0331, 0.0628, 0.0693, 0.0428, 0.0353], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0150, 0.0175, 0.0145, 0.0147, 0.0131, 0.0135, 0.0169], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 04:29:01,508 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22832.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:29:06,145 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22834.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:29:31,768 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3437, 2.7687, 3.3436, 3.1438, 3.3775, 3.5242, 4.1460, 4.5603], + device='cuda:1'), covar=tensor([0.0454, 0.1635, 0.1280, 0.1742, 0.1581, 0.1241, 0.0419, 0.0361], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0218, 0.0238, 0.0235, 0.0262, 0.0228, 0.0181, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 04:29:54,841 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22855.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:30:05,684 INFO [train.py:892] (1/4) Epoch 13, batch 600, loss[loss=0.2027, simple_loss=0.2749, pruned_loss=0.06524, over 19811.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2811, pruned_loss=0.07803, over 3755469.68 frames. ], batch size: 65, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:30:23,817 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2343, 5.5734, 5.5583, 5.4870, 5.2589, 5.5168, 4.8827, 4.9581], + device='cuda:1'), covar=tensor([0.0333, 0.0390, 0.0490, 0.0409, 0.0489, 0.0510, 0.0694, 0.0983], + device='cuda:1'), in_proj_covar=tensor([0.0206, 0.0209, 0.0245, 0.0208, 0.0200, 0.0193, 0.0221, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 04:30:27,399 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22871.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:30:47,649 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22880.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:31:02,362 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22886.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:31:18,006 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22893.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:31:58,583 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 4.701e+02 5.475e+02 6.591e+02 1.011e+03, threshold=1.095e+03, percent-clipped=0.0 +2023-03-28 04:31:58,609 INFO [train.py:892] (1/4) Epoch 13, batch 650, loss[loss=0.1943, simple_loss=0.2456, pruned_loss=0.07147, over 19829.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2801, pruned_loss=0.07736, over 3796683.74 frames. ], batch size: 127, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:32:16,564 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22919.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:33:06,184 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22941.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:33:50,903 INFO [train.py:892] (1/4) Epoch 13, batch 700, loss[loss=0.2211, simple_loss=0.2804, pruned_loss=0.08091, over 19887.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2782, pruned_loss=0.07654, over 3831839.43 frames. ], batch size: 77, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:35:06,072 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22995.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 04:35:40,981 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.530e+02 4.675e+02 5.708e+02 7.114e+02 1.590e+03, threshold=1.142e+03, percent-clipped=5.0 +2023-03-28 04:35:41,053 INFO [train.py:892] (1/4) Epoch 13, batch 750, loss[loss=0.1753, simple_loss=0.242, pruned_loss=0.05431, over 19800.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2785, pruned_loss=0.0768, over 3857709.37 frames. ], batch size: 67, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:35:44,813 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.71 vs. limit=5.0 +2023-03-28 04:36:49,830 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23041.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:37:34,841 INFO [train.py:892] (1/4) Epoch 13, batch 800, loss[loss=0.2112, simple_loss=0.2807, pruned_loss=0.07083, over 19686.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2804, pruned_loss=0.07765, over 3879872.24 frames. ], batch size: 82, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:37:55,160 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5985, 2.6313, 2.7297, 1.9854, 2.7480, 2.3601, 2.5754, 2.7752], + device='cuda:1'), covar=tensor([0.0426, 0.0346, 0.0374, 0.0843, 0.0328, 0.0344, 0.0359, 0.0267], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0064, 0.0066, 0.0096, 0.0064, 0.0059, 0.0058, 0.0051], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 04:38:38,897 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23089.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:39:23,150 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23108.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:39:28,926 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.056e+02 4.768e+02 5.306e+02 6.241e+02 1.399e+03, threshold=1.061e+03, percent-clipped=1.0 +2023-03-28 04:39:28,976 INFO [train.py:892] (1/4) Epoch 13, batch 850, loss[loss=0.2278, simple_loss=0.2817, pruned_loss=0.08694, over 19792.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2813, pruned_loss=0.07801, over 3894605.78 frames. ], batch size: 193, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:40:56,385 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23150.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:41:14,582 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5012, 2.4554, 2.7362, 2.6014, 2.9970, 2.8904, 3.3822, 3.6237], + device='cuda:1'), covar=tensor([0.0620, 0.1515, 0.1395, 0.1777, 0.1356, 0.1249, 0.0477, 0.0469], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0220, 0.0238, 0.0236, 0.0264, 0.0231, 0.0185, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 04:41:21,516 INFO [train.py:892] (1/4) Epoch 13, batch 900, loss[loss=0.1919, simple_loss=0.2601, pruned_loss=0.06187, over 19754.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2813, pruned_loss=0.07806, over 3906435.66 frames. ], batch size: 100, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:41:39,632 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23169.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:42:09,302 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23181.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:42:21,991 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3553, 2.5445, 3.4209, 3.4831, 4.0462, 4.4899, 4.3127, 4.5668], + device='cuda:1'), covar=tensor([0.0639, 0.1683, 0.0954, 0.0477, 0.0249, 0.0134, 0.0183, 0.0280], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0168, 0.0160, 0.0132, 0.0112, 0.0106, 0.0099, 0.0096], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 04:43:15,284 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.371e+02 4.496e+02 5.311e+02 6.592e+02 1.240e+03, threshold=1.062e+03, percent-clipped=1.0 +2023-03-28 04:43:15,311 INFO [train.py:892] (1/4) Epoch 13, batch 950, loss[loss=0.2328, simple_loss=0.2963, pruned_loss=0.0846, over 19735.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2791, pruned_loss=0.07672, over 3915976.62 frames. ], batch size: 134, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:43:28,310 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23217.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:43:53,026 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23229.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:44:48,185 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7877, 2.6325, 2.9983, 2.7456, 3.1665, 3.0910, 3.5481, 3.9213], + device='cuda:1'), covar=tensor([0.0553, 0.1538, 0.1396, 0.1827, 0.1645, 0.1308, 0.0505, 0.0483], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0220, 0.0240, 0.0236, 0.0265, 0.0230, 0.0186, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 04:45:02,585 INFO [train.py:892] (1/4) Epoch 13, batch 1000, loss[loss=0.3233, simple_loss=0.3671, pruned_loss=0.1398, over 19440.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2798, pruned_loss=0.07758, over 3923728.81 frames. ], batch size: 412, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:45:39,955 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23278.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:46:09,924 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23290.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:46:20,583 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23295.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:46:55,470 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.267e+02 4.704e+02 5.526e+02 6.494e+02 9.984e+02, threshold=1.105e+03, percent-clipped=0.0 +2023-03-28 04:46:55,525 INFO [train.py:892] (1/4) Epoch 13, batch 1050, loss[loss=0.2102, simple_loss=0.2717, pruned_loss=0.07438, over 19822.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2802, pruned_loss=0.07798, over 3930154.03 frames. ], batch size: 202, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:48:08,227 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23343.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:48:49,792 INFO [train.py:892] (1/4) Epoch 13, batch 1100, loss[loss=0.2082, simple_loss=0.2803, pruned_loss=0.06803, over 19838.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2813, pruned_loss=0.07889, over 3934091.29 frames. ], batch size: 58, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:50:19,343 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.87 vs. limit=2.0 +2023-03-28 04:50:43,373 INFO [train.py:892] (1/4) Epoch 13, batch 1150, loss[loss=0.4135, simple_loss=0.4397, pruned_loss=0.1937, over 19216.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2806, pruned_loss=0.07837, over 3938051.55 frames. ], batch size: 452, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:50:45,468 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.703e+02 4.708e+02 5.742e+02 7.091e+02 1.301e+03, threshold=1.148e+03, percent-clipped=2.0 +2023-03-28 04:51:19,204 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-28 04:51:26,177 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2739, 5.6324, 5.8995, 5.6745, 5.5491, 5.2450, 5.3652, 5.4806], + device='cuda:1'), covar=tensor([0.1440, 0.0881, 0.0924, 0.0924, 0.0649, 0.0954, 0.2177, 0.1952], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0259, 0.0312, 0.0241, 0.0229, 0.0226, 0.0301, 0.0331], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 04:52:12,655 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23450.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:52:33,754 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4118, 4.4397, 4.8149, 4.4192, 4.1457, 4.6150, 4.4633, 4.9422], + device='cuda:1'), covar=tensor([0.0889, 0.0292, 0.0282, 0.0312, 0.0709, 0.0411, 0.0357, 0.0274], + device='cuda:1'), in_proj_covar=tensor([0.0248, 0.0188, 0.0185, 0.0193, 0.0182, 0.0192, 0.0187, 0.0175], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 04:52:34,810 INFO [train.py:892] (1/4) Epoch 13, batch 1200, loss[loss=0.1968, simple_loss=0.2609, pruned_loss=0.06635, over 19738.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2803, pruned_loss=0.0779, over 3940718.53 frames. ], batch size: 92, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:52:42,285 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23464.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:52:42,565 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8943, 3.9418, 2.2243, 4.2025, 4.2921, 1.6916, 3.4758, 3.1752], + device='cuda:1'), covar=tensor([0.0603, 0.0686, 0.2738, 0.0575, 0.0328, 0.3146, 0.1083, 0.0704], + device='cuda:1'), in_proj_covar=tensor([0.0198, 0.0223, 0.0212, 0.0211, 0.0175, 0.0198, 0.0223, 0.0163], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 04:53:21,344 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23481.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:53:21,584 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1880, 4.1648, 2.4233, 4.4241, 4.6299, 1.8514, 3.8035, 3.4290], + device='cuda:1'), covar=tensor([0.0567, 0.0728, 0.2678, 0.0646, 0.0422, 0.2924, 0.0982, 0.0667], + device='cuda:1'), in_proj_covar=tensor([0.0199, 0.0224, 0.0212, 0.0212, 0.0177, 0.0198, 0.0224, 0.0163], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 04:53:39,045 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23488.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:53:58,256 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23498.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:54:26,402 INFO [train.py:892] (1/4) Epoch 13, batch 1250, loss[loss=0.1834, simple_loss=0.2416, pruned_loss=0.06263, over 19827.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2799, pruned_loss=0.07788, over 3943073.03 frames. ], batch size: 127, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:54:28,514 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.878e+02 4.638e+02 5.638e+02 7.017e+02 1.329e+03, threshold=1.128e+03, percent-clipped=4.0 +2023-03-28 04:55:03,136 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23529.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:55:48,712 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23549.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 04:56:12,843 INFO [train.py:892] (1/4) Epoch 13, batch 1300, loss[loss=0.2041, simple_loss=0.2606, pruned_loss=0.07382, over 19774.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2782, pruned_loss=0.0768, over 3946486.87 frames. ], batch size: 213, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:56:39,788 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23573.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:56:43,676 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0701, 4.1173, 4.4741, 4.0488, 3.8996, 4.3254, 4.1154, 4.5938], + device='cuda:1'), covar=tensor([0.0933, 0.0342, 0.0350, 0.0409, 0.0853, 0.0456, 0.0436, 0.0301], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0192, 0.0190, 0.0199, 0.0186, 0.0196, 0.0191, 0.0179], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 04:56:43,886 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6273, 2.6635, 3.9799, 2.9398, 3.4755, 3.3891, 2.1609, 2.2285], + device='cuda:1'), covar=tensor([0.0844, 0.3039, 0.0462, 0.0857, 0.1264, 0.1060, 0.2001, 0.2389], + device='cuda:1'), in_proj_covar=tensor([0.0311, 0.0346, 0.0277, 0.0226, 0.0336, 0.0278, 0.0299, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 04:57:08,845 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23585.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:58:05,880 INFO [train.py:892] (1/4) Epoch 13, batch 1350, loss[loss=0.2039, simple_loss=0.2733, pruned_loss=0.06727, over 19891.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2787, pruned_loss=0.07685, over 3947264.62 frames. ], batch size: 47, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 04:58:08,080 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.191e+02 4.401e+02 5.493e+02 6.790e+02 1.400e+03, threshold=1.099e+03, percent-clipped=0.0 +2023-03-28 04:59:11,931 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-28 04:59:59,156 INFO [train.py:892] (1/4) Epoch 13, batch 1400, loss[loss=0.2142, simple_loss=0.2701, pruned_loss=0.0792, over 19845.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.278, pruned_loss=0.07684, over 3947424.86 frames. ], batch size: 145, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:01:48,047 INFO [train.py:892] (1/4) Epoch 13, batch 1450, loss[loss=0.1988, simple_loss=0.2694, pruned_loss=0.06409, over 19799.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.278, pruned_loss=0.07613, over 3947507.89 frames. ], batch size: 74, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:01:50,165 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.049e+02 4.743e+02 5.551e+02 6.654e+02 1.229e+03, threshold=1.110e+03, percent-clipped=4.0 +2023-03-28 05:03:37,022 INFO [train.py:892] (1/4) Epoch 13, batch 1500, loss[loss=0.1973, simple_loss=0.257, pruned_loss=0.06881, over 19800.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2768, pruned_loss=0.07527, over 3948877.64 frames. ], batch size: 150, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:03:45,780 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23764.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:05:16,513 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1920, 2.8455, 4.4279, 3.7617, 4.0920, 4.2941, 4.2198, 4.0471], + device='cuda:1'), covar=tensor([0.0215, 0.0719, 0.0090, 0.0923, 0.0126, 0.0204, 0.0157, 0.0130], + device='cuda:1'), in_proj_covar=tensor([0.0082, 0.0090, 0.0073, 0.0143, 0.0067, 0.0082, 0.0076, 0.0067], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 05:05:29,859 INFO [train.py:892] (1/4) Epoch 13, batch 1550, loss[loss=0.2826, simple_loss=0.3399, pruned_loss=0.1127, over 19647.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2786, pruned_loss=0.07639, over 3947965.71 frames. ], batch size: 343, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:05:31,571 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 4.439e+02 5.194e+02 6.421e+02 1.590e+03, threshold=1.039e+03, percent-clipped=3.0 +2023-03-28 05:05:32,334 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23812.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:06:39,319 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23844.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 05:07:17,788 INFO [train.py:892] (1/4) Epoch 13, batch 1600, loss[loss=0.1894, simple_loss=0.2551, pruned_loss=0.06183, over 19815.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2784, pruned_loss=0.07623, over 3948898.84 frames. ], batch size: 103, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:07:30,575 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-03-28 05:07:46,748 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23873.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:08:11,984 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23885.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:09:11,730 INFO [train.py:892] (1/4) Epoch 13, batch 1650, loss[loss=0.2107, simple_loss=0.2769, pruned_loss=0.07228, over 19641.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2787, pruned_loss=0.07696, over 3947879.50 frames. ], batch size: 68, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:09:13,798 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.923e+02 4.526e+02 5.652e+02 6.938e+02 1.810e+03, threshold=1.130e+03, percent-clipped=2.0 +2023-03-28 05:09:21,877 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.06 vs. limit=5.0 +2023-03-28 05:09:35,331 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23921.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:10:00,326 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9744, 4.4619, 4.5622, 4.9522, 4.5701, 5.1404, 5.0678, 5.2259], + device='cuda:1'), covar=tensor([0.0632, 0.0353, 0.0408, 0.0249, 0.0526, 0.0242, 0.0302, 0.0251], + device='cuda:1'), in_proj_covar=tensor([0.0134, 0.0153, 0.0178, 0.0147, 0.0149, 0.0132, 0.0139, 0.0170], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 05:10:02,312 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23933.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:10:47,123 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-28 05:11:04,338 INFO [train.py:892] (1/4) Epoch 13, batch 1700, loss[loss=0.2047, simple_loss=0.2698, pruned_loss=0.06976, over 19827.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2784, pruned_loss=0.07667, over 3948865.75 frames. ], batch size: 76, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:12:55,573 INFO [train.py:892] (1/4) Epoch 13, batch 1750, loss[loss=0.1929, simple_loss=0.261, pruned_loss=0.06241, over 19741.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2775, pruned_loss=0.07594, over 3947545.41 frames. ], batch size: 106, lr: 1.19e-02, grad_scale: 16.0 +2023-03-28 05:12:57,430 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.884e+02 4.345e+02 5.223e+02 6.698e+02 1.111e+03, threshold=1.045e+03, percent-clipped=0.0 +2023-03-28 05:13:46,294 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24037.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:13:48,359 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3683, 3.4231, 3.6695, 2.9915, 4.0360, 2.9107, 3.1510, 3.6874], + device='cuda:1'), covar=tensor([0.0679, 0.0340, 0.0636, 0.0598, 0.0289, 0.0377, 0.0376, 0.0376], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0063, 0.0065, 0.0093, 0.0061, 0.0060, 0.0057, 0.0051], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 05:14:14,816 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.6372, 1.8400, 1.6019, 1.0222, 1.7454, 1.7451, 1.7006, 1.7436], + device='cuda:1'), covar=tensor([0.0250, 0.0223, 0.0268, 0.0521, 0.0353, 0.0173, 0.0199, 0.0186], + device='cuda:1'), in_proj_covar=tensor([0.0068, 0.0065, 0.0072, 0.0080, 0.0084, 0.0057, 0.0053, 0.0057], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 05:14:30,552 INFO [train.py:892] (1/4) Epoch 13, batch 1800, loss[loss=0.2401, simple_loss=0.2986, pruned_loss=0.09075, over 19817.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2793, pruned_loss=0.07684, over 3947908.06 frames. ], batch size: 229, lr: 1.19e-02, grad_scale: 16.0 +2023-03-28 05:15:06,442 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-28 05:15:37,882 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24098.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:16:01,766 INFO [train.py:892] (1/4) Epoch 13, batch 1850, loss[loss=0.2326, simple_loss=0.307, pruned_loss=0.0791, over 19854.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2806, pruned_loss=0.07648, over 3947732.81 frames. ], batch size: 58, lr: 1.19e-02, grad_scale: 16.0 +2023-03-28 05:16:03,695 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 4.684e+02 5.642e+02 7.102e+02 1.280e+03, threshold=1.128e+03, percent-clipped=3.0 +2023-03-28 05:17:07,742 INFO [train.py:892] (1/4) Epoch 14, batch 0, loss[loss=0.1972, simple_loss=0.267, pruned_loss=0.06374, over 19720.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.267, pruned_loss=0.06374, over 19720.00 frames. ], batch size: 104, lr: 1.15e-02, grad_scale: 16.0 +2023-03-28 05:17:07,742 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 05:17:42,313 INFO [train.py:926] (1/4) Epoch 14, validation: loss=0.1725, simple_loss=0.2522, pruned_loss=0.04642, over 2883724.00 frames. +2023-03-28 05:17:42,315 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 05:18:52,106 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24144.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 05:19:45,552 INFO [train.py:892] (1/4) Epoch 14, batch 50, loss[loss=0.2527, simple_loss=0.3097, pruned_loss=0.09779, over 19665.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2722, pruned_loss=0.07523, over 891950.06 frames. ], batch size: 299, lr: 1.15e-02, grad_scale: 16.0 +2023-03-28 05:20:44,925 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24192.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:21:30,230 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 4.338e+02 5.278e+02 6.303e+02 1.236e+03, threshold=1.056e+03, percent-clipped=1.0 +2023-03-28 05:21:38,380 INFO [train.py:892] (1/4) Epoch 14, batch 100, loss[loss=0.2026, simple_loss=0.2722, pruned_loss=0.06649, over 19879.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2747, pruned_loss=0.07496, over 1568837.63 frames. ], batch size: 84, lr: 1.15e-02, grad_scale: 16.0 +2023-03-28 05:21:47,273 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6035, 3.6026, 3.8040, 4.6336, 3.1773, 3.3567, 3.0510, 2.5968], + device='cuda:1'), covar=tensor([0.0422, 0.2686, 0.0921, 0.0296, 0.2088, 0.0882, 0.1144, 0.1844], + device='cuda:1'), in_proj_covar=tensor([0.0206, 0.0333, 0.0229, 0.0163, 0.0242, 0.0181, 0.0201, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 05:23:30,626 INFO [train.py:892] (1/4) Epoch 14, batch 150, loss[loss=0.209, simple_loss=0.2802, pruned_loss=0.06892, over 19929.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2746, pruned_loss=0.07392, over 2095581.85 frames. ], batch size: 51, lr: 1.15e-02, grad_scale: 16.0 +2023-03-28 05:24:41,495 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4096, 4.1614, 4.1655, 3.9950, 4.3056, 3.1488, 3.6698, 2.1965], + device='cuda:1'), covar=tensor([0.0210, 0.0207, 0.0154, 0.0180, 0.0180, 0.0829, 0.0699, 0.1444], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0122, 0.0102, 0.0116, 0.0104, 0.0121, 0.0130, 0.0116], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 05:25:15,400 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.679e+02 4.473e+02 5.483e+02 6.976e+02 1.116e+03, threshold=1.097e+03, percent-clipped=1.0 +2023-03-28 05:25:23,191 INFO [train.py:892] (1/4) Epoch 14, batch 200, loss[loss=0.1962, simple_loss=0.2685, pruned_loss=0.06191, over 19844.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2761, pruned_loss=0.07439, over 2507582.39 frames. ], batch size: 60, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:25:29,326 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.95 vs. limit=5.0 +2023-03-28 05:25:48,500 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0930, 2.9406, 4.3113, 3.8352, 4.1465, 4.2903, 4.2426, 4.0782], + device='cuda:1'), covar=tensor([0.0208, 0.0643, 0.0090, 0.0697, 0.0105, 0.0195, 0.0117, 0.0111], + device='cuda:1'), in_proj_covar=tensor([0.0083, 0.0092, 0.0074, 0.0144, 0.0068, 0.0083, 0.0077, 0.0069], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 05:27:13,856 INFO [train.py:892] (1/4) Epoch 14, batch 250, loss[loss=0.188, simple_loss=0.2567, pruned_loss=0.05965, over 19802.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2773, pruned_loss=0.07538, over 2826793.74 frames. ], batch size: 111, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:28:15,412 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24393.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:28:53,725 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.621e+02 4.746e+02 5.640e+02 6.808e+02 1.324e+03, threshold=1.128e+03, percent-clipped=3.0 +2023-03-28 05:29:04,961 INFO [train.py:892] (1/4) Epoch 14, batch 300, loss[loss=0.1893, simple_loss=0.2564, pruned_loss=0.06111, over 19777.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2764, pruned_loss=0.07469, over 3076630.53 frames. ], batch size: 116, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:30:56,557 INFO [train.py:892] (1/4) Epoch 14, batch 350, loss[loss=0.1853, simple_loss=0.2524, pruned_loss=0.05915, over 19882.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2753, pruned_loss=0.0742, over 3271097.47 frames. ], batch size: 92, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:31:37,821 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-28 05:32:38,974 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 4.463e+02 5.276e+02 6.433e+02 1.148e+03, threshold=1.055e+03, percent-clipped=1.0 +2023-03-28 05:32:48,543 INFO [train.py:892] (1/4) Epoch 14, batch 400, loss[loss=0.1967, simple_loss=0.2627, pruned_loss=0.06534, over 19867.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2748, pruned_loss=0.07407, over 3422972.90 frames. ], batch size: 64, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:34:00,587 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24547.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:34:16,941 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7838, 3.4331, 3.4178, 3.1420, 3.9108, 3.9363, 4.6884, 5.1386], + device='cuda:1'), covar=tensor([0.0430, 0.1379, 0.1374, 0.2099, 0.1760, 0.1217, 0.0404, 0.0355], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0224, 0.0245, 0.0240, 0.0273, 0.0236, 0.0192, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 05:34:41,088 INFO [train.py:892] (1/4) Epoch 14, batch 450, loss[loss=0.1842, simple_loss=0.2508, pruned_loss=0.05877, over 19859.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2757, pruned_loss=0.07483, over 3540662.32 frames. ], batch size: 99, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:34:48,750 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24569.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:34:54,783 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-28 05:35:16,425 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2434, 4.0803, 4.1088, 3.8522, 4.2402, 3.0473, 3.4659, 2.0944], + device='cuda:1'), covar=tensor([0.0248, 0.0221, 0.0187, 0.0208, 0.0180, 0.0884, 0.0857, 0.1667], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0124, 0.0103, 0.0118, 0.0105, 0.0123, 0.0134, 0.0119], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 05:36:16,332 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24608.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:36:24,998 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.188e+02 4.392e+02 5.020e+02 6.221e+02 1.818e+03, threshold=1.004e+03, percent-clipped=1.0 +2023-03-28 05:36:33,499 INFO [train.py:892] (1/4) Epoch 14, batch 500, loss[loss=0.1997, simple_loss=0.2583, pruned_loss=0.07056, over 19864.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2761, pruned_loss=0.07518, over 3630037.98 frames. ], batch size: 122, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:37:04,272 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24630.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:38:20,320 INFO [train.py:892] (1/4) Epoch 14, batch 550, loss[loss=0.1986, simple_loss=0.2642, pruned_loss=0.06647, over 19891.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2756, pruned_loss=0.07511, over 3700239.47 frames. ], batch size: 47, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:39:20,330 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24693.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:39:37,833 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8643, 3.0593, 3.3025, 3.9045, 2.5525, 3.0341, 2.6548, 2.2983], + device='cuda:1'), covar=tensor([0.0521, 0.2489, 0.0992, 0.0303, 0.2279, 0.0802, 0.1240, 0.1944], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0335, 0.0231, 0.0163, 0.0239, 0.0183, 0.0204, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 05:39:57,874 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.918e+02 4.778e+02 5.297e+02 6.778e+02 1.181e+03, threshold=1.059e+03, percent-clipped=3.0 +2023-03-28 05:40:07,595 INFO [train.py:892] (1/4) Epoch 14, batch 600, loss[loss=0.1773, simple_loss=0.2509, pruned_loss=0.05184, over 19595.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2755, pruned_loss=0.07447, over 3755659.55 frames. ], batch size: 44, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:40:47,873 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5573, 2.5063, 3.9687, 2.7321, 3.2568, 3.1969, 1.9558, 2.2046], + device='cuda:1'), covar=tensor([0.1003, 0.3620, 0.0503, 0.0962, 0.1954, 0.1191, 0.2551, 0.2802], + device='cuda:1'), in_proj_covar=tensor([0.0318, 0.0350, 0.0288, 0.0232, 0.0344, 0.0288, 0.0307, 0.0282], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 05:41:01,101 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24741.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:41:54,282 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24765.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:41:55,326 INFO [train.py:892] (1/4) Epoch 14, batch 650, loss[loss=0.2048, simple_loss=0.2672, pruned_loss=0.07116, over 19869.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2754, pruned_loss=0.07448, over 3799696.10 frames. ], batch size: 157, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:42:02,096 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9449, 4.5728, 4.6910, 4.4543, 4.8967, 3.3055, 3.9717, 2.4882], + device='cuda:1'), covar=tensor([0.0154, 0.0183, 0.0128, 0.0162, 0.0109, 0.0752, 0.0776, 0.1365], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0123, 0.0103, 0.0116, 0.0104, 0.0122, 0.0133, 0.0117], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 05:43:34,897 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.281e+02 4.693e+02 5.521e+02 6.311e+02 1.002e+03, threshold=1.104e+03, percent-clipped=0.0 +2023-03-28 05:43:42,884 INFO [train.py:892] (1/4) Epoch 14, batch 700, loss[loss=0.2186, simple_loss=0.2803, pruned_loss=0.07845, over 19840.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2767, pruned_loss=0.07515, over 3831792.58 frames. ], batch size: 145, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:44:08,864 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24826.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:44:55,417 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24848.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:45:36,126 INFO [train.py:892] (1/4) Epoch 14, batch 750, loss[loss=0.1995, simple_loss=0.2647, pruned_loss=0.06719, over 19797.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2764, pruned_loss=0.07517, over 3858284.22 frames. ], batch size: 211, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:46:58,200 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24903.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:47:10,878 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24909.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:47:18,004 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.878e+02 4.189e+02 5.333e+02 6.516e+02 1.195e+03, threshold=1.067e+03, percent-clipped=1.0 +2023-03-28 05:47:26,454 INFO [train.py:892] (1/4) Epoch 14, batch 800, loss[loss=0.2718, simple_loss=0.3256, pruned_loss=0.109, over 19689.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2777, pruned_loss=0.07561, over 3879000.09 frames. ], batch size: 337, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:47:45,222 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24925.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:48:55,784 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0574, 4.6551, 4.5988, 5.0288, 4.6576, 5.2918, 5.1638, 5.3341], + device='cuda:1'), covar=tensor([0.0613, 0.0320, 0.0449, 0.0267, 0.0573, 0.0298, 0.0350, 0.0273], + device='cuda:1'), in_proj_covar=tensor([0.0131, 0.0150, 0.0176, 0.0146, 0.0147, 0.0131, 0.0135, 0.0169], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 05:48:58,331 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-03-28 05:49:15,225 INFO [train.py:892] (1/4) Epoch 14, batch 850, loss[loss=0.1868, simple_loss=0.2674, pruned_loss=0.0531, over 19832.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2768, pruned_loss=0.07479, over 3895523.62 frames. ], batch size: 75, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:50:22,811 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24995.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:51:00,273 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.835e+02 4.699e+02 5.581e+02 6.608e+02 1.039e+03, threshold=1.116e+03, percent-clipped=0.0 +2023-03-28 05:51:08,783 INFO [train.py:892] (1/4) Epoch 14, batch 900, loss[loss=0.2088, simple_loss=0.283, pruned_loss=0.06737, over 19571.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2758, pruned_loss=0.07436, over 3907733.07 frames. ], batch size: 53, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:52:39,954 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25056.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 05:52:57,339 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-28 05:53:00,474 INFO [train.py:892] (1/4) Epoch 14, batch 950, loss[loss=0.1944, simple_loss=0.2749, pruned_loss=0.05691, over 19602.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2759, pruned_loss=0.07424, over 3916421.86 frames. ], batch size: 50, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:53:31,967 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9728, 3.8257, 3.8008, 3.6495, 3.9070, 2.9223, 3.2374, 1.9054], + device='cuda:1'), covar=tensor([0.0191, 0.0190, 0.0144, 0.0172, 0.0140, 0.0895, 0.0764, 0.1497], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0124, 0.0103, 0.0118, 0.0105, 0.0123, 0.0133, 0.0118], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 05:54:42,077 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.968e+02 4.212e+02 5.107e+02 6.466e+02 1.651e+03, threshold=1.021e+03, percent-clipped=2.0 +2023-03-28 05:54:50,394 INFO [train.py:892] (1/4) Epoch 14, batch 1000, loss[loss=0.1937, simple_loss=0.2626, pruned_loss=0.06242, over 19883.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2749, pruned_loss=0.07376, over 3924728.01 frames. ], batch size: 71, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:55:04,840 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25121.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:55:54,053 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.5614, 5.0902, 5.2055, 4.9696, 5.4041, 3.4700, 4.2902, 3.1167], + device='cuda:1'), covar=tensor([0.0180, 0.0153, 0.0135, 0.0158, 0.0135, 0.0769, 0.0874, 0.1233], + device='cuda:1'), in_proj_covar=tensor([0.0091, 0.0125, 0.0105, 0.0120, 0.0106, 0.0125, 0.0135, 0.0120], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 05:56:13,886 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.75 vs. limit=2.0 +2023-03-28 05:56:40,711 INFO [train.py:892] (1/4) Epoch 14, batch 1050, loss[loss=0.1901, simple_loss=0.2575, pruned_loss=0.06135, over 19788.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2761, pruned_loss=0.07416, over 3931110.61 frames. ], batch size: 105, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:58:05,320 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25203.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:58:07,510 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25204.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:58:23,811 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.002e+02 4.493e+02 5.579e+02 6.984e+02 1.281e+03, threshold=1.116e+03, percent-clipped=2.0 +2023-03-28 05:58:32,623 INFO [train.py:892] (1/4) Epoch 14, batch 1100, loss[loss=0.2113, simple_loss=0.2897, pruned_loss=0.06644, over 19847.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2768, pruned_loss=0.07445, over 3935860.95 frames. ], batch size: 56, lr: 1.12e-02, grad_scale: 16.0 +2023-03-28 05:58:53,618 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25225.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:59:55,687 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25251.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:00:27,190 INFO [train.py:892] (1/4) Epoch 14, batch 1150, loss[loss=0.2939, simple_loss=0.3399, pruned_loss=0.1239, over 19646.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.277, pruned_loss=0.0754, over 3938983.63 frames. ], batch size: 343, lr: 1.12e-02, grad_scale: 16.0 +2023-03-28 06:00:46,038 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25273.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:02:10,601 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.130e+02 4.700e+02 5.345e+02 6.282e+02 1.184e+03, threshold=1.069e+03, percent-clipped=1.0 +2023-03-28 06:02:18,770 INFO [train.py:892] (1/4) Epoch 14, batch 1200, loss[loss=0.188, simple_loss=0.2543, pruned_loss=0.06082, over 19783.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2756, pruned_loss=0.07426, over 3941827.10 frames. ], batch size: 66, lr: 1.12e-02, grad_scale: 16.0 +2023-03-28 06:03:15,022 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0208, 4.0961, 2.4031, 4.4209, 4.5919, 1.8429, 3.8080, 3.2648], + device='cuda:1'), covar=tensor([0.0649, 0.0839, 0.2699, 0.0633, 0.0454, 0.3002, 0.0951, 0.0765], + device='cuda:1'), in_proj_covar=tensor([0.0205, 0.0229, 0.0215, 0.0223, 0.0188, 0.0199, 0.0227, 0.0168], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 06:03:35,918 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25351.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 06:03:40,978 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-28 06:03:52,952 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4076, 3.6052, 3.7994, 4.5791, 3.0544, 3.2466, 3.1247, 2.7700], + device='cuda:1'), covar=tensor([0.0486, 0.2193, 0.0812, 0.0253, 0.2010, 0.0869, 0.0972, 0.1624], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0336, 0.0231, 0.0164, 0.0242, 0.0184, 0.0204, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 06:04:08,284 INFO [train.py:892] (1/4) Epoch 14, batch 1250, loss[loss=0.2266, simple_loss=0.2833, pruned_loss=0.08495, over 19764.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2752, pruned_loss=0.07383, over 3944092.44 frames. ], batch size: 244, lr: 1.12e-02, grad_scale: 32.0 +2023-03-28 06:04:36,602 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6356, 2.7599, 3.1288, 2.3712, 3.0238, 2.4990, 2.7740, 2.9035], + device='cuda:1'), covar=tensor([0.0525, 0.0330, 0.0305, 0.0720, 0.0319, 0.0334, 0.0400, 0.0259], + device='cuda:1'), in_proj_covar=tensor([0.0062, 0.0065, 0.0066, 0.0095, 0.0062, 0.0061, 0.0059, 0.0052], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 06:05:51,974 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 4.665e+02 5.361e+02 6.465e+02 1.187e+03, threshold=1.072e+03, percent-clipped=1.0 +2023-03-28 06:06:00,940 INFO [train.py:892] (1/4) Epoch 14, batch 1300, loss[loss=0.1819, simple_loss=0.2533, pruned_loss=0.05528, over 19765.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2752, pruned_loss=0.07346, over 3946153.21 frames. ], batch size: 119, lr: 1.12e-02, grad_scale: 32.0 +2023-03-28 06:06:14,975 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25421.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:06:31,081 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25428.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:06:49,715 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2296, 1.5362, 1.8168, 2.4012, 2.7364, 2.8775, 2.7127, 2.8657], + device='cuda:1'), covar=tensor([0.0870, 0.1983, 0.1521, 0.0627, 0.0413, 0.0273, 0.0356, 0.0376], + device='cuda:1'), in_proj_covar=tensor([0.0142, 0.0169, 0.0169, 0.0133, 0.0116, 0.0110, 0.0103, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 06:07:04,613 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5227, 4.8276, 4.8497, 4.7833, 4.5252, 4.8078, 4.2526, 4.4080], + device='cuda:1'), covar=tensor([0.0446, 0.0430, 0.0531, 0.0487, 0.0661, 0.0573, 0.0664, 0.0976], + device='cuda:1'), in_proj_covar=tensor([0.0216, 0.0219, 0.0252, 0.0216, 0.0209, 0.0201, 0.0226, 0.0262], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 06:07:56,745 INFO [train.py:892] (1/4) Epoch 14, batch 1350, loss[loss=0.2161, simple_loss=0.2771, pruned_loss=0.07759, over 19769.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2747, pruned_loss=0.07334, over 3947477.94 frames. ], batch size: 233, lr: 1.12e-02, grad_scale: 32.0 +2023-03-28 06:08:04,486 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25469.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:08:50,219 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25489.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 06:09:10,671 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.47 vs. limit=5.0 +2023-03-28 06:09:23,714 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25504.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:09:38,484 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.588e+02 5.403e+02 6.911e+02 1.423e+03, threshold=1.081e+03, percent-clipped=2.0 +2023-03-28 06:09:46,915 INFO [train.py:892] (1/4) Epoch 14, batch 1400, loss[loss=0.2073, simple_loss=0.2768, pruned_loss=0.0689, over 19885.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2745, pruned_loss=0.0731, over 3947630.52 frames. ], batch size: 84, lr: 1.12e-02, grad_scale: 32.0 +2023-03-28 06:10:42,603 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3842, 2.6913, 2.7413, 2.6273, 2.3015, 2.4971, 2.4284, 2.8169], + device='cuda:1'), covar=tensor([0.0187, 0.0202, 0.0211, 0.0152, 0.0312, 0.0265, 0.0325, 0.0263], + device='cuda:1'), in_proj_covar=tensor([0.0053, 0.0051, 0.0056, 0.0047, 0.0059, 0.0055, 0.0073, 0.0050], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 06:11:11,929 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25552.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:11:41,965 INFO [train.py:892] (1/4) Epoch 14, batch 1450, loss[loss=0.3562, simple_loss=0.4127, pruned_loss=0.1498, over 17982.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2762, pruned_loss=0.07403, over 3945801.92 frames. ], batch size: 633, lr: 1.12e-02, grad_scale: 32.0 +2023-03-28 06:12:07,338 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3683, 3.3126, 3.2573, 3.0696, 3.3329, 2.6277, 2.6311, 1.4734], + device='cuda:1'), covar=tensor([0.0250, 0.0236, 0.0182, 0.0211, 0.0179, 0.1126, 0.0807, 0.1870], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0124, 0.0102, 0.0118, 0.0105, 0.0123, 0.0133, 0.0118], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 06:12:44,127 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9031, 3.8694, 4.2227, 3.8614, 3.5989, 4.0755, 3.8889, 4.2842], + device='cuda:1'), covar=tensor([0.0885, 0.0368, 0.0377, 0.0400, 0.1176, 0.0528, 0.0486, 0.0368], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0199, 0.0196, 0.0206, 0.0192, 0.0205, 0.0205, 0.0186], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 06:13:26,198 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.132e+02 4.450e+02 5.479e+02 6.667e+02 1.081e+03, threshold=1.096e+03, percent-clipped=1.0 +2023-03-28 06:13:32,600 INFO [train.py:892] (1/4) Epoch 14, batch 1500, loss[loss=0.1883, simple_loss=0.2598, pruned_loss=0.0584, over 19853.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2769, pruned_loss=0.0748, over 3944787.23 frames. ], batch size: 60, lr: 1.12e-02, grad_scale: 16.0 +2023-03-28 06:14:07,179 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2252, 4.8687, 4.9878, 4.7229, 5.1670, 3.3320, 4.1516, 2.6729], + device='cuda:1'), covar=tensor([0.0189, 0.0185, 0.0140, 0.0164, 0.0133, 0.0764, 0.0801, 0.1359], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0124, 0.0101, 0.0118, 0.0105, 0.0122, 0.0133, 0.0118], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 06:14:51,700 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25651.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 06:15:21,271 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5614, 2.5952, 2.8983, 2.1168, 2.8447, 2.2779, 2.5065, 2.6939], + device='cuda:1'), covar=tensor([0.0547, 0.0397, 0.0373, 0.0847, 0.0316, 0.0398, 0.0477, 0.0319], + device='cuda:1'), in_proj_covar=tensor([0.0063, 0.0065, 0.0067, 0.0096, 0.0063, 0.0061, 0.0060, 0.0053], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 06:15:22,146 INFO [train.py:892] (1/4) Epoch 14, batch 1550, loss[loss=0.2137, simple_loss=0.2789, pruned_loss=0.07425, over 19775.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2756, pruned_loss=0.07387, over 3946995.19 frames. ], batch size: 213, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:15:53,901 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-28 06:16:39,238 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25699.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:17:11,818 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 06:17:12,390 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.797e+02 4.271e+02 5.006e+02 6.472e+02 1.345e+03, threshold=1.001e+03, percent-clipped=3.0 +2023-03-28 06:17:19,586 INFO [train.py:892] (1/4) Epoch 14, batch 1600, loss[loss=0.2105, simple_loss=0.2829, pruned_loss=0.06907, over 19952.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2741, pruned_loss=0.07267, over 3949188.74 frames. ], batch size: 53, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:17:51,272 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1679, 3.4880, 2.7954, 2.2430, 2.7837, 3.3072, 3.2778, 3.2226], + device='cuda:1'), covar=tensor([0.0208, 0.0226, 0.0239, 0.0480, 0.0354, 0.0148, 0.0171, 0.0176], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0066, 0.0073, 0.0080, 0.0084, 0.0057, 0.0054, 0.0057], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 06:17:51,541 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-03-28 06:19:09,525 INFO [train.py:892] (1/4) Epoch 14, batch 1650, loss[loss=0.1834, simple_loss=0.2446, pruned_loss=0.06108, over 19745.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2739, pruned_loss=0.07239, over 3947959.99 frames. ], batch size: 89, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:19:49,940 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25784.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:19:52,401 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8478, 3.8945, 2.1683, 4.1281, 4.2940, 1.8319, 3.5077, 3.0934], + device='cuda:1'), covar=tensor([0.0659, 0.0729, 0.2785, 0.0660, 0.0399, 0.2933, 0.0962, 0.0725], + device='cuda:1'), in_proj_covar=tensor([0.0202, 0.0226, 0.0213, 0.0221, 0.0187, 0.0195, 0.0224, 0.0166], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 06:20:52,546 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.972e+02 4.640e+02 5.535e+02 6.802e+02 1.148e+03, threshold=1.107e+03, percent-clipped=2.0 +2023-03-28 06:21:01,426 INFO [train.py:892] (1/4) Epoch 14, batch 1700, loss[loss=0.181, simple_loss=0.2534, pruned_loss=0.0543, over 19850.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2741, pruned_loss=0.07252, over 3948374.60 frames. ], batch size: 81, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:21:27,333 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.51 vs. limit=5.0 +2023-03-28 06:22:10,435 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2466, 3.5355, 3.6645, 4.3026, 2.8363, 3.3521, 2.7202, 2.5500], + device='cuda:1'), covar=tensor([0.0403, 0.2222, 0.0807, 0.0264, 0.1945, 0.0732, 0.1220, 0.1793], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0335, 0.0230, 0.0165, 0.0240, 0.0182, 0.0202, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 06:22:16,260 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7231, 2.6638, 3.0722, 2.2955, 3.0566, 2.5051, 2.6746, 2.9799], + device='cuda:1'), covar=tensor([0.0400, 0.0394, 0.0422, 0.0766, 0.0278, 0.0357, 0.0429, 0.0232], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0065, 0.0067, 0.0095, 0.0062, 0.0061, 0.0059, 0.0052], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 06:22:17,998 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4731, 5.7813, 6.0384, 5.7644, 5.6129, 5.4905, 5.5732, 5.5120], + device='cuda:1'), covar=tensor([0.1220, 0.0887, 0.0773, 0.0906, 0.0531, 0.0681, 0.1670, 0.1677], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0262, 0.0317, 0.0246, 0.0235, 0.0232, 0.0309, 0.0334], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 06:22:29,135 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25858.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:22:43,573 INFO [train.py:892] (1/4) Epoch 14, batch 1750, loss[loss=0.1837, simple_loss=0.2626, pruned_loss=0.05239, over 19732.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2729, pruned_loss=0.07191, over 3949765.83 frames. ], batch size: 51, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:22:48,340 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6610, 4.3500, 4.3894, 4.2380, 4.6556, 3.2302, 3.8181, 2.5810], + device='cuda:1'), covar=tensor([0.0203, 0.0215, 0.0168, 0.0174, 0.0122, 0.0797, 0.0763, 0.1248], + device='cuda:1'), in_proj_covar=tensor([0.0091, 0.0125, 0.0102, 0.0119, 0.0107, 0.0123, 0.0134, 0.0118], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 06:24:15,465 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.161e+02 4.534e+02 5.430e+02 6.216e+02 1.247e+03, threshold=1.086e+03, percent-clipped=3.0 +2023-03-28 06:24:21,512 INFO [train.py:892] (1/4) Epoch 14, batch 1800, loss[loss=0.1875, simple_loss=0.2505, pruned_loss=0.06219, over 19804.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.273, pruned_loss=0.07202, over 3950741.54 frames. ], batch size: 107, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:24:28,158 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25919.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 06:24:47,022 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2105, 3.8778, 3.8704, 4.3037, 3.9448, 4.2599, 4.4024, 4.5180], + device='cuda:1'), covar=tensor([0.0600, 0.0372, 0.0557, 0.0250, 0.0641, 0.0439, 0.0397, 0.0273], + device='cuda:1'), in_proj_covar=tensor([0.0133, 0.0153, 0.0179, 0.0149, 0.0149, 0.0133, 0.0137, 0.0171], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 06:25:52,629 INFO [train.py:892] (1/4) Epoch 14, batch 1850, loss[loss=0.2088, simple_loss=0.2854, pruned_loss=0.06614, over 19655.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2741, pruned_loss=0.07128, over 3950412.35 frames. ], batch size: 57, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:26:59,329 INFO [train.py:892] (1/4) Epoch 15, batch 0, loss[loss=0.1899, simple_loss=0.2564, pruned_loss=0.06166, over 19538.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2564, pruned_loss=0.06166, over 19538.00 frames. ], batch size: 46, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:26:59,329 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 06:27:35,352 INFO [train.py:926] (1/4) Epoch 15, validation: loss=0.1719, simple_loss=0.2516, pruned_loss=0.0461, over 2883724.00 frames. +2023-03-28 06:27:35,353 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 06:28:31,039 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-28 06:29:05,147 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-28 06:29:17,379 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.761e+02 4.443e+02 5.358e+02 6.382e+02 1.006e+03, threshold=1.072e+03, percent-clipped=0.0 +2023-03-28 06:29:36,016 INFO [train.py:892] (1/4) Epoch 15, batch 50, loss[loss=0.2061, simple_loss=0.272, pruned_loss=0.07005, over 19806.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2662, pruned_loss=0.06807, over 889993.36 frames. ], batch size: 229, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:29:53,199 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2504, 3.0653, 3.2861, 2.9864, 3.5075, 3.3593, 4.1006, 4.5126], + device='cuda:1'), covar=tensor([0.0557, 0.1500, 0.1304, 0.1942, 0.1682, 0.1410, 0.0465, 0.0365], + device='cuda:1'), in_proj_covar=tensor([0.0224, 0.0225, 0.0249, 0.0241, 0.0274, 0.0238, 0.0194, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 06:31:24,906 INFO [train.py:892] (1/4) Epoch 15, batch 100, loss[loss=0.2269, simple_loss=0.2859, pruned_loss=0.08392, over 19641.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2686, pruned_loss=0.06952, over 1569941.54 frames. ], batch size: 299, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:31:47,836 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3088, 3.8908, 3.9627, 4.3072, 3.9970, 4.3133, 4.3930, 4.5266], + device='cuda:1'), covar=tensor([0.0599, 0.0371, 0.0574, 0.0288, 0.0587, 0.0469, 0.0414, 0.0302], + device='cuda:1'), in_proj_covar=tensor([0.0134, 0.0156, 0.0181, 0.0151, 0.0152, 0.0136, 0.0140, 0.0175], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 06:31:55,611 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26084.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:32:35,012 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26102.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:32:46,992 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4518, 3.4006, 3.7615, 3.3664, 3.3643, 3.6819, 3.4884, 3.8079], + device='cuda:1'), covar=tensor([0.1138, 0.0439, 0.0451, 0.0477, 0.1255, 0.0576, 0.0512, 0.0412], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0202, 0.0199, 0.0208, 0.0194, 0.0206, 0.0205, 0.0189], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 06:32:57,714 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.557e+02 4.232e+02 5.319e+02 6.491e+02 1.379e+03, threshold=1.064e+03, percent-clipped=5.0 +2023-03-28 06:33:14,687 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4871, 3.0391, 3.2851, 3.1133, 3.7451, 3.6921, 4.2993, 4.6394], + device='cuda:1'), covar=tensor([0.0469, 0.1501, 0.1490, 0.1880, 0.1465, 0.1186, 0.0466, 0.0556], + device='cuda:1'), in_proj_covar=tensor([0.0220, 0.0223, 0.0246, 0.0239, 0.0271, 0.0236, 0.0193, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 06:33:15,685 INFO [train.py:892] (1/4) Epoch 15, batch 150, loss[loss=0.1976, simple_loss=0.2666, pruned_loss=0.06427, over 19627.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2688, pruned_loss=0.06958, over 2098760.45 frames. ], batch size: 68, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:33:41,885 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26132.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 06:34:52,942 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26163.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:35:03,860 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26168.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:35:09,206 INFO [train.py:892] (1/4) Epoch 15, batch 200, loss[loss=0.2375, simple_loss=0.2947, pruned_loss=0.09008, over 19786.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2704, pruned_loss=0.07042, over 2508032.70 frames. ], batch size: 193, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:35:48,499 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 06:36:12,531 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-28 06:36:44,946 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.302e+02 4.298e+02 5.201e+02 6.207e+02 1.187e+03, threshold=1.040e+03, percent-clipped=4.0 +2023-03-28 06:36:48,223 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26214.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 06:37:02,581 INFO [train.py:892] (1/4) Epoch 15, batch 250, loss[loss=0.2141, simple_loss=0.2719, pruned_loss=0.07821, over 19645.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.273, pruned_loss=0.07162, over 2827220.58 frames. ], batch size: 47, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:37:22,816 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1970, 3.0269, 3.6985, 2.6113, 3.7538, 2.9833, 3.1319, 3.7237], + device='cuda:1'), covar=tensor([0.0574, 0.0424, 0.0330, 0.0794, 0.0290, 0.0379, 0.0322, 0.0203], + device='cuda:1'), in_proj_covar=tensor([0.0062, 0.0065, 0.0067, 0.0096, 0.0063, 0.0062, 0.0059, 0.0052], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 06:37:25,029 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26229.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:38:12,378 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26250.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:38:58,596 INFO [train.py:892] (1/4) Epoch 15, batch 300, loss[loss=0.1762, simple_loss=0.2412, pruned_loss=0.05558, over 19867.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2743, pruned_loss=0.07161, over 3075565.08 frames. ], batch size: 106, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:39:51,394 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1448, 3.1260, 1.8436, 3.8714, 3.5492, 3.9796, 3.9439, 3.0060], + device='cuda:1'), covar=tensor([0.0615, 0.0593, 0.1603, 0.0641, 0.0483, 0.0305, 0.0608, 0.0760], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0127, 0.0135, 0.0131, 0.0113, 0.0109, 0.0125, 0.0131], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 06:40:33,061 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26311.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:40:36,443 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.754e+02 4.352e+02 4.963e+02 6.294e+02 9.634e+02, threshold=9.926e+02, percent-clipped=0.0 +2023-03-28 06:40:55,227 INFO [train.py:892] (1/4) Epoch 15, batch 350, loss[loss=0.2302, simple_loss=0.2914, pruned_loss=0.08454, over 19715.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2748, pruned_loss=0.07161, over 3270074.37 frames. ], batch size: 310, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:42:07,760 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.56 vs. limit=5.0 +2023-03-28 06:42:48,765 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9988, 4.5675, 4.6890, 4.5113, 4.9615, 3.3157, 3.9493, 2.5892], + device='cuda:1'), covar=tensor([0.0208, 0.0189, 0.0143, 0.0155, 0.0125, 0.0814, 0.0876, 0.1313], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0125, 0.0102, 0.0120, 0.0107, 0.0125, 0.0133, 0.0118], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 06:42:49,855 INFO [train.py:892] (1/4) Epoch 15, batch 400, loss[loss=0.1823, simple_loss=0.2482, pruned_loss=0.05815, over 19831.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2737, pruned_loss=0.07101, over 3419386.38 frames. ], batch size: 75, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:42:51,112 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1454, 3.2539, 2.0034, 3.2989, 3.4524, 1.5792, 2.7938, 2.7135], + device='cuda:1'), covar=tensor([0.0773, 0.0870, 0.2592, 0.0846, 0.0468, 0.2763, 0.1154, 0.0763], + device='cuda:1'), in_proj_covar=tensor([0.0205, 0.0229, 0.0216, 0.0226, 0.0191, 0.0199, 0.0225, 0.0170], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 06:44:12,253 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4236, 4.6190, 2.7415, 4.7849, 5.1594, 2.1841, 4.2260, 3.7826], + device='cuda:1'), covar=tensor([0.0552, 0.0679, 0.2426, 0.0645, 0.0286, 0.2698, 0.0852, 0.0610], + device='cuda:1'), in_proj_covar=tensor([0.0203, 0.0227, 0.0213, 0.0224, 0.0189, 0.0197, 0.0222, 0.0169], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 06:44:16,174 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7994, 2.7955, 4.3382, 3.7901, 4.1367, 4.3463, 4.2131, 4.0349], + device='cuda:1'), covar=tensor([0.0272, 0.0745, 0.0093, 0.0766, 0.0110, 0.0189, 0.0133, 0.0146], + device='cuda:1'), in_proj_covar=tensor([0.0082, 0.0092, 0.0075, 0.0147, 0.0069, 0.0085, 0.0078, 0.0070], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 06:44:23,294 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.029e+02 4.352e+02 5.044e+02 6.255e+02 1.053e+03, threshold=1.009e+03, percent-clipped=1.0 +2023-03-28 06:44:28,635 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26414.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:44:43,044 INFO [train.py:892] (1/4) Epoch 15, batch 450, loss[loss=0.2015, simple_loss=0.272, pruned_loss=0.06546, over 19955.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2744, pruned_loss=0.07142, over 3536963.96 frames. ], batch size: 53, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:44:53,962 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-28 06:46:07,561 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26458.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:46:36,562 INFO [train.py:892] (1/4) Epoch 15, batch 500, loss[loss=0.4335, simple_loss=0.4429, pruned_loss=0.2121, over 19185.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2739, pruned_loss=0.07202, over 3628929.16 frames. ], batch size: 452, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:46:46,427 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26475.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:48:13,612 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.684e+02 4.387e+02 5.126e+02 6.696e+02 1.197e+03, threshold=1.025e+03, percent-clipped=1.0 +2023-03-28 06:48:17,116 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26514.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 06:48:30,939 INFO [train.py:892] (1/4) Epoch 15, batch 550, loss[loss=0.1884, simple_loss=0.2609, pruned_loss=0.05791, over 19660.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2748, pruned_loss=0.07218, over 3699571.16 frames. ], batch size: 57, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:48:32,230 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5622, 2.8492, 3.3570, 3.2669, 3.6727, 3.5486, 4.3071, 4.6879], + device='cuda:1'), covar=tensor([0.0473, 0.1723, 0.1436, 0.1797, 0.1588, 0.1284, 0.0477, 0.0477], + device='cuda:1'), in_proj_covar=tensor([0.0223, 0.0224, 0.0247, 0.0240, 0.0273, 0.0236, 0.0195, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 06:48:40,947 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26524.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:49:10,371 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9736, 2.9158, 1.5631, 3.5589, 3.3202, 3.5116, 3.6257, 2.8457], + device='cuda:1'), covar=tensor([0.0628, 0.0617, 0.1852, 0.0564, 0.0438, 0.0451, 0.0567, 0.0741], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0125, 0.0134, 0.0129, 0.0112, 0.0108, 0.0124, 0.0130], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 06:49:18,761 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9697, 3.0608, 1.9223, 3.1013, 3.2292, 1.4679, 2.6543, 2.4362], + device='cuda:1'), covar=tensor([0.0774, 0.0710, 0.2505, 0.0613, 0.0411, 0.2519, 0.0984, 0.0821], + device='cuda:1'), in_proj_covar=tensor([0.0205, 0.0231, 0.0215, 0.0225, 0.0191, 0.0197, 0.0224, 0.0170], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 06:50:07,890 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26562.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:50:26,079 INFO [train.py:892] (1/4) Epoch 15, batch 600, loss[loss=0.1961, simple_loss=0.2571, pruned_loss=0.06752, over 19817.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.273, pruned_loss=0.07142, over 3755560.56 frames. ], batch size: 133, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:50:47,420 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3937, 4.4848, 2.5627, 4.7062, 5.0273, 1.9725, 4.3120, 3.6471], + device='cuda:1'), covar=tensor([0.0590, 0.0647, 0.2587, 0.0694, 0.0332, 0.2960, 0.0757, 0.0694], + device='cuda:1'), in_proj_covar=tensor([0.0206, 0.0231, 0.0217, 0.0226, 0.0192, 0.0198, 0.0226, 0.0171], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 06:51:45,291 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26606.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:52:00,972 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.578e+02 4.242e+02 5.300e+02 6.521e+02 1.210e+03, threshold=1.060e+03, percent-clipped=1.0 +2023-03-28 06:52:08,150 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7448, 2.2935, 2.6144, 3.1028, 3.5929, 3.8439, 3.8842, 3.9127], + device='cuda:1'), covar=tensor([0.0863, 0.1699, 0.1225, 0.0540, 0.0313, 0.0208, 0.0235, 0.0221], + device='cuda:1'), in_proj_covar=tensor([0.0145, 0.0171, 0.0169, 0.0135, 0.0117, 0.0111, 0.0108, 0.0101], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 06:52:21,181 INFO [train.py:892] (1/4) Epoch 15, batch 650, loss[loss=0.2347, simple_loss=0.2919, pruned_loss=0.08874, over 19709.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.273, pruned_loss=0.07165, over 3798896.65 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:53:28,159 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3387, 2.5795, 3.5596, 2.8114, 3.0971, 3.0807, 1.9683, 2.1032], + device='cuda:1'), covar=tensor([0.0876, 0.2567, 0.0489, 0.0802, 0.1358, 0.1023, 0.2035, 0.2382], + device='cuda:1'), in_proj_covar=tensor([0.0319, 0.0348, 0.0292, 0.0236, 0.0346, 0.0296, 0.0311, 0.0282], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 06:53:37,641 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.7823, 6.0770, 6.0983, 6.0038, 5.8070, 6.0630, 5.4286, 5.4900], + device='cuda:1'), covar=tensor([0.0372, 0.0359, 0.0507, 0.0427, 0.0529, 0.0555, 0.0650, 0.0859], + device='cuda:1'), in_proj_covar=tensor([0.0217, 0.0222, 0.0253, 0.0217, 0.0211, 0.0203, 0.0230, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 06:54:12,446 INFO [train.py:892] (1/4) Epoch 15, batch 700, loss[loss=0.1895, simple_loss=0.2566, pruned_loss=0.06117, over 19753.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2729, pruned_loss=0.07107, over 3832341.09 frames. ], batch size: 100, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:55:12,324 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-28 06:55:53,045 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.991e+02 4.486e+02 5.723e+02 6.688e+02 1.072e+03, threshold=1.145e+03, percent-clipped=1.0 +2023-03-28 06:55:56,516 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6252, 2.8703, 4.0383, 3.0890, 3.4535, 3.4332, 2.1465, 2.3232], + device='cuda:1'), covar=tensor([0.0855, 0.2443, 0.0450, 0.0775, 0.1335, 0.0917, 0.1985, 0.2322], + device='cuda:1'), in_proj_covar=tensor([0.0322, 0.0350, 0.0294, 0.0236, 0.0348, 0.0299, 0.0314, 0.0285], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 06:56:12,160 INFO [train.py:892] (1/4) Epoch 15, batch 750, loss[loss=0.2052, simple_loss=0.275, pruned_loss=0.06771, over 19718.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2724, pruned_loss=0.07047, over 3858595.18 frames. ], batch size: 54, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:57:34,765 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26758.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:58:01,879 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26770.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:58:03,011 INFO [train.py:892] (1/4) Epoch 15, batch 800, loss[loss=0.3098, simple_loss=0.3662, pruned_loss=0.1267, over 19578.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2739, pruned_loss=0.07129, over 3875220.90 frames. ], batch size: 376, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:59:23,221 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26806.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:59:36,952 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 4.760e+02 5.593e+02 6.609e+02 1.473e+03, threshold=1.119e+03, percent-clipped=2.0 +2023-03-28 06:59:56,353 INFO [train.py:892] (1/4) Epoch 15, batch 850, loss[loss=0.2065, simple_loss=0.2653, pruned_loss=0.07385, over 19757.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2733, pruned_loss=0.07097, over 3891942.56 frames. ], batch size: 217, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:00:03,513 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26824.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:00:34,820 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-28 07:01:47,514 INFO [train.py:892] (1/4) Epoch 15, batch 900, loss[loss=0.2144, simple_loss=0.2854, pruned_loss=0.07168, over 19803.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2729, pruned_loss=0.07064, over 3906120.01 frames. ], batch size: 67, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:01:50,725 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26872.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:03:03,245 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26904.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 07:03:07,740 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26906.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:03:23,384 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.859e+02 4.358e+02 4.979e+02 5.962e+02 9.737e+02, threshold=9.958e+02, percent-clipped=0.0 +2023-03-28 07:03:41,698 INFO [train.py:892] (1/4) Epoch 15, batch 950, loss[loss=0.2101, simple_loss=0.28, pruned_loss=0.07013, over 19851.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2725, pruned_loss=0.07008, over 3916365.98 frames. ], batch size: 60, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:03:59,238 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4115, 2.4152, 1.4425, 2.7816, 2.6465, 2.7060, 2.8790, 2.2370], + device='cuda:1'), covar=tensor([0.0619, 0.0645, 0.1504, 0.0473, 0.0526, 0.0431, 0.0436, 0.0787], + device='cuda:1'), in_proj_covar=tensor([0.0128, 0.0125, 0.0132, 0.0129, 0.0112, 0.0108, 0.0124, 0.0129], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 07:04:47,420 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26950.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:04:57,003 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26954.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:05:01,549 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-28 07:05:18,939 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26965.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 07:05:26,195 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-03-28 07:05:32,172 INFO [train.py:892] (1/4) Epoch 15, batch 1000, loss[loss=0.2072, simple_loss=0.2884, pruned_loss=0.06299, over 19731.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2729, pruned_loss=0.07023, over 3923746.77 frames. ], batch size: 50, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:07:02,000 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27011.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:07:05,035 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.353e+02 4.723e+02 5.899e+02 7.505e+02 1.756e+03, threshold=1.180e+03, percent-clipped=5.0 +2023-03-28 07:07:15,689 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0821, 3.1165, 4.4328, 3.3591, 3.7851, 3.7243, 2.3862, 2.5167], + device='cuda:1'), covar=tensor([0.0770, 0.2754, 0.0426, 0.0809, 0.1365, 0.0937, 0.2046, 0.2304], + device='cuda:1'), in_proj_covar=tensor([0.0322, 0.0353, 0.0295, 0.0238, 0.0348, 0.0299, 0.0314, 0.0285], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 07:07:24,349 INFO [train.py:892] (1/4) Epoch 15, batch 1050, loss[loss=0.2854, simple_loss=0.3413, pruned_loss=0.1148, over 19607.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2745, pruned_loss=0.07117, over 3928209.72 frames. ], batch size: 367, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:08:14,886 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1165, 3.2634, 2.0551, 3.2624, 3.3663, 1.5969, 2.8045, 2.6236], + device='cuda:1'), covar=tensor([0.0739, 0.0745, 0.2592, 0.0663, 0.0490, 0.2636, 0.1031, 0.0776], + device='cuda:1'), in_proj_covar=tensor([0.0206, 0.0232, 0.0216, 0.0227, 0.0194, 0.0198, 0.0225, 0.0172], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 07:09:11,986 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27070.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:09:13,222 INFO [train.py:892] (1/4) Epoch 15, batch 1100, loss[loss=0.1902, simple_loss=0.2606, pruned_loss=0.05989, over 19882.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2743, pruned_loss=0.07125, over 3932152.21 frames. ], batch size: 95, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:10:49,069 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.986e+02 4.421e+02 5.325e+02 6.263e+02 1.209e+03, threshold=1.065e+03, percent-clipped=1.0 +2023-03-28 07:11:00,538 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27118.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:11:06,886 INFO [train.py:892] (1/4) Epoch 15, batch 1150, loss[loss=0.1781, simple_loss=0.2553, pruned_loss=0.05047, over 19797.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2738, pruned_loss=0.07084, over 3933558.01 frames. ], batch size: 51, lr: 1.05e-02, grad_scale: 8.0 +2023-03-28 07:11:20,317 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1025, 3.1222, 4.5237, 3.3754, 3.6939, 3.7334, 2.3596, 2.5420], + device='cuda:1'), covar=tensor([0.0702, 0.2498, 0.0374, 0.0750, 0.1456, 0.0944, 0.2002, 0.2295], + device='cuda:1'), in_proj_covar=tensor([0.0319, 0.0351, 0.0295, 0.0238, 0.0347, 0.0297, 0.0314, 0.0284], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 07:12:06,835 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27148.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:12:58,788 INFO [train.py:892] (1/4) Epoch 15, batch 1200, loss[loss=0.1702, simple_loss=0.2416, pruned_loss=0.04935, over 19733.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2744, pruned_loss=0.07158, over 3937916.07 frames. ], batch size: 106, lr: 1.05e-02, grad_scale: 8.0 +2023-03-28 07:14:25,581 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27209.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:14:34,166 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.952e+02 4.645e+02 5.567e+02 6.439e+02 1.177e+03, threshold=1.113e+03, percent-clipped=3.0 +2023-03-28 07:14:49,421 INFO [train.py:892] (1/4) Epoch 15, batch 1250, loss[loss=0.1853, simple_loss=0.2445, pruned_loss=0.06307, over 19889.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2721, pruned_loss=0.07031, over 3941157.39 frames. ], batch size: 77, lr: 1.05e-02, grad_scale: 8.0 +2023-03-28 07:16:00,774 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3322, 3.4893, 1.8011, 4.2755, 3.8600, 4.0608, 4.2894, 3.2669], + device='cuda:1'), covar=tensor([0.0575, 0.0453, 0.1691, 0.0383, 0.0464, 0.0447, 0.0480, 0.0695], + device='cuda:1'), in_proj_covar=tensor([0.0128, 0.0125, 0.0135, 0.0130, 0.0113, 0.0109, 0.0126, 0.0131], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 07:16:16,181 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27260.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 07:16:40,207 INFO [train.py:892] (1/4) Epoch 15, batch 1300, loss[loss=0.1927, simple_loss=0.2533, pruned_loss=0.06611, over 19820.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2724, pruned_loss=0.07037, over 3943279.48 frames. ], batch size: 121, lr: 1.05e-02, grad_scale: 8.0 +2023-03-28 07:18:00,103 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27306.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:18:17,025 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.149e+02 4.572e+02 5.387e+02 6.367e+02 1.430e+03, threshold=1.077e+03, percent-clipped=1.0 +2023-03-28 07:18:32,960 INFO [train.py:892] (1/4) Epoch 15, batch 1350, loss[loss=0.3791, simple_loss=0.413, pruned_loss=0.1726, over 19371.00 frames. ], tot_loss[loss=0.2081, simple_loss=0.2734, pruned_loss=0.07135, over 3945052.72 frames. ], batch size: 431, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:19:35,693 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-03-28 07:20:21,496 INFO [train.py:892] (1/4) Epoch 15, batch 1400, loss[loss=0.2041, simple_loss=0.2692, pruned_loss=0.06951, over 19836.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2725, pruned_loss=0.07095, over 3946436.68 frames. ], batch size: 145, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:21:44,940 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4451, 4.4985, 2.8511, 4.7814, 5.0480, 2.2761, 4.1875, 3.9337], + device='cuda:1'), covar=tensor([0.0629, 0.0802, 0.2624, 0.0758, 0.0400, 0.2967, 0.0930, 0.0635], + device='cuda:1'), in_proj_covar=tensor([0.0205, 0.0232, 0.0216, 0.0227, 0.0194, 0.0196, 0.0225, 0.0171], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 07:21:59,974 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.359e+02 4.655e+02 5.654e+02 6.548e+02 1.649e+03, threshold=1.131e+03, percent-clipped=3.0 +2023-03-28 07:22:14,812 INFO [train.py:892] (1/4) Epoch 15, batch 1450, loss[loss=0.226, simple_loss=0.2827, pruned_loss=0.08463, over 19774.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.273, pruned_loss=0.07096, over 3945539.93 frames. ], batch size: 224, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:22:15,865 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4004, 3.3363, 3.7336, 3.3603, 3.2294, 3.6061, 3.4716, 3.7429], + device='cuda:1'), covar=tensor([0.0951, 0.0413, 0.0383, 0.0433, 0.1376, 0.0575, 0.0440, 0.0362], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0204, 0.0200, 0.0204, 0.0197, 0.0208, 0.0205, 0.0189], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 07:24:06,273 INFO [train.py:892] (1/4) Epoch 15, batch 1500, loss[loss=0.3407, simple_loss=0.4165, pruned_loss=0.1324, over 17922.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2734, pruned_loss=0.0708, over 3944851.84 frames. ], batch size: 633, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:25:20,011 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27504.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:25:41,680 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.670e+02 4.316e+02 5.110e+02 6.263e+02 1.157e+03, threshold=1.022e+03, percent-clipped=1.0 +2023-03-28 07:25:57,339 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7562, 2.5261, 2.9136, 2.6359, 3.1018, 3.0225, 3.5644, 3.9697], + device='cuda:1'), covar=tensor([0.0604, 0.1702, 0.1422, 0.1973, 0.1550, 0.1345, 0.0532, 0.0494], + device='cuda:1'), in_proj_covar=tensor([0.0220, 0.0222, 0.0245, 0.0236, 0.0269, 0.0235, 0.0195, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 07:25:58,304 INFO [train.py:892] (1/4) Epoch 15, batch 1550, loss[loss=0.2116, simple_loss=0.2732, pruned_loss=0.07499, over 19745.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2733, pruned_loss=0.07083, over 3945072.47 frames. ], batch size: 179, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:26:43,822 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.9684, 2.2912, 1.9527, 1.4471, 2.0496, 2.2396, 2.1586, 2.1952], + device='cuda:1'), covar=tensor([0.0298, 0.0201, 0.0266, 0.0471, 0.0374, 0.0188, 0.0207, 0.0216], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0068, 0.0077, 0.0084, 0.0087, 0.0060, 0.0058, 0.0060], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 07:27:27,717 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27560.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 07:27:51,006 INFO [train.py:892] (1/4) Epoch 15, batch 1600, loss[loss=0.1948, simple_loss=0.2579, pruned_loss=0.06587, over 19740.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2716, pruned_loss=0.06966, over 3947127.20 frames. ], batch size: 106, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:29:11,900 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27606.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:29:15,493 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27608.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 07:29:28,194 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.222e+02 4.211e+02 5.223e+02 6.136e+02 1.197e+03, threshold=1.045e+03, percent-clipped=1.0 +2023-03-28 07:29:43,654 INFO [train.py:892] (1/4) Epoch 15, batch 1650, loss[loss=0.2927, simple_loss=0.3372, pruned_loss=0.1241, over 19695.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2716, pruned_loss=0.06974, over 3948193.62 frames. ], batch size: 337, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:31:01,157 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27654.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:31:05,277 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3163, 3.0430, 3.6960, 2.4666, 3.8027, 2.9267, 2.9980, 3.6033], + device='cuda:1'), covar=tensor([0.0521, 0.0399, 0.0308, 0.0786, 0.0250, 0.0339, 0.0486, 0.0243], + device='cuda:1'), in_proj_covar=tensor([0.0062, 0.0066, 0.0066, 0.0096, 0.0063, 0.0062, 0.0060, 0.0053], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 07:31:38,693 INFO [train.py:892] (1/4) Epoch 15, batch 1700, loss[loss=0.2018, simple_loss=0.2618, pruned_loss=0.07089, over 19805.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2729, pruned_loss=0.0709, over 3947973.41 frames. ], batch size: 167, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:33:14,465 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.834e+02 4.579e+02 5.497e+02 6.695e+02 1.218e+03, threshold=1.099e+03, percent-clipped=2.0 +2023-03-28 07:33:15,357 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27714.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 07:33:27,524 INFO [train.py:892] (1/4) Epoch 15, batch 1750, loss[loss=0.1914, simple_loss=0.2475, pruned_loss=0.06765, over 19898.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2724, pruned_loss=0.07052, over 3948163.55 frames. ], batch size: 116, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:33:36,325 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0664, 4.3222, 4.2890, 4.2057, 4.0309, 4.2803, 3.8111, 3.8824], + device='cuda:1'), covar=tensor([0.0482, 0.0511, 0.0643, 0.0534, 0.0727, 0.0604, 0.0756, 0.1032], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0228, 0.0257, 0.0220, 0.0216, 0.0204, 0.0230, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 07:34:29,054 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4749, 3.7450, 3.7797, 4.9258, 2.9582, 3.3751, 3.0324, 2.8332], + device='cuda:1'), covar=tensor([0.0495, 0.2411, 0.0995, 0.0213, 0.2226, 0.0917, 0.1136, 0.1817], + device='cuda:1'), in_proj_covar=tensor([0.0216, 0.0333, 0.0230, 0.0169, 0.0241, 0.0185, 0.0205, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 07:35:04,129 INFO [train.py:892] (1/4) Epoch 15, batch 1800, loss[loss=0.2037, simple_loss=0.2636, pruned_loss=0.07187, over 19877.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2725, pruned_loss=0.07045, over 3948247.81 frames. ], batch size: 165, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:35:11,097 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27775.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 07:35:46,732 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0579, 4.0437, 4.4257, 3.9614, 3.7616, 4.2165, 4.0354, 4.5197], + device='cuda:1'), covar=tensor([0.0954, 0.0357, 0.0371, 0.0424, 0.1034, 0.0528, 0.0490, 0.0333], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0200, 0.0200, 0.0206, 0.0195, 0.0208, 0.0206, 0.0190], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 07:36:05,746 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27804.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:36:22,621 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.736e+02 4.432e+02 5.190e+02 6.215e+02 1.891e+03, threshold=1.038e+03, percent-clipped=1.0 +2023-03-28 07:36:34,440 INFO [train.py:892] (1/4) Epoch 15, batch 1850, loss[loss=0.1973, simple_loss=0.2742, pruned_loss=0.06017, over 19827.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2737, pruned_loss=0.07053, over 3948727.79 frames. ], batch size: 57, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:37:42,178 INFO [train.py:892] (1/4) Epoch 16, batch 0, loss[loss=0.3541, simple_loss=0.3945, pruned_loss=0.1568, over 19366.00 frames. ], tot_loss[loss=0.3541, simple_loss=0.3945, pruned_loss=0.1568, over 19366.00 frames. ], batch size: 431, lr: 1.00e-02, grad_scale: 8.0 +2023-03-28 07:37:42,179 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 07:38:12,959 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0596, 3.4699, 2.8995, 2.4126, 2.7804, 3.4142, 3.0492, 3.3069], + device='cuda:1'), covar=tensor([0.0296, 0.0195, 0.0244, 0.0428, 0.0344, 0.0233, 0.0173, 0.0173], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0070, 0.0077, 0.0083, 0.0087, 0.0060, 0.0058, 0.0060], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 07:38:14,603 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8908, 3.1993, 2.6050, 2.2155, 2.6225, 3.0226, 2.7744, 3.2154], + device='cuda:1'), covar=tensor([0.0137, 0.0249, 0.0242, 0.0493, 0.0330, 0.0205, 0.0199, 0.0069], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0070, 0.0077, 0.0083, 0.0087, 0.0060, 0.0058, 0.0060], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 07:38:15,255 INFO [train.py:926] (1/4) Epoch 16, validation: loss=0.1716, simple_loss=0.2504, pruned_loss=0.04639, over 2883724.00 frames. +2023-03-28 07:38:15,256 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 07:38:45,676 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.25 vs. limit=5.0 +2023-03-28 07:39:17,628 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27852.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:40:09,178 INFO [train.py:892] (1/4) Epoch 16, batch 50, loss[loss=0.1815, simple_loss=0.2559, pruned_loss=0.05349, over 19690.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2619, pruned_loss=0.06475, over 891381.71 frames. ], batch size: 59, lr: 1.00e-02, grad_scale: 8.0 +2023-03-28 07:41:32,614 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.518e+02 4.210e+02 5.030e+02 5.753e+02 1.491e+03, threshold=1.006e+03, percent-clipped=2.0 +2023-03-28 07:42:00,004 INFO [train.py:892] (1/4) Epoch 16, batch 100, loss[loss=0.1762, simple_loss=0.2436, pruned_loss=0.05436, over 19872.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2633, pruned_loss=0.06518, over 1569788.42 frames. ], batch size: 108, lr: 1.00e-02, grad_scale: 8.0 +2023-03-28 07:42:22,908 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9239, 2.9232, 1.6888, 3.5633, 3.2618, 3.5200, 3.6481, 2.7535], + device='cuda:1'), covar=tensor([0.0646, 0.0581, 0.1780, 0.0428, 0.0506, 0.0427, 0.0398, 0.0782], + device='cuda:1'), in_proj_covar=tensor([0.0130, 0.0126, 0.0135, 0.0131, 0.0114, 0.0111, 0.0126, 0.0132], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 07:42:45,983 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2789, 2.5348, 2.7219, 3.1663, 2.2445, 2.8447, 2.1099, 2.0902], + device='cuda:1'), covar=tensor([0.0591, 0.1741, 0.1106, 0.0375, 0.2248, 0.0727, 0.1338, 0.1763], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0331, 0.0229, 0.0168, 0.0240, 0.0184, 0.0203, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 07:43:53,901 INFO [train.py:892] (1/4) Epoch 16, batch 150, loss[loss=0.1779, simple_loss=0.2436, pruned_loss=0.05614, over 19738.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.267, pruned_loss=0.06708, over 2096501.34 frames. ], batch size: 106, lr: 1.00e-02, grad_scale: 8.0 +2023-03-28 07:45:25,179 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.668e+02 4.765e+02 5.560e+02 7.224e+02 1.633e+03, threshold=1.112e+03, percent-clipped=5.0 +2023-03-28 07:45:44,464 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.6859, 6.0737, 6.0804, 5.9372, 5.7740, 6.0565, 5.3638, 5.4064], + device='cuda:1'), covar=tensor([0.0415, 0.0365, 0.0495, 0.0422, 0.0524, 0.0543, 0.0588, 0.0930], + device='cuda:1'), in_proj_covar=tensor([0.0221, 0.0230, 0.0256, 0.0220, 0.0215, 0.0202, 0.0230, 0.0269], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 07:45:54,094 INFO [train.py:892] (1/4) Epoch 16, batch 200, loss[loss=0.237, simple_loss=0.2963, pruned_loss=0.08881, over 19822.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2693, pruned_loss=0.06857, over 2507888.75 frames. ], batch size: 57, lr: 9.99e-03, grad_scale: 8.0 +2023-03-28 07:47:36,343 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28070.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 07:47:47,826 INFO [train.py:892] (1/4) Epoch 16, batch 250, loss[loss=0.2155, simple_loss=0.2695, pruned_loss=0.08073, over 19773.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2691, pruned_loss=0.06808, over 2828065.71 frames. ], batch size: 169, lr: 9.98e-03, grad_scale: 8.0 +2023-03-28 07:49:14,122 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.535e+02 4.468e+02 5.270e+02 6.395e+02 1.144e+03, threshold=1.054e+03, percent-clipped=2.0 +2023-03-28 07:49:40,343 INFO [train.py:892] (1/4) Epoch 16, batch 300, loss[loss=0.1775, simple_loss=0.2496, pruned_loss=0.05273, over 19833.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2689, pruned_loss=0.06794, over 3077326.29 frames. ], batch size: 90, lr: 9.97e-03, grad_scale: 8.0 +2023-03-28 07:49:44,183 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0126, 3.9722, 4.3794, 4.0228, 3.8658, 4.2281, 4.0989, 4.4601], + device='cuda:1'), covar=tensor([0.0956, 0.0365, 0.0353, 0.0355, 0.0891, 0.0523, 0.0417, 0.0305], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0201, 0.0201, 0.0207, 0.0195, 0.0210, 0.0207, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 07:51:33,568 INFO [train.py:892] (1/4) Epoch 16, batch 350, loss[loss=0.1864, simple_loss=0.259, pruned_loss=0.05687, over 19830.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2677, pruned_loss=0.06747, over 3271925.78 frames. ], batch size: 75, lr: 9.96e-03, grad_scale: 8.0 +2023-03-28 07:53:01,623 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.842e+02 4.257e+02 5.267e+02 6.673e+02 1.330e+03, threshold=1.053e+03, percent-clipped=2.0 +2023-03-28 07:53:27,023 INFO [train.py:892] (1/4) Epoch 16, batch 400, loss[loss=0.1817, simple_loss=0.2425, pruned_loss=0.06051, over 19832.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2686, pruned_loss=0.06772, over 3421602.65 frames. ], batch size: 143, lr: 9.95e-03, grad_scale: 8.0 +2023-03-28 07:53:35,249 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28229.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:54:17,076 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9283, 4.0735, 2.3859, 4.2239, 4.3828, 1.8322, 3.6561, 3.2602], + device='cuda:1'), covar=tensor([0.0684, 0.0779, 0.3027, 0.0698, 0.0417, 0.2999, 0.0998, 0.0764], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0233, 0.0218, 0.0232, 0.0197, 0.0199, 0.0228, 0.0174], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 07:54:36,292 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28255.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 07:55:12,516 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-03-28 07:55:21,938 INFO [train.py:892] (1/4) Epoch 16, batch 450, loss[loss=0.2039, simple_loss=0.2781, pruned_loss=0.06487, over 19853.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2698, pruned_loss=0.06805, over 3536927.82 frames. ], batch size: 56, lr: 9.95e-03, grad_scale: 8.0 +2023-03-28 07:55:46,703 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-28 07:55:48,484 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28287.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:55:55,202 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28290.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:56:47,442 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.542e+02 5.285e+02 6.486e+02 1.059e+03, threshold=1.057e+03, percent-clipped=1.0 +2023-03-28 07:56:54,383 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28316.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 07:57:11,953 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.78 vs. limit=2.0 +2023-03-28 07:57:14,817 INFO [train.py:892] (1/4) Epoch 16, batch 500, loss[loss=0.1607, simple_loss=0.2259, pruned_loss=0.04777, over 19797.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.269, pruned_loss=0.06779, over 3628144.22 frames. ], batch size: 107, lr: 9.94e-03, grad_scale: 8.0 +2023-03-28 07:57:28,356 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7618, 2.1285, 2.5199, 3.1367, 3.4116, 3.6110, 3.4156, 3.5355], + device='cuda:1'), covar=tensor([0.0769, 0.1724, 0.1323, 0.0473, 0.0365, 0.0253, 0.0297, 0.0297], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0169, 0.0171, 0.0137, 0.0122, 0.0112, 0.0110, 0.0102], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 07:58:04,596 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28348.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:58:31,296 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-28 07:58:55,416 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28370.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 07:59:07,685 INFO [train.py:892] (1/4) Epoch 16, batch 550, loss[loss=0.1834, simple_loss=0.2536, pruned_loss=0.05662, over 19849.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2686, pruned_loss=0.06778, over 3699872.95 frames. ], batch size: 115, lr: 9.93e-03, grad_scale: 8.0 +2023-03-28 07:59:12,886 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6676, 2.0467, 2.5574, 3.0132, 3.4933, 3.6751, 3.4560, 3.6237], + device='cuda:1'), covar=tensor([0.0899, 0.1698, 0.1248, 0.0601, 0.0393, 0.0243, 0.0324, 0.0384], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0168, 0.0169, 0.0136, 0.0120, 0.0111, 0.0109, 0.0101], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 08:00:12,632 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6332, 1.9330, 2.3411, 2.9677, 3.4472, 3.6283, 3.4157, 3.6343], + device='cuda:1'), covar=tensor([0.0923, 0.1907, 0.1425, 0.0623, 0.0383, 0.0248, 0.0351, 0.0292], + device='cuda:1'), in_proj_covar=tensor([0.0145, 0.0167, 0.0168, 0.0135, 0.0119, 0.0110, 0.0108, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 08:00:19,898 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.62 vs. limit=5.0 +2023-03-28 08:00:36,159 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.975e+02 4.219e+02 4.967e+02 6.198e+02 1.278e+03, threshold=9.934e+02, percent-clipped=3.0 +2023-03-28 08:00:46,628 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28418.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 08:00:47,109 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-03-28 08:01:02,661 INFO [train.py:892] (1/4) Epoch 16, batch 600, loss[loss=0.1874, simple_loss=0.2551, pruned_loss=0.05988, over 19746.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2696, pruned_loss=0.06887, over 3756583.84 frames. ], batch size: 95, lr: 9.92e-03, grad_scale: 8.0 +2023-03-28 08:01:10,602 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8301, 4.4411, 4.6251, 4.3187, 4.7630, 3.2245, 3.9300, 2.4564], + device='cuda:1'), covar=tensor([0.0156, 0.0190, 0.0124, 0.0176, 0.0112, 0.0788, 0.0732, 0.1416], + device='cuda:1'), in_proj_covar=tensor([0.0091, 0.0126, 0.0101, 0.0120, 0.0107, 0.0122, 0.0131, 0.0117], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 08:02:32,028 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8923, 2.8558, 4.2202, 3.6882, 4.0605, 4.2529, 4.0056, 4.0669], + device='cuda:1'), covar=tensor([0.0239, 0.0689, 0.0084, 0.0700, 0.0100, 0.0181, 0.0149, 0.0111], + device='cuda:1'), in_proj_covar=tensor([0.0084, 0.0092, 0.0075, 0.0144, 0.0069, 0.0084, 0.0077, 0.0070], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0004, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 08:02:53,495 INFO [train.py:892] (1/4) Epoch 16, batch 650, loss[loss=0.1922, simple_loss=0.2551, pruned_loss=0.06461, over 19742.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2683, pruned_loss=0.06821, over 3800579.92 frames. ], batch size: 134, lr: 9.91e-03, grad_scale: 8.0 +2023-03-28 08:03:15,924 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28486.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:03:31,502 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28492.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:03:52,351 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8193, 2.3563, 2.8637, 3.1890, 3.6396, 3.9329, 3.7897, 3.9728], + device='cuda:1'), covar=tensor([0.0868, 0.1574, 0.1053, 0.0552, 0.0364, 0.0222, 0.0250, 0.0286], + device='cuda:1'), in_proj_covar=tensor([0.0145, 0.0165, 0.0166, 0.0135, 0.0119, 0.0110, 0.0107, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 08:04:17,635 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.821e+02 4.448e+02 5.454e+02 6.222e+02 1.151e+03, threshold=1.091e+03, percent-clipped=4.0 +2023-03-28 08:04:44,563 INFO [train.py:892] (1/4) Epoch 16, batch 700, loss[loss=0.2046, simple_loss=0.2611, pruned_loss=0.07399, over 19788.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2704, pruned_loss=0.06946, over 3833640.65 frames. ], batch size: 191, lr: 9.90e-03, grad_scale: 8.0 +2023-03-28 08:05:12,278 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8188, 3.4869, 3.5629, 3.7901, 3.5296, 3.7362, 3.9044, 4.0666], + device='cuda:1'), covar=tensor([0.0667, 0.0460, 0.0564, 0.0346, 0.0717, 0.0580, 0.0467, 0.0323], + device='cuda:1'), in_proj_covar=tensor([0.0133, 0.0152, 0.0177, 0.0148, 0.0152, 0.0133, 0.0135, 0.0171], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 08:05:34,230 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28547.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:05:42,226 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-28 08:05:45,637 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28553.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:06:37,718 INFO [train.py:892] (1/4) Epoch 16, batch 750, loss[loss=0.1816, simple_loss=0.2632, pruned_loss=0.04999, over 19783.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2697, pruned_loss=0.06874, over 3859612.79 frames. ], batch size: 53, lr: 9.89e-03, grad_scale: 8.0 +2023-03-28 08:06:59,833 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28585.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:07:59,422 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28611.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 08:08:06,519 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.992e+02 4.406e+02 5.087e+02 6.345e+02 1.381e+03, threshold=1.017e+03, percent-clipped=1.0 +2023-03-28 08:08:26,919 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1451, 3.1109, 4.6122, 3.6210, 3.8885, 3.7196, 2.3561, 2.4993], + device='cuda:1'), covar=tensor([0.0748, 0.2674, 0.0394, 0.0709, 0.1316, 0.1004, 0.2098, 0.2407], + device='cuda:1'), in_proj_covar=tensor([0.0323, 0.0354, 0.0300, 0.0242, 0.0351, 0.0305, 0.0318, 0.0290], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 08:08:34,046 INFO [train.py:892] (1/4) Epoch 16, batch 800, loss[loss=0.1841, simple_loss=0.2507, pruned_loss=0.05876, over 19793.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.27, pruned_loss=0.06881, over 3879594.65 frames. ], batch size: 162, lr: 9.89e-03, grad_scale: 8.0 +2023-03-28 08:09:04,153 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-28 08:09:12,900 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28643.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:09:13,537 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.83 vs. limit=5.0 +2023-03-28 08:10:29,844 INFO [train.py:892] (1/4) Epoch 16, batch 850, loss[loss=0.1763, simple_loss=0.2427, pruned_loss=0.055, over 19789.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2697, pruned_loss=0.0682, over 3894642.55 frames. ], batch size: 163, lr: 9.88e-03, grad_scale: 8.0 +2023-03-28 08:10:33,525 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7236, 1.9972, 2.4522, 2.9247, 3.2872, 3.5095, 3.4579, 3.4817], + device='cuda:1'), covar=tensor([0.0830, 0.1766, 0.1181, 0.0600, 0.0413, 0.0236, 0.0290, 0.0348], + device='cuda:1'), in_proj_covar=tensor([0.0144, 0.0164, 0.0166, 0.0134, 0.0119, 0.0110, 0.0107, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 08:11:56,544 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.530e+02 4.659e+02 5.536e+02 6.490e+02 1.175e+03, threshold=1.107e+03, percent-clipped=2.0 +2023-03-28 08:12:21,631 INFO [train.py:892] (1/4) Epoch 16, batch 900, loss[loss=0.1858, simple_loss=0.2497, pruned_loss=0.06094, over 19836.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2687, pruned_loss=0.06783, over 3907686.25 frames. ], batch size: 161, lr: 9.87e-03, grad_scale: 8.0 +2023-03-28 08:12:40,986 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-28 08:14:18,781 INFO [train.py:892] (1/4) Epoch 16, batch 950, loss[loss=0.1933, simple_loss=0.2567, pruned_loss=0.06499, over 19875.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2704, pruned_loss=0.06893, over 3917425.69 frames. ], batch size: 159, lr: 9.86e-03, grad_scale: 8.0 +2023-03-28 08:14:58,693 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6888, 3.8405, 2.3343, 3.9484, 4.0786, 1.8340, 3.3700, 3.1110], + device='cuda:1'), covar=tensor([0.0671, 0.0687, 0.2372, 0.0627, 0.0431, 0.2600, 0.0977, 0.0651], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0234, 0.0218, 0.0233, 0.0202, 0.0201, 0.0230, 0.0174], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 08:15:45,007 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.335e+02 4.634e+02 5.431e+02 6.353e+02 1.268e+03, threshold=1.086e+03, percent-clipped=1.0 +2023-03-28 08:15:50,903 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0232, 2.7670, 3.2839, 2.4035, 3.2169, 2.6435, 2.8967, 3.3115], + device='cuda:1'), covar=tensor([0.0538, 0.0533, 0.0339, 0.0810, 0.0375, 0.0443, 0.0489, 0.0242], + device='cuda:1'), in_proj_covar=tensor([0.0065, 0.0070, 0.0069, 0.0100, 0.0067, 0.0065, 0.0063, 0.0055], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 08:16:11,369 INFO [train.py:892] (1/4) Epoch 16, batch 1000, loss[loss=0.2264, simple_loss=0.2791, pruned_loss=0.08684, over 19774.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2693, pruned_loss=0.06824, over 3926135.73 frames. ], batch size: 152, lr: 9.85e-03, grad_scale: 8.0 +2023-03-28 08:16:45,750 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28842.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:16:59,632 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28848.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:17:31,425 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28861.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:18:01,616 INFO [train.py:892] (1/4) Epoch 16, batch 1050, loss[loss=0.1939, simple_loss=0.2619, pruned_loss=0.06293, over 19805.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2695, pruned_loss=0.06808, over 3931015.27 frames. ], batch size: 148, lr: 9.84e-03, grad_scale: 8.0 +2023-03-28 08:18:23,686 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28885.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:19:23,569 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28911.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 08:19:30,256 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.700e+02 4.466e+02 5.126e+02 6.804e+02 1.153e+03, threshold=1.025e+03, percent-clipped=1.0 +2023-03-28 08:19:48,262 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28922.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:19:56,684 INFO [train.py:892] (1/4) Epoch 16, batch 1100, loss[loss=0.1956, simple_loss=0.2712, pruned_loss=0.06002, over 19853.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2703, pruned_loss=0.06818, over 3935666.23 frames. ], batch size: 64, lr: 9.84e-03, grad_scale: 8.0 +2023-03-28 08:20:14,258 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28933.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:20:37,591 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28943.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:21:12,715 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28959.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 08:21:48,706 INFO [train.py:892] (1/4) Epoch 16, batch 1150, loss[loss=0.1979, simple_loss=0.2661, pruned_loss=0.06485, over 19858.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.27, pruned_loss=0.06796, over 3938167.15 frames. ], batch size: 118, lr: 9.83e-03, grad_scale: 8.0 +2023-03-28 08:22:23,822 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28991.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:22:50,982 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29002.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:23:15,267 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 4.550e+02 5.400e+02 6.345e+02 1.264e+03, threshold=1.080e+03, percent-clipped=1.0 +2023-03-28 08:23:43,740 INFO [train.py:892] (1/4) Epoch 16, batch 1200, loss[loss=0.2007, simple_loss=0.2619, pruned_loss=0.06978, over 19713.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2694, pruned_loss=0.06765, over 3940709.75 frames. ], batch size: 109, lr: 9.82e-03, grad_scale: 8.0 +2023-03-28 08:25:09,481 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29063.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:25:38,199 INFO [train.py:892] (1/4) Epoch 16, batch 1250, loss[loss=0.2252, simple_loss=0.2904, pruned_loss=0.08003, over 19760.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2681, pruned_loss=0.06718, over 3943553.97 frames. ], batch size: 276, lr: 9.81e-03, grad_scale: 8.0 +2023-03-28 08:25:47,294 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-28 08:27:06,758 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.820e+02 4.171e+02 4.962e+02 5.864e+02 1.336e+03, threshold=9.924e+02, percent-clipped=4.0 +2023-03-28 08:27:33,969 INFO [train.py:892] (1/4) Epoch 16, batch 1300, loss[loss=0.1735, simple_loss=0.2433, pruned_loss=0.05183, over 19836.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2683, pruned_loss=0.06706, over 3943799.84 frames. ], batch size: 171, lr: 9.80e-03, grad_scale: 16.0 +2023-03-28 08:27:43,889 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8702, 2.0882, 1.7747, 1.2354, 1.8312, 1.9483, 1.9465, 2.0363], + device='cuda:1'), covar=tensor([0.0291, 0.0221, 0.0293, 0.0516, 0.0401, 0.0242, 0.0203, 0.0226], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0071, 0.0080, 0.0084, 0.0088, 0.0063, 0.0059, 0.0063], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-03-28 08:28:12,021 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29142.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:28:16,994 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 08:28:24,949 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29148.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:29:27,195 INFO [train.py:892] (1/4) Epoch 16, batch 1350, loss[loss=0.2323, simple_loss=0.2947, pruned_loss=0.08498, over 19715.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2695, pruned_loss=0.06783, over 3943851.90 frames. ], batch size: 310, lr: 9.80e-03, grad_scale: 16.0 +2023-03-28 08:30:01,013 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29190.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:30:15,772 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29196.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:30:20,572 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-03-28 08:30:55,176 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 4.385e+02 5.390e+02 6.526e+02 1.400e+03, threshold=1.078e+03, percent-clipped=3.0 +2023-03-28 08:31:01,929 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29217.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:31:23,392 INFO [train.py:892] (1/4) Epoch 16, batch 1400, loss[loss=0.1878, simple_loss=0.2596, pruned_loss=0.058, over 19693.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.27, pruned_loss=0.06843, over 3945855.63 frames. ], batch size: 75, lr: 9.79e-03, grad_scale: 16.0 +2023-03-28 08:31:47,958 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29236.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:33:17,624 INFO [train.py:892] (1/4) Epoch 16, batch 1450, loss[loss=0.1718, simple_loss=0.2393, pruned_loss=0.05212, over 19646.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2687, pruned_loss=0.06767, over 3947192.62 frames. ], batch size: 47, lr: 9.78e-03, grad_scale: 16.0 +2023-03-28 08:34:08,465 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29297.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:34:28,687 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-28 08:34:43,984 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.554e+02 4.411e+02 5.175e+02 6.101e+02 1.424e+03, threshold=1.035e+03, percent-clipped=1.0 +2023-03-28 08:35:11,610 INFO [train.py:892] (1/4) Epoch 16, batch 1500, loss[loss=0.2135, simple_loss=0.268, pruned_loss=0.07953, over 19806.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2681, pruned_loss=0.06736, over 3946973.76 frames. ], batch size: 211, lr: 9.77e-03, grad_scale: 16.0 +2023-03-28 08:36:25,352 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29358.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:37:05,534 INFO [train.py:892] (1/4) Epoch 16, batch 1550, loss[loss=0.1928, simple_loss=0.2606, pruned_loss=0.06245, over 19706.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2681, pruned_loss=0.0675, over 3948594.33 frames. ], batch size: 101, lr: 9.76e-03, grad_scale: 16.0 +2023-03-28 08:38:32,266 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 4.365e+02 5.019e+02 6.130e+02 1.225e+03, threshold=1.004e+03, percent-clipped=3.0 +2023-03-28 08:39:00,873 INFO [train.py:892] (1/4) Epoch 16, batch 1600, loss[loss=0.201, simple_loss=0.2708, pruned_loss=0.06557, over 19680.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2683, pruned_loss=0.0676, over 3949425.04 frames. ], batch size: 52, lr: 9.76e-03, grad_scale: 16.0 +2023-03-28 08:40:51,227 INFO [train.py:892] (1/4) Epoch 16, batch 1650, loss[loss=0.1803, simple_loss=0.2471, pruned_loss=0.05676, over 19834.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2672, pruned_loss=0.06677, over 3948946.44 frames. ], batch size: 177, lr: 9.75e-03, grad_scale: 16.0 +2023-03-28 08:42:19,142 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.587e+02 4.298e+02 4.820e+02 5.850e+02 1.066e+03, threshold=9.641e+02, percent-clipped=1.0 +2023-03-28 08:42:27,213 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29517.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:42:45,629 INFO [train.py:892] (1/4) Epoch 16, batch 1700, loss[loss=0.2017, simple_loss=0.2743, pruned_loss=0.06454, over 19643.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.268, pruned_loss=0.06688, over 3948648.38 frames. ], batch size: 69, lr: 9.74e-03, grad_scale: 16.0 +2023-03-28 08:43:45,415 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29551.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:44:06,633 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8969, 1.9362, 2.0269, 1.9756, 1.8872, 1.9652, 1.9736, 2.1290], + device='cuda:1'), covar=tensor([0.0234, 0.0247, 0.0259, 0.0234, 0.0338, 0.0255, 0.0347, 0.0258], + device='cuda:1'), in_proj_covar=tensor([0.0059, 0.0056, 0.0061, 0.0052, 0.0066, 0.0062, 0.0079, 0.0055], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 08:44:15,332 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29565.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:44:35,432 INFO [train.py:892] (1/4) Epoch 16, batch 1750, loss[loss=0.2058, simple_loss=0.269, pruned_loss=0.07136, over 19834.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2683, pruned_loss=0.06723, over 3947755.58 frames. ], batch size: 184, lr: 9.73e-03, grad_scale: 16.0 +2023-03-28 08:44:40,631 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2589, 4.3531, 4.7132, 4.3007, 3.9747, 4.5319, 4.3047, 4.7876], + device='cuda:1'), covar=tensor([0.0931, 0.0293, 0.0338, 0.0346, 0.0843, 0.0451, 0.0417, 0.0304], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0201, 0.0204, 0.0208, 0.0195, 0.0210, 0.0208, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 08:45:10,847 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29592.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:45:50,196 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29612.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:45:52,897 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.648e+02 4.260e+02 5.446e+02 6.419e+02 1.506e+03, threshold=1.089e+03, percent-clipped=4.0 +2023-03-28 08:46:14,358 INFO [train.py:892] (1/4) Epoch 16, batch 1800, loss[loss=0.1804, simple_loss=0.2464, pruned_loss=0.05717, over 19750.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2692, pruned_loss=0.06771, over 3948449.04 frames. ], batch size: 209, lr: 9.72e-03, grad_scale: 16.0 +2023-03-28 08:46:19,746 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.73 vs. limit=5.0 +2023-03-28 08:47:12,701 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.26 vs. limit=5.0 +2023-03-28 08:47:16,040 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29658.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:47:23,428 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29662.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:47:46,342 INFO [train.py:892] (1/4) Epoch 16, batch 1850, loss[loss=0.2067, simple_loss=0.2813, pruned_loss=0.06603, over 19834.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2714, pruned_loss=0.06802, over 3944209.63 frames. ], batch size: 57, lr: 9.72e-03, grad_scale: 16.0 +2023-03-28 08:47:48,892 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-28 08:48:53,468 INFO [train.py:892] (1/4) Epoch 17, batch 0, loss[loss=0.1725, simple_loss=0.2468, pruned_loss=0.0491, over 19773.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2468, pruned_loss=0.0491, over 19773.00 frames. ], batch size: 53, lr: 9.42e-03, grad_scale: 16.0 +2023-03-28 08:48:53,469 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 08:49:26,238 INFO [train.py:926] (1/4) Epoch 17, validation: loss=0.1709, simple_loss=0.2495, pruned_loss=0.0462, over 2883724.00 frames. +2023-03-28 08:49:26,240 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 08:50:27,534 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29706.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:50:46,160 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.983e+02 4.092e+02 5.169e+02 6.324e+02 1.457e+03, threshold=1.034e+03, percent-clipped=3.0 +2023-03-28 08:51:08,857 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29723.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 08:51:25,315 INFO [train.py:892] (1/4) Epoch 17, batch 50, loss[loss=0.1635, simple_loss=0.2315, pruned_loss=0.04773, over 19828.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2657, pruned_loss=0.06741, over 891689.93 frames. ], batch size: 76, lr: 9.41e-03, grad_scale: 16.0 +2023-03-28 08:53:21,623 INFO [train.py:892] (1/4) Epoch 17, batch 100, loss[loss=0.2054, simple_loss=0.284, pruned_loss=0.06337, over 19634.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2666, pruned_loss=0.06643, over 1567875.95 frames. ], batch size: 72, lr: 9.41e-03, grad_scale: 16.0 +2023-03-28 08:54:13,388 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29802.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:54:32,912 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6123, 4.5882, 2.7002, 4.8841, 5.0468, 2.1206, 4.4033, 3.6908], + device='cuda:1'), covar=tensor([0.0508, 0.0825, 0.2477, 0.0500, 0.0392, 0.2842, 0.0696, 0.0658], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0232, 0.0216, 0.0231, 0.0202, 0.0199, 0.0225, 0.0172], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 08:54:38,218 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.079e+02 4.290e+02 5.009e+02 6.167e+02 1.031e+03, threshold=1.002e+03, percent-clipped=0.0 +2023-03-28 08:55:00,943 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4172, 2.5567, 3.9916, 3.4952, 3.8288, 3.9117, 3.8229, 3.6650], + device='cuda:1'), covar=tensor([0.0393, 0.0823, 0.0121, 0.0675, 0.0132, 0.0239, 0.0157, 0.0163], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0092, 0.0076, 0.0146, 0.0071, 0.0085, 0.0079, 0.0071], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 08:55:19,241 INFO [train.py:892] (1/4) Epoch 17, batch 150, loss[loss=0.1895, simple_loss=0.2545, pruned_loss=0.06222, over 19796.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2666, pruned_loss=0.06537, over 2094095.03 frames. ], batch size: 173, lr: 9.40e-03, grad_scale: 16.0 +2023-03-28 08:56:29,452 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29863.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:57:07,841 INFO [train.py:892] (1/4) Epoch 17, batch 200, loss[loss=0.2134, simple_loss=0.2801, pruned_loss=0.07333, over 19716.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2672, pruned_loss=0.06578, over 2505681.24 frames. ], batch size: 62, lr: 9.39e-03, grad_scale: 16.0 +2023-03-28 08:57:33,265 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29892.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:57:37,451 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4496, 3.3073, 4.7964, 3.5528, 4.0419, 3.9969, 2.5075, 2.7191], + device='cuda:1'), covar=tensor([0.0630, 0.2731, 0.0384, 0.0826, 0.1384, 0.0905, 0.1977, 0.2239], + device='cuda:1'), in_proj_covar=tensor([0.0331, 0.0359, 0.0308, 0.0247, 0.0355, 0.0314, 0.0324, 0.0296], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 08:57:39,813 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.58 vs. limit=2.0 +2023-03-28 08:58:05,741 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29907.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:58:19,263 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.430e+02 4.252e+02 5.177e+02 6.087e+02 1.116e+03, threshold=1.035e+03, percent-clipped=2.0 +2023-03-28 08:58:59,495 INFO [train.py:892] (1/4) Epoch 17, batch 250, loss[loss=0.2261, simple_loss=0.2886, pruned_loss=0.08177, over 19637.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2662, pruned_loss=0.06455, over 2825007.80 frames. ], batch size: 330, lr: 9.38e-03, grad_scale: 16.0 +2023-03-28 08:59:22,508 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29940.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:59:23,053 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-28 09:00:54,556 INFO [train.py:892] (1/4) Epoch 17, batch 300, loss[loss=0.2029, simple_loss=0.2754, pruned_loss=0.06516, over 19840.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2664, pruned_loss=0.06447, over 3073027.65 frames. ], batch size: 58, lr: 9.37e-03, grad_scale: 16.0 +2023-03-28 09:02:13,031 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.673e+02 4.129e+02 4.962e+02 6.041e+02 9.266e+02, threshold=9.924e+02, percent-clipped=0.0 +2023-03-28 09:02:24,238 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30018.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 09:02:53,709 INFO [train.py:892] (1/4) Epoch 17, batch 350, loss[loss=0.2331, simple_loss=0.2926, pruned_loss=0.08676, over 19700.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2669, pruned_loss=0.06485, over 3267813.64 frames. ], batch size: 315, lr: 9.37e-03, grad_scale: 16.0 +2023-03-28 09:03:43,984 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2508, 2.4457, 3.7617, 3.1927, 3.6693, 3.8010, 3.5659, 3.6338], + device='cuda:1'), covar=tensor([0.0325, 0.0759, 0.0089, 0.0604, 0.0117, 0.0168, 0.0166, 0.0132], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0092, 0.0075, 0.0146, 0.0071, 0.0085, 0.0079, 0.0071], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 09:04:12,751 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7293, 3.2703, 3.5853, 3.2992, 3.9258, 4.0153, 4.6892, 5.1976], + device='cuda:1'), covar=tensor([0.0503, 0.1486, 0.1361, 0.1955, 0.1536, 0.1269, 0.0439, 0.0372], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0226, 0.0248, 0.0241, 0.0275, 0.0239, 0.0200, 0.0214], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 09:04:38,330 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8307, 3.0696, 2.3887, 2.1976, 2.6291, 2.9388, 3.0087, 2.9135], + device='cuda:1'), covar=tensor([0.0308, 0.0353, 0.0349, 0.0542, 0.0389, 0.0370, 0.0167, 0.0222], + device='cuda:1'), in_proj_covar=tensor([0.0076, 0.0073, 0.0081, 0.0085, 0.0089, 0.0063, 0.0061, 0.0063], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 09:04:38,693 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-28 09:04:44,384 INFO [train.py:892] (1/4) Epoch 17, batch 400, loss[loss=0.1757, simple_loss=0.2554, pruned_loss=0.048, over 19732.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2663, pruned_loss=0.06467, over 3417971.11 frames. ], batch size: 50, lr: 9.36e-03, grad_scale: 16.0 +2023-03-28 09:05:59,372 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.983e+02 4.154e+02 5.101e+02 6.100e+02 1.061e+03, threshold=1.020e+03, percent-clipped=2.0 +2023-03-28 09:06:20,075 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2572, 3.4442, 1.7346, 4.2305, 3.4368, 4.0626, 4.1583, 3.0900], + device='cuda:1'), covar=tensor([0.0579, 0.0487, 0.1759, 0.0409, 0.0624, 0.0351, 0.0430, 0.0757], + device='cuda:1'), in_proj_covar=tensor([0.0131, 0.0128, 0.0135, 0.0132, 0.0115, 0.0114, 0.0129, 0.0133], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 09:06:25,933 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30125.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:06:38,319 INFO [train.py:892] (1/4) Epoch 17, batch 450, loss[loss=0.2031, simple_loss=0.2729, pruned_loss=0.06667, over 19646.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2662, pruned_loss=0.06498, over 3535299.19 frames. ], batch size: 47, lr: 9.35e-03, grad_scale: 16.0 +2023-03-28 09:07:39,007 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8095, 2.7081, 2.6655, 2.9076, 2.8929, 3.0452, 2.8345, 2.9784], + device='cuda:1'), covar=tensor([0.1167, 0.0682, 0.0827, 0.0589, 0.0821, 0.0656, 0.0738, 0.0757], + device='cuda:1'), in_proj_covar=tensor([0.0136, 0.0155, 0.0180, 0.0151, 0.0151, 0.0134, 0.0136, 0.0171], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 09:07:40,866 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30158.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:08:24,299 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.6933, 1.7438, 1.9209, 1.8277, 1.7441, 1.7667, 1.7312, 1.8692], + device='cuda:1'), covar=tensor([0.0240, 0.0230, 0.0221, 0.0189, 0.0303, 0.0213, 0.0364, 0.0183], + device='cuda:1'), in_proj_covar=tensor([0.0058, 0.0056, 0.0060, 0.0052, 0.0065, 0.0062, 0.0078, 0.0054], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 09:08:32,580 INFO [train.py:892] (1/4) Epoch 17, batch 500, loss[loss=0.23, simple_loss=0.2923, pruned_loss=0.0838, over 19725.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2678, pruned_loss=0.06619, over 3626713.34 frames. ], batch size: 295, lr: 9.34e-03, grad_scale: 16.0 +2023-03-28 09:08:43,915 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30186.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:09:30,914 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30207.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:09:35,228 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30209.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:09:45,676 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.879e+02 4.496e+02 5.220e+02 6.735e+02 1.165e+03, threshold=1.044e+03, percent-clipped=2.0 +2023-03-28 09:10:09,897 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5410, 1.9415, 3.1177, 2.4703, 3.0804, 3.1513, 2.9180, 3.0333], + device='cuda:1'), covar=tensor([0.0555, 0.1093, 0.0130, 0.0573, 0.0150, 0.0239, 0.0245, 0.0182], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0094, 0.0076, 0.0149, 0.0072, 0.0086, 0.0080, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 09:10:18,904 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7416, 2.8083, 3.0266, 2.7518, 2.7102, 2.7127, 2.7509, 3.0550], + device='cuda:1'), covar=tensor([0.0238, 0.0262, 0.0201, 0.0213, 0.0311, 0.0329, 0.0318, 0.0266], + device='cuda:1'), in_proj_covar=tensor([0.0058, 0.0055, 0.0060, 0.0052, 0.0065, 0.0062, 0.0078, 0.0054], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 09:10:25,887 INFO [train.py:892] (1/4) Epoch 17, batch 550, loss[loss=0.1844, simple_loss=0.2632, pruned_loss=0.05276, over 19654.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2675, pruned_loss=0.06596, over 3697572.60 frames. ], batch size: 79, lr: 9.34e-03, grad_scale: 16.0 +2023-03-28 09:10:38,410 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1029, 2.5801, 2.1616, 1.5768, 2.2214, 2.4696, 2.3979, 2.4914], + device='cuda:1'), covar=tensor([0.0273, 0.0178, 0.0269, 0.0560, 0.0380, 0.0245, 0.0196, 0.0190], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0071, 0.0079, 0.0084, 0.0087, 0.0062, 0.0059, 0.0062], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:1') +2023-03-28 09:11:16,440 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0099, 2.8604, 1.6438, 3.5730, 3.2382, 3.5731, 3.6692, 2.6935], + device='cuda:1'), covar=tensor([0.0573, 0.0664, 0.1787, 0.0613, 0.0604, 0.0369, 0.0508, 0.0815], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0128, 0.0134, 0.0131, 0.0115, 0.0113, 0.0129, 0.0132], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 09:11:19,690 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30255.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:11:53,985 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30270.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:12:01,975 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.5725, 1.6549, 1.7588, 1.7937, 1.5611, 1.6814, 1.6495, 1.7577], + device='cuda:1'), covar=tensor([0.0275, 0.0227, 0.0242, 0.0205, 0.0355, 0.0228, 0.0369, 0.0221], + device='cuda:1'), in_proj_covar=tensor([0.0059, 0.0056, 0.0061, 0.0053, 0.0066, 0.0062, 0.0079, 0.0054], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 09:12:19,552 INFO [train.py:892] (1/4) Epoch 17, batch 600, loss[loss=0.1841, simple_loss=0.2441, pruned_loss=0.06203, over 19768.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2654, pruned_loss=0.06524, over 3754911.49 frames. ], batch size: 182, lr: 9.33e-03, grad_scale: 16.0 +2023-03-28 09:13:33,429 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.677e+02 4.467e+02 5.041e+02 6.108e+02 1.228e+03, threshold=1.008e+03, percent-clipped=2.0 +2023-03-28 09:13:44,487 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30318.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 09:14:12,603 INFO [train.py:892] (1/4) Epoch 17, batch 650, loss[loss=0.1804, simple_loss=0.2359, pruned_loss=0.06246, over 19820.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2634, pruned_loss=0.06426, over 3799528.16 frames. ], batch size: 127, lr: 9.32e-03, grad_scale: 16.0 +2023-03-28 09:15:18,831 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30360.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:15:32,423 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30366.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:16:03,588 INFO [train.py:892] (1/4) Epoch 17, batch 700, loss[loss=0.1902, simple_loss=0.2652, pruned_loss=0.05757, over 19777.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2656, pruned_loss=0.065, over 3831981.80 frames. ], batch size: 53, lr: 9.31e-03, grad_scale: 16.0 +2023-03-28 09:17:22,249 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.759e+02 4.180e+02 5.300e+02 6.975e+02 1.622e+03, threshold=1.060e+03, percent-clipped=3.0 +2023-03-28 09:17:39,003 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30421.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:17:59,219 INFO [train.py:892] (1/4) Epoch 17, batch 750, loss[loss=0.2024, simple_loss=0.2826, pruned_loss=0.06116, over 19790.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2665, pruned_loss=0.06529, over 3857287.94 frames. ], batch size: 65, lr: 9.31e-03, grad_scale: 16.0 +2023-03-28 09:19:02,423 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30458.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:19:53,081 INFO [train.py:892] (1/4) Epoch 17, batch 800, loss[loss=0.2191, simple_loss=0.287, pruned_loss=0.07557, over 19835.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2668, pruned_loss=0.06598, over 3878178.48 frames. ], batch size: 115, lr: 9.30e-03, grad_scale: 16.0 +2023-03-28 09:19:54,204 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30481.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:20:52,537 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30506.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:21:10,973 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.987e+02 4.028e+02 4.946e+02 5.803e+02 1.096e+03, threshold=9.891e+02, percent-clipped=1.0 +2023-03-28 09:21:18,849 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.87 vs. limit=5.0 +2023-03-28 09:21:46,528 INFO [train.py:892] (1/4) Epoch 17, batch 850, loss[loss=0.2123, simple_loss=0.2843, pruned_loss=0.07012, over 19733.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2665, pruned_loss=0.06558, over 3895322.31 frames. ], batch size: 71, lr: 9.29e-03, grad_scale: 16.0 +2023-03-28 09:21:55,702 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-28 09:22:05,879 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.71 vs. limit=5.0 +2023-03-28 09:23:05,884 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30565.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:23:40,289 INFO [train.py:892] (1/4) Epoch 17, batch 900, loss[loss=0.1879, simple_loss=0.2608, pruned_loss=0.05755, over 19755.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2662, pruned_loss=0.06574, over 3907761.30 frames. ], batch size: 110, lr: 9.28e-03, grad_scale: 16.0 +2023-03-28 09:24:26,041 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5550, 4.5472, 2.8135, 4.8483, 5.1438, 2.0148, 4.1918, 3.7563], + device='cuda:1'), covar=tensor([0.0583, 0.0702, 0.2399, 0.0638, 0.0351, 0.2931, 0.0959, 0.0718], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0236, 0.0218, 0.0236, 0.0205, 0.0198, 0.0228, 0.0173], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 09:24:35,961 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1202, 3.1025, 1.7556, 3.8537, 3.5311, 3.7417, 3.8896, 2.9612], + device='cuda:1'), covar=tensor([0.0593, 0.0688, 0.1871, 0.0482, 0.0513, 0.0396, 0.0484, 0.0758], + device='cuda:1'), in_proj_covar=tensor([0.0132, 0.0130, 0.0137, 0.0133, 0.0115, 0.0115, 0.0132, 0.0134], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 09:24:54,343 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.960e+02 4.086e+02 5.123e+02 6.200e+02 1.238e+03, threshold=1.025e+03, percent-clipped=1.0 +2023-03-28 09:25:34,343 INFO [train.py:892] (1/4) Epoch 17, batch 950, loss[loss=0.1956, simple_loss=0.258, pruned_loss=0.0666, over 19760.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2671, pruned_loss=0.06564, over 3915908.99 frames. ], batch size: 188, lr: 9.28e-03, grad_scale: 16.0 +2023-03-28 09:26:06,910 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5205, 2.7255, 3.0876, 2.7419, 2.5710, 2.7414, 2.6177, 2.8660], + device='cuda:1'), covar=tensor([0.0288, 0.0310, 0.0212, 0.0216, 0.0342, 0.0227, 0.0311, 0.0489], + device='cuda:1'), in_proj_covar=tensor([0.0059, 0.0056, 0.0060, 0.0053, 0.0066, 0.0061, 0.0078, 0.0054], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 09:26:28,415 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30655.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:27:25,984 INFO [train.py:892] (1/4) Epoch 17, batch 1000, loss[loss=0.1799, simple_loss=0.2438, pruned_loss=0.058, over 19866.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2677, pruned_loss=0.06606, over 3922122.98 frames. ], batch size: 129, lr: 9.27e-03, grad_scale: 16.0 +2023-03-28 09:28:28,073 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-28 09:28:42,760 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.493e+02 4.412e+02 5.308e+02 6.255e+02 1.077e+03, threshold=1.062e+03, percent-clipped=1.0 +2023-03-28 09:28:48,058 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30716.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:28:48,271 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30716.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 09:29:20,834 INFO [train.py:892] (1/4) Epoch 17, batch 1050, loss[loss=0.2069, simple_loss=0.2813, pruned_loss=0.06624, over 19719.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2677, pruned_loss=0.06581, over 3928198.76 frames. ], batch size: 61, lr: 9.26e-03, grad_scale: 16.0 +2023-03-28 09:31:15,322 INFO [train.py:892] (1/4) Epoch 17, batch 1100, loss[loss=0.1787, simple_loss=0.2414, pruned_loss=0.05799, over 19800.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2673, pruned_loss=0.06592, over 3932182.52 frames. ], batch size: 86, lr: 9.26e-03, grad_scale: 16.0 +2023-03-28 09:31:16,155 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30781.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:32:31,408 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.889e+02 4.403e+02 5.206e+02 6.352e+02 1.143e+03, threshold=1.041e+03, percent-clipped=2.0 +2023-03-28 09:33:04,358 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30829.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:33:08,510 INFO [train.py:892] (1/4) Epoch 17, batch 1150, loss[loss=0.1957, simple_loss=0.2596, pruned_loss=0.0659, over 19842.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2678, pruned_loss=0.06671, over 3936404.64 frames. ], batch size: 197, lr: 9.25e-03, grad_scale: 16.0 +2023-03-28 09:34:17,730 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2596, 2.3370, 2.7461, 2.4319, 2.3325, 2.3951, 2.3634, 2.6486], + device='cuda:1'), covar=tensor([0.0280, 0.0251, 0.0198, 0.0226, 0.0341, 0.0281, 0.0393, 0.0285], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0056, 0.0060, 0.0053, 0.0067, 0.0062, 0.0079, 0.0054], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 09:34:25,851 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30865.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:35:00,476 INFO [train.py:892] (1/4) Epoch 17, batch 1200, loss[loss=0.1881, simple_loss=0.2508, pruned_loss=0.06269, over 19753.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2671, pruned_loss=0.06613, over 3939358.81 frames. ], batch size: 179, lr: 9.24e-03, grad_scale: 16.0 +2023-03-28 09:35:23,794 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8536, 2.5799, 4.5727, 3.8366, 4.2632, 4.4012, 4.3884, 4.2359], + device='cuda:1'), covar=tensor([0.0371, 0.0998, 0.0103, 0.0926, 0.0145, 0.0234, 0.0162, 0.0147], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0095, 0.0077, 0.0148, 0.0073, 0.0086, 0.0081, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 09:35:50,840 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30903.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:36:17,103 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30913.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:36:18,326 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.660e+02 3.834e+02 4.757e+02 6.314e+02 9.786e+02, threshold=9.513e+02, percent-clipped=0.0 +2023-03-28 09:36:38,793 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4304, 4.5038, 2.7180, 4.7303, 5.0048, 2.1050, 4.1775, 3.6089], + device='cuda:1'), covar=tensor([0.0588, 0.0638, 0.2359, 0.0678, 0.0394, 0.2837, 0.0892, 0.0723], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0236, 0.0217, 0.0235, 0.0208, 0.0200, 0.0229, 0.0175], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 09:36:42,590 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30925.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:36:53,357 INFO [train.py:892] (1/4) Epoch 17, batch 1250, loss[loss=0.1761, simple_loss=0.2461, pruned_loss=0.05305, over 19793.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2672, pruned_loss=0.06594, over 3941676.04 frames. ], batch size: 86, lr: 9.23e-03, grad_scale: 16.0 +2023-03-28 09:37:01,716 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30934.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:38:10,867 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30964.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:38:45,966 INFO [train.py:892] (1/4) Epoch 17, batch 1300, loss[loss=0.2035, simple_loss=0.272, pruned_loss=0.06748, over 19837.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2671, pruned_loss=0.06553, over 3941431.55 frames. ], batch size: 239, lr: 9.23e-03, grad_scale: 16.0 +2023-03-28 09:39:00,938 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30986.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:39:22,358 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30995.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:39:56,584 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31011.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 09:39:56,684 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31011.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 09:40:01,502 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.052e+02 4.725e+02 5.560e+02 6.623e+02 9.875e+02, threshold=1.112e+03, percent-clipped=1.0 +2023-03-28 09:40:06,324 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31016.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:40:39,838 INFO [train.py:892] (1/4) Epoch 17, batch 1350, loss[loss=0.1859, simple_loss=0.251, pruned_loss=0.06037, over 19760.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2668, pruned_loss=0.06542, over 3945224.60 frames. ], batch size: 182, lr: 9.22e-03, grad_scale: 16.0 +2023-03-28 09:41:54,693 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31064.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:42:13,046 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31072.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 09:42:31,171 INFO [train.py:892] (1/4) Epoch 17, batch 1400, loss[loss=0.1889, simple_loss=0.2558, pruned_loss=0.06102, over 19740.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2664, pruned_loss=0.06521, over 3946656.63 frames. ], batch size: 95, lr: 9.21e-03, grad_scale: 16.0 +2023-03-28 09:43:40,800 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 4.073e+02 5.032e+02 6.013e+02 1.066e+03, threshold=1.006e+03, percent-clipped=0.0 +2023-03-28 09:43:41,651 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9770, 3.5747, 3.6950, 3.9777, 3.7054, 3.8286, 4.0784, 4.2447], + device='cuda:1'), covar=tensor([0.0691, 0.0421, 0.0551, 0.0326, 0.0656, 0.0586, 0.0399, 0.0274], + device='cuda:1'), in_proj_covar=tensor([0.0138, 0.0158, 0.0184, 0.0153, 0.0155, 0.0139, 0.0139, 0.0175], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 09:44:19,113 INFO [train.py:892] (1/4) Epoch 17, batch 1450, loss[loss=0.1674, simple_loss=0.238, pruned_loss=0.04841, over 19796.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2671, pruned_loss=0.06514, over 3947055.54 frames. ], batch size: 45, lr: 9.20e-03, grad_scale: 32.0 +2023-03-28 09:46:17,164 INFO [train.py:892] (1/4) Epoch 17, batch 1500, loss[loss=0.1899, simple_loss=0.2618, pruned_loss=0.05903, over 19872.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2676, pruned_loss=0.06531, over 3945874.59 frames. ], batch size: 108, lr: 9.20e-03, grad_scale: 32.0 +2023-03-28 09:47:32,208 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.841e+02 4.138e+02 4.953e+02 5.911e+02 9.562e+02, threshold=9.905e+02, percent-clipped=0.0 +2023-03-28 09:48:10,829 INFO [train.py:892] (1/4) Epoch 17, batch 1550, loss[loss=0.1947, simple_loss=0.2591, pruned_loss=0.06515, over 19888.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2664, pruned_loss=0.06452, over 3946925.44 frames. ], batch size: 176, lr: 9.19e-03, grad_scale: 32.0 +2023-03-28 09:48:30,511 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1896, 2.9537, 4.6805, 3.9418, 4.4195, 4.5864, 4.5132, 4.2863], + device='cuda:1'), covar=tensor([0.0303, 0.0844, 0.0093, 0.1011, 0.0109, 0.0210, 0.0135, 0.0136], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0094, 0.0077, 0.0147, 0.0073, 0.0087, 0.0081, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 09:49:12,695 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31259.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:50:00,618 INFO [train.py:892] (1/4) Epoch 17, batch 1600, loss[loss=0.2846, simple_loss=0.346, pruned_loss=0.1116, over 19621.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2662, pruned_loss=0.06412, over 3948788.50 frames. ], batch size: 367, lr: 9.18e-03, grad_scale: 32.0 +2023-03-28 09:50:02,879 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31281.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:50:23,720 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31290.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:51:07,776 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31311.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:51:12,797 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.169e+02 4.273e+02 5.402e+02 6.708e+02 1.273e+03, threshold=1.080e+03, percent-clipped=3.0 +2023-03-28 09:51:41,149 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4981, 4.2273, 4.3358, 4.0773, 4.5072, 3.2386, 3.7693, 2.4660], + device='cuda:1'), covar=tensor([0.0198, 0.0202, 0.0121, 0.0151, 0.0121, 0.0777, 0.0640, 0.1294], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0130, 0.0103, 0.0124, 0.0109, 0.0125, 0.0135, 0.0119], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 09:51:55,035 INFO [train.py:892] (1/4) Epoch 17, batch 1650, loss[loss=0.2102, simple_loss=0.266, pruned_loss=0.07718, over 19828.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2665, pruned_loss=0.06477, over 3948536.96 frames. ], batch size: 184, lr: 9.18e-03, grad_scale: 32.0 +2023-03-28 09:52:57,495 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31359.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:53:13,473 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31367.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 09:53:43,708 INFO [train.py:892] (1/4) Epoch 17, batch 1700, loss[loss=0.2013, simple_loss=0.2594, pruned_loss=0.07155, over 19836.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2678, pruned_loss=0.06595, over 3949293.30 frames. ], batch size: 177, lr: 9.17e-03, grad_scale: 32.0 +2023-03-28 09:53:52,474 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9929, 4.9713, 5.4577, 4.9757, 4.3657, 5.2501, 5.1379, 5.6538], + device='cuda:1'), covar=tensor([0.0871, 0.0329, 0.0339, 0.0381, 0.0715, 0.0447, 0.0321, 0.0288], + device='cuda:1'), in_proj_covar=tensor([0.0271, 0.0209, 0.0212, 0.0218, 0.0198, 0.0219, 0.0215, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 09:53:54,754 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5060, 2.6656, 3.0764, 2.7828, 2.4685, 2.7640, 2.5045, 2.9184], + device='cuda:1'), covar=tensor([0.0268, 0.0295, 0.0203, 0.0265, 0.0363, 0.0282, 0.0360, 0.0297], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0057, 0.0061, 0.0054, 0.0068, 0.0062, 0.0080, 0.0055], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 09:54:28,969 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-28 09:54:48,861 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31409.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:54:59,294 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.015e+02 4.580e+02 5.253e+02 6.145e+02 1.351e+03, threshold=1.051e+03, percent-clipped=1.0 +2023-03-28 09:55:32,405 INFO [train.py:892] (1/4) Epoch 17, batch 1750, loss[loss=0.2569, simple_loss=0.3223, pruned_loss=0.09576, over 19596.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2664, pruned_loss=0.06508, over 3948969.21 frames. ], batch size: 376, lr: 9.16e-03, grad_scale: 32.0 +2023-03-28 09:56:48,093 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31470.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:57:07,955 INFO [train.py:892] (1/4) Epoch 17, batch 1800, loss[loss=0.188, simple_loss=0.2608, pruned_loss=0.05764, over 19759.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2667, pruned_loss=0.0657, over 3948429.31 frames. ], batch size: 49, lr: 9.15e-03, grad_scale: 32.0 +2023-03-28 09:57:26,689 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31491.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 09:57:43,839 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6502, 3.0317, 2.5565, 2.0500, 2.6122, 2.9675, 2.8142, 3.0004], + device='cuda:1'), covar=tensor([0.0234, 0.0201, 0.0241, 0.0476, 0.0326, 0.0195, 0.0168, 0.0176], + device='cuda:1'), in_proj_covar=tensor([0.0077, 0.0073, 0.0081, 0.0086, 0.0089, 0.0064, 0.0061, 0.0064], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 09:58:07,134 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.684e+02 3.864e+02 5.000e+02 5.920e+02 9.753e+02, threshold=1.000e+03, percent-clipped=0.0 +2023-03-28 09:58:38,276 INFO [train.py:892] (1/4) Epoch 17, batch 1850, loss[loss=0.1949, simple_loss=0.2727, pruned_loss=0.05853, over 19694.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2678, pruned_loss=0.06514, over 3947553.60 frames. ], batch size: 56, lr: 9.15e-03, grad_scale: 32.0 +2023-03-28 09:59:46,588 INFO [train.py:892] (1/4) Epoch 18, batch 0, loss[loss=0.1757, simple_loss=0.2505, pruned_loss=0.0504, over 19804.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2505, pruned_loss=0.0504, over 19804.00 frames. ], batch size: 67, lr: 8.89e-03, grad_scale: 32.0 +2023-03-28 09:59:46,589 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 10:00:15,456 INFO [train.py:926] (1/4) Epoch 18, validation: loss=0.171, simple_loss=0.2489, pruned_loss=0.04657, over 2883724.00 frames. +2023-03-28 10:00:15,457 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 10:00:43,162 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2564, 3.9948, 4.1256, 3.8520, 4.2704, 3.1445, 3.5136, 2.2020], + device='cuda:1'), covar=tensor([0.0191, 0.0217, 0.0135, 0.0173, 0.0132, 0.0788, 0.0667, 0.1391], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0130, 0.0103, 0.0124, 0.0110, 0.0124, 0.0135, 0.0118], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 10:00:52,948 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31552.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 10:01:09,729 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31559.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:02:01,512 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31581.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:02:11,871 INFO [train.py:892] (1/4) Epoch 18, batch 50, loss[loss=0.2117, simple_loss=0.2695, pruned_loss=0.0769, over 19753.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2571, pruned_loss=0.06182, over 891409.77 frames. ], batch size: 256, lr: 8.88e-03, grad_scale: 32.0 +2023-03-28 10:02:21,761 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31590.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:02:47,545 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2564, 4.8576, 4.8181, 5.2534, 4.9694, 5.4709, 5.3478, 5.5417], + device='cuda:1'), covar=tensor([0.0633, 0.0317, 0.0429, 0.0280, 0.0549, 0.0290, 0.0365, 0.0282], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0160, 0.0187, 0.0157, 0.0158, 0.0139, 0.0141, 0.0179], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 10:02:52,054 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9503, 3.9821, 2.4472, 4.2892, 4.4914, 1.9294, 3.5936, 3.4155], + device='cuda:1'), covar=tensor([0.0784, 0.1013, 0.2817, 0.0869, 0.0507, 0.3164, 0.1187, 0.0811], + device='cuda:1'), in_proj_covar=tensor([0.0217, 0.0239, 0.0220, 0.0241, 0.0212, 0.0203, 0.0230, 0.0177], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 10:03:03,306 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31607.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:03:16,248 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.973e+02 4.915e+02 6.184e+02 1.019e+03, threshold=9.829e+02, percent-clipped=1.0 +2023-03-28 10:03:52,323 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31629.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:04:05,507 INFO [train.py:892] (1/4) Epoch 18, batch 100, loss[loss=0.2045, simple_loss=0.2769, pruned_loss=0.06606, over 19669.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2593, pruned_loss=0.06156, over 1569434.79 frames. ], batch size: 52, lr: 8.87e-03, grad_scale: 32.0 +2023-03-28 10:04:10,149 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31638.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:04:57,958 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2587, 4.1134, 4.6313, 4.4091, 4.5351, 3.9478, 4.3202, 4.1974], + device='cuda:1'), covar=tensor([0.1481, 0.1628, 0.0968, 0.1239, 0.0941, 0.1182, 0.2087, 0.2237], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0276, 0.0328, 0.0260, 0.0240, 0.0240, 0.0321, 0.0352], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 10:05:17,219 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31667.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 10:06:00,553 INFO [train.py:892] (1/4) Epoch 18, batch 150, loss[loss=0.2002, simple_loss=0.2694, pruned_loss=0.0655, over 19738.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2633, pruned_loss=0.06381, over 2097707.34 frames. ], batch size: 118, lr: 8.86e-03, grad_scale: 32.0 +2023-03-28 10:06:42,168 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31704.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:07:06,643 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.935e+02 4.429e+02 5.296e+02 6.261e+02 1.038e+03, threshold=1.059e+03, percent-clipped=5.0 +2023-03-28 10:07:07,530 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31715.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 10:07:37,051 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2925, 3.1185, 3.3243, 2.4655, 3.4575, 2.8004, 3.1007, 3.4437], + device='cuda:1'), covar=tensor([0.0503, 0.0359, 0.0491, 0.0768, 0.0313, 0.0383, 0.0410, 0.0284], + device='cuda:1'), in_proj_covar=tensor([0.0065, 0.0071, 0.0069, 0.0100, 0.0066, 0.0066, 0.0065, 0.0057], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 10:07:53,045 INFO [train.py:892] (1/4) Epoch 18, batch 200, loss[loss=0.1679, simple_loss=0.2357, pruned_loss=0.05008, over 19858.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2629, pruned_loss=0.06327, over 2510110.70 frames. ], batch size: 118, lr: 8.86e-03, grad_scale: 16.0 +2023-03-28 10:09:00,549 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31765.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:09:00,708 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31765.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:09:47,222 INFO [train.py:892] (1/4) Epoch 18, batch 250, loss[loss=0.2491, simple_loss=0.314, pruned_loss=0.09212, over 19606.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2648, pruned_loss=0.06363, over 2827506.97 frames. ], batch size: 359, lr: 8.85e-03, grad_scale: 16.0 +2023-03-28 10:09:53,016 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31788.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:10:27,306 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-28 10:10:52,982 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.400e+02 4.013e+02 4.672e+02 5.827e+02 1.248e+03, threshold=9.344e+02, percent-clipped=1.0 +2023-03-28 10:11:14,293 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31823.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:11:20,313 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.7107, 1.6642, 1.9302, 1.9598, 1.6428, 1.8516, 1.7902, 1.8644], + device='cuda:1'), covar=tensor([0.0261, 0.0267, 0.0248, 0.0190, 0.0372, 0.0259, 0.0354, 0.0247], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0057, 0.0061, 0.0053, 0.0067, 0.0062, 0.0079, 0.0055], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 10:11:41,802 INFO [train.py:892] (1/4) Epoch 18, batch 300, loss[loss=0.1894, simple_loss=0.2527, pruned_loss=0.06302, over 19844.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2647, pruned_loss=0.06306, over 3077719.50 frames. ], batch size: 161, lr: 8.84e-03, grad_scale: 16.0 +2023-03-28 10:12:08,476 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31847.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 10:12:13,550 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31849.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:12:34,771 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0851, 4.1895, 4.5772, 4.2040, 4.0382, 4.5049, 4.3527, 4.7114], + device='cuda:1'), covar=tensor([0.1222, 0.0449, 0.0533, 0.0431, 0.0793, 0.0513, 0.0534, 0.0419], + device='cuda:1'), in_proj_covar=tensor([0.0268, 0.0207, 0.0209, 0.0215, 0.0193, 0.0218, 0.0214, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 10:12:42,743 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.0292, 1.9775, 2.4447, 2.3209, 2.0876, 2.3717, 2.3186, 2.3174], + device='cuda:1'), covar=tensor([0.0260, 0.0271, 0.0214, 0.0193, 0.0314, 0.0187, 0.0296, 0.0274], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0056, 0.0060, 0.0052, 0.0066, 0.0061, 0.0077, 0.0054], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 10:13:18,354 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31878.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:13:32,429 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31884.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:13:35,333 INFO [train.py:892] (1/4) Epoch 18, batch 350, loss[loss=0.2034, simple_loss=0.2606, pruned_loss=0.07313, over 19866.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2648, pruned_loss=0.06316, over 3271069.20 frames. ], batch size: 154, lr: 8.84e-03, grad_scale: 16.0 +2023-03-28 10:14:41,489 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.968e+02 4.069e+02 4.792e+02 5.707e+02 1.077e+03, threshold=9.584e+02, percent-clipped=3.0 +2023-03-28 10:14:42,361 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31915.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:15:31,556 INFO [train.py:892] (1/4) Epoch 18, batch 400, loss[loss=0.192, simple_loss=0.2669, pruned_loss=0.05853, over 19729.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.265, pruned_loss=0.06319, over 3420138.84 frames. ], batch size: 71, lr: 8.83e-03, grad_scale: 16.0 +2023-03-28 10:15:39,073 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31939.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:16:04,175 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31950.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:16:11,109 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.98 vs. limit=5.0 +2023-03-28 10:17:03,336 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31976.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:17:23,541 INFO [train.py:892] (1/4) Epoch 18, batch 450, loss[loss=0.1923, simple_loss=0.2614, pruned_loss=0.06162, over 19805.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2643, pruned_loss=0.06275, over 3538839.16 frames. ], batch size: 195, lr: 8.82e-03, grad_scale: 16.0 +2023-03-28 10:18:28,588 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32011.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:18:36,656 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.048e+02 4.283e+02 4.991e+02 5.829e+02 1.431e+03, threshold=9.983e+02, percent-clipped=4.0 +2023-03-28 10:18:56,395 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7112, 3.7147, 4.0638, 3.8914, 3.9972, 3.5328, 3.8069, 3.6730], + device='cuda:1'), covar=tensor([0.1597, 0.1860, 0.1227, 0.1303, 0.1365, 0.1300, 0.2228, 0.2409], + device='cuda:1'), in_proj_covar=tensor([0.0271, 0.0282, 0.0335, 0.0262, 0.0245, 0.0243, 0.0326, 0.0358], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 10:19:12,639 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9030, 2.2284, 3.4964, 3.0109, 3.4488, 3.5422, 3.3607, 3.3526], + device='cuda:1'), covar=tensor([0.0441, 0.0878, 0.0105, 0.0547, 0.0130, 0.0205, 0.0166, 0.0151], + device='cuda:1'), in_proj_covar=tensor([0.0088, 0.0096, 0.0078, 0.0149, 0.0074, 0.0087, 0.0082, 0.0074], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 10:19:26,556 INFO [train.py:892] (1/4) Epoch 18, batch 500, loss[loss=0.1937, simple_loss=0.2603, pruned_loss=0.0635, over 19758.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2658, pruned_loss=0.06388, over 3627150.72 frames. ], batch size: 213, lr: 8.82e-03, grad_scale: 16.0 +2023-03-28 10:20:20,761 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32060.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:20:33,264 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32065.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:21:22,483 INFO [train.py:892] (1/4) Epoch 18, batch 550, loss[loss=0.1735, simple_loss=0.2471, pruned_loss=0.0499, over 19854.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.266, pruned_loss=0.06408, over 3698839.95 frames. ], batch size: 112, lr: 8.81e-03, grad_scale: 16.0 +2023-03-28 10:22:25,793 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32113.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:22:30,546 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.880e+02 4.056e+02 4.821e+02 6.235e+02 9.910e+02, threshold=9.642e+02, percent-clipped=0.0 +2023-03-28 10:23:01,774 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-28 10:23:03,490 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3340, 2.9469, 4.8467, 4.0635, 4.4521, 4.6730, 4.5994, 4.4253], + device='cuda:1'), covar=tensor([0.0279, 0.0828, 0.0093, 0.0943, 0.0129, 0.0227, 0.0152, 0.0118], + device='cuda:1'), in_proj_covar=tensor([0.0088, 0.0095, 0.0077, 0.0147, 0.0073, 0.0086, 0.0082, 0.0074], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 10:23:18,293 INFO [train.py:892] (1/4) Epoch 18, batch 600, loss[loss=0.1778, simple_loss=0.2452, pruned_loss=0.05524, over 19844.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2668, pruned_loss=0.0645, over 3753090.32 frames. ], batch size: 115, lr: 8.80e-03, grad_scale: 16.0 +2023-03-28 10:23:38,059 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32144.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:23:44,748 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32147.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 10:24:56,554 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32179.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:25:12,441 INFO [train.py:892] (1/4) Epoch 18, batch 650, loss[loss=0.1562, simple_loss=0.2288, pruned_loss=0.04184, over 19815.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2647, pruned_loss=0.06335, over 3798209.20 frames. ], batch size: 67, lr: 8.80e-03, grad_scale: 16.0 +2023-03-28 10:25:36,497 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32195.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 10:25:39,066 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-03-28 10:26:20,638 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.941e+02 4.475e+02 5.110e+02 5.947e+02 1.058e+03, threshold=1.022e+03, percent-clipped=2.0 +2023-03-28 10:26:32,219 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-28 10:26:34,260 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9697, 2.8443, 4.5460, 3.8283, 4.2167, 4.4040, 4.2590, 4.2649], + device='cuda:1'), covar=tensor([0.0289, 0.0810, 0.0080, 0.0860, 0.0121, 0.0205, 0.0167, 0.0114], + device='cuda:1'), in_proj_covar=tensor([0.0088, 0.0096, 0.0078, 0.0149, 0.0074, 0.0087, 0.0083, 0.0075], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 10:27:06,411 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32234.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:27:09,555 INFO [train.py:892] (1/4) Epoch 18, batch 700, loss[loss=0.1743, simple_loss=0.2385, pruned_loss=0.05505, over 19824.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.265, pruned_loss=0.06327, over 3832510.86 frames. ], batch size: 195, lr: 8.79e-03, grad_scale: 16.0 +2023-03-28 10:28:31,689 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32271.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:29:04,730 INFO [train.py:892] (1/4) Epoch 18, batch 750, loss[loss=0.3417, simple_loss=0.3826, pruned_loss=0.1504, over 19392.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.265, pruned_loss=0.06284, over 3856497.21 frames. ], batch size: 431, lr: 8.78e-03, grad_scale: 16.0 +2023-03-28 10:29:52,470 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32306.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:30:09,316 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.935e+02 4.187e+02 5.149e+02 5.952e+02 1.160e+03, threshold=1.030e+03, percent-clipped=2.0 +2023-03-28 10:30:12,916 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4019, 5.5513, 5.6026, 5.6244, 5.3307, 5.5724, 4.9786, 4.5752], + device='cuda:1'), covar=tensor([0.0753, 0.0875, 0.0908, 0.0714, 0.0982, 0.1059, 0.1255, 0.2466], + device='cuda:1'), in_proj_covar=tensor([0.0229, 0.0231, 0.0260, 0.0224, 0.0222, 0.0210, 0.0233, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 10:30:36,509 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3867, 2.6010, 2.7861, 2.7896, 2.4662, 2.3179, 2.5596, 2.8700], + device='cuda:1'), covar=tensor([0.0329, 0.0268, 0.0252, 0.0212, 0.0331, 0.0435, 0.0353, 0.0217], + device='cuda:1'), in_proj_covar=tensor([0.0062, 0.0057, 0.0062, 0.0054, 0.0068, 0.0063, 0.0080, 0.0055], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 10:30:59,311 INFO [train.py:892] (1/4) Epoch 18, batch 800, loss[loss=0.188, simple_loss=0.253, pruned_loss=0.06147, over 19753.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2648, pruned_loss=0.06299, over 3877244.48 frames. ], batch size: 139, lr: 8.78e-03, grad_scale: 16.0 +2023-03-28 10:31:51,790 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32360.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:32:44,940 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4162, 3.4977, 2.1454, 3.5599, 3.7251, 1.6949, 2.9791, 2.8229], + device='cuda:1'), covar=tensor([0.0760, 0.0849, 0.2681, 0.0786, 0.0487, 0.2760, 0.1232, 0.0798], + device='cuda:1'), in_proj_covar=tensor([0.0216, 0.0237, 0.0220, 0.0241, 0.0212, 0.0199, 0.0229, 0.0177], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 10:32:47,929 INFO [train.py:892] (1/4) Epoch 18, batch 850, loss[loss=0.168, simple_loss=0.2384, pruned_loss=0.04877, over 19804.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2646, pruned_loss=0.06292, over 3894330.18 frames. ], batch size: 98, lr: 8.77e-03, grad_scale: 16.0 +2023-03-28 10:33:20,760 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4981, 4.6940, 2.8389, 4.9581, 5.1458, 2.2483, 4.3589, 3.6028], + device='cuda:1'), covar=tensor([0.0619, 0.0583, 0.2405, 0.0591, 0.0379, 0.2705, 0.0807, 0.0746], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0237, 0.0219, 0.0240, 0.0212, 0.0199, 0.0228, 0.0177], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 10:33:36,800 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32408.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:33:53,381 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.074e+02 4.132e+02 4.901e+02 5.957e+02 1.046e+03, threshold=9.802e+02, percent-clipped=1.0 +2023-03-28 10:34:11,257 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0980, 2.9607, 1.7292, 3.6084, 3.3496, 3.5611, 3.6297, 2.8855], + device='cuda:1'), covar=tensor([0.0554, 0.0609, 0.1708, 0.0478, 0.0500, 0.0412, 0.0513, 0.0682], + device='cuda:1'), in_proj_covar=tensor([0.0130, 0.0129, 0.0136, 0.0134, 0.0117, 0.0115, 0.0130, 0.0134], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 10:34:39,897 INFO [train.py:892] (1/4) Epoch 18, batch 900, loss[loss=0.1879, simple_loss=0.2591, pruned_loss=0.05832, over 19817.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2643, pruned_loss=0.06293, over 3906457.10 frames. ], batch size: 133, lr: 8.76e-03, grad_scale: 16.0 +2023-03-28 10:34:40,734 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7980, 3.9408, 4.3071, 3.8839, 3.7388, 4.2102, 4.0652, 4.3955], + device='cuda:1'), covar=tensor([0.1188, 0.0404, 0.0471, 0.0467, 0.0978, 0.0515, 0.0480, 0.0402], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0207, 0.0207, 0.0217, 0.0196, 0.0219, 0.0215, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 10:34:58,633 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32444.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:36:20,293 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32479.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:36:36,737 INFO [train.py:892] (1/4) Epoch 18, batch 950, loss[loss=0.1996, simple_loss=0.2837, pruned_loss=0.05771, over 19875.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2643, pruned_loss=0.06294, over 3916442.58 frames. ], batch size: 53, lr: 8.76e-03, grad_scale: 16.0 +2023-03-28 10:36:53,389 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32492.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:37:41,380 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.554e+02 4.412e+02 5.212e+02 6.577e+02 2.177e+03, threshold=1.042e+03, percent-clipped=5.0 +2023-03-28 10:38:01,590 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.72 vs. limit=5.0 +2023-03-28 10:38:11,326 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32527.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:38:28,447 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32534.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:38:32,216 INFO [train.py:892] (1/4) Epoch 18, batch 1000, loss[loss=0.2402, simple_loss=0.2989, pruned_loss=0.09075, over 19706.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.265, pruned_loss=0.06333, over 3923933.43 frames. ], batch size: 315, lr: 8.75e-03, grad_scale: 16.0 +2023-03-28 10:38:39,844 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2353, 4.2024, 2.5912, 4.5070, 4.7545, 2.0257, 3.9124, 3.4078], + device='cuda:1'), covar=tensor([0.0596, 0.0840, 0.2433, 0.0748, 0.0504, 0.2838, 0.0959, 0.0749], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0235, 0.0219, 0.0240, 0.0212, 0.0199, 0.0229, 0.0176], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 10:39:52,913 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32571.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:40:16,953 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32582.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:40:24,632 INFO [train.py:892] (1/4) Epoch 18, batch 1050, loss[loss=0.1862, simple_loss=0.2526, pruned_loss=0.05985, over 19738.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2654, pruned_loss=0.06369, over 3930966.63 frames. ], batch size: 219, lr: 8.74e-03, grad_scale: 16.0 +2023-03-28 10:41:12,734 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32606.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:41:30,003 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.098e+02 4.345e+02 4.935e+02 6.065e+02 2.066e+03, threshold=9.870e+02, percent-clipped=4.0 +2023-03-28 10:41:38,234 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32619.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:42:16,178 INFO [train.py:892] (1/4) Epoch 18, batch 1100, loss[loss=0.1952, simple_loss=0.2645, pruned_loss=0.06293, over 19645.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2641, pruned_loss=0.06306, over 3937216.41 frames. ], batch size: 67, lr: 8.74e-03, grad_scale: 16.0 +2023-03-28 10:42:54,949 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32654.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:43:19,113 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3295, 4.4259, 2.5601, 4.7728, 4.9460, 1.9892, 4.0501, 3.4125], + device='cuda:1'), covar=tensor([0.0577, 0.0613, 0.2613, 0.0525, 0.0387, 0.2842, 0.0927, 0.0816], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0239, 0.0222, 0.0242, 0.0216, 0.0201, 0.0233, 0.0178], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 10:44:04,863 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7430, 4.4501, 4.4691, 4.8318, 4.4216, 4.9742, 4.8638, 5.0665], + device='cuda:1'), covar=tensor([0.0559, 0.0346, 0.0432, 0.0288, 0.0656, 0.0318, 0.0362, 0.0275], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0157, 0.0181, 0.0153, 0.0157, 0.0137, 0.0136, 0.0177], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 10:44:10,626 INFO [train.py:892] (1/4) Epoch 18, batch 1150, loss[loss=0.2128, simple_loss=0.2852, pruned_loss=0.0702, over 19678.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2648, pruned_loss=0.06346, over 3939238.96 frames. ], batch size: 64, lr: 8.73e-03, grad_scale: 16.0 +2023-03-28 10:45:05,342 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5335, 3.2095, 3.3297, 3.5810, 3.3552, 3.5195, 3.6462, 3.8217], + device='cuda:1'), covar=tensor([0.0705, 0.0479, 0.0636, 0.0390, 0.0723, 0.0640, 0.0450, 0.0348], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0158, 0.0182, 0.0154, 0.0157, 0.0137, 0.0137, 0.0178], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 10:45:16,104 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.651e+02 4.519e+02 5.355e+02 6.274e+02 1.728e+03, threshold=1.071e+03, percent-clipped=4.0 +2023-03-28 10:46:02,493 INFO [train.py:892] (1/4) Epoch 18, batch 1200, loss[loss=0.2385, simple_loss=0.2989, pruned_loss=0.08909, over 19714.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2655, pruned_loss=0.06365, over 3940333.51 frames. ], batch size: 269, lr: 8.72e-03, grad_scale: 16.0 +2023-03-28 10:46:06,405 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.77 vs. limit=5.0 +2023-03-28 10:46:18,168 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.74 vs. limit=5.0 +2023-03-28 10:47:49,184 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-03-28 10:48:00,624 INFO [train.py:892] (1/4) Epoch 18, batch 1250, loss[loss=0.1786, simple_loss=0.2544, pruned_loss=0.05145, over 19901.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.266, pruned_loss=0.06352, over 3941097.38 frames. ], batch size: 91, lr: 8.72e-03, grad_scale: 16.0 +2023-03-28 10:49:03,824 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.128e+02 4.132e+02 4.830e+02 5.865e+02 8.224e+02, threshold=9.660e+02, percent-clipped=0.0 +2023-03-28 10:49:53,660 INFO [train.py:892] (1/4) Epoch 18, batch 1300, loss[loss=0.1987, simple_loss=0.2708, pruned_loss=0.06335, over 19883.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2647, pruned_loss=0.06301, over 3943613.33 frames. ], batch size: 52, lr: 8.71e-03, grad_scale: 16.0 +2023-03-28 10:50:55,352 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 10:51:00,002 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-28 10:51:47,877 INFO [train.py:892] (1/4) Epoch 18, batch 1350, loss[loss=0.1953, simple_loss=0.2638, pruned_loss=0.06341, over 19715.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.265, pruned_loss=0.0629, over 3943340.24 frames. ], batch size: 109, lr: 8.71e-03, grad_scale: 16.0 +2023-03-28 10:52:29,405 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.28 vs. limit=5.0 +2023-03-28 10:52:55,320 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.479e+02 4.452e+02 5.064e+02 6.052e+02 1.021e+03, threshold=1.013e+03, percent-clipped=2.0 +2023-03-28 10:53:44,851 INFO [train.py:892] (1/4) Epoch 18, batch 1400, loss[loss=0.1708, simple_loss=0.2351, pruned_loss=0.05322, over 19866.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2653, pruned_loss=0.06317, over 3944644.35 frames. ], batch size: 46, lr: 8.70e-03, grad_scale: 16.0 +2023-03-28 10:54:40,727 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9929, 4.6090, 4.6240, 4.9677, 4.5581, 5.1797, 5.0957, 5.2691], + device='cuda:1'), covar=tensor([0.0589, 0.0327, 0.0430, 0.0256, 0.0658, 0.0289, 0.0383, 0.0272], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0160, 0.0184, 0.0155, 0.0159, 0.0139, 0.0138, 0.0178], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 10:54:52,772 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32966.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:55:38,290 INFO [train.py:892] (1/4) Epoch 18, batch 1450, loss[loss=0.1806, simple_loss=0.2631, pruned_loss=0.04904, over 19855.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2654, pruned_loss=0.06274, over 3946282.06 frames. ], batch size: 78, lr: 8.69e-03, grad_scale: 16.0 +2023-03-28 10:56:44,785 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.979e+02 4.312e+02 5.122e+02 6.077e+02 1.174e+03, threshold=1.024e+03, percent-clipped=2.0 +2023-03-28 10:57:12,143 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33027.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:57:32,880 INFO [train.py:892] (1/4) Epoch 18, batch 1500, loss[loss=0.1781, simple_loss=0.2494, pruned_loss=0.05335, over 19906.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.265, pruned_loss=0.06273, over 3947115.80 frames. ], batch size: 113, lr: 8.69e-03, grad_scale: 16.0 +2023-03-28 10:57:50,706 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9198, 2.6821, 2.9614, 2.7526, 3.0934, 3.0650, 3.7173, 4.0679], + device='cuda:1'), covar=tensor([0.0565, 0.1750, 0.1559, 0.2022, 0.1743, 0.1529, 0.0598, 0.0536], + device='cuda:1'), in_proj_covar=tensor([0.0226, 0.0227, 0.0248, 0.0238, 0.0275, 0.0238, 0.0202, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 10:58:45,851 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.3316, 4.8351, 4.9632, 4.7600, 5.2549, 3.3987, 4.1147, 2.6440], + device='cuda:1'), covar=tensor([0.0180, 0.0193, 0.0135, 0.0162, 0.0134, 0.0730, 0.0929, 0.1450], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0132, 0.0105, 0.0125, 0.0109, 0.0126, 0.0137, 0.0119], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 10:58:45,955 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33069.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:59:25,358 INFO [train.py:892] (1/4) Epoch 18, batch 1550, loss[loss=0.1836, simple_loss=0.2506, pruned_loss=0.05826, over 19815.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2635, pruned_loss=0.06205, over 3948408.62 frames. ], batch size: 147, lr: 8.68e-03, grad_scale: 16.0 +2023-03-28 11:00:25,342 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.832e+02 4.176e+02 4.933e+02 6.239e+02 1.615e+03, threshold=9.866e+02, percent-clipped=3.0 +2023-03-28 11:00:59,199 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33130.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:01:09,752 INFO [train.py:892] (1/4) Epoch 18, batch 1600, loss[loss=0.1972, simple_loss=0.2701, pruned_loss=0.06214, over 19715.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2627, pruned_loss=0.06171, over 3951065.59 frames. ], batch size: 109, lr: 8.67e-03, grad_scale: 16.0 +2023-03-28 11:01:53,568 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33155.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:02:55,078 INFO [train.py:892] (1/4) Epoch 18, batch 1650, loss[loss=0.1619, simple_loss=0.2291, pruned_loss=0.04742, over 19732.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2625, pruned_loss=0.06183, over 3949292.07 frames. ], batch size: 118, lr: 8.67e-03, grad_scale: 16.0 +2023-03-28 11:04:01,027 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.567e+02 4.484e+02 5.220e+02 6.414e+02 1.768e+03, threshold=1.044e+03, percent-clipped=5.0 +2023-03-28 11:04:05,632 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33216.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:04:50,068 INFO [train.py:892] (1/4) Epoch 18, batch 1700, loss[loss=0.1695, simple_loss=0.2445, pruned_loss=0.0473, over 19800.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2637, pruned_loss=0.06215, over 3947691.95 frames. ], batch size: 107, lr: 8.66e-03, grad_scale: 16.0 +2023-03-28 11:06:44,510 INFO [train.py:892] (1/4) Epoch 18, batch 1750, loss[loss=0.1752, simple_loss=0.2409, pruned_loss=0.0547, over 19888.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2636, pruned_loss=0.062, over 3948462.15 frames. ], batch size: 176, lr: 8.65e-03, grad_scale: 16.0 +2023-03-28 11:07:40,436 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 4.001e+02 4.864e+02 5.584e+02 1.277e+03, threshold=9.729e+02, percent-clipped=1.0 +2023-03-28 11:07:54,279 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33322.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:08:21,817 INFO [train.py:892] (1/4) Epoch 18, batch 1800, loss[loss=0.189, simple_loss=0.2687, pruned_loss=0.05464, over 19849.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2625, pruned_loss=0.06141, over 3948564.39 frames. ], batch size: 56, lr: 8.65e-03, grad_scale: 16.0 +2023-03-28 11:09:16,452 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8860, 4.6518, 4.6468, 4.9595, 4.7344, 5.3011, 4.8820, 5.1565], + device='cuda:1'), covar=tensor([0.0782, 0.0431, 0.0454, 0.0358, 0.0584, 0.0345, 0.0717, 0.0551], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0159, 0.0183, 0.0153, 0.0158, 0.0138, 0.0137, 0.0177], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 11:09:57,942 INFO [train.py:892] (1/4) Epoch 18, batch 1850, loss[loss=0.2225, simple_loss=0.3032, pruned_loss=0.07096, over 19823.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2656, pruned_loss=0.0625, over 3946399.08 frames. ], batch size: 57, lr: 8.64e-03, grad_scale: 16.0 +2023-03-28 11:11:02,084 INFO [train.py:892] (1/4) Epoch 19, batch 0, loss[loss=0.169, simple_loss=0.2396, pruned_loss=0.04919, over 19794.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2396, pruned_loss=0.04919, over 19794.00 frames. ], batch size: 191, lr: 8.41e-03, grad_scale: 16.0 +2023-03-28 11:11:02,084 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 11:11:35,982 INFO [train.py:926] (1/4) Epoch 19, validation: loss=0.1703, simple_loss=0.2482, pruned_loss=0.04619, over 2883724.00 frames. +2023-03-28 11:11:35,983 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 11:12:33,180 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.922e+02 3.899e+02 4.712e+02 6.072e+02 1.255e+03, threshold=9.424e+02, percent-clipped=1.0 +2023-03-28 11:12:54,560 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33425.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:13:32,572 INFO [train.py:892] (1/4) Epoch 19, batch 50, loss[loss=0.1682, simple_loss=0.2465, pruned_loss=0.04494, over 19796.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2532, pruned_loss=0.05662, over 892211.72 frames. ], batch size: 79, lr: 8.40e-03, grad_scale: 16.0 +2023-03-28 11:14:21,226 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1886, 3.0454, 3.3633, 2.4703, 3.5459, 2.8700, 3.1160, 3.4565], + device='cuda:1'), covar=tensor([0.0643, 0.0454, 0.0705, 0.0852, 0.0424, 0.0440, 0.0542, 0.0331], + device='cuda:1'), in_proj_covar=tensor([0.0065, 0.0073, 0.0072, 0.0101, 0.0068, 0.0068, 0.0066, 0.0059], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 11:14:40,666 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33472.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:15:25,835 INFO [train.py:892] (1/4) Epoch 19, batch 100, loss[loss=0.2206, simple_loss=0.2894, pruned_loss=0.07585, over 19739.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2575, pruned_loss=0.05892, over 1570324.82 frames. ], batch size: 221, lr: 8.39e-03, grad_scale: 16.0 +2023-03-28 11:15:58,010 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5107, 2.5374, 1.5174, 2.8870, 2.7693, 2.8228, 2.9483, 2.3882], + device='cuda:1'), covar=tensor([0.0686, 0.0704, 0.1494, 0.0519, 0.0554, 0.0459, 0.0552, 0.0809], + device='cuda:1'), in_proj_covar=tensor([0.0132, 0.0130, 0.0136, 0.0133, 0.0116, 0.0117, 0.0129, 0.0134], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 11:16:09,766 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33511.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:16:17,083 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.445e+02 4.091e+02 4.969e+02 5.976e+02 1.351e+03, threshold=9.939e+02, percent-clipped=3.0 +2023-03-28 11:16:59,257 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33533.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:17:16,222 INFO [train.py:892] (1/4) Epoch 19, batch 150, loss[loss=0.1773, simple_loss=0.2407, pruned_loss=0.05698, over 19763.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2601, pruned_loss=0.06044, over 2098165.68 frames. ], batch size: 125, lr: 8.39e-03, grad_scale: 16.0 +2023-03-28 11:18:57,062 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8399, 3.7977, 4.1571, 3.7818, 3.5349, 3.9996, 3.8533, 4.1933], + device='cuda:1'), covar=tensor([0.0869, 0.0379, 0.0378, 0.0419, 0.1195, 0.0567, 0.0476, 0.0377], + device='cuda:1'), in_proj_covar=tensor([0.0267, 0.0209, 0.0206, 0.0219, 0.0198, 0.0222, 0.0218, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 11:19:04,060 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0101, 3.1191, 2.0027, 3.1552, 3.2573, 1.5454, 2.6645, 2.4982], + device='cuda:1'), covar=tensor([0.0759, 0.0757, 0.2398, 0.0702, 0.0498, 0.2359, 0.0995, 0.0818], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0238, 0.0222, 0.0242, 0.0216, 0.0200, 0.0229, 0.0177], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 11:19:06,861 INFO [train.py:892] (1/4) Epoch 19, batch 200, loss[loss=0.1677, simple_loss=0.2505, pruned_loss=0.04244, over 19806.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.261, pruned_loss=0.05984, over 2508714.79 frames. ], batch size: 50, lr: 8.38e-03, grad_scale: 16.0 +2023-03-28 11:19:12,095 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-03-28 11:19:19,583 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9946, 3.3115, 3.5100, 3.9686, 2.7388, 3.2601, 2.6114, 2.5767], + device='cuda:1'), covar=tensor([0.0508, 0.2083, 0.0895, 0.0322, 0.2062, 0.0787, 0.1301, 0.1629], + device='cuda:1'), in_proj_covar=tensor([0.0227, 0.0339, 0.0238, 0.0182, 0.0243, 0.0196, 0.0209, 0.0218], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 11:19:56,414 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.308e+02 4.349e+02 4.912e+02 5.878e+02 1.071e+03, threshold=9.825e+02, percent-clipped=1.0 +2023-03-28 11:19:59,574 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-28 11:20:15,557 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33622.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:20:54,822 INFO [train.py:892] (1/4) Epoch 19, batch 250, loss[loss=0.1695, simple_loss=0.2443, pruned_loss=0.04736, over 19613.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2607, pruned_loss=0.05963, over 2826851.80 frames. ], batch size: 46, lr: 8.38e-03, grad_scale: 16.0 +2023-03-28 11:21:29,772 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-28 11:21:58,897 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33670.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:22:13,461 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33677.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:22:42,180 INFO [train.py:892] (1/4) Epoch 19, batch 300, loss[loss=0.1912, simple_loss=0.2651, pruned_loss=0.05861, over 19833.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2619, pruned_loss=0.06083, over 3076092.14 frames. ], batch size: 90, lr: 8.37e-03, grad_scale: 16.0 +2023-03-28 11:23:27,191 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.74 vs. limit=2.0 +2023-03-28 11:23:39,643 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 4.475e+02 5.212e+02 6.221e+02 1.054e+03, threshold=1.042e+03, percent-clipped=4.0 +2023-03-28 11:24:02,913 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33725.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:24:31,920 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33738.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 11:24:38,089 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-28 11:24:38,650 INFO [train.py:892] (1/4) Epoch 19, batch 350, loss[loss=0.1712, simple_loss=0.2404, pruned_loss=0.05103, over 19385.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2619, pruned_loss=0.06087, over 3270220.12 frames. ], batch size: 40, lr: 8.36e-03, grad_scale: 32.0 +2023-03-28 11:25:51,614 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33773.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:26:29,540 INFO [train.py:892] (1/4) Epoch 19, batch 400, loss[loss=0.1863, simple_loss=0.2554, pruned_loss=0.05858, over 19770.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2612, pruned_loss=0.06071, over 3421290.76 frames. ], batch size: 217, lr: 8.36e-03, grad_scale: 32.0 +2023-03-28 11:26:33,347 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.43 vs. limit=5.0 +2023-03-28 11:27:17,696 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33811.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:27:26,572 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.571e+02 3.967e+02 4.675e+02 5.503e+02 1.191e+03, threshold=9.350e+02, percent-clipped=1.0 +2023-03-28 11:27:57,264 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33828.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:28:24,660 INFO [train.py:892] (1/4) Epoch 19, batch 450, loss[loss=0.1929, simple_loss=0.2658, pruned_loss=0.06001, over 19791.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2627, pruned_loss=0.06117, over 3537615.23 frames. ], batch size: 224, lr: 8.35e-03, grad_scale: 16.0 +2023-03-28 11:29:08,883 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33859.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:29:13,433 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8162, 2.1996, 2.7267, 3.0998, 3.5345, 3.7807, 3.6826, 3.6932], + device='cuda:1'), covar=tensor([0.0825, 0.1679, 0.1122, 0.0606, 0.0352, 0.0185, 0.0313, 0.0395], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0169, 0.0171, 0.0143, 0.0123, 0.0116, 0.0109, 0.0106], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 11:29:36,824 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7859, 2.9190, 3.0271, 2.3985, 3.2411, 2.6586, 2.8715, 3.1611], + device='cuda:1'), covar=tensor([0.0695, 0.0339, 0.0731, 0.0761, 0.0288, 0.0384, 0.0457, 0.0297], + device='cuda:1'), in_proj_covar=tensor([0.0066, 0.0073, 0.0071, 0.0100, 0.0066, 0.0068, 0.0065, 0.0059], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 11:30:18,974 INFO [train.py:892] (1/4) Epoch 19, batch 500, loss[loss=0.1739, simple_loss=0.2519, pruned_loss=0.04794, over 19777.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2623, pruned_loss=0.06123, over 3627252.09 frames. ], batch size: 66, lr: 8.35e-03, grad_scale: 16.0 +2023-03-28 11:31:15,669 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.605e+02 4.236e+02 4.841e+02 6.500e+02 9.898e+02, threshold=9.682e+02, percent-clipped=2.0 +2023-03-28 11:32:08,641 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33939.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:32:11,665 INFO [train.py:892] (1/4) Epoch 19, batch 550, loss[loss=0.1941, simple_loss=0.2718, pruned_loss=0.05824, over 19778.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.262, pruned_loss=0.06111, over 3698870.65 frames. ], batch size: 236, lr: 8.34e-03, grad_scale: 16.0 +2023-03-28 11:32:14,513 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6395, 4.8494, 4.9027, 4.7550, 4.6024, 4.8367, 4.3032, 4.3562], + device='cuda:1'), covar=tensor([0.0392, 0.0430, 0.0484, 0.0419, 0.0551, 0.0502, 0.0641, 0.0944], + device='cuda:1'), in_proj_covar=tensor([0.0230, 0.0238, 0.0265, 0.0228, 0.0228, 0.0216, 0.0237, 0.0280], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 11:33:07,348 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1543, 3.2392, 1.9927, 3.8671, 3.4950, 3.8497, 3.9090, 3.0429], + device='cuda:1'), covar=tensor([0.0588, 0.0575, 0.1475, 0.0518, 0.0537, 0.0370, 0.0533, 0.0738], + device='cuda:1'), in_proj_covar=tensor([0.0132, 0.0130, 0.0136, 0.0134, 0.0117, 0.0117, 0.0131, 0.0135], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 11:34:04,575 INFO [train.py:892] (1/4) Epoch 19, batch 600, loss[loss=0.1652, simple_loss=0.2374, pruned_loss=0.0465, over 19738.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2616, pruned_loss=0.06098, over 3755733.32 frames. ], batch size: 118, lr: 8.33e-03, grad_scale: 16.0 +2023-03-28 11:34:16,613 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6463, 3.5737, 3.5013, 3.8251, 3.5819, 3.9804, 3.7800, 3.8928], + device='cuda:1'), covar=tensor([0.0954, 0.0563, 0.0674, 0.0445, 0.0762, 0.0487, 0.0583, 0.0597], + device='cuda:1'), in_proj_covar=tensor([0.0136, 0.0158, 0.0181, 0.0152, 0.0156, 0.0136, 0.0137, 0.0174], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 11:34:31,023 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34000.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:34:35,197 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-28 11:35:01,393 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.382e+02 4.429e+02 5.203e+02 6.509e+02 1.553e+03, threshold=1.041e+03, percent-clipped=5.0 +2023-03-28 11:35:42,670 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34033.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 11:36:02,244 INFO [train.py:892] (1/4) Epoch 19, batch 650, loss[loss=0.1873, simple_loss=0.2511, pruned_loss=0.06177, over 19854.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2603, pruned_loss=0.06068, over 3800380.86 frames. ], batch size: 122, lr: 8.33e-03, grad_scale: 16.0 +2023-03-28 11:36:10,137 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4986, 1.9614, 2.1562, 2.8401, 3.1526, 3.2179, 3.2088, 3.2337], + device='cuda:1'), covar=tensor([0.0971, 0.1767, 0.1523, 0.0635, 0.0441, 0.0309, 0.0338, 0.0371], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0170, 0.0172, 0.0144, 0.0123, 0.0117, 0.0110, 0.0106], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 11:36:10,177 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34044.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:37:08,969 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9512, 3.0948, 4.3630, 3.4182, 3.6860, 3.5675, 2.3239, 2.5897], + device='cuda:1'), covar=tensor([0.0851, 0.2600, 0.0444, 0.0867, 0.1391, 0.1129, 0.2221, 0.2419], + device='cuda:1'), in_proj_covar=tensor([0.0334, 0.0365, 0.0315, 0.0254, 0.0358, 0.0326, 0.0336, 0.0304], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 11:37:31,401 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.51 vs. limit=5.0 +2023-03-28 11:37:53,463 INFO [train.py:892] (1/4) Epoch 19, batch 700, loss[loss=0.2311, simple_loss=0.3022, pruned_loss=0.08007, over 19681.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2622, pruned_loss=0.06142, over 3831907.44 frames. ], batch size: 337, lr: 8.32e-03, grad_scale: 16.0 +2023-03-28 11:38:13,724 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1333, 4.6912, 4.7126, 5.1035, 4.7145, 5.3600, 5.2222, 5.4192], + device='cuda:1'), covar=tensor([0.0629, 0.0380, 0.0420, 0.0301, 0.0696, 0.0295, 0.0364, 0.0283], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0161, 0.0185, 0.0155, 0.0160, 0.0139, 0.0140, 0.0178], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 11:38:28,810 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34105.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:38:52,078 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.720e+02 4.319e+02 5.100e+02 5.966e+02 1.380e+03, threshold=1.020e+03, percent-clipped=2.0 +2023-03-28 11:39:10,043 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-28 11:39:19,167 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34128.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:39:45,516 INFO [train.py:892] (1/4) Epoch 19, batch 750, loss[loss=0.1666, simple_loss=0.2352, pruned_loss=0.04894, over 19737.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2638, pruned_loss=0.06208, over 3856864.12 frames. ], batch size: 134, lr: 8.32e-03, grad_scale: 16.0 +2023-03-28 11:39:57,491 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34145.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:40:52,140 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-03-28 11:41:07,008 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34176.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:41:14,970 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8903, 2.2554, 3.4806, 3.0575, 3.4327, 3.5665, 3.2700, 3.2612], + device='cuda:1'), covar=tensor([0.0470, 0.0921, 0.0111, 0.0490, 0.0130, 0.0220, 0.0187, 0.0170], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0097, 0.0079, 0.0149, 0.0075, 0.0088, 0.0083, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 11:41:40,066 INFO [train.py:892] (1/4) Epoch 19, batch 800, loss[loss=0.2027, simple_loss=0.2743, pruned_loss=0.06556, over 19714.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2639, pruned_loss=0.0623, over 3876994.18 frames. ], batch size: 269, lr: 8.31e-03, grad_scale: 16.0 +2023-03-28 11:42:14,834 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34206.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:42:36,509 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.024e+02 4.346e+02 5.123e+02 5.919e+02 1.290e+03, threshold=1.025e+03, percent-clipped=2.0 +2023-03-28 11:43:36,128 INFO [train.py:892] (1/4) Epoch 19, batch 850, loss[loss=0.234, simple_loss=0.307, pruned_loss=0.08048, over 19710.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2626, pruned_loss=0.06133, over 3894416.35 frames. ], batch size: 305, lr: 8.30e-03, grad_scale: 16.0 +2023-03-28 11:44:31,216 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-28 11:45:25,864 INFO [train.py:892] (1/4) Epoch 19, batch 900, loss[loss=0.1847, simple_loss=0.2676, pruned_loss=0.0509, over 19728.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2626, pruned_loss=0.06113, over 3904323.17 frames. ], batch size: 52, lr: 8.30e-03, grad_scale: 8.0 +2023-03-28 11:45:38,122 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34295.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:46:19,260 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-28 11:46:25,648 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.957e+02 4.098e+02 5.258e+02 6.127e+02 1.043e+03, threshold=1.052e+03, percent-clipped=1.0 +2023-03-28 11:47:00,644 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34333.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:47:11,679 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-28 11:47:16,657 INFO [train.py:892] (1/4) Epoch 19, batch 950, loss[loss=0.1908, simple_loss=0.2575, pruned_loss=0.06208, over 19795.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2623, pruned_loss=0.06097, over 3914949.34 frames. ], batch size: 236, lr: 8.29e-03, grad_scale: 8.0 +2023-03-28 11:47:43,674 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3774, 3.0151, 3.2083, 3.3681, 3.2481, 3.2987, 3.4928, 3.6344], + device='cuda:1'), covar=tensor([0.0734, 0.0575, 0.0587, 0.0410, 0.0712, 0.0727, 0.0504, 0.0389], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0160, 0.0183, 0.0154, 0.0158, 0.0139, 0.0139, 0.0177], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 11:48:24,887 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7762, 3.0914, 3.2122, 3.7255, 2.6398, 3.1102, 2.3087, 2.3210], + device='cuda:1'), covar=tensor([0.0520, 0.2087, 0.0993, 0.0371, 0.1986, 0.0741, 0.1477, 0.1803], + device='cuda:1'), in_proj_covar=tensor([0.0223, 0.0332, 0.0235, 0.0181, 0.0240, 0.0192, 0.0206, 0.0214], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 11:48:49,469 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34381.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:49:08,936 INFO [train.py:892] (1/4) Epoch 19, batch 1000, loss[loss=0.1756, simple_loss=0.2477, pruned_loss=0.05175, over 19786.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2624, pruned_loss=0.06141, over 3923408.93 frames. ], batch size: 91, lr: 8.29e-03, grad_scale: 8.0 +2023-03-28 11:49:29,377 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34400.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:49:54,001 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7584, 5.0624, 5.1126, 5.0240, 4.6814, 5.0837, 4.5831, 4.6331], + device='cuda:1'), covar=tensor([0.0442, 0.0420, 0.0469, 0.0419, 0.0634, 0.0501, 0.0644, 0.0883], + device='cuda:1'), in_proj_covar=tensor([0.0233, 0.0239, 0.0266, 0.0229, 0.0230, 0.0218, 0.0237, 0.0280], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 11:50:08,247 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.058e+02 4.216e+02 4.828e+02 6.014e+02 1.612e+03, threshold=9.655e+02, percent-clipped=1.0 +2023-03-28 11:51:00,679 INFO [train.py:892] (1/4) Epoch 19, batch 1050, loss[loss=0.1793, simple_loss=0.2543, pruned_loss=0.0522, over 19848.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2626, pruned_loss=0.06085, over 3927597.62 frames. ], batch size: 106, lr: 8.28e-03, grad_scale: 8.0 +2023-03-28 11:52:51,880 INFO [train.py:892] (1/4) Epoch 19, batch 1100, loss[loss=0.1812, simple_loss=0.2493, pruned_loss=0.05648, over 19870.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2618, pruned_loss=0.06023, over 3933164.90 frames. ], batch size: 64, lr: 8.27e-03, grad_scale: 8.0 +2023-03-28 11:52:56,499 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-28 11:53:15,468 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34501.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:53:50,393 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.527e+02 4.200e+02 4.924e+02 5.856e+02 9.802e+02, threshold=9.848e+02, percent-clipped=1.0 +2023-03-28 11:54:41,951 INFO [train.py:892] (1/4) Epoch 19, batch 1150, loss[loss=0.1355, simple_loss=0.2022, pruned_loss=0.0344, over 19539.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2612, pruned_loss=0.06026, over 3937922.51 frames. ], batch size: 46, lr: 8.27e-03, grad_scale: 8.0 +2023-03-28 11:56:34,995 INFO [train.py:892] (1/4) Epoch 19, batch 1200, loss[loss=0.1988, simple_loss=0.2663, pruned_loss=0.06564, over 19733.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2623, pruned_loss=0.06099, over 3939982.91 frames. ], batch size: 221, lr: 8.26e-03, grad_scale: 8.0 +2023-03-28 11:56:41,620 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34593.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:56:45,710 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34595.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:57:14,267 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1131, 3.0420, 3.2865, 2.6269, 3.4015, 2.8032, 3.0790, 3.4551], + device='cuda:1'), covar=tensor([0.0524, 0.0375, 0.0525, 0.0707, 0.0290, 0.0382, 0.0438, 0.0287], + device='cuda:1'), in_proj_covar=tensor([0.0066, 0.0073, 0.0072, 0.0100, 0.0067, 0.0068, 0.0066, 0.0059], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 11:57:36,122 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.821e+02 4.325e+02 5.082e+02 6.313e+02 1.101e+03, threshold=1.016e+03, percent-clipped=4.0 +2023-03-28 11:57:47,153 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8177, 3.7748, 4.1604, 3.7939, 3.5833, 4.0357, 3.8725, 4.2545], + device='cuda:1'), covar=tensor([0.0981, 0.0387, 0.0388, 0.0432, 0.1115, 0.0518, 0.0444, 0.0335], + device='cuda:1'), in_proj_covar=tensor([0.0268, 0.0211, 0.0204, 0.0215, 0.0197, 0.0218, 0.0214, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 11:57:49,057 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34622.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:57:57,981 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34626.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:58:30,490 INFO [train.py:892] (1/4) Epoch 19, batch 1250, loss[loss=0.1854, simple_loss=0.2508, pruned_loss=0.05996, over 19781.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.262, pruned_loss=0.0612, over 3942583.58 frames. ], batch size: 131, lr: 8.26e-03, grad_scale: 8.0 +2023-03-28 11:58:35,844 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34643.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:58:59,671 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34654.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:00:04,851 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34683.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 12:00:13,966 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34687.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:00:21,436 INFO [train.py:892] (1/4) Epoch 19, batch 1300, loss[loss=0.1538, simple_loss=0.2313, pruned_loss=0.03812, over 19778.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2602, pruned_loss=0.05994, over 3944699.14 frames. ], batch size: 94, lr: 8.25e-03, grad_scale: 8.0 +2023-03-28 12:00:44,842 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34700.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:00:54,725 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-28 12:01:06,048 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34710.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 12:01:21,615 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.806e+02 3.918e+02 4.981e+02 5.928e+02 9.169e+02, threshold=9.963e+02, percent-clipped=0.0 +2023-03-28 12:01:26,954 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2090, 2.4930, 2.6552, 3.0243, 2.1460, 2.8135, 2.0492, 2.0473], + device='cuda:1'), covar=tensor([0.0645, 0.1351, 0.1106, 0.0525, 0.2276, 0.0722, 0.1390, 0.1686], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0333, 0.0237, 0.0181, 0.0240, 0.0193, 0.0207, 0.0215], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 12:02:16,225 INFO [train.py:892] (1/4) Epoch 19, batch 1350, loss[loss=0.1693, simple_loss=0.2497, pruned_loss=0.04447, over 19816.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2618, pruned_loss=0.06111, over 3945409.16 frames. ], batch size: 103, lr: 8.24e-03, grad_scale: 8.0 +2023-03-28 12:02:34,279 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34748.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:03:20,525 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34771.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 12:04:08,642 INFO [train.py:892] (1/4) Epoch 19, batch 1400, loss[loss=0.2286, simple_loss=0.3015, pruned_loss=0.07786, over 19652.00 frames. ], tot_loss[loss=0.192, simple_loss=0.262, pruned_loss=0.06102, over 3945696.12 frames. ], batch size: 330, lr: 8.24e-03, grad_scale: 8.0 +2023-03-28 12:04:31,240 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34801.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:05:06,143 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.813e+02 4.248e+02 4.961e+02 6.105e+02 8.838e+02, threshold=9.921e+02, percent-clipped=0.0 +2023-03-28 12:05:56,228 INFO [train.py:892] (1/4) Epoch 19, batch 1450, loss[loss=0.194, simple_loss=0.2637, pruned_loss=0.06219, over 19764.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2624, pruned_loss=0.06104, over 3947102.86 frames. ], batch size: 198, lr: 8.23e-03, grad_scale: 8.0 +2023-03-28 12:06:16,986 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34849.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:06:22,816 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-28 12:07:51,367 INFO [train.py:892] (1/4) Epoch 19, batch 1500, loss[loss=0.1779, simple_loss=0.2503, pruned_loss=0.05281, over 19751.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2614, pruned_loss=0.06057, over 3948527.06 frames. ], batch size: 139, lr: 8.23e-03, grad_scale: 8.0 +2023-03-28 12:08:14,965 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.3949, 4.7614, 5.0273, 4.8048, 5.2998, 3.2166, 4.2113, 2.8456], + device='cuda:1'), covar=tensor([0.0161, 0.0201, 0.0140, 0.0169, 0.0124, 0.0949, 0.0914, 0.1318], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0135, 0.0107, 0.0127, 0.0112, 0.0129, 0.0140, 0.0122], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 12:08:22,913 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1972, 3.8355, 3.9269, 4.1825, 3.8523, 4.2039, 4.3382, 4.4335], + device='cuda:1'), covar=tensor([0.0710, 0.0443, 0.0545, 0.0367, 0.0831, 0.0567, 0.0419, 0.0364], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0162, 0.0186, 0.0157, 0.0160, 0.0141, 0.0141, 0.0181], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 12:08:47,106 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.972e+02 4.472e+02 5.197e+02 6.398e+02 1.021e+03, threshold=1.039e+03, percent-clipped=2.0 +2023-03-28 12:09:30,007 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2452, 1.7184, 1.8452, 2.5172, 2.8304, 2.8413, 2.7326, 2.9158], + device='cuda:1'), covar=tensor([0.1037, 0.1844, 0.1663, 0.0652, 0.0459, 0.0373, 0.0471, 0.0355], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0168, 0.0170, 0.0141, 0.0125, 0.0117, 0.0109, 0.0106], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 12:09:43,761 INFO [train.py:892] (1/4) Epoch 19, batch 1550, loss[loss=0.2184, simple_loss=0.291, pruned_loss=0.07293, over 19660.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2612, pruned_loss=0.06029, over 3949846.61 frames. ], batch size: 299, lr: 8.22e-03, grad_scale: 8.0 +2023-03-28 12:10:01,138 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34949.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:11:03,977 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5067, 4.1546, 4.2717, 4.0544, 4.4571, 3.1442, 3.7287, 2.1662], + device='cuda:1'), covar=tensor([0.0180, 0.0218, 0.0148, 0.0188, 0.0166, 0.0836, 0.0709, 0.1475], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0135, 0.0106, 0.0127, 0.0112, 0.0129, 0.0140, 0.0122], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 12:11:05,702 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34978.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 12:11:13,816 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34982.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:11:32,681 INFO [train.py:892] (1/4) Epoch 19, batch 1600, loss[loss=0.1737, simple_loss=0.248, pruned_loss=0.0497, over 19731.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.261, pruned_loss=0.06006, over 3949627.58 frames. ], batch size: 118, lr: 8.22e-03, grad_scale: 8.0 +2023-03-28 12:12:24,052 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.68 vs. limit=5.0 +2023-03-28 12:12:30,719 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 4.214e+02 4.806e+02 5.980e+02 1.376e+03, threshold=9.612e+02, percent-clipped=1.0 +2023-03-28 12:13:27,270 INFO [train.py:892] (1/4) Epoch 19, batch 1650, loss[loss=0.1834, simple_loss=0.2466, pruned_loss=0.0601, over 19785.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2596, pruned_loss=0.05932, over 3949834.64 frames. ], batch size: 163, lr: 8.21e-03, grad_scale: 8.0 +2023-03-28 12:14:22,962 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35066.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 12:15:21,219 INFO [train.py:892] (1/4) Epoch 19, batch 1700, loss[loss=0.1739, simple_loss=0.242, pruned_loss=0.05291, over 19845.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2601, pruned_loss=0.05915, over 3948940.37 frames. ], batch size: 109, lr: 8.20e-03, grad_scale: 8.0 +2023-03-28 12:15:25,098 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35092.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:16:20,384 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 3.844e+02 4.787e+02 6.177e+02 1.345e+03, threshold=9.574e+02, percent-clipped=3.0 +2023-03-28 12:16:30,293 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4239, 2.9204, 2.5420, 1.9361, 2.4670, 2.8526, 2.6679, 2.7433], + device='cuda:1'), covar=tensor([0.0334, 0.0245, 0.0259, 0.0562, 0.0381, 0.0244, 0.0239, 0.0212], + device='cuda:1'), in_proj_covar=tensor([0.0083, 0.0079, 0.0086, 0.0090, 0.0095, 0.0069, 0.0066, 0.0070], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 12:16:36,993 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7755, 5.1085, 5.1627, 5.0009, 4.7825, 5.0938, 4.5728, 4.6596], + device='cuda:1'), covar=tensor([0.0457, 0.0421, 0.0497, 0.0440, 0.0607, 0.0533, 0.0678, 0.0896], + device='cuda:1'), in_proj_covar=tensor([0.0238, 0.0242, 0.0270, 0.0232, 0.0228, 0.0222, 0.0239, 0.0280], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 12:17:09,576 INFO [train.py:892] (1/4) Epoch 19, batch 1750, loss[loss=0.1997, simple_loss=0.2577, pruned_loss=0.07092, over 19767.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.26, pruned_loss=0.05929, over 3947377.91 frames. ], batch size: 155, lr: 8.20e-03, grad_scale: 8.0 +2023-03-28 12:17:32,591 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35153.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:18:09,335 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-28 12:18:42,357 INFO [train.py:892] (1/4) Epoch 19, batch 1800, loss[loss=0.1736, simple_loss=0.2515, pruned_loss=0.04783, over 19804.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2598, pruned_loss=0.05942, over 3948121.36 frames. ], batch size: 74, lr: 8.19e-03, grad_scale: 8.0 +2023-03-28 12:19:30,002 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.773e+02 3.767e+02 4.672e+02 5.957e+02 1.054e+03, threshold=9.344e+02, percent-clipped=2.0 +2023-03-28 12:20:12,662 INFO [train.py:892] (1/4) Epoch 19, batch 1850, loss[loss=0.1953, simple_loss=0.2726, pruned_loss=0.05904, over 19825.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2607, pruned_loss=0.05907, over 3946502.21 frames. ], batch size: 57, lr: 8.19e-03, grad_scale: 8.0 +2023-03-28 12:21:18,508 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-28 12:21:19,049 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-03-28 12:21:19,180 INFO [train.py:892] (1/4) Epoch 20, batch 0, loss[loss=0.1604, simple_loss=0.2355, pruned_loss=0.04265, over 19613.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2355, pruned_loss=0.04265, over 19613.00 frames. ], batch size: 46, lr: 7.98e-03, grad_scale: 8.0 +2023-03-28 12:21:19,181 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 12:21:49,713 INFO [train.py:926] (1/4) Epoch 20, validation: loss=0.1718, simple_loss=0.2485, pruned_loss=0.04755, over 2883724.00 frames. +2023-03-28 12:21:49,714 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 12:21:57,774 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35249.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:22:01,611 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4930, 2.7407, 2.9980, 2.7393, 2.5118, 2.5425, 2.7536, 2.9506], + device='cuda:1'), covar=tensor([0.0340, 0.0245, 0.0224, 0.0241, 0.0341, 0.0314, 0.0323, 0.0260], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0063, 0.0066, 0.0059, 0.0073, 0.0068, 0.0085, 0.0059], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 12:23:03,235 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35278.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 12:23:12,043 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35282.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:23:43,020 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4011, 4.3167, 4.7984, 4.3038, 4.0175, 4.6059, 4.3929, 4.8956], + device='cuda:1'), covar=tensor([0.0973, 0.0423, 0.0395, 0.0461, 0.0928, 0.0499, 0.0527, 0.0353], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0217, 0.0213, 0.0222, 0.0204, 0.0225, 0.0222, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 12:23:44,096 INFO [train.py:892] (1/4) Epoch 20, batch 50, loss[loss=0.1841, simple_loss=0.2563, pruned_loss=0.05588, over 19882.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2549, pruned_loss=0.05773, over 891245.17 frames. ], batch size: 92, lr: 7.97e-03, grad_scale: 8.0 +2023-03-28 12:23:47,062 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35297.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:24:30,959 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.385e+02 4.131e+02 4.865e+02 6.041e+02 1.249e+03, threshold=9.730e+02, percent-clipped=3.0 +2023-03-28 12:24:53,318 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35326.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:25:03,354 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35330.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:25:04,186 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-28 12:25:26,366 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-28 12:25:38,641 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35345.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 12:25:39,743 INFO [train.py:892] (1/4) Epoch 20, batch 100, loss[loss=0.1808, simple_loss=0.247, pruned_loss=0.05734, over 19758.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2571, pruned_loss=0.0577, over 1568874.24 frames. ], batch size: 213, lr: 7.96e-03, grad_scale: 8.0 +2023-03-28 12:26:04,120 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1957, 3.2392, 3.5412, 2.7276, 3.7075, 2.9273, 2.9204, 3.6788], + device='cuda:1'), covar=tensor([0.0660, 0.0365, 0.0493, 0.0696, 0.0306, 0.0380, 0.0645, 0.0281], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0075, 0.0073, 0.0102, 0.0068, 0.0069, 0.0068, 0.0060], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 12:26:25,086 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35366.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 12:26:42,424 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35374.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:27:26,175 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.38 vs. limit=5.0 +2023-03-28 12:27:28,759 INFO [train.py:892] (1/4) Epoch 20, batch 150, loss[loss=0.1746, simple_loss=0.2472, pruned_loss=0.05096, over 19625.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2588, pruned_loss=0.05881, over 2096421.88 frames. ], batch size: 65, lr: 7.96e-03, grad_scale: 8.0 +2023-03-28 12:27:54,720 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35406.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 12:28:12,782 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35414.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 12:28:19,116 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.855e+02 4.347e+02 5.138e+02 6.356e+02 9.999e+02, threshold=1.028e+03, percent-clipped=1.0 +2023-03-28 12:28:58,726 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35435.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:29:25,292 INFO [train.py:892] (1/4) Epoch 20, batch 200, loss[loss=0.1882, simple_loss=0.2501, pruned_loss=0.06318, over 19734.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2589, pruned_loss=0.05836, over 2507084.81 frames. ], batch size: 118, lr: 7.95e-03, grad_scale: 8.0 +2023-03-28 12:29:30,502 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35448.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:31:20,195 INFO [train.py:892] (1/4) Epoch 20, batch 250, loss[loss=0.1673, simple_loss=0.226, pruned_loss=0.05434, over 19738.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2576, pruned_loss=0.05823, over 2827249.68 frames. ], batch size: 134, lr: 7.95e-03, grad_scale: 8.0 +2023-03-28 12:32:02,742 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.157e+02 5.011e+02 5.895e+02 1.207e+03, threshold=1.002e+03, percent-clipped=2.0 +2023-03-28 12:32:28,439 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6054, 4.6267, 2.7793, 4.9498, 5.0928, 2.1984, 4.2324, 3.8123], + device='cuda:1'), covar=tensor([0.0504, 0.0604, 0.2560, 0.0587, 0.0363, 0.2664, 0.0934, 0.0692], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0241, 0.0223, 0.0247, 0.0220, 0.0200, 0.0231, 0.0180], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 12:32:35,618 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35530.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:32:35,787 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8233, 3.8181, 2.2993, 4.0356, 4.1848, 1.8675, 3.3593, 3.2316], + device='cuda:1'), covar=tensor([0.0674, 0.0812, 0.2731, 0.0881, 0.0462, 0.2811, 0.1115, 0.0771], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0241, 0.0223, 0.0247, 0.0220, 0.0200, 0.0231, 0.0180], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 12:32:57,110 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4325, 2.4404, 1.5260, 2.7246, 2.6022, 2.6767, 2.7402, 2.2011], + device='cuda:1'), covar=tensor([0.0670, 0.0719, 0.1493, 0.0586, 0.0590, 0.0462, 0.0565, 0.0889], + device='cuda:1'), in_proj_covar=tensor([0.0134, 0.0134, 0.0138, 0.0137, 0.0120, 0.0121, 0.0133, 0.0138], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 12:33:07,917 INFO [train.py:892] (1/4) Epoch 20, batch 300, loss[loss=0.1886, simple_loss=0.2525, pruned_loss=0.06241, over 19771.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2576, pruned_loss=0.05809, over 3076614.90 frames. ], batch size: 130, lr: 7.94e-03, grad_scale: 8.0 +2023-03-28 12:33:49,258 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-28 12:34:49,267 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35591.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:34:58,969 INFO [train.py:892] (1/4) Epoch 20, batch 350, loss[loss=0.1814, simple_loss=0.2506, pruned_loss=0.05615, over 19560.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2588, pruned_loss=0.05852, over 3269662.83 frames. ], batch size: 53, lr: 7.94e-03, grad_scale: 8.0 +2023-03-28 12:35:32,689 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8244, 3.4665, 3.6310, 3.8295, 3.5891, 3.7341, 3.8950, 4.0635], + device='cuda:1'), covar=tensor([0.0660, 0.0440, 0.0522, 0.0346, 0.0701, 0.0603, 0.0454, 0.0318], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0160, 0.0184, 0.0157, 0.0159, 0.0141, 0.0139, 0.0180], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 12:35:45,656 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.727e+02 4.459e+02 5.252e+02 6.207e+02 1.161e+03, threshold=1.050e+03, percent-clipped=2.0 +2023-03-28 12:36:55,517 INFO [train.py:892] (1/4) Epoch 20, batch 400, loss[loss=0.1902, simple_loss=0.2552, pruned_loss=0.0626, over 19793.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2588, pruned_loss=0.05882, over 3420113.85 frames. ], batch size: 172, lr: 7.93e-03, grad_scale: 8.0 +2023-03-28 12:38:46,374 INFO [train.py:892] (1/4) Epoch 20, batch 450, loss[loss=0.2026, simple_loss=0.2665, pruned_loss=0.06937, over 19813.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2587, pruned_loss=0.05902, over 3537615.10 frames. ], batch size: 67, lr: 7.93e-03, grad_scale: 8.0 +2023-03-28 12:38:47,723 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.45 vs. limit=5.0 +2023-03-28 12:38:58,498 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35701.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 12:39:31,597 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.836e+02 4.301e+02 4.934e+02 5.630e+02 1.538e+03, threshold=9.868e+02, percent-clipped=1.0 +2023-03-28 12:39:58,927 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35730.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:40:31,622 INFO [train.py:892] (1/4) Epoch 20, batch 500, loss[loss=0.1839, simple_loss=0.2461, pruned_loss=0.06079, over 19762.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2587, pruned_loss=0.05905, over 3629479.89 frames. ], batch size: 125, lr: 7.92e-03, grad_scale: 8.0 +2023-03-28 12:40:37,943 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35748.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:41:26,153 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35771.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:42:19,108 INFO [train.py:892] (1/4) Epoch 20, batch 550, loss[loss=0.1792, simple_loss=0.2516, pruned_loss=0.05346, over 19860.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2573, pruned_loss=0.05857, over 3701105.45 frames. ], batch size: 106, lr: 7.92e-03, grad_scale: 8.0 +2023-03-28 12:42:19,809 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35796.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:42:51,792 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35811.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:43:04,508 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.312e+02 4.108e+02 4.834e+02 5.739e+02 9.285e+02, threshold=9.669e+02, percent-clipped=0.0 +2023-03-28 12:43:11,739 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-03-28 12:43:18,653 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-28 12:43:35,070 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6632, 2.5876, 1.5147, 3.1349, 2.8322, 3.0334, 3.1225, 2.4748], + device='cuda:1'), covar=tensor([0.0625, 0.0749, 0.1760, 0.0497, 0.0666, 0.0454, 0.0481, 0.0830], + device='cuda:1'), in_proj_covar=tensor([0.0134, 0.0134, 0.0138, 0.0136, 0.0121, 0.0121, 0.0133, 0.0137], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 12:43:39,440 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35832.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:44:08,242 INFO [train.py:892] (1/4) Epoch 20, batch 600, loss[loss=0.1819, simple_loss=0.2509, pruned_loss=0.05651, over 19854.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2581, pruned_loss=0.05843, over 3755546.74 frames. ], batch size: 197, lr: 7.91e-03, grad_scale: 8.0 +2023-03-28 12:45:06,826 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35872.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:45:36,142 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35886.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:45:55,673 INFO [train.py:892] (1/4) Epoch 20, batch 650, loss[loss=0.2198, simple_loss=0.283, pruned_loss=0.07828, over 19759.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2584, pruned_loss=0.05862, over 3799354.06 frames. ], batch size: 253, lr: 7.90e-03, grad_scale: 8.0 +2023-03-28 12:46:20,249 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2055, 4.1545, 4.5369, 4.1434, 3.8046, 4.3649, 4.2479, 4.6156], + device='cuda:1'), covar=tensor([0.0915, 0.0371, 0.0370, 0.0419, 0.1112, 0.0561, 0.0472, 0.0359], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0218, 0.0215, 0.0224, 0.0203, 0.0226, 0.0225, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 12:46:23,856 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3207, 4.2998, 4.6865, 4.4867, 4.5703, 4.1191, 4.3981, 4.2370], + device='cuda:1'), covar=tensor([0.1509, 0.1570, 0.0951, 0.1342, 0.0970, 0.1075, 0.1972, 0.2139], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0291, 0.0338, 0.0273, 0.0254, 0.0249, 0.0326, 0.0361], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 12:46:42,659 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.720e+02 4.236e+02 5.120e+02 6.447e+02 1.973e+03, threshold=1.024e+03, percent-clipped=4.0 +2023-03-28 12:47:47,305 INFO [train.py:892] (1/4) Epoch 20, batch 700, loss[loss=0.2044, simple_loss=0.2623, pruned_loss=0.07319, over 19762.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2584, pruned_loss=0.05892, over 3834065.41 frames. ], batch size: 198, lr: 7.90e-03, grad_scale: 8.0 +2023-03-28 12:49:38,980 INFO [train.py:892] (1/4) Epoch 20, batch 750, loss[loss=0.1876, simple_loss=0.2591, pruned_loss=0.05804, over 19785.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2583, pruned_loss=0.05879, over 3859215.55 frames. ], batch size: 73, lr: 7.89e-03, grad_scale: 8.0 +2023-03-28 12:49:40,072 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3505, 3.1899, 3.5086, 2.6106, 3.7132, 3.1316, 3.1482, 3.7698], + device='cuda:1'), covar=tensor([0.0657, 0.0398, 0.0642, 0.0849, 0.0369, 0.0326, 0.0497, 0.0246], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0075, 0.0073, 0.0103, 0.0069, 0.0069, 0.0068, 0.0061], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 12:49:57,172 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36001.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 12:50:31,046 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.681e+02 4.396e+02 5.340e+02 6.377e+02 1.000e+03, threshold=1.068e+03, percent-clipped=0.0 +2023-03-28 12:51:01,322 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36030.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:51:29,472 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3607, 2.3442, 1.4602, 2.5900, 2.4687, 2.5275, 2.6083, 2.1066], + device='cuda:1'), covar=tensor([0.0671, 0.0740, 0.1469, 0.0581, 0.0602, 0.0523, 0.0566, 0.0893], + device='cuda:1'), in_proj_covar=tensor([0.0135, 0.0134, 0.0139, 0.0137, 0.0121, 0.0122, 0.0134, 0.0138], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 12:51:36,006 INFO [train.py:892] (1/4) Epoch 20, batch 800, loss[loss=0.1833, simple_loss=0.2537, pruned_loss=0.05645, over 19858.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2605, pruned_loss=0.05941, over 3877352.77 frames. ], batch size: 104, lr: 7.89e-03, grad_scale: 8.0 +2023-03-28 12:51:42,562 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36049.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 12:52:48,788 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36078.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:53:26,952 INFO [train.py:892] (1/4) Epoch 20, batch 850, loss[loss=0.1912, simple_loss=0.2614, pruned_loss=0.06047, over 19737.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2596, pruned_loss=0.05877, over 3894183.74 frames. ], batch size: 76, lr: 7.88e-03, grad_scale: 8.0 +2023-03-28 12:54:15,635 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.913e+02 4.436e+02 5.161e+02 6.355e+02 1.299e+03, threshold=1.032e+03, percent-clipped=1.0 +2023-03-28 12:54:36,519 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36127.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:55:19,439 INFO [train.py:892] (1/4) Epoch 20, batch 900, loss[loss=0.1977, simple_loss=0.272, pruned_loss=0.0617, over 19961.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2593, pruned_loss=0.05837, over 3906834.08 frames. ], batch size: 53, lr: 7.88e-03, grad_scale: 8.0 +2023-03-28 12:55:48,584 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7712, 4.7506, 5.1135, 4.9569, 4.9635, 4.3983, 4.8368, 4.7098], + device='cuda:1'), covar=tensor([0.1370, 0.1457, 0.0947, 0.1128, 0.0803, 0.1020, 0.1803, 0.1809], + device='cuda:1'), in_proj_covar=tensor([0.0271, 0.0289, 0.0337, 0.0271, 0.0252, 0.0249, 0.0325, 0.0357], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 12:56:08,562 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36167.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:56:39,274 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36181.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:56:51,673 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36186.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:57:11,809 INFO [train.py:892] (1/4) Epoch 20, batch 950, loss[loss=0.1818, simple_loss=0.2506, pruned_loss=0.05653, over 19781.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2595, pruned_loss=0.05866, over 3917239.84 frames. ], batch size: 131, lr: 7.87e-03, grad_scale: 8.0 +2023-03-28 12:57:58,662 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.907e+02 4.513e+02 5.155e+02 5.945e+02 1.102e+03, threshold=1.031e+03, percent-clipped=1.0 +2023-03-28 12:58:39,633 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36234.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:58:57,955 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36242.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:59:05,372 INFO [train.py:892] (1/4) Epoch 20, batch 1000, loss[loss=0.1844, simple_loss=0.2549, pruned_loss=0.05694, over 19764.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2598, pruned_loss=0.0585, over 3921849.11 frames. ], batch size: 244, lr: 7.87e-03, grad_scale: 8.0 +2023-03-28 13:00:32,288 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2794, 4.3723, 2.6327, 4.6160, 4.8243, 2.0164, 4.0894, 3.5165], + device='cuda:1'), covar=tensor([0.0638, 0.0736, 0.2565, 0.0824, 0.0551, 0.2919, 0.0953, 0.0793], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0238, 0.0221, 0.0248, 0.0220, 0.0198, 0.0228, 0.0180], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 13:01:03,812 INFO [train.py:892] (1/4) Epoch 20, batch 1050, loss[loss=0.1886, simple_loss=0.2626, pruned_loss=0.05731, over 19764.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2592, pruned_loss=0.0582, over 3927534.23 frames. ], batch size: 244, lr: 7.86e-03, grad_scale: 16.0 +2023-03-28 13:01:49,686 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.691e+02 4.131e+02 5.046e+02 5.738e+02 1.025e+03, threshold=1.009e+03, percent-clipped=0.0 +2023-03-28 13:02:56,420 INFO [train.py:892] (1/4) Epoch 20, batch 1100, loss[loss=0.1768, simple_loss=0.251, pruned_loss=0.0513, over 19808.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2595, pruned_loss=0.05864, over 3933164.74 frames. ], batch size: 57, lr: 7.86e-03, grad_scale: 16.0 +2023-03-28 13:03:26,764 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9851, 2.9340, 1.7248, 3.5782, 3.2203, 3.5277, 3.6212, 2.8008], + device='cuda:1'), covar=tensor([0.0610, 0.0637, 0.1701, 0.0494, 0.0548, 0.0363, 0.0501, 0.0760], + device='cuda:1'), in_proj_covar=tensor([0.0134, 0.0134, 0.0137, 0.0136, 0.0121, 0.0120, 0.0133, 0.0137], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 13:03:44,813 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.83 vs. limit=5.0 +2023-03-28 13:04:48,932 INFO [train.py:892] (1/4) Epoch 20, batch 1150, loss[loss=0.1808, simple_loss=0.2405, pruned_loss=0.06058, over 19816.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2599, pruned_loss=0.05951, over 3937024.83 frames. ], batch size: 103, lr: 7.85e-03, grad_scale: 16.0 +2023-03-28 13:05:36,784 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.688e+02 4.392e+02 5.039e+02 5.876e+02 1.119e+03, threshold=1.008e+03, percent-clipped=1.0 +2023-03-28 13:06:01,301 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36427.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:06:43,083 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6654, 2.8408, 4.0545, 3.1339, 3.3781, 3.3236, 2.2593, 2.4246], + device='cuda:1'), covar=tensor([0.1007, 0.3136, 0.0549, 0.0960, 0.1635, 0.1378, 0.2390, 0.2560], + device='cuda:1'), in_proj_covar=tensor([0.0333, 0.0367, 0.0321, 0.0258, 0.0359, 0.0333, 0.0340, 0.0309], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 13:06:43,830 INFO [train.py:892] (1/4) Epoch 20, batch 1200, loss[loss=0.1823, simple_loss=0.2493, pruned_loss=0.05768, over 19768.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2588, pruned_loss=0.05848, over 3940766.26 frames. ], batch size: 130, lr: 7.85e-03, grad_scale: 16.0 +2023-03-28 13:07:15,540 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8727, 4.5442, 4.5799, 4.8084, 4.4396, 5.0142, 4.9771, 5.1682], + device='cuda:1'), covar=tensor([0.0633, 0.0286, 0.0423, 0.0264, 0.0603, 0.0354, 0.0339, 0.0249], + device='cuda:1'), in_proj_covar=tensor([0.0142, 0.0162, 0.0185, 0.0160, 0.0159, 0.0144, 0.0140, 0.0182], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 13:07:31,464 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36467.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:07:50,184 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36475.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:08:37,667 INFO [train.py:892] (1/4) Epoch 20, batch 1250, loss[loss=0.1645, simple_loss=0.2345, pruned_loss=0.04721, over 19794.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2579, pruned_loss=0.05791, over 3942537.47 frames. ], batch size: 86, lr: 7.84e-03, grad_scale: 16.0 +2023-03-28 13:09:22,501 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36515.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:09:27,462 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.037e+02 4.208e+02 4.891e+02 6.117e+02 9.708e+02, threshold=9.782e+02, percent-clipped=0.0 +2023-03-28 13:09:50,052 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-28 13:10:11,610 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36537.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:10:28,851 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8246, 3.1727, 2.8549, 2.3989, 2.7770, 3.1232, 3.0452, 3.1669], + device='cuda:1'), covar=tensor([0.0293, 0.0274, 0.0244, 0.0458, 0.0330, 0.0289, 0.0223, 0.0187], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0079, 0.0087, 0.0090, 0.0093, 0.0069, 0.0068, 0.0070], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 13:10:29,806 INFO [train.py:892] (1/4) Epoch 20, batch 1300, loss[loss=0.1781, simple_loss=0.256, pruned_loss=0.05008, over 19845.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2589, pruned_loss=0.05832, over 3944178.85 frames. ], batch size: 59, lr: 7.84e-03, grad_scale: 16.0 +2023-03-28 13:11:25,961 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1874, 3.3220, 2.0418, 3.4008, 3.4954, 1.6417, 2.9059, 2.6837], + device='cuda:1'), covar=tensor([0.0794, 0.0852, 0.2643, 0.0812, 0.0580, 0.2620, 0.1083, 0.0860], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0241, 0.0222, 0.0250, 0.0221, 0.0200, 0.0230, 0.0181], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 13:12:24,078 INFO [train.py:892] (1/4) Epoch 20, batch 1350, loss[loss=0.1564, simple_loss=0.2372, pruned_loss=0.03783, over 19745.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2596, pruned_loss=0.05839, over 3945750.24 frames. ], batch size: 84, lr: 7.83e-03, grad_scale: 16.0 +2023-03-28 13:12:45,745 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36605.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:12:51,776 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36608.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:13:00,900 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36612.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:13:12,356 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.844e+02 4.275e+02 5.272e+02 6.307e+02 1.102e+03, threshold=1.054e+03, percent-clipped=1.0 +2023-03-28 13:14:16,053 INFO [train.py:892] (1/4) Epoch 20, batch 1400, loss[loss=0.1539, simple_loss=0.2172, pruned_loss=0.04526, over 19859.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2594, pruned_loss=0.05828, over 3947323.38 frames. ], batch size: 122, lr: 7.82e-03, grad_scale: 16.0 +2023-03-28 13:14:21,487 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8399, 2.2708, 3.4346, 2.9829, 3.4760, 3.5865, 3.3933, 3.3899], + device='cuda:1'), covar=tensor([0.0486, 0.0930, 0.0125, 0.0575, 0.0116, 0.0208, 0.0181, 0.0166], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0097, 0.0080, 0.0149, 0.0075, 0.0089, 0.0083, 0.0077], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 13:14:27,306 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7733, 2.2338, 2.7076, 3.1121, 3.4994, 3.7001, 3.7136, 3.7324], + device='cuda:1'), covar=tensor([0.0888, 0.1643, 0.1185, 0.0603, 0.0388, 0.0270, 0.0298, 0.0420], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0167, 0.0170, 0.0142, 0.0125, 0.0118, 0.0110, 0.0106], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 13:15:03,585 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36666.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:15:09,241 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36669.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:15:16,992 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36673.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:16:06,743 INFO [train.py:892] (1/4) Epoch 20, batch 1450, loss[loss=0.1951, simple_loss=0.2604, pruned_loss=0.06488, over 19880.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2585, pruned_loss=0.05768, over 3948341.14 frames. ], batch size: 84, lr: 7.82e-03, grad_scale: 16.0 +2023-03-28 13:16:17,623 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36700.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 13:16:54,919 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.742e+02 4.075e+02 4.928e+02 6.205e+02 1.251e+03, threshold=9.857e+02, percent-clipped=1.0 +2023-03-28 13:17:56,090 INFO [train.py:892] (1/4) Epoch 20, batch 1500, loss[loss=0.1659, simple_loss=0.2416, pruned_loss=0.04514, over 19880.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2593, pruned_loss=0.05833, over 3950082.97 frames. ], batch size: 92, lr: 7.81e-03, grad_scale: 16.0 +2023-03-28 13:18:05,803 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2103, 4.7961, 4.8610, 5.1398, 4.9097, 5.4365, 5.3322, 5.5248], + device='cuda:1'), covar=tensor([0.0581, 0.0296, 0.0387, 0.0268, 0.0589, 0.0285, 0.0333, 0.0241], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0161, 0.0184, 0.0159, 0.0158, 0.0143, 0.0141, 0.0180], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 13:18:30,403 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36761.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 13:18:40,377 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-03-28 13:19:49,248 INFO [train.py:892] (1/4) Epoch 20, batch 1550, loss[loss=0.1649, simple_loss=0.2334, pruned_loss=0.04826, over 19888.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2586, pruned_loss=0.058, over 3951459.04 frames. ], batch size: 176, lr: 7.81e-03, grad_scale: 16.0 +2023-03-28 13:20:07,726 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1866, 3.6036, 3.1694, 2.6244, 3.2249, 3.6388, 3.4637, 3.5942], + device='cuda:1'), covar=tensor([0.0228, 0.0210, 0.0227, 0.0452, 0.0277, 0.0192, 0.0155, 0.0161], + device='cuda:1'), in_proj_covar=tensor([0.0084, 0.0079, 0.0086, 0.0089, 0.0093, 0.0069, 0.0068, 0.0069], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 13:20:10,159 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36805.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 13:20:34,819 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3601, 4.4623, 4.7439, 4.6043, 4.6244, 4.1957, 4.4755, 4.3229], + device='cuda:1'), covar=tensor([0.1466, 0.1414, 0.0970, 0.1354, 0.0931, 0.1098, 0.2008, 0.2150], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0293, 0.0341, 0.0272, 0.0256, 0.0253, 0.0328, 0.0362], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 13:20:35,961 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.615e+02 4.163e+02 4.935e+02 6.337e+02 1.233e+03, threshold=9.870e+02, percent-clipped=4.0 +2023-03-28 13:21:24,657 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36837.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:21:43,514 INFO [train.py:892] (1/4) Epoch 20, batch 1600, loss[loss=0.1603, simple_loss=0.2321, pruned_loss=0.04426, over 19700.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2574, pruned_loss=0.05708, over 3951999.05 frames. ], batch size: 85, lr: 7.80e-03, grad_scale: 16.0 +2023-03-28 13:21:54,064 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4808, 4.7570, 4.7987, 4.6819, 4.4987, 4.7956, 4.2525, 4.3024], + device='cuda:1'), covar=tensor([0.0491, 0.0485, 0.0506, 0.0463, 0.0612, 0.0502, 0.0736, 0.0973], + device='cuda:1'), in_proj_covar=tensor([0.0240, 0.0250, 0.0270, 0.0236, 0.0237, 0.0226, 0.0243, 0.0286], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 13:22:33,979 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36866.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 13:23:15,934 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36885.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:23:20,515 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4000, 2.9536, 4.8137, 3.8885, 4.4786, 4.7221, 4.6471, 4.5115], + device='cuda:1'), covar=tensor([0.0275, 0.0845, 0.0089, 0.1010, 0.0119, 0.0180, 0.0127, 0.0136], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0098, 0.0081, 0.0150, 0.0076, 0.0089, 0.0084, 0.0078], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 13:23:38,873 INFO [train.py:892] (1/4) Epoch 20, batch 1650, loss[loss=0.2664, simple_loss=0.3275, pruned_loss=0.1027, over 19466.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2583, pruned_loss=0.05771, over 3951693.27 frames. ], batch size: 396, lr: 7.80e-03, grad_scale: 16.0 +2023-03-28 13:23:58,096 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8893, 2.3499, 2.8995, 3.1418, 3.5887, 3.9274, 3.8117, 3.9409], + device='cuda:1'), covar=tensor([0.0867, 0.1708, 0.1231, 0.0640, 0.0407, 0.0228, 0.0360, 0.0305], + device='cuda:1'), in_proj_covar=tensor([0.0153, 0.0168, 0.0173, 0.0144, 0.0126, 0.0120, 0.0111, 0.0109], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 13:24:05,938 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2256, 3.4289, 3.6625, 4.2504, 2.7719, 3.1685, 2.8529, 2.5992], + device='cuda:1'), covar=tensor([0.0461, 0.2137, 0.0881, 0.0344, 0.2137, 0.0902, 0.1176, 0.1621], + device='cuda:1'), in_proj_covar=tensor([0.0226, 0.0332, 0.0236, 0.0180, 0.0239, 0.0195, 0.0207, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 13:24:13,769 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36911.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:24:26,323 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.686e+02 3.920e+02 4.713e+02 5.573e+02 1.416e+03, threshold=9.427e+02, percent-clipped=1.0 +2023-03-28 13:25:31,478 INFO [train.py:892] (1/4) Epoch 20, batch 1700, loss[loss=0.1514, simple_loss=0.2262, pruned_loss=0.03829, over 19691.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2585, pruned_loss=0.05756, over 3950451.46 frames. ], batch size: 46, lr: 7.79e-03, grad_scale: 16.0 +2023-03-28 13:25:34,567 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1812, 2.5858, 3.0455, 2.8226, 3.3662, 3.2372, 4.0001, 4.3590], + device='cuda:1'), covar=tensor([0.0493, 0.1877, 0.1588, 0.2009, 0.1602, 0.1558, 0.0504, 0.0515], + device='cuda:1'), in_proj_covar=tensor([0.0236, 0.0233, 0.0254, 0.0246, 0.0281, 0.0246, 0.0214, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 13:25:58,499 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36958.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:26:04,530 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36961.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:26:10,892 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36964.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:26:21,277 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36968.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:26:29,983 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36972.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:27:17,383 INFO [train.py:892] (1/4) Epoch 20, batch 1750, loss[loss=0.1828, simple_loss=0.2539, pruned_loss=0.05587, over 19689.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2581, pruned_loss=0.05779, over 3951372.88 frames. ], batch size: 74, lr: 7.79e-03, grad_scale: 16.0 +2023-03-28 13:27:35,575 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37004.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:28:00,087 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.609e+02 4.040e+02 4.922e+02 6.023e+02 1.252e+03, threshold=9.844e+02, percent-clipped=2.0 +2023-03-28 13:28:04,722 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37019.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:28:12,987 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.98 vs. limit=5.0 +2023-03-28 13:28:46,755 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-28 13:28:55,527 INFO [train.py:892] (1/4) Epoch 20, batch 1800, loss[loss=0.2955, simple_loss=0.3591, pruned_loss=0.1159, over 19400.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2592, pruned_loss=0.05875, over 3950862.11 frames. ], batch size: 412, lr: 7.78e-03, grad_scale: 16.0 +2023-03-28 13:29:13,141 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37056.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 13:29:29,163 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37065.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:30:21,853 INFO [train.py:892] (1/4) Epoch 20, batch 1850, loss[loss=0.1874, simple_loss=0.2474, pruned_loss=0.06372, over 19648.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2602, pruned_loss=0.05826, over 3949189.16 frames. ], batch size: 47, lr: 7.78e-03, grad_scale: 16.0 +2023-03-28 13:31:23,898 INFO [train.py:892] (1/4) Epoch 21, batch 0, loss[loss=0.1825, simple_loss=0.2525, pruned_loss=0.05629, over 19904.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2525, pruned_loss=0.05629, over 19904.00 frames. ], batch size: 116, lr: 7.59e-03, grad_scale: 16.0 +2023-03-28 13:31:23,898 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 13:31:56,331 INFO [train.py:926] (1/4) Epoch 21, validation: loss=0.1717, simple_loss=0.248, pruned_loss=0.04765, over 2883724.00 frames. +2023-03-28 13:31:56,332 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 13:32:31,704 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.932e+02 4.162e+02 4.969e+02 6.097e+02 9.968e+02, threshold=9.939e+02, percent-clipped=2.0 +2023-03-28 13:33:08,761 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.70 vs. limit=2.0 +2023-03-28 13:33:23,738 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9994, 2.9949, 1.8202, 3.7224, 3.3351, 3.6748, 3.6934, 2.8284], + device='cuda:1'), covar=tensor([0.0627, 0.0657, 0.1680, 0.0491, 0.0520, 0.0364, 0.0543, 0.0798], + device='cuda:1'), in_proj_covar=tensor([0.0136, 0.0135, 0.0140, 0.0139, 0.0121, 0.0121, 0.0135, 0.0137], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 13:33:48,509 INFO [train.py:892] (1/4) Epoch 21, batch 50, loss[loss=0.2144, simple_loss=0.3002, pruned_loss=0.06427, over 19819.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2621, pruned_loss=0.05985, over 891258.56 frames. ], batch size: 50, lr: 7.58e-03, grad_scale: 16.0 +2023-03-28 13:34:10,848 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37161.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 13:34:13,488 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-28 13:35:40,046 INFO [train.py:892] (1/4) Epoch 21, batch 100, loss[loss=0.1613, simple_loss=0.2465, pruned_loss=0.03803, over 19829.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2581, pruned_loss=0.05696, over 1569819.57 frames. ], batch size: 93, lr: 7.58e-03, grad_scale: 16.0 +2023-03-28 13:36:14,455 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.842e+02 4.106e+02 4.671e+02 5.605e+02 1.435e+03, threshold=9.342e+02, percent-clipped=1.0 +2023-03-28 13:37:03,843 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1219, 3.0195, 2.0149, 3.7898, 3.4907, 3.6869, 3.8147, 2.9113], + device='cuda:1'), covar=tensor([0.0665, 0.0643, 0.1596, 0.0472, 0.0508, 0.0363, 0.0479, 0.0825], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0136, 0.0141, 0.0139, 0.0121, 0.0123, 0.0136, 0.0139], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 13:37:18,404 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-28 13:37:33,180 INFO [train.py:892] (1/4) Epoch 21, batch 150, loss[loss=0.1752, simple_loss=0.245, pruned_loss=0.05266, over 19764.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.256, pruned_loss=0.05644, over 2097505.65 frames. ], batch size: 122, lr: 7.57e-03, grad_scale: 16.0 +2023-03-28 13:37:57,936 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37261.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:38:04,125 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37264.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:38:10,149 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37267.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:38:14,140 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37268.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:38:14,223 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37268.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:39:01,698 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-03-28 13:39:09,690 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-28 13:39:25,427 INFO [train.py:892] (1/4) Epoch 21, batch 200, loss[loss=0.178, simple_loss=0.2516, pruned_loss=0.05217, over 19658.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2571, pruned_loss=0.05685, over 2507798.50 frames. ], batch size: 50, lr: 7.57e-03, grad_scale: 16.0 +2023-03-28 13:39:39,088 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.9910, 2.3336, 2.1603, 1.5058, 2.1760, 2.3201, 2.1715, 2.2951], + device='cuda:1'), covar=tensor([0.0313, 0.0299, 0.0269, 0.0562, 0.0359, 0.0241, 0.0268, 0.0213], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0079, 0.0086, 0.0089, 0.0092, 0.0069, 0.0068, 0.0069], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 13:39:45,072 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37309.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:39:51,124 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37312.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:39:55,570 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37314.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:40:00,798 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37316.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:40:02,148 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.481e+02 4.044e+02 5.052e+02 5.778e+02 1.190e+03, threshold=1.010e+03, percent-clipped=3.0 +2023-03-28 13:40:32,405 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37329.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:41:19,186 INFO [train.py:892] (1/4) Epoch 21, batch 250, loss[loss=0.1593, simple_loss=0.2373, pruned_loss=0.04067, over 19926.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2557, pruned_loss=0.05635, over 2829522.14 frames. ], batch size: 49, lr: 7.56e-03, grad_scale: 16.0 +2023-03-28 13:41:30,859 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37356.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 13:41:39,256 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37360.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:41:47,423 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37364.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:41:49,585 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1385, 4.7234, 4.7264, 5.0877, 4.7855, 5.3390, 5.2363, 5.4139], + device='cuda:1'), covar=tensor([0.0677, 0.0340, 0.0411, 0.0268, 0.0591, 0.0319, 0.0356, 0.0274], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0164, 0.0187, 0.0161, 0.0162, 0.0145, 0.0142, 0.0185], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 13:43:12,292 INFO [train.py:892] (1/4) Epoch 21, batch 300, loss[loss=0.2154, simple_loss=0.2772, pruned_loss=0.07678, over 19787.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2572, pruned_loss=0.0568, over 3077056.46 frames. ], batch size: 241, lr: 7.56e-03, grad_scale: 16.0 +2023-03-28 13:43:21,057 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37404.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 13:43:49,249 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 4.191e+02 4.785e+02 5.606e+02 9.286e+02, threshold=9.571e+02, percent-clipped=0.0 +2023-03-28 13:44:06,885 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37425.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:44:30,999 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-28 13:45:03,287 INFO [train.py:892] (1/4) Epoch 21, batch 350, loss[loss=0.1679, simple_loss=0.2392, pruned_loss=0.0483, over 19831.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2553, pruned_loss=0.05612, over 3272474.47 frames. ], batch size: 90, lr: 7.55e-03, grad_scale: 16.0 +2023-03-28 13:45:26,335 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37461.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 13:46:54,746 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-28 13:46:57,110 INFO [train.py:892] (1/4) Epoch 21, batch 400, loss[loss=0.2068, simple_loss=0.2739, pruned_loss=0.06989, over 19764.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2555, pruned_loss=0.05637, over 3422612.04 frames. ], batch size: 244, lr: 7.55e-03, grad_scale: 16.0 +2023-03-28 13:47:14,307 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37509.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 13:47:33,497 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.804e+02 3.892e+02 4.730e+02 5.808e+02 1.803e+03, threshold=9.460e+02, percent-clipped=3.0 +2023-03-28 13:48:32,439 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3228, 3.5258, 3.7771, 4.3668, 2.9511, 3.3329, 2.9349, 2.5472], + device='cuda:1'), covar=tensor([0.0437, 0.2106, 0.0915, 0.0326, 0.2112, 0.0927, 0.1174, 0.1814], + device='cuda:1'), in_proj_covar=tensor([0.0229, 0.0334, 0.0239, 0.0183, 0.0243, 0.0197, 0.0209, 0.0214], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 13:48:49,646 INFO [train.py:892] (1/4) Epoch 21, batch 450, loss[loss=0.1556, simple_loss=0.2206, pruned_loss=0.04526, over 19866.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2562, pruned_loss=0.0564, over 3537039.14 frames. ], batch size: 129, lr: 7.54e-03, grad_scale: 16.0 +2023-03-28 13:49:28,617 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37567.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:49:42,683 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7382, 2.8515, 4.2263, 3.2890, 3.5537, 3.3245, 2.3306, 2.3589], + device='cuda:1'), covar=tensor([0.0951, 0.3006, 0.0484, 0.0897, 0.1427, 0.1373, 0.2244, 0.2743], + device='cuda:1'), in_proj_covar=tensor([0.0337, 0.0367, 0.0322, 0.0261, 0.0359, 0.0340, 0.0344, 0.0315], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 13:50:01,629 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9181, 4.5526, 4.5456, 4.9280, 4.6417, 5.1416, 5.0173, 5.1859], + device='cuda:1'), covar=tensor([0.0622, 0.0320, 0.0408, 0.0276, 0.0515, 0.0278, 0.0349, 0.0255], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0163, 0.0188, 0.0161, 0.0162, 0.0145, 0.0142, 0.0185], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 13:50:25,379 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-28 13:50:42,361 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3504, 3.0426, 3.6232, 2.7380, 3.6712, 3.0304, 3.2140, 3.5894], + device='cuda:1'), covar=tensor([0.0449, 0.0488, 0.0359, 0.0726, 0.0299, 0.0389, 0.0535, 0.0308], + device='cuda:1'), in_proj_covar=tensor([0.0068, 0.0077, 0.0074, 0.0104, 0.0070, 0.0071, 0.0069, 0.0062], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 13:50:43,340 INFO [train.py:892] (1/4) Epoch 21, batch 500, loss[loss=0.1807, simple_loss=0.2497, pruned_loss=0.05587, over 19639.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2562, pruned_loss=0.05609, over 3626931.83 frames. ], batch size: 68, lr: 7.54e-03, grad_scale: 16.0 +2023-03-28 13:50:44,979 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-03-28 13:50:49,821 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.63 vs. limit=5.0 +2023-03-28 13:51:16,982 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37614.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:51:19,014 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37615.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:51:22,197 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.832e+02 3.951e+02 4.577e+02 5.485e+02 9.092e+02, threshold=9.154e+02, percent-clipped=0.0 +2023-03-28 13:51:37,837 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37624.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:52:39,177 INFO [train.py:892] (1/4) Epoch 21, batch 550, loss[loss=0.1862, simple_loss=0.2698, pruned_loss=0.05125, over 19940.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2593, pruned_loss=0.05806, over 3696476.97 frames. ], batch size: 52, lr: 7.53e-03, grad_scale: 16.0 +2023-03-28 13:52:59,467 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37660.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:53:03,628 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37662.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:53:11,365 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.78 vs. limit=5.0 +2023-03-28 13:54:24,529 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.61 vs. limit=5.0 +2023-03-28 13:54:32,540 INFO [train.py:892] (1/4) Epoch 21, batch 600, loss[loss=0.2065, simple_loss=0.2717, pruned_loss=0.0707, over 19836.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.259, pruned_loss=0.0582, over 3752045.74 frames. ], batch size: 171, lr: 7.53e-03, grad_scale: 16.0 +2023-03-28 13:54:49,156 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37708.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:55:09,545 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.449e+02 4.150e+02 4.838e+02 5.835e+02 9.838e+02, threshold=9.675e+02, percent-clipped=3.0 +2023-03-28 13:55:17,332 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37720.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:56:25,828 INFO [train.py:892] (1/4) Epoch 21, batch 650, loss[loss=0.2051, simple_loss=0.2702, pruned_loss=0.07, over 19672.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2574, pruned_loss=0.05732, over 3796337.83 frames. ], batch size: 64, lr: 7.52e-03, grad_scale: 16.0 +2023-03-28 13:57:29,782 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-28 13:58:19,133 INFO [train.py:892] (1/4) Epoch 21, batch 700, loss[loss=0.181, simple_loss=0.2514, pruned_loss=0.05532, over 19790.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2585, pruned_loss=0.05779, over 3829747.39 frames. ], batch size: 174, lr: 7.52e-03, grad_scale: 16.0 +2023-03-28 13:58:22,440 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4325, 3.5313, 1.9747, 4.3057, 3.7509, 4.2058, 4.2663, 3.1935], + device='cuda:1'), covar=tensor([0.0546, 0.0497, 0.1528, 0.0505, 0.0530, 0.0402, 0.0475, 0.0754], + device='cuda:1'), in_proj_covar=tensor([0.0136, 0.0133, 0.0139, 0.0138, 0.0122, 0.0122, 0.0135, 0.0137], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 13:58:55,439 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 4.218e+02 4.924e+02 5.585e+02 8.798e+02, threshold=9.849e+02, percent-clipped=0.0 +2023-03-28 14:00:09,464 INFO [train.py:892] (1/4) Epoch 21, batch 750, loss[loss=0.1772, simple_loss=0.2379, pruned_loss=0.05827, over 19841.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2577, pruned_loss=0.05718, over 3857558.32 frames. ], batch size: 143, lr: 7.51e-03, grad_scale: 16.0 +2023-03-28 14:02:04,538 INFO [train.py:892] (1/4) Epoch 21, batch 800, loss[loss=0.1914, simple_loss=0.2594, pruned_loss=0.06175, over 19782.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2578, pruned_loss=0.05741, over 3878863.53 frames. ], batch size: 247, lr: 7.51e-03, grad_scale: 16.0 +2023-03-28 14:02:39,804 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.002e+02 4.314e+02 5.039e+02 6.116e+02 1.150e+03, threshold=1.008e+03, percent-clipped=3.0 +2023-03-28 14:02:45,833 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3936, 3.7379, 3.9101, 4.4960, 2.8732, 3.4443, 2.8101, 2.8344], + device='cuda:1'), covar=tensor([0.0520, 0.2391, 0.0911, 0.0387, 0.2443, 0.0970, 0.1412, 0.1829], + device='cuda:1'), in_proj_covar=tensor([0.0228, 0.0333, 0.0239, 0.0183, 0.0241, 0.0195, 0.0208, 0.0213], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 14:02:56,870 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37924.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:03:57,490 INFO [train.py:892] (1/4) Epoch 21, batch 850, loss[loss=0.1669, simple_loss=0.2374, pruned_loss=0.04814, over 19870.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2567, pruned_loss=0.05638, over 3893369.95 frames. ], batch size: 89, lr: 7.50e-03, grad_scale: 16.0 +2023-03-28 14:04:45,715 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37972.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:05:01,970 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37979.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:05:53,024 INFO [train.py:892] (1/4) Epoch 21, batch 900, loss[loss=0.1884, simple_loss=0.2567, pruned_loss=0.06002, over 19784.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2561, pruned_loss=0.05616, over 3906453.44 frames. ], batch size: 247, lr: 7.50e-03, grad_scale: 16.0 +2023-03-28 14:06:31,294 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.842e+02 4.121e+02 4.904e+02 6.115e+02 1.131e+03, threshold=9.807e+02, percent-clipped=1.0 +2023-03-28 14:06:38,890 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38020.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:07:21,154 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38040.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:07:46,395 INFO [train.py:892] (1/4) Epoch 21, batch 950, loss[loss=0.2209, simple_loss=0.2912, pruned_loss=0.07533, over 19674.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2573, pruned_loss=0.05674, over 3913334.92 frames. ], batch size: 337, lr: 7.49e-03, grad_scale: 16.0 +2023-03-28 14:08:23,053 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38068.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:09:34,131 INFO [train.py:892] (1/4) Epoch 21, batch 1000, loss[loss=0.1563, simple_loss=0.2316, pruned_loss=0.04046, over 19757.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2553, pruned_loss=0.05586, over 3923075.25 frames. ], batch size: 100, lr: 7.49e-03, grad_scale: 16.0 +2023-03-28 14:10:09,399 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.902e+02 4.380e+02 5.195e+02 6.424e+02 1.121e+03, threshold=1.039e+03, percent-clipped=5.0 +2023-03-28 14:11:25,969 INFO [train.py:892] (1/4) Epoch 21, batch 1050, loss[loss=0.2184, simple_loss=0.2831, pruned_loss=0.0769, over 19779.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2561, pruned_loss=0.05632, over 3929460.63 frames. ], batch size: 198, lr: 7.48e-03, grad_scale: 16.0 +2023-03-28 14:12:27,617 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-28 14:13:22,262 INFO [train.py:892] (1/4) Epoch 21, batch 1100, loss[loss=0.1681, simple_loss=0.2486, pruned_loss=0.04376, over 19815.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2562, pruned_loss=0.05635, over 3935325.53 frames. ], batch size: 82, lr: 7.48e-03, grad_scale: 16.0 +2023-03-28 14:13:55,406 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.714e+02 3.883e+02 4.794e+02 6.298e+02 1.135e+03, threshold=9.588e+02, percent-clipped=2.0 +2023-03-28 14:14:45,816 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-28 14:15:12,023 INFO [train.py:892] (1/4) Epoch 21, batch 1150, loss[loss=0.191, simple_loss=0.2661, pruned_loss=0.05798, over 19837.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2567, pruned_loss=0.05632, over 3938886.02 frames. ], batch size: 58, lr: 7.47e-03, grad_scale: 16.0 +2023-03-28 14:15:28,863 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5000, 3.8435, 3.9409, 4.6241, 3.0781, 3.4207, 2.8411, 2.8445], + device='cuda:1'), covar=tensor([0.0496, 0.1770, 0.0806, 0.0314, 0.1962, 0.0926, 0.1294, 0.1663], + device='cuda:1'), in_proj_covar=tensor([0.0228, 0.0332, 0.0240, 0.0184, 0.0242, 0.0197, 0.0210, 0.0213], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 14:17:02,229 INFO [train.py:892] (1/4) Epoch 21, batch 1200, loss[loss=0.1719, simple_loss=0.2403, pruned_loss=0.05176, over 19855.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2568, pruned_loss=0.05641, over 3941382.48 frames. ], batch size: 104, lr: 7.47e-03, grad_scale: 32.0 +2023-03-28 14:17:37,164 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.715e+02 4.012e+02 4.628e+02 5.373e+02 1.374e+03, threshold=9.255e+02, percent-clipped=1.0 +2023-03-28 14:18:18,203 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38335.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:18:53,060 INFO [train.py:892] (1/4) Epoch 21, batch 1250, loss[loss=0.1892, simple_loss=0.2711, pruned_loss=0.05369, over 19845.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2565, pruned_loss=0.05642, over 3942429.28 frames. ], batch size: 56, lr: 7.46e-03, grad_scale: 32.0 +2023-03-28 14:19:13,217 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38360.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:20:08,565 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-28 14:20:46,453 INFO [train.py:892] (1/4) Epoch 21, batch 1300, loss[loss=0.1812, simple_loss=0.2607, pruned_loss=0.05086, over 19887.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2573, pruned_loss=0.05715, over 3942884.08 frames. ], batch size: 95, lr: 7.46e-03, grad_scale: 32.0 +2023-03-28 14:21:20,982 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.582e+02 4.064e+02 4.748e+02 5.705e+02 1.088e+03, threshold=9.497e+02, percent-clipped=1.0 +2023-03-28 14:21:32,066 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38421.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:22:38,964 INFO [train.py:892] (1/4) Epoch 21, batch 1350, loss[loss=0.1816, simple_loss=0.2483, pruned_loss=0.05742, over 19810.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2568, pruned_loss=0.05667, over 3944941.92 frames. ], batch size: 148, lr: 7.45e-03, grad_scale: 32.0 +2023-03-28 14:23:09,926 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5439, 4.4017, 4.8973, 4.5132, 4.0662, 4.6882, 4.5849, 5.0364], + device='cuda:1'), covar=tensor([0.0808, 0.0386, 0.0340, 0.0343, 0.0843, 0.0458, 0.0419, 0.0286], + device='cuda:1'), in_proj_covar=tensor([0.0272, 0.0214, 0.0211, 0.0222, 0.0201, 0.0220, 0.0222, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 14:23:19,539 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-28 14:24:12,370 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-28 14:24:34,300 INFO [train.py:892] (1/4) Epoch 21, batch 1400, loss[loss=0.1665, simple_loss=0.2368, pruned_loss=0.04809, over 19598.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2564, pruned_loss=0.0561, over 3946119.79 frames. ], batch size: 44, lr: 7.45e-03, grad_scale: 32.0 +2023-03-28 14:24:47,823 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1451, 2.5810, 3.1999, 3.3488, 3.8292, 4.3385, 4.0247, 4.2222], + device='cuda:1'), covar=tensor([0.0772, 0.1630, 0.1144, 0.0583, 0.0327, 0.0196, 0.0357, 0.0448], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0165, 0.0169, 0.0141, 0.0123, 0.0117, 0.0111, 0.0106], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 14:24:56,963 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0158, 3.0614, 4.4937, 3.4939, 3.6911, 3.5573, 2.4264, 2.5467], + device='cuda:1'), covar=tensor([0.0925, 0.2972, 0.0492, 0.0899, 0.1644, 0.1235, 0.2355, 0.2643], + device='cuda:1'), in_proj_covar=tensor([0.0338, 0.0369, 0.0325, 0.0261, 0.0363, 0.0340, 0.0346, 0.0315], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 14:25:09,950 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.194e+02 4.264e+02 4.951e+02 5.984e+02 1.387e+03, threshold=9.901e+02, percent-clipped=2.0 +2023-03-28 14:25:35,342 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2006, 3.0909, 4.9009, 3.4915, 3.8398, 3.6822, 2.5108, 2.7561], + device='cuda:1'), covar=tensor([0.0931, 0.3160, 0.0400, 0.1019, 0.1816, 0.1212, 0.2470, 0.2561], + device='cuda:1'), in_proj_covar=tensor([0.0337, 0.0369, 0.0324, 0.0261, 0.0362, 0.0340, 0.0346, 0.0315], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 14:26:26,404 INFO [train.py:892] (1/4) Epoch 21, batch 1450, loss[loss=0.1658, simple_loss=0.2499, pruned_loss=0.0409, over 19752.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2569, pruned_loss=0.0558, over 3947524.94 frames. ], batch size: 97, lr: 7.45e-03, grad_scale: 32.0 +2023-03-28 14:28:22,106 INFO [train.py:892] (1/4) Epoch 21, batch 1500, loss[loss=0.2057, simple_loss=0.2725, pruned_loss=0.06949, over 19694.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2553, pruned_loss=0.05499, over 3948697.06 frames. ], batch size: 75, lr: 7.44e-03, grad_scale: 32.0 +2023-03-28 14:28:27,666 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1750, 2.4766, 3.7960, 3.4688, 3.7759, 3.9565, 3.8093, 3.6685], + device='cuda:1'), covar=tensor([0.0430, 0.0865, 0.0112, 0.0584, 0.0138, 0.0210, 0.0167, 0.0163], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0097, 0.0082, 0.0149, 0.0077, 0.0091, 0.0085, 0.0078], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 14:28:58,330 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.556e+02 4.180e+02 4.793e+02 6.063e+02 1.172e+03, threshold=9.587e+02, percent-clipped=1.0 +2023-03-28 14:29:40,718 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38635.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:30:15,136 INFO [train.py:892] (1/4) Epoch 21, batch 1550, loss[loss=0.1685, simple_loss=0.241, pruned_loss=0.04799, over 19687.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2564, pruned_loss=0.05574, over 3948425.36 frames. ], batch size: 74, lr: 7.44e-03, grad_scale: 32.0 +2023-03-28 14:31:30,391 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38683.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:32:09,924 INFO [train.py:892] (1/4) Epoch 21, batch 1600, loss[loss=0.1527, simple_loss=0.2338, pruned_loss=0.03579, over 19814.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2562, pruned_loss=0.05548, over 3948232.14 frames. ], batch size: 96, lr: 7.43e-03, grad_scale: 32.0 +2023-03-28 14:32:47,619 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38716.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:32:48,638 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.983e+02 3.919e+02 4.752e+02 6.022e+02 1.186e+03, threshold=9.505e+02, percent-clipped=3.0 +2023-03-28 14:33:19,286 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4031, 2.4227, 1.5354, 2.6997, 2.4995, 2.6529, 2.7355, 2.1744], + device='cuda:1'), covar=tensor([0.0632, 0.0652, 0.1364, 0.0493, 0.0558, 0.0502, 0.0524, 0.0843], + device='cuda:1'), in_proj_covar=tensor([0.0135, 0.0133, 0.0139, 0.0138, 0.0121, 0.0124, 0.0135, 0.0138], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 14:34:02,111 INFO [train.py:892] (1/4) Epoch 21, batch 1650, loss[loss=0.1729, simple_loss=0.2467, pruned_loss=0.04953, over 19870.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.256, pruned_loss=0.05584, over 3947757.19 frames. ], batch size: 77, lr: 7.43e-03, grad_scale: 32.0 +2023-03-28 14:35:57,127 INFO [train.py:892] (1/4) Epoch 21, batch 1700, loss[loss=0.2921, simple_loss=0.3578, pruned_loss=0.1131, over 19255.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2572, pruned_loss=0.0566, over 3947849.63 frames. ], batch size: 483, lr: 7.42e-03, grad_scale: 32.0 +2023-03-28 14:36:32,826 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.773e+02 3.919e+02 4.516e+02 5.441e+02 1.197e+03, threshold=9.032e+02, percent-clipped=2.0 +2023-03-28 14:36:48,322 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38823.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:36:54,160 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8046, 2.7092, 1.6232, 3.1675, 2.8493, 3.2374, 3.2377, 2.5619], + device='cuda:1'), covar=tensor([0.0624, 0.0708, 0.1850, 0.0691, 0.0727, 0.0502, 0.0652, 0.0842], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0134, 0.0141, 0.0140, 0.0123, 0.0125, 0.0136, 0.0140], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 14:37:43,289 INFO [train.py:892] (1/4) Epoch 21, batch 1750, loss[loss=0.2363, simple_loss=0.3048, pruned_loss=0.08388, over 19582.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2572, pruned_loss=0.05648, over 3948471.67 frames. ], batch size: 376, lr: 7.42e-03, grad_scale: 32.0 +2023-03-28 14:38:01,155 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4615, 3.2069, 3.6136, 2.5676, 3.7313, 2.9354, 3.1844, 3.7080], + device='cuda:1'), covar=tensor([0.0608, 0.0392, 0.0340, 0.0760, 0.0347, 0.0408, 0.0464, 0.0319], + device='cuda:1'), in_proj_covar=tensor([0.0068, 0.0076, 0.0074, 0.0103, 0.0071, 0.0071, 0.0069, 0.0062], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 14:38:47,619 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38884.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:38:56,566 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-28 14:39:18,389 INFO [train.py:892] (1/4) Epoch 21, batch 1800, loss[loss=0.2954, simple_loss=0.359, pruned_loss=0.1159, over 19419.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2574, pruned_loss=0.05636, over 3948373.59 frames. ], batch size: 412, lr: 7.41e-03, grad_scale: 32.0 +2023-03-28 14:39:48,601 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.425e+02 4.042e+02 5.128e+02 6.021e+02 1.055e+03, threshold=1.026e+03, percent-clipped=2.0 +2023-03-28 14:40:47,455 INFO [train.py:892] (1/4) Epoch 21, batch 1850, loss[loss=0.184, simple_loss=0.2632, pruned_loss=0.05238, over 19857.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2588, pruned_loss=0.05616, over 3948774.62 frames. ], batch size: 58, lr: 7.41e-03, grad_scale: 32.0 +2023-03-28 14:41:51,757 INFO [train.py:892] (1/4) Epoch 22, batch 0, loss[loss=0.1558, simple_loss=0.2295, pruned_loss=0.04104, over 19772.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2295, pruned_loss=0.04104, over 19772.00 frames. ], batch size: 66, lr: 7.23e-03, grad_scale: 32.0 +2023-03-28 14:41:51,757 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 14:42:30,100 INFO [train.py:926] (1/4) Epoch 22, validation: loss=0.1727, simple_loss=0.2482, pruned_loss=0.04859, over 2883724.00 frames. +2023-03-28 14:42:30,102 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 14:42:36,096 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.6915, 1.9012, 1.7078, 1.0999, 1.7690, 1.8030, 1.7588, 1.8203], + device='cuda:1'), covar=tensor([0.0328, 0.0258, 0.0287, 0.0530, 0.0375, 0.0256, 0.0236, 0.0222], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0081, 0.0087, 0.0091, 0.0094, 0.0071, 0.0069, 0.0071], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 14:44:26,247 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6387, 4.3537, 4.3929, 4.1044, 4.6469, 3.2080, 3.8304, 2.3687], + device='cuda:1'), covar=tensor([0.0184, 0.0216, 0.0158, 0.0219, 0.0133, 0.0853, 0.0761, 0.1380], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0138, 0.0110, 0.0130, 0.0115, 0.0129, 0.0140, 0.0123], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 14:44:29,535 INFO [train.py:892] (1/4) Epoch 22, batch 50, loss[loss=0.2861, simple_loss=0.3445, pruned_loss=0.1138, over 19397.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.253, pruned_loss=0.0559, over 890853.31 frames. ], batch size: 412, lr: 7.23e-03, grad_scale: 32.0 +2023-03-28 14:44:52,741 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39016.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:44:53,816 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.550e+02 3.535e+02 4.354e+02 5.558e+02 1.145e+03, threshold=8.708e+02, percent-clipped=3.0 +2023-03-28 14:46:23,789 INFO [train.py:892] (1/4) Epoch 22, batch 100, loss[loss=0.178, simple_loss=0.2433, pruned_loss=0.05634, over 19764.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2536, pruned_loss=0.05433, over 1568708.10 frames. ], batch size: 198, lr: 7.22e-03, grad_scale: 32.0 +2023-03-28 14:46:43,238 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39064.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:48:04,984 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2792, 3.2508, 3.3407, 2.5136, 3.6230, 2.8448, 3.2050, 3.6816], + device='cuda:1'), covar=tensor([0.0598, 0.0380, 0.0597, 0.0814, 0.0338, 0.0472, 0.0542, 0.0249], + device='cuda:1'), in_proj_covar=tensor([0.0069, 0.0077, 0.0075, 0.0105, 0.0072, 0.0072, 0.0070, 0.0062], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 14:48:19,912 INFO [train.py:892] (1/4) Epoch 22, batch 150, loss[loss=0.1864, simple_loss=0.2602, pruned_loss=0.05632, over 19861.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2545, pruned_loss=0.0546, over 2096937.45 frames. ], batch size: 99, lr: 7.22e-03, grad_scale: 32.0 +2023-03-28 14:48:33,073 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5263, 2.9896, 3.3143, 2.9178, 3.6947, 3.6889, 4.3291, 4.7837], + device='cuda:1'), covar=tensor([0.0476, 0.1742, 0.1500, 0.2309, 0.1620, 0.1377, 0.0545, 0.0429], + device='cuda:1'), in_proj_covar=tensor([0.0242, 0.0235, 0.0256, 0.0247, 0.0285, 0.0248, 0.0215, 0.0237], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 14:48:48,468 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.956e+02 3.952e+02 4.652e+02 5.506e+02 8.622e+02, threshold=9.304e+02, percent-clipped=0.0 +2023-03-28 14:50:14,977 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1865, 2.9702, 3.3875, 2.5672, 3.4082, 2.7371, 3.0966, 3.3617], + device='cuda:1'), covar=tensor([0.0384, 0.0527, 0.0365, 0.0745, 0.0344, 0.0438, 0.0426, 0.0313], + device='cuda:1'), in_proj_covar=tensor([0.0069, 0.0077, 0.0075, 0.0105, 0.0071, 0.0071, 0.0069, 0.0062], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 14:50:18,189 INFO [train.py:892] (1/4) Epoch 22, batch 200, loss[loss=0.1821, simple_loss=0.2438, pruned_loss=0.06023, over 19797.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2532, pruned_loss=0.05358, over 2507476.34 frames. ], batch size: 149, lr: 7.22e-03, grad_scale: 32.0 +2023-03-28 14:51:07,156 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39179.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:51:21,853 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39185.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:52:10,200 INFO [train.py:892] (1/4) Epoch 22, batch 250, loss[loss=0.1673, simple_loss=0.2442, pruned_loss=0.04517, over 19892.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2528, pruned_loss=0.05377, over 2828556.43 frames. ], batch size: 71, lr: 7.21e-03, grad_scale: 32.0 +2023-03-28 14:52:16,065 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4502, 4.2220, 4.2154, 3.9957, 4.4347, 3.0123, 3.6831, 2.2945], + device='cuda:1'), covar=tensor([0.0186, 0.0202, 0.0143, 0.0196, 0.0131, 0.0913, 0.0791, 0.1323], + device='cuda:1'), in_proj_covar=tensor([0.0098, 0.0137, 0.0109, 0.0129, 0.0114, 0.0128, 0.0139, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 14:52:34,878 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.591e+02 3.931e+02 4.730e+02 5.720e+02 9.531e+02, threshold=9.460e+02, percent-clipped=1.0 +2023-03-28 14:53:42,687 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39246.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:54:03,337 INFO [train.py:892] (1/4) Epoch 22, batch 300, loss[loss=0.1922, simple_loss=0.2639, pruned_loss=0.06022, over 19846.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2534, pruned_loss=0.05408, over 3076460.86 frames. ], batch size: 177, lr: 7.21e-03, grad_scale: 32.0 +2023-03-28 14:55:08,585 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39284.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:55:59,327 INFO [train.py:892] (1/4) Epoch 22, batch 350, loss[loss=0.1541, simple_loss=0.2278, pruned_loss=0.04019, over 19645.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2522, pruned_loss=0.05388, over 3271812.72 frames. ], batch size: 72, lr: 7.20e-03, grad_scale: 32.0 +2023-03-28 14:56:21,664 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.483e+02 3.997e+02 4.728e+02 5.910e+02 1.079e+03, threshold=9.457e+02, percent-clipped=2.0 +2023-03-28 14:56:47,667 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.3172, 4.9435, 4.9318, 5.3195, 4.9347, 5.5915, 5.4009, 5.5868], + device='cuda:1'), covar=tensor([0.0561, 0.0335, 0.0399, 0.0287, 0.0589, 0.0251, 0.0406, 0.0279], + device='cuda:1'), in_proj_covar=tensor([0.0144, 0.0165, 0.0190, 0.0164, 0.0163, 0.0148, 0.0144, 0.0188], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 14:57:26,581 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.98 vs. limit=5.0 +2023-03-28 14:57:27,965 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39345.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:57:49,325 INFO [train.py:892] (1/4) Epoch 22, batch 400, loss[loss=0.1619, simple_loss=0.239, pruned_loss=0.0424, over 19843.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2529, pruned_loss=0.05421, over 3422216.87 frames. ], batch size: 161, lr: 7.20e-03, grad_scale: 32.0 +2023-03-28 14:58:58,000 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.0061, 2.3059, 2.0987, 1.5222, 2.1110, 2.3243, 2.1180, 2.2135], + device='cuda:1'), covar=tensor([0.0349, 0.0272, 0.0299, 0.0621, 0.0416, 0.0229, 0.0282, 0.0241], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0082, 0.0088, 0.0093, 0.0095, 0.0071, 0.0071, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 14:59:41,763 INFO [train.py:892] (1/4) Epoch 22, batch 450, loss[loss=0.1843, simple_loss=0.2557, pruned_loss=0.05642, over 19774.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2548, pruned_loss=0.05468, over 3539275.39 frames. ], batch size: 247, lr: 7.19e-03, grad_scale: 32.0 +2023-03-28 15:00:08,524 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.964e+02 3.895e+02 4.549e+02 5.283e+02 8.975e+02, threshold=9.097e+02, percent-clipped=0.0 +2023-03-28 15:01:37,219 INFO [train.py:892] (1/4) Epoch 22, batch 500, loss[loss=0.1584, simple_loss=0.2281, pruned_loss=0.0444, over 19610.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2568, pruned_loss=0.05553, over 3626916.79 frames. ], batch size: 46, lr: 7.19e-03, grad_scale: 32.0 +2023-03-28 15:01:53,527 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39463.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:02:31,979 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39479.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:03:33,880 INFO [train.py:892] (1/4) Epoch 22, batch 550, loss[loss=0.1854, simple_loss=0.2539, pruned_loss=0.05844, over 19778.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2563, pruned_loss=0.05557, over 3700194.49 frames. ], batch size: 152, lr: 7.18e-03, grad_scale: 32.0 +2023-03-28 15:03:58,571 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.760e+02 4.104e+02 4.987e+02 6.113e+02 1.669e+03, threshold=9.973e+02, percent-clipped=4.0 +2023-03-28 15:04:16,088 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39524.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:04:22,109 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39527.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:04:51,804 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39541.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:04:56,453 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4025, 1.7527, 1.9944, 2.5847, 2.8687, 3.0509, 2.9713, 2.9988], + device='cuda:1'), covar=tensor([0.0941, 0.1871, 0.1457, 0.0661, 0.0468, 0.0308, 0.0362, 0.0409], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0168, 0.0171, 0.0141, 0.0124, 0.0120, 0.0114, 0.0108], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 15:05:16,739 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-28 15:05:26,105 INFO [train.py:892] (1/4) Epoch 22, batch 600, loss[loss=0.1877, simple_loss=0.2528, pruned_loss=0.06125, over 19875.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2569, pruned_loss=0.05607, over 3756257.11 frames. ], batch size: 125, lr: 7.18e-03, grad_scale: 32.0 +2023-03-28 15:06:15,326 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6682, 3.5674, 3.5798, 3.8401, 3.6161, 4.0136, 3.7751, 3.8536], + device='cuda:1'), covar=tensor([0.0988, 0.0620, 0.0715, 0.0512, 0.0927, 0.0550, 0.0666, 0.0699], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0165, 0.0190, 0.0163, 0.0163, 0.0147, 0.0143, 0.0187], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 15:07:20,981 INFO [train.py:892] (1/4) Epoch 22, batch 650, loss[loss=0.154, simple_loss=0.2232, pruned_loss=0.04237, over 19895.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2554, pruned_loss=0.0552, over 3799290.34 frames. ], batch size: 91, lr: 7.17e-03, grad_scale: 32.0 +2023-03-28 15:07:45,277 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.674e+02 3.926e+02 4.968e+02 5.717e+02 7.985e+02, threshold=9.935e+02, percent-clipped=0.0 +2023-03-28 15:08:37,616 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39639.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 15:08:39,445 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39640.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:09:12,765 INFO [train.py:892] (1/4) Epoch 22, batch 700, loss[loss=0.1696, simple_loss=0.2488, pruned_loss=0.04518, over 19784.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2559, pruned_loss=0.05529, over 3833489.61 frames. ], batch size: 52, lr: 7.17e-03, grad_scale: 32.0 +2023-03-28 15:10:37,246 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9391, 4.5641, 4.6614, 4.9697, 4.5481, 5.1158, 5.0449, 5.1949], + device='cuda:1'), covar=tensor([0.0555, 0.0333, 0.0388, 0.0273, 0.0622, 0.0319, 0.0335, 0.0287], + device='cuda:1'), in_proj_covar=tensor([0.0142, 0.0164, 0.0188, 0.0162, 0.0162, 0.0146, 0.0141, 0.0186], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 15:10:43,904 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-28 15:10:53,931 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39700.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 15:11:05,445 INFO [train.py:892] (1/4) Epoch 22, batch 750, loss[loss=0.1566, simple_loss=0.2311, pruned_loss=0.04106, over 19664.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2554, pruned_loss=0.05461, over 3858806.26 frames. ], batch size: 50, lr: 7.17e-03, grad_scale: 16.0 +2023-03-28 15:11:32,108 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.919e+02 4.119e+02 5.270e+02 6.689e+02 1.138e+03, threshold=1.054e+03, percent-clipped=1.0 +2023-03-28 15:11:53,425 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-28 15:13:01,462 INFO [train.py:892] (1/4) Epoch 22, batch 800, loss[loss=0.189, simple_loss=0.2592, pruned_loss=0.05934, over 19822.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2574, pruned_loss=0.05551, over 3877090.07 frames. ], batch size: 181, lr: 7.16e-03, grad_scale: 16.0 +2023-03-28 15:13:25,085 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39767.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:14:37,150 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5371, 4.4232, 4.8805, 4.4815, 4.1497, 4.6488, 4.4924, 4.9972], + device='cuda:1'), covar=tensor([0.0870, 0.0390, 0.0380, 0.0408, 0.0867, 0.0534, 0.0485, 0.0330], + device='cuda:1'), in_proj_covar=tensor([0.0268, 0.0208, 0.0208, 0.0220, 0.0199, 0.0220, 0.0217, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 15:14:52,002 INFO [train.py:892] (1/4) Epoch 22, batch 850, loss[loss=0.1954, simple_loss=0.2651, pruned_loss=0.06284, over 19738.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2578, pruned_loss=0.05559, over 3892108.62 frames. ], batch size: 76, lr: 7.16e-03, grad_scale: 16.0 +2023-03-28 15:15:12,748 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39815.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:15:18,347 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.581e+02 3.870e+02 4.647e+02 5.944e+02 1.250e+03, threshold=9.293e+02, percent-clipped=1.0 +2023-03-28 15:15:21,412 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39819.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:15:42,443 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39828.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:16:09,959 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39841.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:16:42,222 INFO [train.py:892] (1/4) Epoch 22, batch 900, loss[loss=0.183, simple_loss=0.2477, pruned_loss=0.05911, over 19877.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2573, pruned_loss=0.05551, over 3901104.48 frames. ], batch size: 95, lr: 7.15e-03, grad_scale: 16.0 +2023-03-28 15:17:30,852 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39876.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:18:01,291 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39889.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:18:37,572 INFO [train.py:892] (1/4) Epoch 22, batch 950, loss[loss=0.1706, simple_loss=0.2452, pruned_loss=0.04801, over 19864.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2571, pruned_loss=0.05568, over 3912814.42 frames. ], batch size: 99, lr: 7.15e-03, grad_scale: 16.0 +2023-03-28 15:19:06,555 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.773e+02 4.111e+02 4.981e+02 6.379e+02 1.110e+03, threshold=9.962e+02, percent-clipped=1.0 +2023-03-28 15:19:32,808 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.74 vs. limit=5.0 +2023-03-28 15:19:56,011 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39940.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:20:06,233 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-28 15:20:28,443 INFO [train.py:892] (1/4) Epoch 22, batch 1000, loss[loss=0.1647, simple_loss=0.2292, pruned_loss=0.05007, over 19883.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.256, pruned_loss=0.05513, over 3921048.70 frames. ], batch size: 134, lr: 7.14e-03, grad_scale: 16.0 +2023-03-28 15:20:48,807 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.63 vs. limit=5.0 +2023-03-28 15:21:33,815 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-28 15:21:42,491 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39988.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:21:59,457 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39995.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 15:22:03,449 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9874, 4.0658, 2.3500, 4.2361, 4.4258, 1.8844, 3.5865, 3.2842], + device='cuda:1'), covar=tensor([0.0647, 0.0809, 0.2736, 0.0728, 0.0489, 0.2890, 0.1008, 0.0803], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0243, 0.0223, 0.0255, 0.0229, 0.0198, 0.0230, 0.0184], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 15:22:27,058 INFO [train.py:892] (1/4) Epoch 22, batch 1050, loss[loss=0.2188, simple_loss=0.2878, pruned_loss=0.07487, over 19630.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2569, pruned_loss=0.05574, over 3925185.46 frames. ], batch size: 351, lr: 7.14e-03, grad_scale: 16.0 +2023-03-28 15:22:53,663 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.755e+02 4.195e+02 4.862e+02 6.171e+02 1.256e+03, threshold=9.724e+02, percent-clipped=2.0 +2023-03-28 15:23:38,679 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7753, 3.6900, 4.0473, 3.6845, 3.5386, 3.9532, 3.7961, 4.1111], + device='cuda:1'), covar=tensor([0.0794, 0.0353, 0.0358, 0.0403, 0.1047, 0.0480, 0.0429, 0.0327], + device='cuda:1'), in_proj_covar=tensor([0.0272, 0.0212, 0.0212, 0.0223, 0.0202, 0.0224, 0.0221, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 15:24:16,359 INFO [train.py:892] (1/4) Epoch 22, batch 1100, loss[loss=0.1917, simple_loss=0.2741, pruned_loss=0.05465, over 19727.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2555, pruned_loss=0.0551, over 3932386.31 frames. ], batch size: 50, lr: 7.13e-03, grad_scale: 16.0 +2023-03-28 15:24:32,094 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6343, 2.0545, 2.4253, 2.9245, 3.2112, 3.4340, 3.3034, 3.2855], + device='cuda:1'), covar=tensor([0.0924, 0.1746, 0.1276, 0.0630, 0.0450, 0.0276, 0.0390, 0.0544], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0166, 0.0170, 0.0141, 0.0123, 0.0119, 0.0112, 0.0107], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 15:25:52,591 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0310, 2.4071, 3.0671, 3.2038, 3.6662, 4.2319, 4.0579, 4.1641], + device='cuda:1'), covar=tensor([0.0913, 0.1828, 0.1257, 0.0656, 0.0442, 0.0226, 0.0313, 0.0351], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0168, 0.0171, 0.0142, 0.0124, 0.0120, 0.0113, 0.0108], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 15:26:09,789 INFO [train.py:892] (1/4) Epoch 22, batch 1150, loss[loss=0.1842, simple_loss=0.2511, pruned_loss=0.05868, over 19802.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2568, pruned_loss=0.05554, over 3933232.42 frames. ], batch size: 98, lr: 7.13e-03, grad_scale: 16.0 +2023-03-28 15:26:19,076 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0745, 2.7829, 3.2972, 3.4285, 3.7829, 4.3696, 4.2141, 4.3454], + device='cuda:1'), covar=tensor([0.0910, 0.1660, 0.1261, 0.0591, 0.0410, 0.0207, 0.0291, 0.0329], + device='cuda:1'), in_proj_covar=tensor([0.0153, 0.0169, 0.0172, 0.0143, 0.0125, 0.0120, 0.0114, 0.0109], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 15:26:33,620 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.627e+02 4.168e+02 4.947e+02 6.030e+02 1.277e+03, threshold=9.894e+02, percent-clipped=1.0 +2023-03-28 15:26:37,564 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40119.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:26:46,627 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40123.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:28:02,040 INFO [train.py:892] (1/4) Epoch 22, batch 1200, loss[loss=0.1628, simple_loss=0.2352, pruned_loss=0.04525, over 19674.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2575, pruned_loss=0.05583, over 3933977.28 frames. ], batch size: 55, lr: 7.13e-03, grad_scale: 16.0 +2023-03-28 15:28:26,263 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40167.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:28:35,098 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40171.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:29:02,477 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-28 15:29:32,152 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40195.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:29:54,930 INFO [train.py:892] (1/4) Epoch 22, batch 1250, loss[loss=0.1576, simple_loss=0.2285, pruned_loss=0.04333, over 19854.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2561, pruned_loss=0.05502, over 3938956.28 frames. ], batch size: 104, lr: 7.12e-03, grad_scale: 16.0 +2023-03-28 15:30:21,957 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 3.858e+02 4.574e+02 5.518e+02 1.243e+03, threshold=9.148e+02, percent-clipped=1.0 +2023-03-28 15:30:23,168 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40218.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:30:32,066 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0727, 3.9494, 3.9453, 3.6895, 4.0912, 3.0554, 3.4080, 2.0792], + device='cuda:1'), covar=tensor([0.0214, 0.0216, 0.0152, 0.0206, 0.0142, 0.0856, 0.0669, 0.1521], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0137, 0.0110, 0.0130, 0.0114, 0.0130, 0.0139, 0.0123], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 15:31:43,895 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0699, 2.9566, 3.3620, 2.5233, 3.3606, 2.7565, 3.0758, 3.4387], + device='cuda:1'), covar=tensor([0.0495, 0.0484, 0.0476, 0.0742, 0.0296, 0.0460, 0.0426, 0.0263], + device='cuda:1'), in_proj_covar=tensor([0.0069, 0.0078, 0.0074, 0.0104, 0.0071, 0.0072, 0.0070, 0.0062], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 15:31:49,164 INFO [train.py:892] (1/4) Epoch 22, batch 1300, loss[loss=0.1713, simple_loss=0.2342, pruned_loss=0.05419, over 19801.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2555, pruned_loss=0.05474, over 3942978.20 frames. ], batch size: 126, lr: 7.12e-03, grad_scale: 16.0 +2023-03-28 15:31:50,199 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40256.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:32:26,086 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40272.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:32:41,677 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40279.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:33:20,647 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40295.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 15:33:30,601 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4725, 2.4958, 2.7020, 2.4868, 2.5396, 2.6639, 2.5090, 2.6236], + device='cuda:1'), covar=tensor([0.0274, 0.0297, 0.0230, 0.0282, 0.0372, 0.0273, 0.0374, 0.0320], + device='cuda:1'), in_proj_covar=tensor([0.0071, 0.0065, 0.0069, 0.0062, 0.0075, 0.0069, 0.0087, 0.0061], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 15:33:43,546 INFO [train.py:892] (1/4) Epoch 22, batch 1350, loss[loss=0.1559, simple_loss=0.2291, pruned_loss=0.04132, over 19748.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2554, pruned_loss=0.05444, over 3943915.74 frames. ], batch size: 44, lr: 7.11e-03, grad_scale: 16.0 +2023-03-28 15:34:11,582 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.667e+02 4.340e+02 4.978e+02 5.730e+02 1.244e+03, threshold=9.955e+02, percent-clipped=1.0 +2023-03-28 15:34:39,950 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40330.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:34:46,758 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40333.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:34:51,851 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-28 15:34:56,493 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-28 15:35:09,341 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40343.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 15:35:35,540 INFO [train.py:892] (1/4) Epoch 22, batch 1400, loss[loss=0.1679, simple_loss=0.2412, pruned_loss=0.04727, over 19653.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2549, pruned_loss=0.05423, over 3945088.90 frames. ], batch size: 72, lr: 7.11e-03, grad_scale: 16.0 +2023-03-28 15:36:59,042 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40391.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:37:29,878 INFO [train.py:892] (1/4) Epoch 22, batch 1450, loss[loss=0.1917, simple_loss=0.253, pruned_loss=0.06524, over 19794.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2555, pruned_loss=0.05463, over 3945254.65 frames. ], batch size: 236, lr: 7.10e-03, grad_scale: 16.0 +2023-03-28 15:37:57,986 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.955e+02 4.197e+02 4.850e+02 6.324e+02 1.307e+03, threshold=9.700e+02, percent-clipped=2.0 +2023-03-28 15:38:11,822 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40423.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:38:22,273 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1795, 4.8879, 4.9212, 5.2181, 4.9010, 5.4722, 5.3746, 5.5304], + device='cuda:1'), covar=tensor([0.0627, 0.0297, 0.0392, 0.0290, 0.0602, 0.0286, 0.0347, 0.0280], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0165, 0.0189, 0.0164, 0.0162, 0.0147, 0.0142, 0.0187], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 15:38:22,381 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0453, 2.6509, 3.0988, 3.2416, 3.7448, 4.4418, 4.1168, 4.2947], + device='cuda:1'), covar=tensor([0.0856, 0.1612, 0.1287, 0.0625, 0.0350, 0.0169, 0.0311, 0.0332], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0169, 0.0173, 0.0144, 0.0126, 0.0121, 0.0115, 0.0110], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 15:39:08,781 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3559, 4.5369, 2.6821, 4.7439, 4.9779, 2.0984, 4.1541, 3.4369], + device='cuda:1'), covar=tensor([0.0617, 0.0668, 0.2590, 0.0643, 0.0400, 0.2821, 0.0884, 0.0882], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0243, 0.0223, 0.0253, 0.0229, 0.0196, 0.0230, 0.0184], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 15:39:20,808 INFO [train.py:892] (1/4) Epoch 22, batch 1500, loss[loss=0.1735, simple_loss=0.2403, pruned_loss=0.05331, over 19806.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.255, pruned_loss=0.05441, over 3946057.38 frames. ], batch size: 211, lr: 7.10e-03, grad_scale: 16.0 +2023-03-28 15:39:55,918 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40471.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:39:56,125 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40471.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:40:41,480 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40492.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:40:59,922 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-03-28 15:41:12,878 INFO [train.py:892] (1/4) Epoch 22, batch 1550, loss[loss=0.1781, simple_loss=0.2596, pruned_loss=0.04832, over 19831.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2554, pruned_loss=0.0545, over 3946172.93 frames. ], batch size: 57, lr: 7.10e-03, grad_scale: 16.0 +2023-03-28 15:41:32,430 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.65 vs. limit=5.0 +2023-03-28 15:41:39,850 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.411e+02 3.822e+02 4.612e+02 5.584e+02 1.112e+03, threshold=9.223e+02, percent-clipped=2.0 +2023-03-28 15:41:43,101 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40519.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:42:25,301 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8736, 3.5929, 3.7223, 3.9668, 3.5904, 3.9996, 3.9575, 4.1662], + device='cuda:1'), covar=tensor([0.0847, 0.0587, 0.0656, 0.0469, 0.0882, 0.0708, 0.0733, 0.0501], + device='cuda:1'), in_proj_covar=tensor([0.0144, 0.0165, 0.0189, 0.0164, 0.0163, 0.0147, 0.0142, 0.0187], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 15:42:55,088 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40551.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:42:59,667 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9827, 3.9724, 3.8866, 3.7240, 4.0983, 2.9920, 3.2952, 2.0559], + device='cuda:1'), covar=tensor([0.0344, 0.0268, 0.0237, 0.0248, 0.0220, 0.1132, 0.1015, 0.1860], + device='cuda:1'), in_proj_covar=tensor([0.0098, 0.0136, 0.0109, 0.0128, 0.0113, 0.0128, 0.0138, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 15:42:59,726 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40553.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:43:05,469 INFO [train.py:892] (1/4) Epoch 22, batch 1600, loss[loss=0.1828, simple_loss=0.2576, pruned_loss=0.05403, over 19707.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2546, pruned_loss=0.05389, over 3947947.86 frames. ], batch size: 60, lr: 7.09e-03, grad_scale: 16.0 +2023-03-28 15:43:46,441 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40574.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:44:42,788 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7237, 2.8284, 4.3702, 3.7794, 4.1504, 4.3823, 4.1737, 4.0590], + device='cuda:1'), covar=tensor([0.0360, 0.0770, 0.0094, 0.0649, 0.0129, 0.0183, 0.0154, 0.0145], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0098, 0.0081, 0.0148, 0.0078, 0.0091, 0.0085, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 15:44:55,482 INFO [train.py:892] (1/4) Epoch 22, batch 1650, loss[loss=0.1895, simple_loss=0.2742, pruned_loss=0.05237, over 19724.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2532, pruned_loss=0.05331, over 3949696.57 frames. ], batch size: 50, lr: 7.09e-03, grad_scale: 16.0 +2023-03-28 15:45:24,164 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.699e+02 4.293e+02 4.863e+02 5.850e+02 9.887e+02, threshold=9.726e+02, percent-clipped=2.0 +2023-03-28 15:45:46,628 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40628.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:46:18,469 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-28 15:46:34,580 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40651.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:46:47,033 INFO [train.py:892] (1/4) Epoch 22, batch 1700, loss[loss=0.1735, simple_loss=0.241, pruned_loss=0.05297, over 19754.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2529, pruned_loss=0.05334, over 3949566.77 frames. ], batch size: 182, lr: 7.08e-03, grad_scale: 16.0 +2023-03-28 15:46:51,293 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.23 vs. limit=5.0 +2023-03-28 15:47:52,431 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40686.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:48:31,898 INFO [train.py:892] (1/4) Epoch 22, batch 1750, loss[loss=0.1514, simple_loss=0.2208, pruned_loss=0.04102, over 19848.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2526, pruned_loss=0.05297, over 3948006.17 frames. ], batch size: 104, lr: 7.08e-03, grad_scale: 16.0 +2023-03-28 15:48:45,028 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40712.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:48:56,709 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.471e+02 3.805e+02 4.645e+02 5.444e+02 1.360e+03, threshold=9.290e+02, percent-clipped=2.0 +2023-03-28 15:49:31,327 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8069, 2.9012, 3.1068, 3.0113, 2.7192, 2.9174, 2.7078, 3.0833], + device='cuda:1'), covar=tensor([0.0246, 0.0327, 0.0241, 0.0198, 0.0343, 0.0241, 0.0356, 0.0312], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0067, 0.0070, 0.0064, 0.0076, 0.0070, 0.0088, 0.0062], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 15:50:07,516 INFO [train.py:892] (1/4) Epoch 22, batch 1800, loss[loss=0.1427, simple_loss=0.2144, pruned_loss=0.03547, over 19796.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2524, pruned_loss=0.05264, over 3947953.65 frames. ], batch size: 105, lr: 7.07e-03, grad_scale: 16.0 +2023-03-28 15:50:48,873 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-28 15:51:02,603 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.39 vs. limit=5.0 +2023-03-28 15:51:37,504 INFO [train.py:892] (1/4) Epoch 22, batch 1850, loss[loss=0.1798, simple_loss=0.2701, pruned_loss=0.04475, over 19829.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2539, pruned_loss=0.05262, over 3948254.65 frames. ], batch size: 57, lr: 7.07e-03, grad_scale: 16.0 +2023-03-28 15:52:42,438 INFO [train.py:892] (1/4) Epoch 23, batch 0, loss[loss=0.1621, simple_loss=0.2348, pruned_loss=0.04469, over 19855.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.2348, pruned_loss=0.04469, over 19855.00 frames. ], batch size: 122, lr: 6.91e-03, grad_scale: 16.0 +2023-03-28 15:52:42,439 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 15:53:15,677 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8665, 3.7147, 3.7277, 3.9567, 3.8040, 3.6895, 3.9745, 4.1494], + device='cuda:1'), covar=tensor([0.0605, 0.0391, 0.0454, 0.0306, 0.0552, 0.0593, 0.0403, 0.0260], + device='cuda:1'), in_proj_covar=tensor([0.0144, 0.0166, 0.0190, 0.0164, 0.0163, 0.0148, 0.0143, 0.0188], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 15:53:19,198 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0548, 2.7901, 3.4646, 3.3535, 3.7864, 4.1839, 4.1638, 4.1138], + device='cuda:1'), covar=tensor([0.0755, 0.1493, 0.0926, 0.0583, 0.0309, 0.0204, 0.0260, 0.0540], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0168, 0.0173, 0.0144, 0.0125, 0.0121, 0.0115, 0.0109], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 15:53:21,300 INFO [train.py:926] (1/4) Epoch 23, validation: loss=0.1723, simple_loss=0.2475, pruned_loss=0.04853, over 2883724.00 frames. +2023-03-28 15:53:21,301 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 15:53:38,484 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.877e+02 3.829e+02 4.262e+02 4.921e+02 1.071e+03, threshold=8.525e+02, percent-clipped=1.0 +2023-03-28 15:53:57,335 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.60 vs. limit=5.0 +2023-03-28 15:54:46,293 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-28 15:54:49,822 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40848.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:54:56,845 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40851.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:55:18,671 INFO [train.py:892] (1/4) Epoch 23, batch 50, loss[loss=0.1678, simple_loss=0.24, pruned_loss=0.04778, over 19709.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.248, pruned_loss=0.05133, over 892663.27 frames. ], batch size: 101, lr: 6.91e-03, grad_scale: 16.0 +2023-03-28 15:55:28,668 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40865.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:55:32,951 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0306, 3.9235, 4.3327, 3.9642, 3.7870, 4.2361, 4.0373, 4.4404], + device='cuda:1'), covar=tensor([0.0941, 0.0420, 0.0421, 0.0411, 0.0998, 0.0591, 0.0527, 0.0347], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0219, 0.0218, 0.0228, 0.0206, 0.0230, 0.0227, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 15:55:48,762 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40874.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:56:49,861 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40899.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:56:52,213 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40900.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:57:16,972 INFO [train.py:892] (1/4) Epoch 23, batch 100, loss[loss=0.1538, simple_loss=0.2361, pruned_loss=0.03577, over 19744.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2496, pruned_loss=0.05232, over 1571267.13 frames. ], batch size: 102, lr: 6.90e-03, grad_scale: 16.0 +2023-03-28 15:57:32,532 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.530e+02 4.075e+02 4.882e+02 5.777e+02 1.089e+03, threshold=9.764e+02, percent-clipped=3.0 +2023-03-28 15:57:44,687 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40922.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:57:54,359 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40926.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:57:58,547 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40928.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:58:26,484 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1523, 3.4286, 3.5707, 4.3009, 2.7641, 3.4503, 2.8201, 2.6009], + device='cuda:1'), covar=tensor([0.0588, 0.2067, 0.1034, 0.0377, 0.2253, 0.0952, 0.1365, 0.1831], + device='cuda:1'), in_proj_covar=tensor([0.0228, 0.0326, 0.0238, 0.0183, 0.0239, 0.0195, 0.0207, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 15:58:38,369 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8375, 4.8053, 5.3544, 4.9419, 4.4280, 5.1976, 4.9656, 5.5914], + device='cuda:1'), covar=tensor([0.1076, 0.0404, 0.0443, 0.0364, 0.0761, 0.0480, 0.0497, 0.0326], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0218, 0.0217, 0.0227, 0.0206, 0.0230, 0.0227, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 15:59:13,792 INFO [train.py:892] (1/4) Epoch 23, batch 150, loss[loss=0.1531, simple_loss=0.2269, pruned_loss=0.03965, over 19909.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2531, pruned_loss=0.05444, over 2097683.48 frames. ], batch size: 53, lr: 6.90e-03, grad_scale: 16.0 +2023-03-28 15:59:14,813 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40961.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 15:59:46,971 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40976.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:00:11,769 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40986.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:00:40,985 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40997.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:00:41,025 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8606, 2.0164, 1.7257, 1.1749, 1.8061, 1.9445, 1.8354, 1.9620], + device='cuda:1'), covar=tensor([0.0302, 0.0256, 0.0306, 0.0541, 0.0368, 0.0258, 0.0246, 0.0239], + device='cuda:1'), in_proj_covar=tensor([0.0088, 0.0082, 0.0088, 0.0093, 0.0095, 0.0072, 0.0072, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 16:01:02,010 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41007.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:01:09,373 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-28 16:01:09,827 INFO [train.py:892] (1/4) Epoch 23, batch 200, loss[loss=0.175, simple_loss=0.248, pruned_loss=0.05095, over 19546.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2537, pruned_loss=0.0545, over 2509092.32 frames. ], batch size: 41, lr: 6.89e-03, grad_scale: 16.0 +2023-03-28 16:01:24,986 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.565e+02 4.412e+02 5.143e+02 6.492e+02 1.088e+03, threshold=1.029e+03, percent-clipped=2.0 +2023-03-28 16:01:45,471 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-28 16:02:04,212 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7092, 2.7840, 2.9379, 2.7634, 2.5672, 2.7733, 2.7052, 2.8357], + device='cuda:1'), covar=tensor([0.0327, 0.0279, 0.0256, 0.0287, 0.0368, 0.0259, 0.0355, 0.0314], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0067, 0.0071, 0.0064, 0.0077, 0.0071, 0.0089, 0.0063], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 16:02:08,348 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41034.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:02:16,522 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6059, 4.3741, 4.3527, 4.1588, 4.5866, 3.2105, 3.8491, 2.2853], + device='cuda:1'), covar=tensor([0.0184, 0.0223, 0.0144, 0.0172, 0.0125, 0.0833, 0.0695, 0.1416], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0140, 0.0111, 0.0130, 0.0115, 0.0130, 0.0140, 0.0124], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 16:02:30,365 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41045.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:03:00,085 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41058.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:03:06,047 INFO [train.py:892] (1/4) Epoch 23, batch 250, loss[loss=0.1842, simple_loss=0.2712, pruned_loss=0.04863, over 19592.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2535, pruned_loss=0.05411, over 2829199.66 frames. ], batch size: 49, lr: 6.89e-03, grad_scale: 16.0 +2023-03-28 16:04:44,544 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41102.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:04:55,311 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41106.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:05:07,179 INFO [train.py:892] (1/4) Epoch 23, batch 300, loss[loss=0.1613, simple_loss=0.2435, pruned_loss=0.03955, over 19760.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2558, pruned_loss=0.05467, over 3076819.44 frames. ], batch size: 119, lr: 6.89e-03, grad_scale: 16.0 +2023-03-28 16:05:23,593 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.930e+02 3.981e+02 4.919e+02 5.993e+02 1.063e+03, threshold=9.839e+02, percent-clipped=1.0 +2023-03-28 16:06:24,562 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4083, 5.7094, 5.9829, 5.8648, 5.6505, 5.4722, 5.6614, 5.5727], + device='cuda:1'), covar=tensor([0.1436, 0.1101, 0.0870, 0.1034, 0.0677, 0.0782, 0.1933, 0.1966], + device='cuda:1'), in_proj_covar=tensor([0.0283, 0.0306, 0.0353, 0.0284, 0.0264, 0.0263, 0.0340, 0.0369], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 16:06:37,345 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41148.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:07:05,667 INFO [train.py:892] (1/4) Epoch 23, batch 350, loss[loss=0.1653, simple_loss=0.2386, pruned_loss=0.04599, over 19785.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2557, pruned_loss=0.0544, over 3270707.64 frames. ], batch size: 52, lr: 6.88e-03, grad_scale: 16.0 +2023-03-28 16:07:11,233 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41163.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:08:27,652 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41196.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:08:57,538 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8011, 3.1645, 2.6877, 2.3037, 2.8129, 3.1235, 3.0499, 3.0356], + device='cuda:1'), covar=tensor([0.0240, 0.0226, 0.0234, 0.0483, 0.0287, 0.0215, 0.0180, 0.0205], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0084, 0.0090, 0.0094, 0.0097, 0.0073, 0.0074, 0.0075], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 16:09:03,402 INFO [train.py:892] (1/4) Epoch 23, batch 400, loss[loss=0.1482, simple_loss=0.2238, pruned_loss=0.03627, over 19535.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2543, pruned_loss=0.0534, over 3420363.60 frames. ], batch size: 46, lr: 6.88e-03, grad_scale: 16.0 +2023-03-28 16:09:22,953 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.551e+02 4.181e+02 5.035e+02 6.093e+02 9.382e+02, threshold=1.007e+03, percent-clipped=0.0 +2023-03-28 16:09:31,475 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41221.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:09:34,732 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-28 16:10:58,653 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41256.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 16:11:08,170 INFO [train.py:892] (1/4) Epoch 23, batch 450, loss[loss=0.1686, simple_loss=0.2423, pruned_loss=0.0475, over 19841.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2548, pruned_loss=0.05343, over 3536403.03 frames. ], batch size: 190, lr: 6.87e-03, grad_scale: 16.0 +2023-03-28 16:12:51,599 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1033, 2.6447, 3.8727, 3.5406, 3.8050, 3.8829, 3.7092, 3.5734], + device='cuda:1'), covar=tensor([0.0449, 0.0790, 0.0109, 0.0513, 0.0127, 0.0227, 0.0171, 0.0191], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0098, 0.0081, 0.0148, 0.0078, 0.0091, 0.0086, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 16:12:57,665 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41307.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:13:05,239 INFO [train.py:892] (1/4) Epoch 23, batch 500, loss[loss=0.1595, simple_loss=0.2293, pruned_loss=0.04484, over 19768.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2543, pruned_loss=0.05337, over 3628994.86 frames. ], batch size: 116, lr: 6.87e-03, grad_scale: 16.0 +2023-03-28 16:13:09,771 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.77 vs. limit=5.0 +2023-03-28 16:13:15,382 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8180, 3.2283, 2.7067, 2.2964, 2.7425, 3.0909, 2.9900, 3.1388], + device='cuda:1'), covar=tensor([0.0321, 0.0287, 0.0283, 0.0614, 0.0375, 0.0258, 0.0197, 0.0227], + device='cuda:1'), in_proj_covar=tensor([0.0091, 0.0085, 0.0091, 0.0095, 0.0098, 0.0074, 0.0074, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 16:13:24,678 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.561e+02 3.916e+02 4.619e+02 5.416e+02 1.072e+03, threshold=9.239e+02, percent-clipped=2.0 +2023-03-28 16:14:51,538 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41353.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:14:56,268 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41355.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:15:11,428 INFO [train.py:892] (1/4) Epoch 23, batch 550, loss[loss=0.22, simple_loss=0.2922, pruned_loss=0.07385, over 19694.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2542, pruned_loss=0.05303, over 3699930.21 frames. ], batch size: 337, lr: 6.87e-03, grad_scale: 16.0 +2023-03-28 16:15:36,887 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5141, 2.5394, 2.6718, 2.0913, 2.6987, 2.2636, 2.6132, 2.8017], + device='cuda:1'), covar=tensor([0.0484, 0.0433, 0.0427, 0.0819, 0.0387, 0.0557, 0.0439, 0.0315], + device='cuda:1'), in_proj_covar=tensor([0.0070, 0.0078, 0.0075, 0.0105, 0.0072, 0.0073, 0.0071, 0.0063], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 16:16:51,539 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41401.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:17:14,941 INFO [train.py:892] (1/4) Epoch 23, batch 600, loss[loss=0.1872, simple_loss=0.2533, pruned_loss=0.06058, over 19821.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2526, pruned_loss=0.05238, over 3755677.91 frames. ], batch size: 202, lr: 6.86e-03, grad_scale: 16.0 +2023-03-28 16:17:30,476 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.341e+02 3.739e+02 4.752e+02 5.799e+02 9.691e+02, threshold=9.504e+02, percent-clipped=1.0 +2023-03-28 16:18:10,108 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6173, 2.2009, 3.4358, 2.8238, 3.4348, 3.5013, 3.2870, 3.3062], + device='cuda:1'), covar=tensor([0.0633, 0.1017, 0.0121, 0.0471, 0.0137, 0.0233, 0.0189, 0.0191], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0098, 0.0081, 0.0149, 0.0078, 0.0092, 0.0086, 0.0080], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 16:19:02,765 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41458.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:19:09,278 INFO [train.py:892] (1/4) Epoch 23, batch 650, loss[loss=0.2029, simple_loss=0.2565, pruned_loss=0.07463, over 19811.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2522, pruned_loss=0.05275, over 3797854.91 frames. ], batch size: 132, lr: 6.86e-03, grad_scale: 16.0 +2023-03-28 16:20:37,893 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-28 16:21:07,565 INFO [train.py:892] (1/4) Epoch 23, batch 700, loss[loss=0.1623, simple_loss=0.2342, pruned_loss=0.04515, over 19762.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2517, pruned_loss=0.05254, over 3831264.46 frames. ], batch size: 102, lr: 6.85e-03, grad_scale: 16.0 +2023-03-28 16:21:25,091 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.559e+02 3.930e+02 4.652e+02 5.850e+02 9.862e+02, threshold=9.304e+02, percent-clipped=2.0 +2023-03-28 16:21:31,976 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41521.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:22:57,189 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41556.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 16:23:06,385 INFO [train.py:892] (1/4) Epoch 23, batch 750, loss[loss=0.197, simple_loss=0.2657, pruned_loss=0.06411, over 19812.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2504, pruned_loss=0.05152, over 3858577.16 frames. ], batch size: 224, lr: 6.85e-03, grad_scale: 16.0 +2023-03-28 16:23:22,558 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9196, 2.6460, 2.8040, 2.9006, 2.8615, 2.9652, 2.9947, 3.1701], + device='cuda:1'), covar=tensor([0.0847, 0.0580, 0.0612, 0.0489, 0.0801, 0.0668, 0.0538, 0.0411], + device='cuda:1'), in_proj_covar=tensor([0.0145, 0.0168, 0.0190, 0.0165, 0.0166, 0.0149, 0.0144, 0.0189], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 16:23:24,374 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41569.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:24:47,246 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41604.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:25:00,967 INFO [train.py:892] (1/4) Epoch 23, batch 800, loss[loss=0.1512, simple_loss=0.2265, pruned_loss=0.03794, over 19704.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2506, pruned_loss=0.05157, over 3879082.30 frames. ], batch size: 101, lr: 6.85e-03, grad_scale: 16.0 +2023-03-28 16:25:18,863 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.557e+02 3.999e+02 4.823e+02 6.128e+02 1.113e+03, threshold=9.646e+02, percent-clipped=2.0 +2023-03-28 16:25:52,416 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0715, 2.9506, 4.9492, 4.2017, 4.5349, 4.8504, 4.7778, 4.5747], + device='cuda:1'), covar=tensor([0.0359, 0.0816, 0.0076, 0.0876, 0.0117, 0.0156, 0.0118, 0.0111], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0099, 0.0082, 0.0150, 0.0078, 0.0093, 0.0086, 0.0080], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 16:26:41,299 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41653.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:26:45,857 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41654.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:27:02,059 INFO [train.py:892] (1/4) Epoch 23, batch 850, loss[loss=0.1482, simple_loss=0.22, pruned_loss=0.03823, over 19817.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2512, pruned_loss=0.05182, over 3893677.41 frames. ], batch size: 103, lr: 6.84e-03, grad_scale: 16.0 +2023-03-28 16:28:19,042 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0467, 3.0254, 3.2037, 2.5600, 3.3477, 2.8048, 3.0495, 3.3543], + device='cuda:1'), covar=tensor([0.0566, 0.0481, 0.0679, 0.0793, 0.0357, 0.0486, 0.0488, 0.0256], + device='cuda:1'), in_proj_covar=tensor([0.0070, 0.0078, 0.0076, 0.0105, 0.0072, 0.0074, 0.0071, 0.0063], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 16:28:35,347 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41701.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:28:35,575 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41701.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:28:59,723 INFO [train.py:892] (1/4) Epoch 23, batch 900, loss[loss=0.2058, simple_loss=0.2619, pruned_loss=0.07484, over 19789.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2507, pruned_loss=0.05157, over 3906846.40 frames. ], batch size: 174, lr: 6.84e-03, grad_scale: 16.0 +2023-03-28 16:29:10,697 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41715.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 16:29:18,881 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.536e+02 3.998e+02 4.733e+02 5.736e+02 9.757e+02, threshold=9.466e+02, percent-clipped=2.0 +2023-03-28 16:30:31,526 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41749.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:30:54,573 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4735, 2.6855, 4.3657, 3.8153, 4.1730, 4.3296, 4.2012, 4.0357], + device='cuda:1'), covar=tensor([0.0463, 0.0895, 0.0114, 0.0744, 0.0142, 0.0203, 0.0159, 0.0167], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0099, 0.0081, 0.0149, 0.0078, 0.0092, 0.0086, 0.0080], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 16:30:54,586 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41758.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:31:00,153 INFO [train.py:892] (1/4) Epoch 23, batch 950, loss[loss=0.1859, simple_loss=0.255, pruned_loss=0.05834, over 19797.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2512, pruned_loss=0.05187, over 3916629.02 frames. ], batch size: 149, lr: 6.83e-03, grad_scale: 16.0 +2023-03-28 16:31:09,300 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.71 vs. limit=2.0 +2023-03-28 16:31:28,100 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.80 vs. limit=5.0 +2023-03-28 16:32:53,026 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41806.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:33:02,650 INFO [train.py:892] (1/4) Epoch 23, batch 1000, loss[loss=0.185, simple_loss=0.2466, pruned_loss=0.06169, over 19800.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2513, pruned_loss=0.05132, over 3923478.88 frames. ], batch size: 191, lr: 6.83e-03, grad_scale: 16.0 +2023-03-28 16:33:15,270 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4285, 2.8299, 2.3437, 1.9187, 2.4981, 2.7305, 2.6802, 2.7300], + device='cuda:1'), covar=tensor([0.0289, 0.0272, 0.0293, 0.0571, 0.0335, 0.0221, 0.0240, 0.0219], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0084, 0.0090, 0.0094, 0.0097, 0.0074, 0.0074, 0.0074], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 16:33:19,751 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.497e+02 4.077e+02 4.736e+02 5.828e+02 1.008e+03, threshold=9.473e+02, percent-clipped=1.0 +2023-03-28 16:34:37,232 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7831, 2.9688, 4.7901, 4.1129, 4.5445, 4.7265, 4.5386, 4.3835], + device='cuda:1'), covar=tensor([0.0425, 0.0852, 0.0085, 0.0821, 0.0112, 0.0164, 0.0146, 0.0131], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0099, 0.0082, 0.0149, 0.0078, 0.0092, 0.0086, 0.0080], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 16:35:00,325 INFO [train.py:892] (1/4) Epoch 23, batch 1050, loss[loss=0.1784, simple_loss=0.2459, pruned_loss=0.05539, over 19837.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.253, pruned_loss=0.05241, over 3929516.36 frames. ], batch size: 166, lr: 6.83e-03, grad_scale: 16.0 +2023-03-28 16:35:34,716 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7360, 3.7573, 3.6384, 3.4893, 3.8255, 2.8299, 3.0397, 1.8270], + device='cuda:1'), covar=tensor([0.0362, 0.0289, 0.0247, 0.0266, 0.0251, 0.1195, 0.1085, 0.2056], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0138, 0.0109, 0.0129, 0.0113, 0.0129, 0.0139, 0.0122], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 16:35:59,208 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5636, 3.6587, 2.2512, 4.3769, 3.7745, 4.3576, 4.4149, 3.3948], + device='cuda:1'), covar=tensor([0.0481, 0.0469, 0.1347, 0.0387, 0.0543, 0.0272, 0.0408, 0.0624], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0135, 0.0139, 0.0141, 0.0125, 0.0123, 0.0135, 0.0139], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 16:36:49,569 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41906.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:37:01,114 INFO [train.py:892] (1/4) Epoch 23, batch 1100, loss[loss=0.1582, simple_loss=0.2297, pruned_loss=0.04334, over 19812.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2523, pruned_loss=0.05223, over 3934374.26 frames. ], batch size: 166, lr: 6.82e-03, grad_scale: 16.0 +2023-03-28 16:37:20,550 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.656e+02 4.250e+02 5.076e+02 6.187e+02 1.225e+03, threshold=1.015e+03, percent-clipped=1.0 +2023-03-28 16:37:22,314 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.89 vs. limit=5.0 +2023-03-28 16:38:31,238 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41948.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 16:38:58,869 INFO [train.py:892] (1/4) Epoch 23, batch 1150, loss[loss=0.1792, simple_loss=0.2644, pruned_loss=0.04698, over 19681.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2517, pruned_loss=0.05203, over 3939026.33 frames. ], batch size: 49, lr: 6.82e-03, grad_scale: 16.0 +2023-03-28 16:39:13,993 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41967.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:40:58,973 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42009.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 16:41:01,182 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42010.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 16:41:03,125 INFO [train.py:892] (1/4) Epoch 23, batch 1200, loss[loss=0.2312, simple_loss=0.3025, pruned_loss=0.0799, over 19640.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2526, pruned_loss=0.05218, over 3941726.60 frames. ], batch size: 330, lr: 6.81e-03, grad_scale: 16.0 +2023-03-28 16:41:23,906 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.867e+02 3.967e+02 4.687e+02 5.246e+02 9.664e+02, threshold=9.374e+02, percent-clipped=0.0 +2023-03-28 16:42:28,556 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8266, 4.4904, 4.5912, 4.3290, 4.8069, 3.2507, 3.9429, 2.3833], + device='cuda:1'), covar=tensor([0.0179, 0.0216, 0.0136, 0.0186, 0.0128, 0.0846, 0.0846, 0.1456], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0139, 0.0110, 0.0130, 0.0114, 0.0130, 0.0140, 0.0123], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 16:43:04,837 INFO [train.py:892] (1/4) Epoch 23, batch 1250, loss[loss=0.1618, simple_loss=0.2446, pruned_loss=0.03948, over 19809.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2516, pruned_loss=0.0516, over 3944171.37 frames. ], batch size: 96, lr: 6.81e-03, grad_scale: 16.0 +2023-03-28 16:43:39,239 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9088, 2.7540, 2.9415, 2.2272, 3.0546, 2.5432, 2.8336, 3.0282], + device='cuda:1'), covar=tensor([0.0476, 0.0516, 0.0612, 0.0904, 0.0418, 0.0499, 0.0502, 0.0344], + device='cuda:1'), in_proj_covar=tensor([0.0071, 0.0079, 0.0076, 0.0106, 0.0073, 0.0074, 0.0071, 0.0064], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 16:44:00,079 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9391, 3.8076, 4.2455, 3.8881, 3.6055, 4.1090, 3.9548, 4.2837], + device='cuda:1'), covar=tensor([0.0761, 0.0335, 0.0319, 0.0339, 0.1066, 0.0477, 0.0413, 0.0318], + device='cuda:1'), in_proj_covar=tensor([0.0275, 0.0216, 0.0214, 0.0226, 0.0203, 0.0229, 0.0226, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 16:44:39,605 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.19 vs. limit=5.0 +2023-03-28 16:44:59,362 INFO [train.py:892] (1/4) Epoch 23, batch 1300, loss[loss=0.2783, simple_loss=0.3481, pruned_loss=0.1043, over 19594.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2524, pruned_loss=0.05214, over 3945783.55 frames. ], batch size: 376, lr: 6.81e-03, grad_scale: 16.0 +2023-03-28 16:45:16,141 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.506e+02 3.507e+02 4.427e+02 5.595e+02 1.023e+03, threshold=8.855e+02, percent-clipped=1.0 +2023-03-28 16:45:32,621 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.56 vs. limit=5.0 +2023-03-28 16:45:45,589 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1887, 3.1065, 4.7024, 3.4796, 3.8386, 3.6136, 2.5206, 2.7113], + device='cuda:1'), covar=tensor([0.0888, 0.3166, 0.0440, 0.0987, 0.1647, 0.1390, 0.2485, 0.2707], + device='cuda:1'), in_proj_covar=tensor([0.0341, 0.0371, 0.0329, 0.0266, 0.0363, 0.0349, 0.0351, 0.0319], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 16:46:59,029 INFO [train.py:892] (1/4) Epoch 23, batch 1350, loss[loss=0.181, simple_loss=0.2575, pruned_loss=0.05229, over 19750.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2527, pruned_loss=0.05197, over 3945807.59 frames. ], batch size: 226, lr: 6.80e-03, grad_scale: 16.0 +2023-03-28 16:47:46,797 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.3996, 4.8069, 4.9804, 4.7562, 5.2869, 3.2581, 4.2853, 2.6975], + device='cuda:1'), covar=tensor([0.0162, 0.0195, 0.0138, 0.0187, 0.0133, 0.0892, 0.0817, 0.1414], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0138, 0.0110, 0.0129, 0.0114, 0.0130, 0.0140, 0.0123], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 16:48:57,827 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-03-28 16:48:58,512 INFO [train.py:892] (1/4) Epoch 23, batch 1400, loss[loss=0.244, simple_loss=0.3484, pruned_loss=0.06986, over 17993.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2536, pruned_loss=0.05252, over 3944934.96 frames. ], batch size: 633, lr: 6.80e-03, grad_scale: 16.0 +2023-03-28 16:49:18,202 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.596e+02 4.046e+02 4.840e+02 5.531e+02 1.167e+03, threshold=9.681e+02, percent-clipped=2.0 +2023-03-28 16:50:56,065 INFO [train.py:892] (1/4) Epoch 23, batch 1450, loss[loss=0.1654, simple_loss=0.2373, pruned_loss=0.04679, over 19861.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2536, pruned_loss=0.05239, over 3946448.17 frames. ], batch size: 104, lr: 6.79e-03, grad_scale: 16.0 +2023-03-28 16:50:59,092 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42262.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:51:01,425 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9009, 2.8955, 1.8134, 3.3693, 3.0973, 3.3661, 3.3751, 2.7104], + device='cuda:1'), covar=tensor([0.0565, 0.0641, 0.1534, 0.0719, 0.0638, 0.0406, 0.0649, 0.0764], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0135, 0.0139, 0.0141, 0.0125, 0.0123, 0.0137, 0.0139], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 16:52:09,568 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8001, 1.8074, 1.8920, 1.7462, 1.7061, 1.9116, 1.7730, 1.8614], + device='cuda:1'), covar=tensor([0.0334, 0.0281, 0.0287, 0.0285, 0.0423, 0.0278, 0.0408, 0.0307], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0068, 0.0071, 0.0065, 0.0077, 0.0073, 0.0089, 0.0064], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 16:52:39,464 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42304.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 16:52:55,227 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42310.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 16:52:56,192 INFO [train.py:892] (1/4) Epoch 23, batch 1500, loss[loss=0.1534, simple_loss=0.2323, pruned_loss=0.03718, over 19812.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2528, pruned_loss=0.05179, over 3946883.05 frames. ], batch size: 72, lr: 6.79e-03, grad_scale: 16.0 +2023-03-28 16:53:12,439 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.820e+02 3.834e+02 4.491e+02 5.475e+02 9.229e+02, threshold=8.983e+02, percent-clipped=0.0 +2023-03-28 16:53:30,495 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7038, 2.7545, 4.8030, 3.9126, 4.5883, 4.6574, 4.5340, 4.4436], + device='cuda:1'), covar=tensor([0.0646, 0.1184, 0.0134, 0.1269, 0.0140, 0.0264, 0.0220, 0.0211], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0100, 0.0082, 0.0150, 0.0080, 0.0093, 0.0086, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 16:53:39,771 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0013, 2.4936, 2.9846, 3.1270, 3.6827, 4.1809, 4.1076, 4.1093], + device='cuda:1'), covar=tensor([0.0923, 0.1843, 0.1391, 0.0680, 0.0388, 0.0236, 0.0356, 0.0368], + device='cuda:1'), in_proj_covar=tensor([0.0154, 0.0169, 0.0173, 0.0144, 0.0127, 0.0122, 0.0116, 0.0110], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 16:54:46,316 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42358.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:54:51,549 INFO [train.py:892] (1/4) Epoch 23, batch 1550, loss[loss=0.1537, simple_loss=0.2286, pruned_loss=0.03939, over 19895.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2532, pruned_loss=0.05202, over 3946139.17 frames. ], batch size: 87, lr: 6.79e-03, grad_scale: 16.0 +2023-03-28 16:55:01,182 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-28 16:55:38,169 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42379.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:56:55,960 INFO [train.py:892] (1/4) Epoch 23, batch 1600, loss[loss=0.1751, simple_loss=0.2449, pruned_loss=0.05266, over 19809.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2542, pruned_loss=0.05226, over 3944842.87 frames. ], batch size: 231, lr: 6.78e-03, grad_scale: 16.0 +2023-03-28 16:57:13,628 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.225e+02 3.903e+02 4.499e+02 5.665e+02 1.044e+03, threshold=8.998e+02, percent-clipped=1.0 +2023-03-28 16:58:07,307 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42440.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:58:53,580 INFO [train.py:892] (1/4) Epoch 23, batch 1650, loss[loss=0.1625, simple_loss=0.2406, pruned_loss=0.04223, over 19762.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2548, pruned_loss=0.05272, over 3943723.62 frames. ], batch size: 70, lr: 6.78e-03, grad_scale: 16.0 +2023-03-28 17:00:28,645 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.50 vs. limit=2.0 +2023-03-28 17:00:34,471 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-03-28 17:00:37,929 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42506.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:00:47,775 INFO [train.py:892] (1/4) Epoch 23, batch 1700, loss[loss=0.1717, simple_loss=0.2462, pruned_loss=0.04855, over 19668.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2539, pruned_loss=0.05228, over 3945048.51 frames. ], batch size: 64, lr: 6.77e-03, grad_scale: 16.0 +2023-03-28 17:01:08,840 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-03-28 17:01:09,689 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.489e+02 4.052e+02 4.477e+02 5.328e+02 1.019e+03, threshold=8.953e+02, percent-clipped=3.0 +2023-03-28 17:01:54,467 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-28 17:02:34,851 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4909, 3.6755, 3.8657, 4.5936, 2.9945, 3.3668, 3.4622, 2.8816], + device='cuda:1'), covar=tensor([0.0524, 0.2289, 0.0956, 0.0434, 0.2194, 0.1145, 0.1012, 0.1742], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0333, 0.0243, 0.0188, 0.0244, 0.0201, 0.0213, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 17:02:39,533 INFO [train.py:892] (1/4) Epoch 23, batch 1750, loss[loss=0.1532, simple_loss=0.2277, pruned_loss=0.0394, over 19793.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2537, pruned_loss=0.05262, over 3945303.25 frames. ], batch size: 154, lr: 6.77e-03, grad_scale: 16.0 +2023-03-28 17:02:40,392 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6469, 2.8085, 2.9719, 2.7263, 2.5679, 2.8022, 2.6456, 3.0232], + device='cuda:1'), covar=tensor([0.0282, 0.0303, 0.0212, 0.0271, 0.0391, 0.0329, 0.0379, 0.0272], + device='cuda:1'), in_proj_covar=tensor([0.0073, 0.0068, 0.0071, 0.0064, 0.0077, 0.0072, 0.0088, 0.0063], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-28 17:02:43,450 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42562.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:02:52,616 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42567.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:04:07,883 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42604.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 17:04:10,559 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-28 17:04:19,993 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42610.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:04:21,417 INFO [train.py:892] (1/4) Epoch 23, batch 1800, loss[loss=0.2658, simple_loss=0.3383, pruned_loss=0.09667, over 19447.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2538, pruned_loss=0.05266, over 3946611.95 frames. ], batch size: 396, lr: 6.77e-03, grad_scale: 16.0 +2023-03-28 17:04:37,544 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.634e+02 3.841e+02 4.565e+02 5.704e+02 1.447e+03, threshold=9.129e+02, percent-clipped=3.0 +2023-03-28 17:05:16,692 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9703, 2.4839, 3.0879, 3.1432, 3.7175, 4.1761, 4.0182, 4.0615], + device='cuda:1'), covar=tensor([0.0923, 0.1779, 0.1321, 0.0718, 0.0424, 0.0244, 0.0340, 0.0458], + device='cuda:1'), in_proj_covar=tensor([0.0157, 0.0172, 0.0175, 0.0147, 0.0130, 0.0125, 0.0119, 0.0112], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 17:05:32,049 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42649.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:05:37,688 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42652.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 17:05:54,212 INFO [train.py:892] (1/4) Epoch 23, batch 1850, loss[loss=0.1792, simple_loss=0.266, pruned_loss=0.04616, over 19822.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2549, pruned_loss=0.05209, over 3946666.54 frames. ], batch size: 57, lr: 6.76e-03, grad_scale: 16.0 +2023-03-28 17:06:55,134 INFO [train.py:892] (1/4) Epoch 24, batch 0, loss[loss=0.1824, simple_loss=0.2528, pruned_loss=0.05605, over 19799.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2528, pruned_loss=0.05605, over 19799.00 frames. ], batch size: 231, lr: 6.62e-03, grad_scale: 16.0 +2023-03-28 17:06:55,134 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 17:07:24,172 INFO [train.py:926] (1/4) Epoch 24, validation: loss=0.1738, simple_loss=0.2478, pruned_loss=0.0499, over 2883724.00 frames. +2023-03-28 17:07:24,174 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 17:07:25,280 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5403, 4.4905, 5.0138, 4.5716, 4.1131, 4.8096, 4.6676, 5.1556], + device='cuda:1'), covar=tensor([0.0977, 0.0379, 0.0371, 0.0366, 0.0817, 0.0493, 0.0456, 0.0307], + device='cuda:1'), in_proj_covar=tensor([0.0275, 0.0215, 0.0214, 0.0224, 0.0201, 0.0229, 0.0224, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 17:09:11,917 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42710.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:09:24,522 INFO [train.py:892] (1/4) Epoch 24, batch 50, loss[loss=0.1688, simple_loss=0.2486, pruned_loss=0.04447, over 19885.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2428, pruned_loss=0.04779, over 891590.66 frames. ], batch size: 47, lr: 6.61e-03, grad_scale: 16.0 +2023-03-28 17:09:30,668 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.706e+02 3.984e+02 4.809e+02 5.646e+02 9.126e+02, threshold=9.617e+02, percent-clipped=0.0 +2023-03-28 17:10:09,742 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42735.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:10:16,380 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9799, 2.8529, 1.6547, 3.4556, 3.1394, 3.4619, 3.5303, 2.8348], + device='cuda:1'), covar=tensor([0.0626, 0.0720, 0.1967, 0.0668, 0.0646, 0.0496, 0.0616, 0.0770], + device='cuda:1'), in_proj_covar=tensor([0.0138, 0.0135, 0.0140, 0.0142, 0.0125, 0.0124, 0.0138, 0.0139], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 17:11:03,418 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0839, 4.6995, 4.7384, 4.4963, 4.9894, 3.2476, 4.1163, 2.4185], + device='cuda:1'), covar=tensor([0.0153, 0.0187, 0.0126, 0.0165, 0.0118, 0.0877, 0.0797, 0.1525], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0140, 0.0111, 0.0129, 0.0115, 0.0131, 0.0141, 0.0123], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 17:11:17,028 INFO [train.py:892] (1/4) Epoch 24, batch 100, loss[loss=0.1444, simple_loss=0.2174, pruned_loss=0.03567, over 19764.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2454, pruned_loss=0.04861, over 1569583.23 frames. ], batch size: 113, lr: 6.61e-03, grad_scale: 16.0 +2023-03-28 17:11:39,299 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.62 vs. limit=5.0 +2023-03-28 17:12:45,613 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1742, 4.2318, 2.4501, 4.5149, 4.6455, 1.8687, 3.8846, 3.4704], + device='cuda:1'), covar=tensor([0.0686, 0.0900, 0.2815, 0.0802, 0.0586, 0.2931, 0.1008, 0.0818], + device='cuda:1'), in_proj_covar=tensor([0.0224, 0.0248, 0.0225, 0.0258, 0.0236, 0.0199, 0.0233, 0.0187], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 17:13:12,217 INFO [train.py:892] (1/4) Epoch 24, batch 150, loss[loss=0.1834, simple_loss=0.2466, pruned_loss=0.06009, over 19770.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2501, pruned_loss=0.0504, over 2094161.83 frames. ], batch size: 241, lr: 6.60e-03, grad_scale: 16.0 +2023-03-28 17:13:19,967 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.303e+02 3.674e+02 4.384e+02 5.229e+02 7.320e+02, threshold=8.767e+02, percent-clipped=0.0 +2023-03-28 17:14:17,389 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.0414, 2.3086, 2.0671, 1.5268, 2.1122, 2.2526, 2.1434, 2.2067], + device='cuda:1'), covar=tensor([0.0344, 0.0249, 0.0301, 0.0533, 0.0373, 0.0258, 0.0252, 0.0243], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0083, 0.0089, 0.0093, 0.0096, 0.0074, 0.0073, 0.0074], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 17:14:33,847 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3660, 3.4391, 2.2831, 4.2363, 3.8087, 4.1255, 4.2242, 3.1170], + device='cuda:1'), covar=tensor([0.0644, 0.0621, 0.1457, 0.0502, 0.0508, 0.0350, 0.0470, 0.0824], + device='cuda:1'), in_proj_covar=tensor([0.0138, 0.0136, 0.0140, 0.0142, 0.0125, 0.0124, 0.0138, 0.0140], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 17:14:41,090 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8935, 4.8006, 5.3166, 4.8278, 4.2954, 5.1189, 4.9605, 5.5117], + device='cuda:1'), covar=tensor([0.0956, 0.0370, 0.0382, 0.0383, 0.0751, 0.0396, 0.0409, 0.0289], + device='cuda:1'), in_proj_covar=tensor([0.0274, 0.0215, 0.0214, 0.0224, 0.0202, 0.0228, 0.0223, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 17:14:53,578 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42859.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:15:01,894 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42862.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:15:02,636 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-28 17:15:09,283 INFO [train.py:892] (1/4) Epoch 24, batch 200, loss[loss=0.1434, simple_loss=0.2162, pruned_loss=0.03529, over 19847.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2511, pruned_loss=0.0508, over 2506913.74 frames. ], batch size: 143, lr: 6.60e-03, grad_scale: 16.0 +2023-03-28 17:15:39,942 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42879.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:16:16,865 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2953, 4.3798, 2.5115, 4.6537, 4.7801, 2.0693, 4.0150, 3.5410], + device='cuda:1'), covar=tensor([0.0631, 0.0798, 0.2903, 0.0663, 0.0604, 0.2828, 0.0994, 0.0817], + device='cuda:1'), in_proj_covar=tensor([0.0224, 0.0248, 0.0225, 0.0258, 0.0236, 0.0199, 0.0233, 0.0187], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 17:17:07,532 INFO [train.py:892] (1/4) Epoch 24, batch 250, loss[loss=0.1566, simple_loss=0.2296, pruned_loss=0.04175, over 19841.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2502, pruned_loss=0.05123, over 2827422.92 frames. ], batch size: 109, lr: 6.60e-03, grad_scale: 16.0 +2023-03-28 17:17:15,144 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.878e+02 4.086e+02 4.768e+02 5.713e+02 1.218e+03, threshold=9.536e+02, percent-clipped=1.0 +2023-03-28 17:17:18,126 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42920.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:18:03,199 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7287, 2.6859, 2.9537, 2.6747, 3.0514, 3.0319, 3.5594, 3.9088], + device='cuda:1'), covar=tensor([0.0628, 0.1706, 0.1551, 0.2149, 0.1629, 0.1467, 0.0681, 0.0579], + device='cuda:1'), in_proj_covar=tensor([0.0243, 0.0235, 0.0259, 0.0248, 0.0289, 0.0250, 0.0219, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 17:18:08,680 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42940.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:19:11,322 INFO [train.py:892] (1/4) Epoch 24, batch 300, loss[loss=0.236, simple_loss=0.2966, pruned_loss=0.08767, over 19711.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2515, pruned_loss=0.0517, over 3074808.56 frames. ], batch size: 315, lr: 6.59e-03, grad_scale: 16.0 +2023-03-28 17:20:47,294 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43005.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:21:09,365 INFO [train.py:892] (1/4) Epoch 24, batch 350, loss[loss=0.1616, simple_loss=0.2363, pruned_loss=0.04348, over 19823.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2518, pruned_loss=0.05172, over 3269533.31 frames. ], batch size: 127, lr: 6.59e-03, grad_scale: 16.0 +2023-03-28 17:21:15,714 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.627e+02 4.222e+02 4.669e+02 5.474e+02 9.136e+02, threshold=9.338e+02, percent-clipped=0.0 +2023-03-28 17:21:58,421 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43035.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:23:09,456 INFO [train.py:892] (1/4) Epoch 24, batch 400, loss[loss=0.2056, simple_loss=0.2843, pruned_loss=0.06342, over 19705.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2523, pruned_loss=0.05211, over 3418016.98 frames. ], batch size: 325, lr: 6.59e-03, grad_scale: 16.0 +2023-03-28 17:23:28,543 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2465, 2.9822, 5.0606, 4.1837, 4.6344, 5.0060, 4.7956, 4.7330], + device='cuda:1'), covar=tensor([0.0338, 0.0805, 0.0080, 0.0941, 0.0118, 0.0151, 0.0121, 0.0108], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0100, 0.0083, 0.0150, 0.0080, 0.0094, 0.0087, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 17:23:55,625 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43083.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:24:27,035 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-28 17:25:13,689 INFO [train.py:892] (1/4) Epoch 24, batch 450, loss[loss=0.1601, simple_loss=0.2372, pruned_loss=0.04149, over 19772.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2544, pruned_loss=0.05277, over 3534789.52 frames. ], batch size: 113, lr: 6.58e-03, grad_scale: 16.0 +2023-03-28 17:25:20,843 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.554e+02 4.148e+02 4.850e+02 5.814e+02 7.870e+02, threshold=9.701e+02, percent-clipped=0.0 +2023-03-28 17:25:21,882 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7884, 4.0952, 4.1490, 4.9795, 3.1724, 3.4890, 3.1955, 3.0566], + device='cuda:1'), covar=tensor([0.0459, 0.2041, 0.0897, 0.0300, 0.2068, 0.1042, 0.1126, 0.1537], + device='cuda:1'), in_proj_covar=tensor([0.0231, 0.0329, 0.0239, 0.0188, 0.0240, 0.0196, 0.0208, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 17:25:28,130 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1371, 5.4347, 5.4669, 5.3670, 5.0530, 5.4174, 4.8206, 4.9349], + device='cuda:1'), covar=tensor([0.0419, 0.0421, 0.0470, 0.0376, 0.0595, 0.0545, 0.0713, 0.0882], + device='cuda:1'), in_proj_covar=tensor([0.0247, 0.0261, 0.0278, 0.0238, 0.0243, 0.0232, 0.0249, 0.0296], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 17:27:01,485 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43162.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:27:08,741 INFO [train.py:892] (1/4) Epoch 24, batch 500, loss[loss=0.1776, simple_loss=0.248, pruned_loss=0.05362, over 19846.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2538, pruned_loss=0.05275, over 3626864.47 frames. ], batch size: 60, lr: 6.58e-03, grad_scale: 16.0 +2023-03-28 17:28:57,159 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43210.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:29:09,442 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43215.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:29:10,709 INFO [train.py:892] (1/4) Epoch 24, batch 550, loss[loss=0.1505, simple_loss=0.2216, pruned_loss=0.03968, over 19701.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2541, pruned_loss=0.05306, over 3699803.60 frames. ], batch size: 56, lr: 6.57e-03, grad_scale: 16.0 +2023-03-28 17:29:18,525 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.622e+02 3.772e+02 4.624e+02 5.545e+02 8.960e+02, threshold=9.249e+02, percent-clipped=0.0 +2023-03-28 17:29:59,356 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43235.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:30:14,366 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.74 vs. limit=5.0 +2023-03-28 17:31:16,334 INFO [train.py:892] (1/4) Epoch 24, batch 600, loss[loss=0.1991, simple_loss=0.2672, pruned_loss=0.06554, over 19696.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2531, pruned_loss=0.05283, over 3755733.08 frames. ], batch size: 325, lr: 6.57e-03, grad_scale: 16.0 +2023-03-28 17:32:50,182 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43305.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:33:14,154 INFO [train.py:892] (1/4) Epoch 24, batch 650, loss[loss=0.1381, simple_loss=0.2093, pruned_loss=0.03345, over 19653.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2522, pruned_loss=0.05191, over 3797926.72 frames. ], batch size: 47, lr: 6.57e-03, grad_scale: 16.0 +2023-03-28 17:33:20,464 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.750e+02 3.990e+02 4.608e+02 6.131e+02 1.046e+03, threshold=9.216e+02, percent-clipped=2.0 +2023-03-28 17:34:34,769 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43353.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:35:00,965 INFO [train.py:892] (1/4) Epoch 24, batch 700, loss[loss=0.1462, simple_loss=0.2294, pruned_loss=0.03147, over 19706.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2528, pruned_loss=0.05205, over 3830259.69 frames. ], batch size: 85, lr: 6.56e-03, grad_scale: 16.0 +2023-03-28 17:37:05,719 INFO [train.py:892] (1/4) Epoch 24, batch 750, loss[loss=0.209, simple_loss=0.2817, pruned_loss=0.06817, over 19701.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2515, pruned_loss=0.05134, over 3857370.86 frames. ], batch size: 74, lr: 6.56e-03, grad_scale: 16.0 +2023-03-28 17:37:13,198 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.432e+02 3.941e+02 4.702e+02 5.595e+02 1.048e+03, threshold=9.403e+02, percent-clipped=2.0 +2023-03-28 17:37:46,004 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1727, 3.4228, 3.6305, 4.1528, 2.9474, 3.3221, 2.6590, 2.6248], + device='cuda:1'), covar=tensor([0.0476, 0.2107, 0.0954, 0.0390, 0.1887, 0.0860, 0.1265, 0.1678], + device='cuda:1'), in_proj_covar=tensor([0.0233, 0.0330, 0.0242, 0.0189, 0.0242, 0.0199, 0.0211, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 17:39:06,751 INFO [train.py:892] (1/4) Epoch 24, batch 800, loss[loss=0.1592, simple_loss=0.2354, pruned_loss=0.04149, over 19759.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2503, pruned_loss=0.05081, over 3877445.07 frames. ], batch size: 100, lr: 6.56e-03, grad_scale: 16.0 +2023-03-28 17:40:25,518 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-28 17:41:03,947 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43514.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:41:06,005 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43515.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:41:06,982 INFO [train.py:892] (1/4) Epoch 24, batch 850, loss[loss=0.1775, simple_loss=0.2491, pruned_loss=0.05294, over 19739.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2512, pruned_loss=0.05103, over 3892529.18 frames. ], batch size: 179, lr: 6.55e-03, grad_scale: 16.0 +2023-03-28 17:41:14,148 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.358e+02 3.878e+02 4.715e+02 5.531e+02 7.871e+02, threshold=9.429e+02, percent-clipped=0.0 +2023-03-28 17:41:52,044 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43535.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:43:00,649 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43563.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:43:06,605 INFO [train.py:892] (1/4) Epoch 24, batch 900, loss[loss=0.2143, simple_loss=0.2816, pruned_loss=0.07345, over 19698.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.251, pruned_loss=0.05059, over 3905055.57 frames. ], batch size: 310, lr: 6.55e-03, grad_scale: 16.0 +2023-03-28 17:43:27,511 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43575.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:43:45,969 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43583.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:45:04,110 INFO [train.py:892] (1/4) Epoch 24, batch 950, loss[loss=0.1699, simple_loss=0.2394, pruned_loss=0.05023, over 19826.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2527, pruned_loss=0.05148, over 3912456.49 frames. ], batch size: 231, lr: 6.54e-03, grad_scale: 16.0 +2023-03-28 17:45:11,471 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.960e+02 4.203e+02 4.948e+02 5.601e+02 1.021e+03, threshold=9.897e+02, percent-clipped=1.0 +2023-03-28 17:47:04,267 INFO [train.py:892] (1/4) Epoch 24, batch 1000, loss[loss=0.1444, simple_loss=0.2148, pruned_loss=0.03704, over 19812.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2522, pruned_loss=0.05141, over 3921243.05 frames. ], batch size: 96, lr: 6.54e-03, grad_scale: 16.0 +2023-03-28 17:49:06,807 INFO [train.py:892] (1/4) Epoch 24, batch 1050, loss[loss=0.1881, simple_loss=0.2652, pruned_loss=0.05552, over 19775.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2511, pruned_loss=0.05113, over 3928542.41 frames. ], batch size: 233, lr: 6.54e-03, grad_scale: 32.0 +2023-03-28 17:49:14,075 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.754e+02 4.065e+02 4.703e+02 5.528e+02 1.039e+03, threshold=9.406e+02, percent-clipped=2.0 +2023-03-28 17:49:15,812 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-28 17:50:30,072 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2500, 5.5998, 5.7962, 5.5611, 5.5031, 5.3188, 5.4155, 5.2545], + device='cuda:1'), covar=tensor([0.1488, 0.1396, 0.0869, 0.1190, 0.0693, 0.0736, 0.2059, 0.1977], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0304, 0.0348, 0.0276, 0.0258, 0.0256, 0.0330, 0.0359], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 17:51:07,618 INFO [train.py:892] (1/4) Epoch 24, batch 1100, loss[loss=0.1667, simple_loss=0.2458, pruned_loss=0.04376, over 19534.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2511, pruned_loss=0.05058, over 3932183.15 frames. ], batch size: 46, lr: 6.53e-03, grad_scale: 16.0 +2023-03-28 17:51:11,645 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-03-28 17:51:45,992 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2643, 3.8335, 4.0222, 4.2121, 3.9461, 4.1667, 4.3214, 4.4968], + device='cuda:1'), covar=tensor([0.0630, 0.0450, 0.0490, 0.0345, 0.0701, 0.0541, 0.0429, 0.0305], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0172, 0.0194, 0.0167, 0.0167, 0.0149, 0.0145, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 17:51:56,892 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4252, 4.7051, 4.7483, 4.6467, 4.4218, 4.6934, 4.1972, 4.2610], + device='cuda:1'), covar=tensor([0.0489, 0.0494, 0.0522, 0.0464, 0.0632, 0.0590, 0.0731, 0.1003], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0263, 0.0281, 0.0243, 0.0245, 0.0235, 0.0252, 0.0298], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 17:51:56,974 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43787.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:53:09,084 INFO [train.py:892] (1/4) Epoch 24, batch 1150, loss[loss=0.1677, simple_loss=0.2522, pruned_loss=0.04156, over 19659.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2505, pruned_loss=0.05083, over 3936279.72 frames. ], batch size: 57, lr: 6.53e-03, grad_scale: 16.0 +2023-03-28 17:53:10,875 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9679, 3.2353, 2.8791, 2.4223, 2.8659, 3.1929, 3.0547, 3.1693], + device='cuda:1'), covar=tensor([0.0268, 0.0257, 0.0249, 0.0537, 0.0337, 0.0239, 0.0217, 0.0208], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0087, 0.0093, 0.0096, 0.0100, 0.0077, 0.0076, 0.0077], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 17:53:17,088 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9097, 2.3001, 2.9263, 3.1389, 3.5967, 3.9872, 3.9958, 3.9290], + device='cuda:1'), covar=tensor([0.0961, 0.1922, 0.1352, 0.0737, 0.0421, 0.0243, 0.0301, 0.0426], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0169, 0.0173, 0.0148, 0.0128, 0.0124, 0.0118, 0.0111], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 17:53:19,346 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.514e+02 3.854e+02 4.753e+02 5.981e+02 1.175e+03, threshold=9.505e+02, percent-clipped=4.0 +2023-03-28 17:53:22,173 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8582, 3.7248, 3.7314, 3.5091, 3.8623, 2.8712, 3.2219, 1.8571], + device='cuda:1'), covar=tensor([0.0215, 0.0237, 0.0162, 0.0196, 0.0152, 0.1019, 0.0677, 0.1671], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0141, 0.0111, 0.0130, 0.0115, 0.0131, 0.0141, 0.0125], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 17:53:24,290 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6159, 3.6433, 2.2942, 3.8157, 3.9301, 1.8132, 3.1571, 3.0658], + device='cuda:1'), covar=tensor([0.0766, 0.0999, 0.2826, 0.0864, 0.0739, 0.2892, 0.1269, 0.0900], + device='cuda:1'), in_proj_covar=tensor([0.0227, 0.0251, 0.0228, 0.0262, 0.0241, 0.0202, 0.0235, 0.0189], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 17:54:27,378 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43848.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:54:50,061 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-28 17:55:06,398 INFO [train.py:892] (1/4) Epoch 24, batch 1200, loss[loss=0.1656, simple_loss=0.242, pruned_loss=0.04461, over 19836.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2512, pruned_loss=0.05096, over 3939881.99 frames. ], batch size: 171, lr: 6.53e-03, grad_scale: 16.0 +2023-03-28 17:55:15,706 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43870.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:55:24,616 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9737, 2.8143, 3.0918, 2.7927, 3.3043, 3.2442, 3.8801, 4.1763], + device='cuda:1'), covar=tensor([0.0625, 0.1692, 0.1613, 0.2130, 0.1605, 0.1380, 0.0607, 0.0643], + device='cuda:1'), in_proj_covar=tensor([0.0243, 0.0236, 0.0259, 0.0248, 0.0288, 0.0250, 0.0219, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 17:55:53,688 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0364, 4.3409, 4.7011, 4.2081, 4.0612, 4.5837, 4.3895, 4.8178], + device='cuda:1'), covar=tensor([0.1258, 0.0396, 0.0513, 0.0464, 0.0983, 0.0570, 0.0544, 0.0451], + device='cuda:1'), in_proj_covar=tensor([0.0270, 0.0212, 0.0211, 0.0223, 0.0200, 0.0226, 0.0220, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 17:56:08,843 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3367, 4.5801, 4.5975, 4.4867, 4.2858, 4.5287, 4.0791, 4.1297], + device='cuda:1'), covar=tensor([0.0476, 0.0498, 0.0516, 0.0463, 0.0590, 0.0601, 0.0717, 0.1002], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0262, 0.0280, 0.0243, 0.0244, 0.0234, 0.0251, 0.0296], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 17:56:47,489 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43909.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:56:51,844 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7323, 4.0734, 4.3396, 4.7983, 3.1183, 3.3590, 2.8781, 2.9132], + device='cuda:1'), covar=tensor([0.0474, 0.1625, 0.0700, 0.0328, 0.1906, 0.1073, 0.1214, 0.1582], + device='cuda:1'), in_proj_covar=tensor([0.0233, 0.0329, 0.0242, 0.0190, 0.0241, 0.0199, 0.0210, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 17:56:58,253 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3503, 3.6349, 3.8724, 4.3982, 2.9400, 3.2534, 2.8185, 2.7394], + device='cuda:1'), covar=tensor([0.0487, 0.1965, 0.0879, 0.0332, 0.2002, 0.0931, 0.1206, 0.1656], + device='cuda:1'), in_proj_covar=tensor([0.0233, 0.0329, 0.0242, 0.0190, 0.0241, 0.0199, 0.0210, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 17:57:04,595 INFO [train.py:892] (1/4) Epoch 24, batch 1250, loss[loss=0.1807, simple_loss=0.2412, pruned_loss=0.06013, over 19751.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2501, pruned_loss=0.05066, over 3942564.60 frames. ], batch size: 129, lr: 6.52e-03, grad_scale: 8.0 +2023-03-28 17:57:16,406 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.338e+02 3.868e+02 4.714e+02 5.719e+02 9.399e+02, threshold=9.429e+02, percent-clipped=0.0 +2023-03-28 17:58:32,709 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0368, 2.5836, 3.2966, 3.3348, 3.7719, 4.2955, 4.1776, 4.2300], + device='cuda:1'), covar=tensor([0.0901, 0.1600, 0.1051, 0.0590, 0.0383, 0.0226, 0.0342, 0.0349], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0168, 0.0172, 0.0147, 0.0128, 0.0124, 0.0117, 0.0111], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 17:58:32,752 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3811, 3.5203, 2.1490, 4.3617, 3.7638, 4.1844, 4.2905, 3.2708], + device='cuda:1'), covar=tensor([0.0630, 0.0648, 0.1491, 0.0501, 0.0613, 0.0386, 0.0479, 0.0758], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0138, 0.0140, 0.0143, 0.0127, 0.0127, 0.0140, 0.0140], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 17:58:46,008 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43959.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:59:01,800 INFO [train.py:892] (1/4) Epoch 24, batch 1300, loss[loss=0.1732, simple_loss=0.2554, pruned_loss=0.04553, over 19672.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2507, pruned_loss=0.05103, over 3944051.60 frames. ], batch size: 73, lr: 6.52e-03, grad_scale: 8.0 +2023-03-28 17:59:13,396 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43970.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:00:59,591 INFO [train.py:892] (1/4) Epoch 24, batch 1350, loss[loss=0.1613, simple_loss=0.2264, pruned_loss=0.04804, over 19858.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2505, pruned_loss=0.05101, over 3946020.50 frames. ], batch size: 78, lr: 6.52e-03, grad_scale: 8.0 +2023-03-28 18:01:06,798 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7199, 4.8427, 2.8892, 5.0341, 5.2827, 2.3236, 4.4772, 3.6907], + device='cuda:1'), covar=tensor([0.0525, 0.0584, 0.2503, 0.0658, 0.0394, 0.2543, 0.0813, 0.0763], + device='cuda:1'), in_proj_covar=tensor([0.0224, 0.0247, 0.0225, 0.0259, 0.0237, 0.0199, 0.0232, 0.0187], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 18:01:08,826 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44020.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:01:09,756 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.867e+02 4.545e+02 5.331e+02 8.729e+02, threshold=9.091e+02, percent-clipped=0.0 +2023-03-28 18:02:45,824 INFO [train.py:892] (1/4) Epoch 24, batch 1400, loss[loss=0.1901, simple_loss=0.2675, pruned_loss=0.05638, over 19794.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2504, pruned_loss=0.05054, over 3946144.45 frames. ], batch size: 45, lr: 6.51e-03, grad_scale: 8.0 +2023-03-28 18:04:35,905 INFO [train.py:892] (1/4) Epoch 24, batch 1450, loss[loss=0.1579, simple_loss=0.2263, pruned_loss=0.04477, over 19856.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2492, pruned_loss=0.04956, over 3948158.73 frames. ], batch size: 165, lr: 6.51e-03, grad_scale: 8.0 +2023-03-28 18:04:46,679 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.585e+02 3.832e+02 4.632e+02 5.399e+02 1.168e+03, threshold=9.265e+02, percent-clipped=4.0 +2023-03-28 18:05:40,661 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44143.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:06:31,961 INFO [train.py:892] (1/4) Epoch 24, batch 1500, loss[loss=0.2617, simple_loss=0.3263, pruned_loss=0.09861, over 19446.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2493, pruned_loss=0.04995, over 3949202.89 frames. ], batch size: 396, lr: 6.50e-03, grad_scale: 8.0 +2023-03-28 18:06:40,963 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44170.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:06:49,224 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0809, 4.1243, 2.5559, 4.3795, 4.5525, 2.0478, 3.7777, 3.3599], + device='cuda:1'), covar=tensor([0.0661, 0.0923, 0.2557, 0.0771, 0.0547, 0.2719, 0.1029, 0.0844], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0250, 0.0227, 0.0262, 0.0239, 0.0200, 0.0234, 0.0188], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 18:07:36,377 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7574, 3.1561, 3.6038, 3.3042, 3.9541, 3.9418, 4.4876, 5.0347], + device='cuda:1'), covar=tensor([0.0484, 0.1628, 0.1428, 0.2022, 0.1567, 0.1260, 0.0598, 0.0442], + device='cuda:1'), in_proj_covar=tensor([0.0245, 0.0238, 0.0261, 0.0250, 0.0289, 0.0252, 0.0223, 0.0245], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 18:07:54,194 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0406, 5.3197, 5.3242, 5.2696, 4.9485, 5.3206, 4.7915, 4.8392], + device='cuda:1'), covar=tensor([0.0398, 0.0430, 0.0510, 0.0382, 0.0548, 0.0492, 0.0673, 0.0869], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0260, 0.0280, 0.0241, 0.0245, 0.0235, 0.0251, 0.0297], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 18:08:30,473 INFO [train.py:892] (1/4) Epoch 24, batch 1550, loss[loss=0.1633, simple_loss=0.2547, pruned_loss=0.03598, over 19710.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2495, pruned_loss=0.04992, over 3949664.41 frames. ], batch size: 54, lr: 6.50e-03, grad_scale: 8.0 +2023-03-28 18:08:35,838 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44218.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:08:41,960 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.584e+02 4.103e+02 5.047e+02 5.818e+02 1.108e+03, threshold=1.009e+03, percent-clipped=1.0 +2023-03-28 18:10:28,896 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44265.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:10:30,465 INFO [train.py:892] (1/4) Epoch 24, batch 1600, loss[loss=0.1678, simple_loss=0.2377, pruned_loss=0.04897, over 19816.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2504, pruned_loss=0.05022, over 3950590.04 frames. ], batch size: 181, lr: 6.50e-03, grad_scale: 8.0 +2023-03-28 18:10:47,998 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-28 18:11:32,137 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5820, 2.6891, 2.7936, 2.2515, 2.8589, 2.4802, 2.7155, 2.8228], + device='cuda:1'), covar=tensor([0.0544, 0.0470, 0.0452, 0.0845, 0.0409, 0.0466, 0.0451, 0.0353], + device='cuda:1'), in_proj_covar=tensor([0.0073, 0.0080, 0.0078, 0.0107, 0.0074, 0.0076, 0.0073, 0.0066], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 18:12:24,896 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44315.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:12:26,299 INFO [train.py:892] (1/4) Epoch 24, batch 1650, loss[loss=0.1547, simple_loss=0.2341, pruned_loss=0.03769, over 19815.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2501, pruned_loss=0.0502, over 3951166.93 frames. ], batch size: 98, lr: 6.49e-03, grad_scale: 8.0 +2023-03-28 18:12:36,797 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.188e+02 4.041e+02 4.652e+02 5.558e+02 9.682e+02, threshold=9.304e+02, percent-clipped=0.0 +2023-03-28 18:13:44,307 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6775, 2.2369, 3.4721, 2.9730, 3.5379, 3.5505, 3.3394, 3.3933], + device='cuda:1'), covar=tensor([0.0616, 0.0983, 0.0118, 0.0524, 0.0134, 0.0223, 0.0194, 0.0179], + device='cuda:1'), in_proj_covar=tensor([0.0095, 0.0100, 0.0082, 0.0150, 0.0080, 0.0094, 0.0087, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 18:14:21,546 INFO [train.py:892] (1/4) Epoch 24, batch 1700, loss[loss=0.2036, simple_loss=0.2681, pruned_loss=0.06959, over 19744.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2492, pruned_loss=0.05014, over 3951451.22 frames. ], batch size: 253, lr: 6.49e-03, grad_scale: 8.0 +2023-03-28 18:16:13,231 INFO [train.py:892] (1/4) Epoch 24, batch 1750, loss[loss=0.1569, simple_loss=0.2313, pruned_loss=0.04121, over 19389.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2504, pruned_loss=0.05082, over 3949603.33 frames. ], batch size: 40, lr: 6.49e-03, grad_scale: 8.0 +2023-03-28 18:16:22,201 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.732e+02 4.086e+02 4.781e+02 5.963e+02 1.155e+03, threshold=9.562e+02, percent-clipped=1.0 +2023-03-28 18:16:42,451 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8556, 2.9231, 4.2914, 3.2785, 3.5717, 3.3733, 2.3162, 2.5676], + device='cuda:1'), covar=tensor([0.1007, 0.3081, 0.0501, 0.1028, 0.1674, 0.1393, 0.2591, 0.2646], + device='cuda:1'), in_proj_covar=tensor([0.0345, 0.0374, 0.0335, 0.0270, 0.0365, 0.0356, 0.0355, 0.0324], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 18:17:12,518 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44443.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:17:22,463 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8743, 3.1868, 2.7651, 2.3509, 2.7551, 3.1179, 2.9943, 3.0295], + device='cuda:1'), covar=tensor([0.0296, 0.0289, 0.0288, 0.0517, 0.0355, 0.0238, 0.0211, 0.0246], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0087, 0.0092, 0.0095, 0.0098, 0.0077, 0.0075, 0.0077], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 18:17:56,863 INFO [train.py:892] (1/4) Epoch 24, batch 1800, loss[loss=0.1924, simple_loss=0.258, pruned_loss=0.06342, over 19698.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2503, pruned_loss=0.05067, over 3949828.33 frames. ], batch size: 315, lr: 6.48e-03, grad_scale: 8.0 +2023-03-28 18:18:42,772 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44491.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:19:13,699 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1088, 3.4131, 2.7823, 2.4816, 2.8808, 3.3962, 3.1509, 3.2238], + device='cuda:1'), covar=tensor([0.0285, 0.0209, 0.0280, 0.0471, 0.0327, 0.0209, 0.0226, 0.0188], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0087, 0.0093, 0.0096, 0.0098, 0.0077, 0.0076, 0.0077], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 18:19:30,403 INFO [train.py:892] (1/4) Epoch 24, batch 1850, loss[loss=0.1833, simple_loss=0.2716, pruned_loss=0.04752, over 19813.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2544, pruned_loss=0.05163, over 3947406.99 frames. ], batch size: 57, lr: 6.48e-03, grad_scale: 8.0 +2023-03-28 18:20:24,351 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.697e+02 4.242e+02 4.967e+02 6.010e+02 1.010e+03, threshold=9.934e+02, percent-clipped=1.0 +2023-03-28 18:20:24,389 INFO [train.py:892] (1/4) Epoch 25, batch 0, loss[loss=0.1603, simple_loss=0.2297, pruned_loss=0.04543, over 19765.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2297, pruned_loss=0.04543, over 19765.00 frames. ], batch size: 152, lr: 6.35e-03, grad_scale: 8.0 +2023-03-28 18:20:24,389 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 18:20:47,763 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5553, 4.0186, 3.8772, 3.9080, 3.9415, 3.9075, 3.8267, 3.5843], + device='cuda:1'), covar=tensor([0.2184, 0.1246, 0.1593, 0.1311, 0.1237, 0.0907, 0.1747, 0.2412], + device='cuda:1'), in_proj_covar=tensor([0.0284, 0.0310, 0.0354, 0.0283, 0.0264, 0.0261, 0.0340, 0.0370], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 18:20:53,332 INFO [train.py:926] (1/4) Epoch 25, validation: loss=0.1751, simple_loss=0.2485, pruned_loss=0.05079, over 2883724.00 frames. +2023-03-28 18:20:53,333 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 18:21:07,842 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44527.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:21:57,579 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44550.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:22:06,351 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.09 vs. limit=5.0 +2023-03-28 18:22:31,265 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44565.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:22:45,085 INFO [train.py:892] (1/4) Epoch 25, batch 50, loss[loss=0.1559, simple_loss=0.2311, pruned_loss=0.04033, over 19828.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2458, pruned_loss=0.0499, over 891254.51 frames. ], batch size: 57, lr: 6.34e-03, grad_scale: 8.0 +2023-03-28 18:23:23,632 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44588.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:23:56,839 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1520, 5.3611, 5.6032, 5.4081, 5.4082, 5.0726, 5.2716, 5.1862], + device='cuda:1'), covar=tensor([0.1349, 0.1357, 0.0837, 0.1065, 0.0687, 0.0860, 0.1836, 0.1873], + device='cuda:1'), in_proj_covar=tensor([0.0283, 0.0311, 0.0356, 0.0284, 0.0266, 0.0263, 0.0340, 0.0371], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 18:24:18,610 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44611.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:24:24,385 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44613.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:24:29,386 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44615.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:24:45,077 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.512e+02 3.985e+02 4.536e+02 5.355e+02 8.886e+02, threshold=9.072e+02, percent-clipped=0.0 +2023-03-28 18:24:45,149 INFO [train.py:892] (1/4) Epoch 25, batch 100, loss[loss=0.1582, simple_loss=0.2309, pruned_loss=0.04277, over 19647.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2439, pruned_loss=0.04805, over 1570468.50 frames. ], batch size: 69, lr: 6.34e-03, grad_scale: 8.0 +2023-03-28 18:26:23,568 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.85 vs. limit=5.0 +2023-03-28 18:26:30,257 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44663.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:26:47,984 INFO [train.py:892] (1/4) Epoch 25, batch 150, loss[loss=0.2147, simple_loss=0.2839, pruned_loss=0.07277, over 19753.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2456, pruned_loss=0.04852, over 2096198.45 frames. ], batch size: 321, lr: 6.33e-03, grad_scale: 8.0 +2023-03-28 18:26:51,076 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0519, 3.9751, 3.9516, 3.6617, 4.1367, 2.8695, 3.3291, 1.7684], + device='cuda:1'), covar=tensor([0.0402, 0.0324, 0.0306, 0.0329, 0.0311, 0.1187, 0.1101, 0.2167], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0144, 0.0112, 0.0132, 0.0116, 0.0132, 0.0142, 0.0125], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 18:28:52,026 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.576e+02 4.007e+02 4.679e+02 6.121e+02 1.422e+03, threshold=9.359e+02, percent-clipped=5.0 +2023-03-28 18:28:52,052 INFO [train.py:892] (1/4) Epoch 25, batch 200, loss[loss=0.1721, simple_loss=0.2528, pruned_loss=0.04571, over 19946.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2471, pruned_loss=0.04835, over 2507567.80 frames. ], batch size: 52, lr: 6.33e-03, grad_scale: 8.0 +2023-03-28 18:29:12,886 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-28 18:29:36,187 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8451, 3.0385, 3.0697, 3.0863, 2.8449, 3.0666, 2.8869, 3.0904], + device='cuda:1'), covar=tensor([0.0325, 0.0409, 0.0279, 0.0242, 0.0361, 0.0278, 0.0332, 0.0360], + device='cuda:1'), in_proj_covar=tensor([0.0077, 0.0071, 0.0073, 0.0068, 0.0081, 0.0074, 0.0091, 0.0066], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 18:30:53,948 INFO [train.py:892] (1/4) Epoch 25, batch 250, loss[loss=0.1806, simple_loss=0.253, pruned_loss=0.05411, over 19735.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2488, pruned_loss=0.04895, over 2824812.05 frames. ], batch size: 134, lr: 6.33e-03, grad_scale: 8.0 +2023-03-28 18:31:09,241 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5644, 4.2787, 4.3334, 4.5593, 4.1715, 4.6479, 4.7005, 4.8494], + device='cuda:1'), covar=tensor([0.0659, 0.0352, 0.0438, 0.0334, 0.0754, 0.0434, 0.0379, 0.0275], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0170, 0.0193, 0.0168, 0.0168, 0.0149, 0.0146, 0.0189], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 18:31:41,043 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6650, 2.2360, 2.3540, 2.8534, 3.2348, 3.3227, 3.2457, 3.4284], + device='cuda:1'), covar=tensor([0.0937, 0.1515, 0.1340, 0.0707, 0.0432, 0.0364, 0.0451, 0.0348], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0167, 0.0172, 0.0146, 0.0128, 0.0125, 0.0118, 0.0110], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 18:32:57,521 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.592e+02 3.844e+02 4.514e+02 5.418e+02 9.178e+02, threshold=9.028e+02, percent-clipped=0.0 +2023-03-28 18:32:57,546 INFO [train.py:892] (1/4) Epoch 25, batch 300, loss[loss=0.1897, simple_loss=0.2607, pruned_loss=0.05937, over 19821.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2491, pruned_loss=0.04938, over 3074193.14 frames. ], batch size: 128, lr: 6.32e-03, grad_scale: 8.0 +2023-03-28 18:33:47,874 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2049, 4.8378, 4.8837, 5.2249, 4.8404, 5.4412, 5.3070, 5.4996], + device='cuda:1'), covar=tensor([0.0645, 0.0402, 0.0417, 0.0349, 0.0605, 0.0384, 0.0405, 0.0295], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0171, 0.0194, 0.0169, 0.0169, 0.0150, 0.0146, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 18:35:01,678 INFO [train.py:892] (1/4) Epoch 25, batch 350, loss[loss=0.1644, simple_loss=0.2318, pruned_loss=0.04845, over 19840.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2491, pruned_loss=0.0497, over 3268288.68 frames. ], batch size: 160, lr: 6.32e-03, grad_scale: 8.0 +2023-03-28 18:35:28,946 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44883.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:36:26,453 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44906.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:36:54,318 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7445, 4.8605, 5.1927, 4.9846, 5.0135, 4.6548, 4.8723, 4.7253], + device='cuda:1'), covar=tensor([0.1558, 0.1368, 0.0816, 0.1142, 0.0785, 0.0804, 0.1829, 0.1971], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0309, 0.0354, 0.0285, 0.0264, 0.0262, 0.0339, 0.0370], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 18:36:57,342 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-28 18:37:01,654 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.886e+02 4.144e+02 4.807e+02 5.961e+02 1.159e+03, threshold=9.615e+02, percent-clipped=3.0 +2023-03-28 18:37:01,684 INFO [train.py:892] (1/4) Epoch 25, batch 400, loss[loss=0.1839, simple_loss=0.2747, pruned_loss=0.04656, over 19875.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2502, pruned_loss=0.05015, over 3419476.85 frames. ], batch size: 53, lr: 6.32e-03, grad_scale: 8.0 +2023-03-28 18:38:49,493 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2918, 2.3233, 2.4676, 2.4100, 2.3234, 2.4629, 2.2960, 2.4912], + device='cuda:1'), covar=tensor([0.0357, 0.0323, 0.0313, 0.0270, 0.0376, 0.0308, 0.0407, 0.0275], + device='cuda:1'), in_proj_covar=tensor([0.0077, 0.0071, 0.0073, 0.0067, 0.0081, 0.0074, 0.0092, 0.0066], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 18:39:03,916 INFO [train.py:892] (1/4) Epoch 25, batch 450, loss[loss=0.1901, simple_loss=0.2617, pruned_loss=0.05925, over 19685.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2498, pruned_loss=0.0502, over 3536993.32 frames. ], batch size: 75, lr: 6.31e-03, grad_scale: 8.0 +2023-03-28 18:40:59,620 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.643e+02 4.015e+02 4.807e+02 5.728e+02 9.598e+02, threshold=9.613e+02, percent-clipped=0.0 +2023-03-28 18:40:59,651 INFO [train.py:892] (1/4) Epoch 25, batch 500, loss[loss=0.1819, simple_loss=0.2568, pruned_loss=0.05347, over 19900.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2498, pruned_loss=0.05017, over 3628190.37 frames. ], batch size: 71, lr: 6.31e-03, grad_scale: 8.0 +2023-03-28 18:41:11,546 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8965, 4.5423, 4.6194, 4.3625, 4.8108, 3.2693, 3.8974, 2.3059], + device='cuda:1'), covar=tensor([0.0172, 0.0201, 0.0131, 0.0183, 0.0136, 0.0880, 0.0836, 0.1531], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0144, 0.0111, 0.0132, 0.0117, 0.0132, 0.0142, 0.0125], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 18:42:20,421 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45056.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:42:52,473 INFO [train.py:892] (1/4) Epoch 25, batch 550, loss[loss=0.1673, simple_loss=0.2444, pruned_loss=0.0451, over 19845.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2504, pruned_loss=0.05061, over 3699637.73 frames. ], batch size: 109, lr: 6.31e-03, grad_scale: 8.0 +2023-03-28 18:43:06,726 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3073, 3.1738, 3.5970, 2.8566, 3.6773, 3.0275, 3.1747, 3.5503], + device='cuda:1'), covar=tensor([0.0667, 0.0429, 0.0532, 0.0746, 0.0304, 0.0430, 0.0659, 0.0371], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0079, 0.0078, 0.0106, 0.0074, 0.0075, 0.0073, 0.0066], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 18:43:21,231 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8392, 5.1584, 5.1842, 5.0994, 4.8187, 5.1562, 4.6178, 4.6538], + device='cuda:1'), covar=tensor([0.0514, 0.0476, 0.0545, 0.0444, 0.0624, 0.0573, 0.0708, 0.1112], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0262, 0.0282, 0.0243, 0.0246, 0.0236, 0.0252, 0.0298], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 18:43:21,376 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45081.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:44:44,878 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45117.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 18:44:51,760 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.460e+02 3.947e+02 4.547e+02 5.526e+02 8.636e+02, threshold=9.094e+02, percent-clipped=0.0 +2023-03-28 18:44:51,789 INFO [train.py:892] (1/4) Epoch 25, batch 600, loss[loss=0.2504, simple_loss=0.3269, pruned_loss=0.08698, over 19268.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2508, pruned_loss=0.05093, over 3755924.81 frames. ], batch size: 483, lr: 6.30e-03, grad_scale: 8.0 +2023-03-28 18:45:23,784 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4194, 4.1695, 4.2641, 3.9744, 4.3821, 3.1136, 3.7085, 2.0912], + device='cuda:1'), covar=tensor([0.0171, 0.0209, 0.0120, 0.0180, 0.0132, 0.0961, 0.0697, 0.1452], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0143, 0.0111, 0.0131, 0.0117, 0.0131, 0.0140, 0.0125], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 18:45:40,588 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2176, 3.1055, 3.4582, 2.5256, 3.5438, 3.0199, 3.0616, 3.4753], + device='cuda:1'), covar=tensor([0.0615, 0.0447, 0.0547, 0.0841, 0.0313, 0.0402, 0.0508, 0.0305], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0079, 0.0078, 0.0106, 0.0074, 0.0075, 0.0073, 0.0065], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 18:45:45,413 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45142.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:46:55,707 INFO [train.py:892] (1/4) Epoch 25, batch 650, loss[loss=0.154, simple_loss=0.2277, pruned_loss=0.04014, over 19830.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2502, pruned_loss=0.05041, over 3797065.36 frames. ], batch size: 166, lr: 6.30e-03, grad_scale: 8.0 +2023-03-28 18:47:25,401 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45183.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:48:24,908 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45206.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:48:58,096 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.622e+02 4.071e+02 4.790e+02 5.596e+02 1.238e+03, threshold=9.579e+02, percent-clipped=2.0 +2023-03-28 18:48:58,125 INFO [train.py:892] (1/4) Epoch 25, batch 700, loss[loss=0.1562, simple_loss=0.2246, pruned_loss=0.04395, over 19770.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2501, pruned_loss=0.0503, over 3830237.20 frames. ], batch size: 130, lr: 6.30e-03, grad_scale: 8.0 +2023-03-28 18:49:21,634 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45231.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:50:14,563 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45254.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:50:54,524 INFO [train.py:892] (1/4) Epoch 25, batch 750, loss[loss=0.1838, simple_loss=0.2536, pruned_loss=0.05699, over 19814.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2501, pruned_loss=0.05025, over 3856933.79 frames. ], batch size: 181, lr: 6.29e-03, grad_scale: 8.0 +2023-03-28 18:51:13,695 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7470, 2.8594, 4.0957, 3.2272, 3.4684, 3.2760, 2.3470, 2.4505], + device='cuda:1'), covar=tensor([0.1017, 0.2876, 0.0553, 0.0923, 0.1577, 0.1404, 0.2411, 0.2605], + device='cuda:1'), in_proj_covar=tensor([0.0345, 0.0376, 0.0337, 0.0271, 0.0366, 0.0358, 0.0356, 0.0326], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 18:52:49,367 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.500e+02 3.772e+02 4.659e+02 5.559e+02 8.961e+02, threshold=9.318e+02, percent-clipped=0.0 +2023-03-28 18:52:49,395 INFO [train.py:892] (1/4) Epoch 25, batch 800, loss[loss=0.1603, simple_loss=0.2454, pruned_loss=0.03758, over 19762.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2498, pruned_loss=0.04972, over 3877655.30 frames. ], batch size: 49, lr: 6.29e-03, grad_scale: 8.0 +2023-03-28 18:53:53,902 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45348.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 18:54:43,385 INFO [train.py:892] (1/4) Epoch 25, batch 850, loss[loss=0.1668, simple_loss=0.2448, pruned_loss=0.04441, over 19771.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2503, pruned_loss=0.04971, over 3894819.50 frames. ], batch size: 226, lr: 6.29e-03, grad_scale: 8.0 +2023-03-28 18:55:17,011 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0271, 5.2225, 5.4599, 5.2623, 5.2929, 4.9834, 5.1795, 5.0099], + device='cuda:1'), covar=tensor([0.1271, 0.1514, 0.0809, 0.1151, 0.0651, 0.0827, 0.1702, 0.1853], + device='cuda:1'), in_proj_covar=tensor([0.0286, 0.0312, 0.0355, 0.0287, 0.0263, 0.0263, 0.0339, 0.0372], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 18:56:15,250 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45409.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 18:56:20,853 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45412.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 18:56:43,413 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.654e+02 4.187e+02 5.025e+02 5.805e+02 1.375e+03, threshold=1.005e+03, percent-clipped=1.0 +2023-03-28 18:56:43,470 INFO [train.py:892] (1/4) Epoch 25, batch 900, loss[loss=0.153, simple_loss=0.2283, pruned_loss=0.03892, over 19764.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2507, pruned_loss=0.04975, over 3904133.23 frames. ], batch size: 122, lr: 6.28e-03, grad_scale: 8.0 +2023-03-28 18:57:24,662 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45437.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:58:45,896 INFO [train.py:892] (1/4) Epoch 25, batch 950, loss[loss=0.1576, simple_loss=0.2411, pruned_loss=0.03701, over 19893.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2516, pruned_loss=0.04985, over 3914659.16 frames. ], batch size: 91, lr: 6.28e-03, grad_scale: 8.0 +2023-03-28 18:59:32,023 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2290, 4.8006, 4.9019, 4.6411, 5.1246, 3.2743, 4.1692, 2.6679], + device='cuda:1'), covar=tensor([0.0147, 0.0187, 0.0115, 0.0172, 0.0128, 0.0888, 0.0829, 0.1381], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0143, 0.0111, 0.0131, 0.0117, 0.0132, 0.0142, 0.0125], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 18:59:52,183 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7073, 4.9804, 5.0335, 4.9117, 4.6750, 4.9965, 4.4856, 4.4807], + device='cuda:1'), covar=tensor([0.0474, 0.0474, 0.0481, 0.0439, 0.0621, 0.0489, 0.0678, 0.1043], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0266, 0.0284, 0.0245, 0.0248, 0.0237, 0.0255, 0.0300], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 19:00:40,934 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.498e+02 3.760e+02 4.893e+02 5.936e+02 1.409e+03, threshold=9.787e+02, percent-clipped=1.0 +2023-03-28 19:00:40,967 INFO [train.py:892] (1/4) Epoch 25, batch 1000, loss[loss=0.157, simple_loss=0.2255, pruned_loss=0.04428, over 19803.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2527, pruned_loss=0.05086, over 3921618.57 frames. ], batch size: 149, lr: 6.28e-03, grad_scale: 8.0 +2023-03-28 19:01:39,671 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-28 19:02:41,449 INFO [train.py:892] (1/4) Epoch 25, batch 1050, loss[loss=0.171, simple_loss=0.2485, pruned_loss=0.04677, over 19737.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2512, pruned_loss=0.05046, over 3928647.98 frames. ], batch size: 99, lr: 6.27e-03, grad_scale: 8.0 +2023-03-28 19:03:54,934 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5298, 2.6027, 2.6816, 2.1315, 2.7879, 2.3025, 2.6517, 2.7006], + device='cuda:1'), covar=tensor([0.0496, 0.0480, 0.0465, 0.0864, 0.0361, 0.0489, 0.0450, 0.0359], + device='cuda:1'), in_proj_covar=tensor([0.0071, 0.0079, 0.0077, 0.0105, 0.0073, 0.0075, 0.0072, 0.0066], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 19:03:57,041 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2040, 3.1213, 1.8846, 3.7932, 3.4680, 3.7565, 3.7913, 3.0076], + device='cuda:1'), covar=tensor([0.0600, 0.0639, 0.1613, 0.0569, 0.0557, 0.0435, 0.0586, 0.0754], + device='cuda:1'), in_proj_covar=tensor([0.0138, 0.0137, 0.0140, 0.0145, 0.0127, 0.0127, 0.0141, 0.0140], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 19:04:14,664 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4304, 4.4100, 4.7817, 4.5790, 4.6722, 4.2170, 4.5431, 4.3533], + device='cuda:1'), covar=tensor([0.1471, 0.1579, 0.0925, 0.1335, 0.0899, 0.1070, 0.1996, 0.2189], + device='cuda:1'), in_proj_covar=tensor([0.0286, 0.0311, 0.0354, 0.0287, 0.0264, 0.0265, 0.0338, 0.0371], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 19:04:40,789 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.489e+02 3.919e+02 4.482e+02 5.377e+02 1.345e+03, threshold=8.964e+02, percent-clipped=5.0 +2023-03-28 19:04:40,819 INFO [train.py:892] (1/4) Epoch 25, batch 1100, loss[loss=0.1339, simple_loss=0.2131, pruned_loss=0.02739, over 19822.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2518, pruned_loss=0.05056, over 3931142.49 frames. ], batch size: 103, lr: 6.27e-03, grad_scale: 8.0 +2023-03-28 19:05:25,723 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4926, 4.5330, 4.8253, 4.6604, 4.7630, 4.2532, 4.5674, 4.3942], + device='cuda:1'), covar=tensor([0.1291, 0.1438, 0.0882, 0.1162, 0.0742, 0.0974, 0.1806, 0.1907], + device='cuda:1'), in_proj_covar=tensor([0.0287, 0.0313, 0.0355, 0.0288, 0.0265, 0.0266, 0.0340, 0.0371], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 19:06:36,328 INFO [train.py:892] (1/4) Epoch 25, batch 1150, loss[loss=0.1681, simple_loss=0.2553, pruned_loss=0.04041, over 19879.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2512, pruned_loss=0.04998, over 3935086.37 frames. ], batch size: 52, lr: 6.27e-03, grad_scale: 8.0 +2023-03-28 19:07:40,568 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3718, 4.1433, 4.2134, 3.9778, 4.3662, 3.0698, 3.6808, 2.0816], + device='cuda:1'), covar=tensor([0.0194, 0.0213, 0.0138, 0.0185, 0.0129, 0.0962, 0.0712, 0.1546], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0143, 0.0110, 0.0130, 0.0117, 0.0131, 0.0141, 0.0124], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 19:07:52,547 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45702.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:07:58,070 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45704.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 19:07:59,929 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0022, 5.2889, 5.3393, 5.2324, 4.9281, 5.3429, 4.7746, 4.8000], + device='cuda:1'), covar=tensor([0.0429, 0.0454, 0.0462, 0.0397, 0.0557, 0.0482, 0.0708, 0.0941], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0265, 0.0283, 0.0247, 0.0249, 0.0236, 0.0253, 0.0299], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 19:08:19,185 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45712.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 19:08:39,163 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.049e+02 4.389e+02 4.900e+02 5.957e+02 9.190e+02, threshold=9.801e+02, percent-clipped=1.0 +2023-03-28 19:08:39,201 INFO [train.py:892] (1/4) Epoch 25, batch 1200, loss[loss=0.1556, simple_loss=0.2326, pruned_loss=0.03925, over 19812.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2505, pruned_loss=0.04997, over 3939601.18 frames. ], batch size: 82, lr: 6.26e-03, grad_scale: 8.0 +2023-03-28 19:09:16,145 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45737.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:10:08,267 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45760.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:10:13,979 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45763.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:10:31,171 INFO [train.py:892] (1/4) Epoch 25, batch 1250, loss[loss=0.1869, simple_loss=0.258, pruned_loss=0.05788, over 19857.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2488, pruned_loss=0.04905, over 3942463.14 frames. ], batch size: 197, lr: 6.26e-03, grad_scale: 8.0 +2023-03-28 19:10:38,098 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45774.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:11:03,085 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45785.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:11:55,328 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9406, 2.9746, 4.4061, 3.2791, 3.6079, 3.4326, 2.4109, 2.5525], + device='cuda:1'), covar=tensor([0.0951, 0.2840, 0.0485, 0.1022, 0.1673, 0.1347, 0.2428, 0.2642], + device='cuda:1'), in_proj_covar=tensor([0.0346, 0.0378, 0.0339, 0.0274, 0.0368, 0.0358, 0.0357, 0.0329], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 19:12:24,935 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.740e+02 4.060e+02 4.817e+02 5.850e+02 1.056e+03, threshold=9.634e+02, percent-clipped=3.0 +2023-03-28 19:12:24,990 INFO [train.py:892] (1/4) Epoch 25, batch 1300, loss[loss=0.1488, simple_loss=0.2165, pruned_loss=0.04056, over 19837.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.248, pruned_loss=0.04886, over 3944166.66 frames. ], batch size: 143, lr: 6.26e-03, grad_scale: 8.0 +2023-03-28 19:13:01,711 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45835.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:14:00,849 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45859.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:14:29,531 INFO [train.py:892] (1/4) Epoch 25, batch 1350, loss[loss=0.1682, simple_loss=0.2369, pruned_loss=0.04975, over 19805.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2501, pruned_loss=0.04995, over 3944361.07 frames. ], batch size: 181, lr: 6.25e-03, grad_scale: 8.0 +2023-03-28 19:16:11,682 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9119, 3.2146, 3.3990, 3.8587, 2.7254, 3.1574, 2.4964, 2.4542], + device='cuda:1'), covar=tensor([0.0552, 0.1906, 0.0948, 0.0434, 0.1895, 0.0876, 0.1404, 0.1670], + device='cuda:1'), in_proj_covar=tensor([0.0237, 0.0333, 0.0243, 0.0194, 0.0243, 0.0203, 0.0213, 0.0214], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 19:16:24,352 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45920.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:16:25,388 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.758e+02 4.142e+02 4.668e+02 5.534e+02 8.223e+02, threshold=9.336e+02, percent-clipped=0.0 +2023-03-28 19:16:25,417 INFO [train.py:892] (1/4) Epoch 25, batch 1400, loss[loss=0.1781, simple_loss=0.2491, pruned_loss=0.0536, over 19872.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2483, pruned_loss=0.0494, over 3947020.15 frames. ], batch size: 138, lr: 6.25e-03, grad_scale: 16.0 +2023-03-28 19:17:21,955 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4247, 2.1777, 3.3277, 2.7983, 3.3933, 3.4339, 3.1382, 3.2797], + device='cuda:1'), covar=tensor([0.0765, 0.1045, 0.0135, 0.0496, 0.0148, 0.0225, 0.0232, 0.0191], + device='cuda:1'), in_proj_covar=tensor([0.0096, 0.0102, 0.0084, 0.0152, 0.0081, 0.0095, 0.0090, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 19:18:21,742 INFO [train.py:892] (1/4) Epoch 25, batch 1450, loss[loss=0.1736, simple_loss=0.2519, pruned_loss=0.04766, over 19667.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2506, pruned_loss=0.04999, over 3946852.99 frames. ], batch size: 58, lr: 6.25e-03, grad_scale: 16.0 +2023-03-28 19:18:38,264 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8868, 2.3469, 2.7534, 3.1396, 3.5418, 3.8324, 3.7579, 3.8031], + device='cuda:1'), covar=tensor([0.0963, 0.1795, 0.1464, 0.0751, 0.0436, 0.0309, 0.0413, 0.0443], + device='cuda:1'), in_proj_covar=tensor([0.0156, 0.0170, 0.0176, 0.0149, 0.0129, 0.0125, 0.0117, 0.0113], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 19:18:55,418 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9851, 2.2636, 2.8593, 3.2743, 3.6524, 3.9662, 3.9294, 3.9397], + device='cuda:1'), covar=tensor([0.0934, 0.2090, 0.1452, 0.0693, 0.0420, 0.0286, 0.0364, 0.0370], + device='cuda:1'), in_proj_covar=tensor([0.0156, 0.0170, 0.0176, 0.0149, 0.0129, 0.0125, 0.0117, 0.0113], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 19:19:53,542 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46004.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 19:20:30,239 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.575e+02 3.735e+02 4.746e+02 5.527e+02 8.310e+02, threshold=9.492e+02, percent-clipped=0.0 +2023-03-28 19:20:30,271 INFO [train.py:892] (1/4) Epoch 25, batch 1500, loss[loss=0.1713, simple_loss=0.2478, pruned_loss=0.04745, over 19634.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2521, pruned_loss=0.05046, over 3943058.58 frames. ], batch size: 72, lr: 6.24e-03, grad_scale: 16.0 +2023-03-28 19:21:43,816 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46052.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 19:21:57,570 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46058.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:22:23,750 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4218, 4.5790, 2.7721, 4.8175, 5.0161, 2.0719, 4.1367, 3.5067], + device='cuda:1'), covar=tensor([0.0642, 0.0660, 0.2719, 0.0625, 0.0500, 0.2785, 0.1073, 0.0956], + device='cuda:1'), in_proj_covar=tensor([0.0226, 0.0250, 0.0227, 0.0264, 0.0243, 0.0201, 0.0235, 0.0190], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 19:22:29,076 INFO [train.py:892] (1/4) Epoch 25, batch 1550, loss[loss=0.1985, simple_loss=0.2778, pruned_loss=0.05962, over 19888.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2513, pruned_loss=0.0497, over 3944957.07 frames. ], batch size: 62, lr: 6.24e-03, grad_scale: 16.0 +2023-03-28 19:24:26,940 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.641e+02 4.209e+02 5.084e+02 6.041e+02 9.841e+02, threshold=1.017e+03, percent-clipped=2.0 +2023-03-28 19:24:26,967 INFO [train.py:892] (1/4) Epoch 25, batch 1600, loss[loss=0.172, simple_loss=0.2496, pruned_loss=0.04718, over 19728.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2515, pruned_loss=0.04985, over 3946513.28 frames. ], batch size: 61, lr: 6.24e-03, grad_scale: 16.0 +2023-03-28 19:24:49,232 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46130.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:26:20,500 INFO [train.py:892] (1/4) Epoch 25, batch 1650, loss[loss=0.1653, simple_loss=0.2548, pruned_loss=0.03785, over 19929.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2504, pruned_loss=0.04897, over 3947347.68 frames. ], batch size: 49, lr: 6.23e-03, grad_scale: 16.0 +2023-03-28 19:27:37,501 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0986, 3.2551, 5.0710, 4.2395, 4.7156, 4.9117, 4.8163, 4.5203], + device='cuda:1'), covar=tensor([0.0377, 0.0746, 0.0079, 0.0920, 0.0118, 0.0201, 0.0142, 0.0141], + device='cuda:1'), in_proj_covar=tensor([0.0096, 0.0102, 0.0084, 0.0153, 0.0081, 0.0095, 0.0090, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 19:28:09,028 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46215.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:28:20,989 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.501e+02 4.063e+02 5.043e+02 6.372e+02 1.431e+03, threshold=1.009e+03, percent-clipped=3.0 +2023-03-28 19:28:21,023 INFO [train.py:892] (1/4) Epoch 25, batch 1700, loss[loss=0.1465, simple_loss=0.2196, pruned_loss=0.03671, over 19735.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2496, pruned_loss=0.04871, over 3948798.74 frames. ], batch size: 118, lr: 6.23e-03, grad_scale: 16.0 +2023-03-28 19:30:18,594 INFO [train.py:892] (1/4) Epoch 25, batch 1750, loss[loss=0.158, simple_loss=0.2348, pruned_loss=0.04059, over 19802.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2494, pruned_loss=0.04897, over 3948471.48 frames. ], batch size: 74, lr: 6.23e-03, grad_scale: 16.0 +2023-03-28 19:30:29,027 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46276.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:32:03,809 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.718e+02 4.184e+02 4.852e+02 5.831e+02 2.262e+03, threshold=9.705e+02, percent-clipped=1.0 +2023-03-28 19:32:03,853 INFO [train.py:892] (1/4) Epoch 25, batch 1800, loss[loss=0.1441, simple_loss=0.2116, pruned_loss=0.03834, over 19814.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2499, pruned_loss=0.04935, over 3947607.56 frames. ], batch size: 148, lr: 6.22e-03, grad_scale: 16.0 +2023-03-28 19:32:32,109 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46337.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:33:14,989 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46358.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:33:38,583 INFO [train.py:892] (1/4) Epoch 25, batch 1850, loss[loss=0.1865, simple_loss=0.2701, pruned_loss=0.05147, over 19684.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2504, pruned_loss=0.04903, over 3947861.53 frames. ], batch size: 55, lr: 6.22e-03, grad_scale: 16.0 +2023-03-28 19:34:37,788 INFO [train.py:892] (1/4) Epoch 26, batch 0, loss[loss=0.2547, simple_loss=0.3213, pruned_loss=0.09401, over 19465.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.3213, pruned_loss=0.09401, over 19465.00 frames. ], batch size: 396, lr: 6.10e-03, grad_scale: 16.0 +2023-03-28 19:34:37,788 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 19:34:56,956 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3707, 3.2529, 3.7193, 3.0592, 3.9960, 3.1936, 3.3099, 3.9028], + device='cuda:1'), covar=tensor([0.0879, 0.0420, 0.0640, 0.0702, 0.0290, 0.0427, 0.0538, 0.0298], + device='cuda:1'), in_proj_covar=tensor([0.0071, 0.0079, 0.0077, 0.0105, 0.0073, 0.0076, 0.0073, 0.0066], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 19:35:05,743 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0478, 3.1232, 3.1160, 3.0636, 2.9155, 3.0741, 2.9628, 3.2661], + device='cuda:1'), covar=tensor([0.0253, 0.0317, 0.0315, 0.0286, 0.0353, 0.0223, 0.0303, 0.0279], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0073, 0.0075, 0.0070, 0.0083, 0.0076, 0.0094, 0.0067], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 19:35:16,909 INFO [train.py:926] (1/4) Epoch 26, validation: loss=0.176, simple_loss=0.2485, pruned_loss=0.05179, over 2883724.00 frames. +2023-03-28 19:35:16,911 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 19:35:36,322 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7684, 3.6857, 3.6834, 3.4446, 3.7832, 2.8632, 3.1322, 1.8690], + device='cuda:1'), covar=tensor([0.0228, 0.0223, 0.0154, 0.0201, 0.0144, 0.0976, 0.0625, 0.1557], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0144, 0.0111, 0.0131, 0.0118, 0.0132, 0.0141, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 19:36:01,570 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9003, 2.5669, 3.0645, 2.7311, 3.1862, 3.1229, 3.8399, 4.1118], + device='cuda:1'), covar=tensor([0.0652, 0.1924, 0.1585, 0.2116, 0.1703, 0.1580, 0.0568, 0.0541], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0237, 0.0260, 0.0249, 0.0289, 0.0250, 0.0225, 0.0245], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 19:36:29,097 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46406.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:37:07,543 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.563e+02 3.816e+02 4.430e+02 5.059e+02 8.683e+02, threshold=8.861e+02, percent-clipped=0.0 +2023-03-28 19:37:18,516 INFO [train.py:892] (1/4) Epoch 26, batch 50, loss[loss=0.1416, simple_loss=0.2135, pruned_loss=0.03484, over 19869.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2455, pruned_loss=0.049, over 891669.19 frames. ], batch size: 92, lr: 6.09e-03, grad_scale: 16.0 +2023-03-28 19:37:28,833 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46430.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:39:07,517 INFO [train.py:892] (1/4) Epoch 26, batch 100, loss[loss=0.1823, simple_loss=0.2492, pruned_loss=0.05763, over 19848.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2455, pruned_loss=0.04718, over 1569937.30 frames. ], batch size: 190, lr: 6.09e-03, grad_scale: 16.0 +2023-03-28 19:39:12,053 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46478.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:40:31,572 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46515.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:40:42,634 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.820e+02 4.177e+02 4.877e+02 5.600e+02 1.186e+03, threshold=9.755e+02, percent-clipped=5.0 +2023-03-28 19:40:54,309 INFO [train.py:892] (1/4) Epoch 26, batch 150, loss[loss=0.1532, simple_loss=0.2332, pruned_loss=0.03662, over 19809.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2479, pruned_loss=0.04896, over 2097246.52 frames. ], batch size: 82, lr: 6.09e-03, grad_scale: 16.0 +2023-03-28 19:42:14,629 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46563.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:42:42,366 INFO [train.py:892] (1/4) Epoch 26, batch 200, loss[loss=0.1486, simple_loss=0.2299, pruned_loss=0.03367, over 19877.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.248, pruned_loss=0.04831, over 2507829.45 frames. ], batch size: 84, lr: 6.08e-03, grad_scale: 16.0 +2023-03-28 19:43:25,386 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46595.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:43:42,643 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46602.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:44:24,556 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.062e+02 3.750e+02 4.526e+02 5.438e+02 1.075e+03, threshold=9.053e+02, percent-clipped=3.0 +2023-03-28 19:44:31,776 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0690, 3.6820, 3.8357, 4.0464, 3.7542, 4.0989, 4.0808, 4.2685], + device='cuda:1'), covar=tensor([0.0827, 0.0606, 0.0742, 0.0544, 0.0930, 0.0698, 0.0757, 0.0531], + device='cuda:1'), in_proj_covar=tensor([0.0149, 0.0174, 0.0196, 0.0170, 0.0169, 0.0152, 0.0147, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 19:44:34,759 INFO [train.py:892] (1/4) Epoch 26, batch 250, loss[loss=0.1628, simple_loss=0.2326, pruned_loss=0.04656, over 19815.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2481, pruned_loss=0.04827, over 2827690.16 frames. ], batch size: 202, lr: 6.08e-03, grad_scale: 16.0 +2023-03-28 19:44:49,644 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46632.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:44:53,945 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0246, 3.8666, 3.8722, 3.6355, 3.9881, 2.9173, 3.3449, 1.8383], + device='cuda:1'), covar=tensor([0.0192, 0.0245, 0.0150, 0.0198, 0.0164, 0.1036, 0.0659, 0.1663], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0142, 0.0111, 0.0130, 0.0117, 0.0132, 0.0140, 0.0125], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 19:45:47,143 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46656.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 19:46:01,425 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46663.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:46:01,578 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6878, 3.2919, 3.3784, 3.0928, 3.8533, 3.9081, 4.5243, 5.1160], + device='cuda:1'), covar=tensor([0.0464, 0.1474, 0.1495, 0.2176, 0.1443, 0.1236, 0.0537, 0.0370], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0237, 0.0261, 0.0251, 0.0290, 0.0251, 0.0225, 0.0245], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 19:46:16,968 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6431, 4.5656, 2.8664, 4.8599, 5.1084, 2.2819, 4.3302, 3.7729], + device='cuda:1'), covar=tensor([0.0506, 0.0781, 0.2377, 0.0737, 0.0473, 0.2729, 0.0827, 0.0737], + device='cuda:1'), in_proj_covar=tensor([0.0227, 0.0249, 0.0226, 0.0265, 0.0244, 0.0201, 0.0236, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 19:46:31,148 INFO [train.py:892] (1/4) Epoch 26, batch 300, loss[loss=0.147, simple_loss=0.2187, pruned_loss=0.03762, over 19836.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2475, pruned_loss=0.0479, over 3076186.02 frames. ], batch size: 184, lr: 6.08e-03, grad_scale: 16.0 +2023-03-28 19:46:58,387 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-28 19:48:12,959 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.886e+02 3.778e+02 4.525e+02 5.535e+02 8.157e+02, threshold=9.049e+02, percent-clipped=0.0 +2023-03-28 19:48:23,050 INFO [train.py:892] (1/4) Epoch 26, batch 350, loss[loss=0.1726, simple_loss=0.2465, pruned_loss=0.04934, over 19756.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.249, pruned_loss=0.04815, over 3268650.62 frames. ], batch size: 205, lr: 6.07e-03, grad_scale: 16.0 +2023-03-28 19:50:21,593 INFO [train.py:892] (1/4) Epoch 26, batch 400, loss[loss=0.1635, simple_loss=0.2451, pruned_loss=0.041, over 19660.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2487, pruned_loss=0.04849, over 3418937.34 frames. ], batch size: 67, lr: 6.07e-03, grad_scale: 16.0 +2023-03-28 19:50:45,588 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.9232, 6.2364, 6.2246, 6.0777, 5.9595, 6.2246, 5.4457, 5.5627], + device='cuda:1'), covar=tensor([0.0365, 0.0340, 0.0433, 0.0380, 0.0518, 0.0475, 0.0687, 0.0950], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0267, 0.0288, 0.0247, 0.0252, 0.0239, 0.0257, 0.0303], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 19:51:41,992 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46810.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:52:09,157 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.488e+02 3.851e+02 4.931e+02 6.030e+02 1.390e+03, threshold=9.862e+02, percent-clipped=3.0 +2023-03-28 19:52:19,777 INFO [train.py:892] (1/4) Epoch 26, batch 450, loss[loss=0.3295, simple_loss=0.3826, pruned_loss=0.1382, over 19163.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2476, pruned_loss=0.0483, over 3537128.41 frames. ], batch size: 452, lr: 6.07e-03, grad_scale: 16.0 +2023-03-28 19:54:06,131 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46871.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:54:16,476 INFO [train.py:892] (1/4) Epoch 26, batch 500, loss[loss=0.1681, simple_loss=0.2362, pruned_loss=0.05006, over 19830.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2495, pruned_loss=0.04915, over 3626286.63 frames. ], batch size: 177, lr: 6.06e-03, grad_scale: 16.0 +2023-03-28 19:56:03,165 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.854e+02 4.348e+02 5.094e+02 6.034e+02 1.006e+03, threshold=1.019e+03, percent-clipped=1.0 +2023-03-28 19:56:14,631 INFO [train.py:892] (1/4) Epoch 26, batch 550, loss[loss=0.178, simple_loss=0.2528, pruned_loss=0.05154, over 19823.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2496, pruned_loss=0.04912, over 3698424.94 frames. ], batch size: 93, lr: 6.06e-03, grad_scale: 16.0 +2023-03-28 19:56:27,401 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46932.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:57:13,235 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46951.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 19:57:28,710 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46958.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:58:07,215 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.63 vs. limit=5.0 +2023-03-28 19:58:10,315 INFO [train.py:892] (1/4) Epoch 26, batch 600, loss[loss=0.1939, simple_loss=0.2655, pruned_loss=0.0612, over 19771.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.249, pruned_loss=0.04882, over 3754966.19 frames. ], batch size: 178, lr: 6.06e-03, grad_scale: 16.0 +2023-03-28 19:58:20,951 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46980.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:59:54,802 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.697e+02 3.904e+02 4.581e+02 5.783e+02 1.259e+03, threshold=9.163e+02, percent-clipped=1.0 +2023-03-28 20:00:06,870 INFO [train.py:892] (1/4) Epoch 26, batch 650, loss[loss=0.1773, simple_loss=0.2514, pruned_loss=0.05163, over 19793.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2487, pruned_loss=0.04854, over 3796792.13 frames. ], batch size: 162, lr: 6.05e-03, grad_scale: 16.0 +2023-03-28 20:00:42,934 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3039, 4.1411, 4.1365, 3.9014, 4.2832, 2.9354, 3.6301, 2.0587], + device='cuda:1'), covar=tensor([0.0195, 0.0193, 0.0127, 0.0176, 0.0134, 0.1004, 0.0593, 0.1536], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0142, 0.0111, 0.0131, 0.0117, 0.0133, 0.0139, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 20:01:54,632 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4649, 5.6481, 5.6485, 5.6715, 5.4215, 5.6666, 5.0056, 4.7912], + device='cuda:1'), covar=tensor([0.1029, 0.0942, 0.0929, 0.0682, 0.1087, 0.1059, 0.1296, 0.2370], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0270, 0.0290, 0.0250, 0.0254, 0.0240, 0.0259, 0.0306], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 20:02:09,492 INFO [train.py:892] (1/4) Epoch 26, batch 700, loss[loss=0.1468, simple_loss=0.2258, pruned_loss=0.03389, over 19762.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2466, pruned_loss=0.04753, over 3831816.33 frames. ], batch size: 113, lr: 6.05e-03, grad_scale: 16.0 +2023-03-28 20:03:05,568 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0235, 2.0841, 2.4700, 2.8416, 1.8791, 2.5401, 1.9927, 1.9417], + device='cuda:1'), covar=tensor([0.0696, 0.1206, 0.1275, 0.0662, 0.2425, 0.0982, 0.1474, 0.1745], + device='cuda:1'), in_proj_covar=tensor([0.0237, 0.0336, 0.0247, 0.0197, 0.0244, 0.0204, 0.0213, 0.0216], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 20:03:10,763 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6988, 3.7216, 2.2621, 3.9045, 4.0034, 1.9275, 3.3293, 3.2099], + device='cuda:1'), covar=tensor([0.0776, 0.1011, 0.2982, 0.0868, 0.0637, 0.2767, 0.1120, 0.0835], + device='cuda:1'), in_proj_covar=tensor([0.0231, 0.0253, 0.0229, 0.0268, 0.0248, 0.0203, 0.0238, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 20:03:57,429 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.011e+02 3.881e+02 4.556e+02 5.406e+02 9.543e+02, threshold=9.112e+02, percent-clipped=1.0 +2023-03-28 20:04:09,168 INFO [train.py:892] (1/4) Epoch 26, batch 750, loss[loss=0.1525, simple_loss=0.2285, pruned_loss=0.03821, over 19796.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2455, pruned_loss=0.04738, over 3858408.21 frames. ], batch size: 105, lr: 6.05e-03, grad_scale: 16.0 +2023-03-28 20:05:49,734 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47166.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:06:15,294 INFO [train.py:892] (1/4) Epoch 26, batch 800, loss[loss=0.1846, simple_loss=0.2744, pruned_loss=0.04738, over 19522.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2468, pruned_loss=0.04756, over 3876374.78 frames. ], batch size: 54, lr: 6.04e-03, grad_scale: 16.0 +2023-03-28 20:06:18,971 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2605, 2.3688, 2.4750, 2.3245, 2.3551, 2.3888, 2.4884, 2.5252], + device='cuda:1'), covar=tensor([0.0408, 0.0274, 0.0348, 0.0264, 0.0428, 0.0321, 0.0362, 0.0284], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0073, 0.0076, 0.0069, 0.0084, 0.0076, 0.0094, 0.0067], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 20:06:30,836 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4184, 2.0581, 2.1977, 2.7680, 3.1266, 3.1833, 3.1551, 3.2691], + device='cuda:1'), covar=tensor([0.1032, 0.1694, 0.1462, 0.0734, 0.0472, 0.0360, 0.0378, 0.0363], + device='cuda:1'), in_proj_covar=tensor([0.0157, 0.0168, 0.0176, 0.0149, 0.0131, 0.0126, 0.0118, 0.0113], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 20:08:13,347 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.013e+02 4.034e+02 4.451e+02 5.296e+02 1.110e+03, threshold=8.902e+02, percent-clipped=2.0 +2023-03-28 20:08:25,341 INFO [train.py:892] (1/4) Epoch 26, batch 850, loss[loss=0.1873, simple_loss=0.2609, pruned_loss=0.05686, over 19889.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2472, pruned_loss=0.04807, over 3893832.74 frames. ], batch size: 71, lr: 6.04e-03, grad_scale: 16.0 +2023-03-28 20:09:05,061 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47241.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:09:28,551 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47251.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:09:48,014 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47258.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:10:29,659 INFO [train.py:892] (1/4) Epoch 26, batch 900, loss[loss=0.1744, simple_loss=0.2486, pruned_loss=0.05007, over 19761.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2476, pruned_loss=0.04826, over 3907045.30 frames. ], batch size: 182, lr: 6.04e-03, grad_scale: 16.0 +2023-03-28 20:10:47,091 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9530, 2.7993, 4.5975, 4.0632, 4.5019, 4.5968, 4.4485, 4.2975], + device='cuda:1'), covar=tensor([0.0398, 0.0931, 0.0109, 0.0745, 0.0126, 0.0212, 0.0162, 0.0160], + device='cuda:1'), in_proj_covar=tensor([0.0096, 0.0102, 0.0085, 0.0152, 0.0082, 0.0096, 0.0089, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 20:11:30,980 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47299.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:11:38,939 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47302.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:11:47,727 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47306.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:12:26,909 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.526e+02 3.904e+02 4.611e+02 5.515e+02 1.040e+03, threshold=9.223e+02, percent-clipped=2.0 +2023-03-28 20:12:38,758 INFO [train.py:892] (1/4) Epoch 26, batch 950, loss[loss=0.1618, simple_loss=0.2362, pruned_loss=0.04368, over 19824.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2478, pruned_loss=0.048, over 3917174.86 frames. ], batch size: 184, lr: 6.03e-03, grad_scale: 16.0 +2023-03-28 20:12:43,542 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2639, 2.3723, 2.5148, 2.3438, 2.3579, 2.4018, 2.4588, 2.5517], + device='cuda:1'), covar=tensor([0.0304, 0.0329, 0.0309, 0.0311, 0.0400, 0.0346, 0.0449, 0.0329], + device='cuda:1'), in_proj_covar=tensor([0.0080, 0.0074, 0.0076, 0.0070, 0.0085, 0.0077, 0.0096, 0.0068], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 20:13:25,815 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3852, 2.7014, 4.9024, 4.1093, 4.6677, 4.8330, 4.7570, 4.4804], + device='cuda:1'), covar=tensor([0.0339, 0.1021, 0.0097, 0.1004, 0.0128, 0.0205, 0.0152, 0.0164], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0102, 0.0085, 0.0153, 0.0082, 0.0097, 0.0089, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 20:14:46,439 INFO [train.py:892] (1/4) Epoch 26, batch 1000, loss[loss=0.1458, simple_loss=0.2188, pruned_loss=0.03636, over 19839.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2483, pruned_loss=0.0484, over 3923300.74 frames. ], batch size: 109, lr: 6.03e-03, grad_scale: 16.0 +2023-03-28 20:14:55,264 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47379.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:16:43,932 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.712e+02 3.612e+02 4.402e+02 5.356e+02 9.611e+02, threshold=8.803e+02, percent-clipped=1.0 +2023-03-28 20:16:55,288 INFO [train.py:892] (1/4) Epoch 26, batch 1050, loss[loss=0.1598, simple_loss=0.2365, pruned_loss=0.04159, over 19796.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2483, pruned_loss=0.04827, over 3929671.69 frames. ], batch size: 224, lr: 6.03e-03, grad_scale: 16.0 +2023-03-28 20:17:30,610 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47440.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:17:54,835 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-28 20:18:33,950 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47466.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:18:57,898 INFO [train.py:892] (1/4) Epoch 26, batch 1100, loss[loss=0.1853, simple_loss=0.2569, pruned_loss=0.05686, over 19840.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2481, pruned_loss=0.0481, over 3932680.79 frames. ], batch size: 58, lr: 6.03e-03, grad_scale: 16.0 +2023-03-28 20:20:11,499 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1652, 3.1409, 1.8328, 3.7735, 3.4431, 3.6824, 3.8021, 2.9428], + device='cuda:1'), covar=tensor([0.0631, 0.0647, 0.1756, 0.0642, 0.0592, 0.0443, 0.0533, 0.0819], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0140, 0.0141, 0.0146, 0.0130, 0.0128, 0.0142, 0.0142], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 20:20:37,062 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47514.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:20:53,328 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.834e+02 3.692e+02 4.407e+02 5.502e+02 8.056e+02, threshold=8.815e+02, percent-clipped=0.0 +2023-03-28 20:21:05,825 INFO [train.py:892] (1/4) Epoch 26, batch 1150, loss[loss=0.1655, simple_loss=0.2348, pruned_loss=0.04805, over 19799.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2473, pruned_loss=0.0483, over 3936288.69 frames. ], batch size: 126, lr: 6.02e-03, grad_scale: 16.0 +2023-03-28 20:23:12,079 INFO [train.py:892] (1/4) Epoch 26, batch 1200, loss[loss=0.1766, simple_loss=0.2557, pruned_loss=0.04878, over 19749.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2498, pruned_loss=0.04942, over 3938368.37 frames. ], batch size: 100, lr: 6.02e-03, grad_scale: 16.0 +2023-03-28 20:23:34,164 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1791, 2.9497, 3.3100, 2.6579, 3.3702, 2.7821, 3.0678, 3.2758], + device='cuda:1'), covar=tensor([0.0525, 0.0474, 0.0549, 0.0750, 0.0339, 0.0474, 0.0475, 0.0328], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0080, 0.0077, 0.0105, 0.0074, 0.0076, 0.0074, 0.0067], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 20:24:05,393 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47597.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:25:05,939 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.537e+02 4.111e+02 4.755e+02 5.836e+02 8.359e+02, threshold=9.510e+02, percent-clipped=0.0 +2023-03-28 20:25:19,218 INFO [train.py:892] (1/4) Epoch 26, batch 1250, loss[loss=0.1733, simple_loss=0.2516, pruned_loss=0.04751, over 19883.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2497, pruned_loss=0.0494, over 3939886.74 frames. ], batch size: 87, lr: 6.02e-03, grad_scale: 16.0 +2023-03-28 20:27:24,958 INFO [train.py:892] (1/4) Epoch 26, batch 1300, loss[loss=0.1722, simple_loss=0.2491, pruned_loss=0.04772, over 19610.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2498, pruned_loss=0.04943, over 3941418.20 frames. ], batch size: 51, lr: 6.01e-03, grad_scale: 16.0 +2023-03-28 20:27:44,231 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-28 20:28:22,018 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0830, 4.7234, 4.7562, 5.0786, 4.7029, 5.2628, 5.2533, 5.4011], + device='cuda:1'), covar=tensor([0.0634, 0.0391, 0.0471, 0.0337, 0.0657, 0.0399, 0.0405, 0.0315], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0171, 0.0194, 0.0167, 0.0166, 0.0150, 0.0145, 0.0189], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 20:28:39,784 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0732, 4.0165, 4.4153, 4.0917, 4.3497, 3.8418, 4.1578, 3.8587], + device='cuda:1'), covar=tensor([0.1474, 0.1825, 0.1027, 0.1548, 0.1098, 0.1262, 0.2008, 0.2529], + device='cuda:1'), in_proj_covar=tensor([0.0290, 0.0319, 0.0362, 0.0290, 0.0267, 0.0269, 0.0345, 0.0381], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 20:29:19,684 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.334e+02 3.779e+02 4.339e+02 5.421e+02 8.530e+02, threshold=8.679e+02, percent-clipped=0.0 +2023-03-28 20:29:34,067 INFO [train.py:892] (1/4) Epoch 26, batch 1350, loss[loss=0.1513, simple_loss=0.2252, pruned_loss=0.0387, over 19730.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2482, pruned_loss=0.04847, over 3944278.71 frames. ], batch size: 134, lr: 6.01e-03, grad_scale: 16.0 +2023-03-28 20:29:58,932 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47735.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:30:06,670 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47738.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:31:42,344 INFO [train.py:892] (1/4) Epoch 26, batch 1400, loss[loss=0.2021, simple_loss=0.2876, pruned_loss=0.05827, over 19913.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2478, pruned_loss=0.04829, over 3944795.07 frames. ], batch size: 53, lr: 6.01e-03, grad_scale: 16.0 +2023-03-28 20:32:03,054 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3393, 2.6231, 4.2098, 3.7606, 4.0416, 4.2007, 3.9898, 3.8532], + device='cuda:1'), covar=tensor([0.0448, 0.0855, 0.0103, 0.0558, 0.0144, 0.0212, 0.0170, 0.0186], + device='cuda:1'), in_proj_covar=tensor([0.0096, 0.0102, 0.0085, 0.0151, 0.0081, 0.0096, 0.0088, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 20:32:13,936 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47788.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:32:43,838 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47799.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:33:05,666 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47809.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 20:33:29,897 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.973e+02 4.160e+02 4.762e+02 6.051e+02 1.589e+03, threshold=9.525e+02, percent-clipped=3.0 +2023-03-28 20:33:39,299 INFO [train.py:892] (1/4) Epoch 26, batch 1450, loss[loss=0.1653, simple_loss=0.252, pruned_loss=0.03937, over 19726.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2491, pruned_loss=0.04891, over 3945031.54 frames. ], batch size: 52, lr: 6.00e-03, grad_scale: 16.0 +2023-03-28 20:34:39,473 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47849.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:35:15,635 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-28 20:35:30,050 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47870.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 20:35:46,892 INFO [train.py:892] (1/4) Epoch 26, batch 1500, loss[loss=0.1653, simple_loss=0.2449, pruned_loss=0.04281, over 19836.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.248, pruned_loss=0.04828, over 3947316.93 frames. ], batch size: 171, lr: 6.00e-03, grad_scale: 16.0 +2023-03-28 20:36:42,309 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47897.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:36:44,923 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4894, 2.8094, 2.5588, 1.9535, 2.5686, 2.7149, 2.6748, 2.6883], + device='cuda:1'), covar=tensor([0.0374, 0.0277, 0.0272, 0.0638, 0.0359, 0.0305, 0.0346, 0.0267], + device='cuda:1'), in_proj_covar=tensor([0.0096, 0.0090, 0.0094, 0.0098, 0.0101, 0.0080, 0.0080, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 20:37:43,344 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.594e+02 4.057e+02 4.781e+02 5.680e+02 8.130e+02, threshold=9.563e+02, percent-clipped=0.0 +2023-03-28 20:37:54,800 INFO [train.py:892] (1/4) Epoch 26, batch 1550, loss[loss=0.1647, simple_loss=0.2447, pruned_loss=0.04232, over 19630.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2476, pruned_loss=0.04844, over 3948643.51 frames. ], batch size: 52, lr: 6.00e-03, grad_scale: 32.0 +2023-03-28 20:37:59,239 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1898, 3.0033, 3.3811, 2.6565, 3.4134, 2.8246, 3.0573, 3.3325], + device='cuda:1'), covar=tensor([0.0595, 0.0505, 0.0553, 0.0780, 0.0348, 0.0433, 0.0570, 0.0330], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0082, 0.0079, 0.0108, 0.0076, 0.0078, 0.0075, 0.0068], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 20:38:45,540 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47945.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:39:59,969 INFO [train.py:892] (1/4) Epoch 26, batch 1600, loss[loss=0.1581, simple_loss=0.2302, pruned_loss=0.04302, over 19781.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2462, pruned_loss=0.04755, over 3950091.26 frames. ], batch size: 131, lr: 5.99e-03, grad_scale: 32.0 +2023-03-28 20:40:57,479 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9605, 3.6270, 3.7094, 3.9295, 3.6626, 3.8214, 4.0367, 4.1874], + device='cuda:1'), covar=tensor([0.0640, 0.0485, 0.0558, 0.0376, 0.0764, 0.0628, 0.0461, 0.0336], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0172, 0.0196, 0.0169, 0.0168, 0.0152, 0.0147, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 20:41:53,771 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.602e+02 3.888e+02 4.549e+02 5.335e+02 7.774e+02, threshold=9.097e+02, percent-clipped=0.0 +2023-03-28 20:42:08,500 INFO [train.py:892] (1/4) Epoch 26, batch 1650, loss[loss=0.1705, simple_loss=0.2455, pruned_loss=0.04771, over 19839.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2459, pruned_loss=0.04718, over 3950106.34 frames. ], batch size: 43, lr: 5.99e-03, grad_scale: 32.0 +2023-03-28 20:42:31,698 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48035.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:44:05,504 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48072.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:44:15,505 INFO [train.py:892] (1/4) Epoch 26, batch 1700, loss[loss=0.1645, simple_loss=0.2478, pruned_loss=0.04058, over 19864.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.247, pruned_loss=0.04719, over 3948829.64 frames. ], batch size: 99, lr: 5.99e-03, grad_scale: 32.0 +2023-03-28 20:44:34,507 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48083.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:45:03,331 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48094.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:46:06,349 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.958e+02 3.839e+02 4.571e+02 5.458e+02 1.264e+03, threshold=9.143e+02, percent-clipped=4.0 +2023-03-28 20:46:18,202 INFO [train.py:892] (1/4) Epoch 26, batch 1750, loss[loss=0.1616, simple_loss=0.2365, pruned_loss=0.04339, over 19794.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2478, pruned_loss=0.04763, over 3948492.65 frames. ], batch size: 236, lr: 5.98e-03, grad_scale: 32.0 +2023-03-28 20:46:35,025 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48133.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:46:58,822 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48144.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:47:09,943 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1240, 3.0480, 3.2844, 2.5668, 3.3568, 2.7810, 3.1488, 3.1697], + device='cuda:1'), covar=tensor([0.0531, 0.0466, 0.0503, 0.0772, 0.0371, 0.0461, 0.0432, 0.0396], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0081, 0.0079, 0.0107, 0.0076, 0.0077, 0.0075, 0.0068], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 20:47:42,230 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48165.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 20:48:01,805 INFO [train.py:892] (1/4) Epoch 26, batch 1800, loss[loss=0.1777, simple_loss=0.2559, pruned_loss=0.04972, over 19818.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2488, pruned_loss=0.04882, over 3948271.03 frames. ], batch size: 72, lr: 5.98e-03, grad_scale: 16.0 +2023-03-28 20:49:11,088 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48212.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 20:49:30,284 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.294e+02 3.982e+02 4.834e+02 5.879e+02 1.454e+03, threshold=9.667e+02, percent-clipped=6.0 +2023-03-28 20:49:38,215 INFO [train.py:892] (1/4) Epoch 26, batch 1850, loss[loss=0.1878, simple_loss=0.2755, pruned_loss=0.05008, over 19848.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.25, pruned_loss=0.04848, over 3948650.81 frames. ], batch size: 58, lr: 5.98e-03, grad_scale: 16.0 +2023-03-28 20:50:46,163 INFO [train.py:892] (1/4) Epoch 27, batch 0, loss[loss=0.1754, simple_loss=0.2492, pruned_loss=0.05077, over 19877.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2492, pruned_loss=0.05077, over 19877.00 frames. ], batch size: 139, lr: 5.86e-03, grad_scale: 16.0 +2023-03-28 20:50:46,164 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 20:51:18,766 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8583, 3.8877, 4.1089, 3.8603, 3.7500, 3.9675, 3.7449, 4.1290], + device='cuda:1'), covar=tensor([0.0668, 0.0313, 0.0341, 0.0352, 0.0714, 0.0500, 0.0533, 0.0328], + device='cuda:1'), in_proj_covar=tensor([0.0282, 0.0220, 0.0221, 0.0232, 0.0205, 0.0233, 0.0230, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 20:51:20,762 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.7725, 1.5796, 1.7738, 1.6870, 1.7071, 1.7216, 1.7269, 1.7561], + device='cuda:1'), covar=tensor([0.0339, 0.0343, 0.0330, 0.0319, 0.0441, 0.0343, 0.0452, 0.0392], + device='cuda:1'), in_proj_covar=tensor([0.0080, 0.0074, 0.0077, 0.0070, 0.0086, 0.0078, 0.0095, 0.0069], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 20:51:25,318 INFO [train.py:926] (1/4) Epoch 27, validation: loss=0.1767, simple_loss=0.2485, pruned_loss=0.05248, over 2883724.00 frames. +2023-03-28 20:51:25,319 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 20:51:34,223 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0017, 3.6949, 3.8596, 4.0238, 3.7707, 3.9962, 4.1614, 4.3075], + device='cuda:1'), covar=tensor([0.0700, 0.0465, 0.0537, 0.0395, 0.0739, 0.0622, 0.0465, 0.0334], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0174, 0.0197, 0.0172, 0.0170, 0.0155, 0.0147, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 20:52:16,913 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-28 20:53:19,823 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48273.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 20:53:39,075 INFO [train.py:892] (1/4) Epoch 27, batch 50, loss[loss=0.1523, simple_loss=0.2354, pruned_loss=0.03459, over 19639.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2439, pruned_loss=0.04772, over 890928.67 frames. ], batch size: 79, lr: 5.86e-03, grad_scale: 16.0 +2023-03-28 20:54:11,117 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1844, 4.4307, 4.4429, 4.3376, 4.1589, 4.4262, 3.9595, 3.9874], + device='cuda:1'), covar=tensor([0.0496, 0.0528, 0.0530, 0.0470, 0.0663, 0.0542, 0.0747, 0.1017], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0273, 0.0288, 0.0250, 0.0255, 0.0240, 0.0259, 0.0305], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 20:55:21,758 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.295e+02 3.761e+02 4.503e+02 5.543e+02 9.672e+02, threshold=9.006e+02, percent-clipped=1.0 +2023-03-28 20:55:27,419 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48324.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:55:44,787 INFO [train.py:892] (1/4) Epoch 27, batch 100, loss[loss=0.15, simple_loss=0.2268, pruned_loss=0.03655, over 19713.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2432, pruned_loss=0.04636, over 1567930.33 frames. ], batch size: 78, lr: 5.86e-03, grad_scale: 16.0 +2023-03-28 20:55:55,559 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4384, 3.4700, 2.1239, 4.2708, 3.7465, 4.2253, 4.2411, 3.2736], + device='cuda:1'), covar=tensor([0.0631, 0.0611, 0.1789, 0.0493, 0.0646, 0.0355, 0.0527, 0.0792], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0139, 0.0140, 0.0146, 0.0130, 0.0129, 0.0142, 0.0142], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 20:57:46,642 INFO [train.py:892] (1/4) Epoch 27, batch 150, loss[loss=0.1874, simple_loss=0.2627, pruned_loss=0.05605, over 19755.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2422, pruned_loss=0.04519, over 2097338.03 frames. ], batch size: 276, lr: 5.86e-03, grad_scale: 16.0 +2023-03-28 20:57:56,801 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48385.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:58:23,322 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48394.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:58:30,120 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48397.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:59:26,430 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.124e+02 3.642e+02 4.290e+02 5.743e+02 1.049e+03, threshold=8.581e+02, percent-clipped=1.0 +2023-03-28 20:59:30,198 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48423.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:59:40,796 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48428.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:59:49,321 INFO [train.py:892] (1/4) Epoch 27, batch 200, loss[loss=0.2187, simple_loss=0.328, pruned_loss=0.05468, over 18724.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2441, pruned_loss=0.04535, over 2507406.69 frames. ], batch size: 564, lr: 5.85e-03, grad_scale: 16.0 +2023-03-28 21:00:15,444 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48442.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:00:20,006 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48444.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:00:53,457 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48458.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:01:08,556 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48465.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 21:01:09,046 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-28 21:01:49,457 INFO [train.py:892] (1/4) Epoch 27, batch 250, loss[loss=0.167, simple_loss=0.2435, pruned_loss=0.0452, over 19788.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2437, pruned_loss=0.04502, over 2827615.72 frames. ], batch size: 162, lr: 5.85e-03, grad_scale: 16.0 +2023-03-28 21:01:58,546 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48484.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:02:17,787 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48492.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:03:11,017 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48513.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 21:03:17,592 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1236, 4.2247, 2.4849, 4.4520, 4.6592, 1.9766, 3.9379, 3.3570], + device='cuda:1'), covar=tensor([0.0718, 0.0775, 0.2740, 0.0916, 0.0548, 0.2906, 0.1024, 0.0896], + device='cuda:1'), in_proj_covar=tensor([0.0227, 0.0250, 0.0224, 0.0265, 0.0245, 0.0199, 0.0235, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 21:03:32,744 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.677e+02 3.668e+02 4.339e+02 5.007e+02 7.630e+02, threshold=8.678e+02, percent-clipped=0.0 +2023-03-28 21:03:53,549 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3833, 3.0836, 3.4197, 2.9802, 3.5538, 3.5783, 4.1742, 4.6477], + device='cuda:1'), covar=tensor([0.0492, 0.1504, 0.1285, 0.2031, 0.1474, 0.1217, 0.0591, 0.0426], + device='cuda:1'), in_proj_covar=tensor([0.0245, 0.0235, 0.0259, 0.0249, 0.0289, 0.0251, 0.0224, 0.0246], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 21:03:56,497 INFO [train.py:892] (1/4) Epoch 27, batch 300, loss[loss=0.1457, simple_loss=0.2224, pruned_loss=0.03452, over 19865.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2444, pruned_loss=0.04556, over 3076783.78 frames. ], batch size: 89, lr: 5.85e-03, grad_scale: 16.0 +2023-03-28 21:04:47,296 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2142, 3.0481, 3.4263, 2.6391, 3.3868, 2.7561, 3.0776, 3.3627], + device='cuda:1'), covar=tensor([0.0520, 0.0445, 0.0422, 0.0723, 0.0354, 0.0437, 0.0499, 0.0297], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0081, 0.0078, 0.0106, 0.0076, 0.0078, 0.0075, 0.0068], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 21:05:21,959 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48568.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 21:05:49,785 INFO [train.py:892] (1/4) Epoch 27, batch 350, loss[loss=0.1639, simple_loss=0.2398, pruned_loss=0.04403, over 19687.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2448, pruned_loss=0.04561, over 3270282.83 frames. ], batch size: 75, lr: 5.84e-03, grad_scale: 16.0 +2023-03-28 21:07:35,040 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.631e+02 3.784e+02 4.446e+02 5.402e+02 1.015e+03, threshold=8.893e+02, percent-clipped=1.0 +2023-03-28 21:07:58,167 INFO [train.py:892] (1/4) Epoch 27, batch 400, loss[loss=0.1442, simple_loss=0.2268, pruned_loss=0.03081, over 19880.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2436, pruned_loss=0.04546, over 3422462.51 frames. ], batch size: 97, lr: 5.84e-03, grad_scale: 16.0 +2023-03-28 21:10:02,660 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48680.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:10:04,301 INFO [train.py:892] (1/4) Epoch 27, batch 450, loss[loss=0.1729, simple_loss=0.2646, pruned_loss=0.04058, over 19712.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2447, pruned_loss=0.0457, over 3541165.91 frames. ], batch size: 54, lr: 5.84e-03, grad_scale: 16.0 +2023-03-28 21:10:31,904 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8827, 3.2023, 2.6601, 2.2769, 2.7431, 3.0295, 3.0266, 3.0868], + device='cuda:1'), covar=tensor([0.0281, 0.0299, 0.0298, 0.0555, 0.0337, 0.0267, 0.0209, 0.0196], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0091, 0.0095, 0.0099, 0.0102, 0.0081, 0.0081, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 21:12:00,775 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1422, 3.9392, 3.9812, 3.7784, 4.1156, 3.0103, 3.4829, 2.0204], + device='cuda:1'), covar=tensor([0.0200, 0.0228, 0.0140, 0.0187, 0.0153, 0.0963, 0.0669, 0.1518], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0142, 0.0111, 0.0132, 0.0118, 0.0133, 0.0140, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 21:12:01,748 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.486e+02 3.534e+02 4.421e+02 5.220e+02 9.459e+02, threshold=8.842e+02, percent-clipped=1.0 +2023-03-28 21:12:21,506 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48728.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:12:28,192 INFO [train.py:892] (1/4) Epoch 27, batch 500, loss[loss=0.156, simple_loss=0.2405, pruned_loss=0.03577, over 19582.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2441, pruned_loss=0.04521, over 3632624.32 frames. ], batch size: 53, lr: 5.83e-03, grad_scale: 16.0 +2023-03-28 21:13:22,884 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48753.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:14:22,703 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48776.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:14:30,811 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48779.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:14:34,588 INFO [train.py:892] (1/4) Epoch 27, batch 550, loss[loss=0.2023, simple_loss=0.2802, pruned_loss=0.0622, over 19704.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2454, pruned_loss=0.04629, over 3703327.78 frames. ], batch size: 315, lr: 5.83e-03, grad_scale: 16.0 +2023-03-28 21:14:36,022 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2921, 3.3710, 2.1119, 3.4702, 3.5412, 1.7060, 2.9471, 2.7770], + device='cuda:1'), covar=tensor([0.0814, 0.0894, 0.2699, 0.0887, 0.0637, 0.2639, 0.1123, 0.0903], + device='cuda:1'), in_proj_covar=tensor([0.0229, 0.0251, 0.0226, 0.0265, 0.0245, 0.0199, 0.0236, 0.0192], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 21:15:14,824 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-28 21:16:20,417 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48821.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:16:22,706 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.382e+02 4.009e+02 4.912e+02 5.862e+02 1.286e+03, threshold=9.824e+02, percent-clipped=2.0 +2023-03-28 21:16:44,226 INFO [train.py:892] (1/4) Epoch 27, batch 600, loss[loss=0.143, simple_loss=0.2173, pruned_loss=0.03431, over 19843.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2451, pruned_loss=0.04647, over 3758514.85 frames. ], batch size: 109, lr: 5.83e-03, grad_scale: 16.0 +2023-03-28 21:16:50,666 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-03-28 21:17:02,457 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48838.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:18:12,883 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48868.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 21:18:21,730 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.71 vs. limit=5.0 +2023-03-28 21:18:42,198 INFO [train.py:892] (1/4) Epoch 27, batch 650, loss[loss=0.2193, simple_loss=0.319, pruned_loss=0.05985, over 18736.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2449, pruned_loss=0.04625, over 3801000.49 frames. ], batch size: 564, lr: 5.83e-03, grad_scale: 16.0 +2023-03-28 21:18:46,925 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48882.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:19:27,485 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48899.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:20:07,011 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48916.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 21:20:19,930 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.871e+02 3.831e+02 4.540e+02 6.098e+02 1.137e+03, threshold=9.081e+02, percent-clipped=2.0 +2023-03-28 21:20:43,004 INFO [train.py:892] (1/4) Epoch 27, batch 700, loss[loss=0.1583, simple_loss=0.2411, pruned_loss=0.03779, over 19704.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2464, pruned_loss=0.0469, over 3833432.20 frames. ], batch size: 101, lr: 5.82e-03, grad_scale: 16.0 +2023-03-28 21:22:48,223 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48980.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:22:49,429 INFO [train.py:892] (1/4) Epoch 27, batch 750, loss[loss=0.1778, simple_loss=0.2504, pruned_loss=0.0526, over 19769.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2456, pruned_loss=0.04687, over 3859572.94 frames. ], batch size: 213, lr: 5.82e-03, grad_scale: 16.0 +2023-03-28 21:24:18,351 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4099, 3.7628, 3.8704, 4.6055, 2.9776, 3.4164, 2.7993, 2.7167], + device='cuda:1'), covar=tensor([0.0595, 0.1955, 0.1000, 0.0354, 0.2152, 0.1046, 0.1360, 0.1752], + device='cuda:1'), in_proj_covar=tensor([0.0237, 0.0333, 0.0245, 0.0196, 0.0245, 0.0204, 0.0214, 0.0215], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 21:24:30,756 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 3.742e+02 4.626e+02 5.475e+02 1.269e+03, threshold=9.252e+02, percent-clipped=2.0 +2023-03-28 21:24:49,088 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49028.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:24:56,538 INFO [train.py:892] (1/4) Epoch 27, batch 800, loss[loss=0.1526, simple_loss=0.2331, pruned_loss=0.03605, over 19853.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2448, pruned_loss=0.04664, over 3880727.91 frames. ], batch size: 85, lr: 5.82e-03, grad_scale: 16.0 +2023-03-28 21:25:52,150 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49053.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:26:28,178 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8214, 3.7458, 2.2653, 4.0134, 4.1284, 1.9027, 3.4325, 3.2383], + device='cuda:1'), covar=tensor([0.0698, 0.1065, 0.2786, 0.0818, 0.0597, 0.2787, 0.1151, 0.0850], + device='cuda:1'), in_proj_covar=tensor([0.0228, 0.0251, 0.0226, 0.0265, 0.0245, 0.0199, 0.0235, 0.0192], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 21:26:58,633 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49079.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:27:02,213 INFO [train.py:892] (1/4) Epoch 27, batch 850, loss[loss=0.1821, simple_loss=0.2529, pruned_loss=0.05563, over 19781.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2444, pruned_loss=0.04653, over 3895821.69 frames. ], batch size: 213, lr: 5.81e-03, grad_scale: 16.0 +2023-03-28 21:27:51,049 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49101.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:28:00,047 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5490, 4.8060, 4.8665, 4.7515, 4.5249, 4.8313, 4.3452, 4.3799], + device='cuda:1'), covar=tensor([0.0454, 0.0460, 0.0475, 0.0437, 0.0580, 0.0533, 0.0682, 0.0936], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0272, 0.0284, 0.0248, 0.0254, 0.0238, 0.0256, 0.0302], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 21:28:42,759 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.493e+02 3.766e+02 4.379e+02 5.390e+02 1.134e+03, threshold=8.757e+02, percent-clipped=1.0 +2023-03-28 21:28:56,259 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49127.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:29:04,483 INFO [train.py:892] (1/4) Epoch 27, batch 900, loss[loss=0.2083, simple_loss=0.2715, pruned_loss=0.0725, over 19811.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2435, pruned_loss=0.04627, over 3909054.34 frames. ], batch size: 72, lr: 5.81e-03, grad_scale: 16.0 +2023-03-28 21:29:15,820 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.67 vs. limit=5.0 +2023-03-28 21:30:29,882 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6349, 4.9690, 5.0454, 4.9853, 4.6115, 5.0211, 4.5722, 4.6202], + device='cuda:1'), covar=tensor([0.0517, 0.0498, 0.0479, 0.0436, 0.0663, 0.0508, 0.0701, 0.0969], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0273, 0.0284, 0.0248, 0.0255, 0.0240, 0.0257, 0.0303], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 21:31:02,164 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49177.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:31:10,457 INFO [train.py:892] (1/4) Epoch 27, batch 950, loss[loss=0.2388, simple_loss=0.3113, pruned_loss=0.08316, over 19596.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2435, pruned_loss=0.04621, over 3919343.44 frames. ], batch size: 367, lr: 5.81e-03, grad_scale: 16.0 +2023-03-28 21:31:41,687 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49194.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:32:49,459 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.549e+02 3.706e+02 4.158e+02 5.021e+02 8.010e+02, threshold=8.316e+02, percent-clipped=0.0 +2023-03-28 21:33:13,475 INFO [train.py:892] (1/4) Epoch 27, batch 1000, loss[loss=0.1589, simple_loss=0.2311, pruned_loss=0.0434, over 19766.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2423, pruned_loss=0.04562, over 3927383.11 frames. ], batch size: 125, lr: 5.81e-03, grad_scale: 16.0 +2023-03-28 21:33:25,807 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49236.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:33:37,829 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8803, 2.5484, 2.9010, 3.2033, 3.6326, 3.9205, 3.8141, 3.7881], + device='cuda:1'), covar=tensor([0.0992, 0.1658, 0.1313, 0.0691, 0.0419, 0.0282, 0.0396, 0.0529], + device='cuda:1'), in_proj_covar=tensor([0.0157, 0.0170, 0.0177, 0.0149, 0.0131, 0.0128, 0.0120, 0.0114], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 21:33:46,993 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8747, 3.7785, 3.7436, 3.5153, 3.8902, 2.8921, 3.2114, 1.8416], + device='cuda:1'), covar=tensor([0.0272, 0.0248, 0.0175, 0.0225, 0.0175, 0.1067, 0.0781, 0.1810], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0142, 0.0111, 0.0131, 0.0117, 0.0132, 0.0139, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 21:35:10,547 INFO [train.py:892] (1/4) Epoch 27, batch 1050, loss[loss=0.156, simple_loss=0.2275, pruned_loss=0.04225, over 19765.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2423, pruned_loss=0.04559, over 3932455.85 frames. ], batch size: 125, lr: 5.80e-03, grad_scale: 16.0 +2023-03-28 21:35:54,759 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49297.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:36:54,139 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.170e+02 3.715e+02 4.308e+02 4.982e+02 8.833e+02, threshold=8.615e+02, percent-clipped=1.0 +2023-03-28 21:37:20,648 INFO [train.py:892] (1/4) Epoch 27, batch 1100, loss[loss=0.2282, simple_loss=0.3039, pruned_loss=0.0763, over 19608.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2435, pruned_loss=0.04584, over 3935319.23 frames. ], batch size: 359, lr: 5.80e-03, grad_scale: 16.0 +2023-03-28 21:39:03,723 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([6.0060, 6.2968, 6.2825, 6.1741, 5.9582, 6.2460, 5.6085, 5.6003], + device='cuda:1'), covar=tensor([0.0348, 0.0381, 0.0450, 0.0383, 0.0560, 0.0512, 0.0645, 0.0970], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0269, 0.0283, 0.0247, 0.0253, 0.0236, 0.0255, 0.0300], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 21:39:23,319 INFO [train.py:892] (1/4) Epoch 27, batch 1150, loss[loss=0.1779, simple_loss=0.2492, pruned_loss=0.05336, over 19795.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2457, pruned_loss=0.04673, over 3936513.29 frames. ], batch size: 185, lr: 5.80e-03, grad_scale: 16.0 +2023-03-28 21:40:41,061 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-28 21:40:42,191 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6145, 4.5044, 5.0250, 4.5238, 4.0979, 4.8024, 4.6664, 5.1561], + device='cuda:1'), covar=tensor([0.0902, 0.0379, 0.0392, 0.0384, 0.0878, 0.0486, 0.0442, 0.0318], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0219, 0.0218, 0.0230, 0.0205, 0.0233, 0.0227, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 21:41:04,947 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.904e+02 3.848e+02 4.568e+02 5.710e+02 9.674e+02, threshold=9.135e+02, percent-clipped=2.0 +2023-03-28 21:41:06,114 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49422.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:41:28,119 INFO [train.py:892] (1/4) Epoch 27, batch 1200, loss[loss=0.148, simple_loss=0.2247, pruned_loss=0.03566, over 19723.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2469, pruned_loss=0.04732, over 3939083.31 frames. ], batch size: 104, lr: 5.79e-03, grad_scale: 16.0 +2023-03-28 21:41:31,348 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49432.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 21:42:01,957 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3830, 2.7857, 2.5031, 1.9100, 2.4684, 2.6783, 2.6233, 2.6740], + device='cuda:1'), covar=tensor([0.0374, 0.0292, 0.0283, 0.0586, 0.0417, 0.0302, 0.0299, 0.0243], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0091, 0.0095, 0.0099, 0.0101, 0.0081, 0.0080, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 21:43:28,854 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49477.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:43:35,933 INFO [train.py:892] (1/4) Epoch 27, batch 1250, loss[loss=0.1455, simple_loss=0.2238, pruned_loss=0.03356, over 19783.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2468, pruned_loss=0.04743, over 3941854.33 frames. ], batch size: 52, lr: 5.79e-03, grad_scale: 16.0 +2023-03-28 21:43:37,100 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2774, 3.9106, 4.0885, 4.2498, 3.9480, 4.2353, 4.3841, 4.5423], + device='cuda:1'), covar=tensor([0.0619, 0.0439, 0.0485, 0.0387, 0.0710, 0.0506, 0.0411, 0.0303], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0172, 0.0194, 0.0169, 0.0168, 0.0153, 0.0145, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-28 21:43:41,722 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49483.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:44:06,509 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49493.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 21:44:08,843 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49494.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:45:21,342 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.439e+02 3.880e+02 4.551e+02 5.249e+02 9.846e+02, threshold=9.103e+02, percent-clipped=1.0 +2023-03-28 21:45:29,390 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49525.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:45:44,632 INFO [train.py:892] (1/4) Epoch 27, batch 1300, loss[loss=0.1651, simple_loss=0.2493, pruned_loss=0.04046, over 19574.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2471, pruned_loss=0.04717, over 3944177.50 frames. ], batch size: 60, lr: 5.79e-03, grad_scale: 16.0 +2023-03-28 21:46:14,128 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49542.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:47:50,728 INFO [train.py:892] (1/4) Epoch 27, batch 1350, loss[loss=0.1527, simple_loss=0.2334, pruned_loss=0.03594, over 19793.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2464, pruned_loss=0.04696, over 3945612.69 frames. ], batch size: 79, lr: 5.78e-03, grad_scale: 16.0 +2023-03-28 21:48:20,149 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49592.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:49:30,808 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.556e+02 4.075e+02 4.881e+02 5.911e+02 1.218e+03, threshold=9.761e+02, percent-clipped=5.0 +2023-03-28 21:49:51,504 INFO [train.py:892] (1/4) Epoch 27, batch 1400, loss[loss=0.2323, simple_loss=0.3047, pruned_loss=0.07994, over 19626.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2455, pruned_loss=0.04678, over 3944975.30 frames. ], batch size: 367, lr: 5.78e-03, grad_scale: 16.0 +2023-03-28 21:51:36,506 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-28 21:51:48,452 INFO [train.py:892] (1/4) Epoch 27, batch 1450, loss[loss=0.1661, simple_loss=0.2459, pruned_loss=0.04317, over 19786.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2448, pruned_loss=0.04611, over 3947018.46 frames. ], batch size: 191, lr: 5.78e-03, grad_scale: 16.0 +2023-03-28 21:52:47,270 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5834, 3.5069, 3.8828, 3.5299, 3.2950, 3.7619, 3.6375, 3.9052], + device='cuda:1'), covar=tensor([0.0845, 0.0389, 0.0355, 0.0421, 0.1407, 0.0563, 0.0474, 0.0393], + device='cuda:1'), in_proj_covar=tensor([0.0282, 0.0220, 0.0219, 0.0232, 0.0207, 0.0235, 0.0229, 0.0213], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 21:52:58,884 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3490, 3.3605, 4.7873, 3.7078, 3.9837, 3.7453, 2.6060, 2.9035], + device='cuda:1'), covar=tensor([0.0783, 0.2705, 0.0402, 0.0891, 0.1379, 0.1315, 0.2257, 0.2286], + device='cuda:1'), in_proj_covar=tensor([0.0348, 0.0381, 0.0341, 0.0277, 0.0369, 0.0364, 0.0363, 0.0334], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 21:53:31,600 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.500e+02 3.941e+02 4.673e+02 5.511e+02 7.393e+02, threshold=9.346e+02, percent-clipped=0.0 +2023-03-28 21:53:54,214 INFO [train.py:892] (1/4) Epoch 27, batch 1500, loss[loss=0.1833, simple_loss=0.2559, pruned_loss=0.05534, over 19795.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2442, pruned_loss=0.04626, over 3948417.33 frames. ], batch size: 40, lr: 5.78e-03, grad_scale: 16.0 +2023-03-28 21:55:20,658 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1010, 3.3478, 2.8458, 2.5114, 2.9878, 3.1988, 3.2942, 3.2289], + device='cuda:1'), covar=tensor([0.0254, 0.0265, 0.0272, 0.0448, 0.0322, 0.0297, 0.0180, 0.0212], + device='cuda:1'), in_proj_covar=tensor([0.0096, 0.0090, 0.0095, 0.0098, 0.0101, 0.0081, 0.0080, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 21:55:22,616 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49766.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:55:51,279 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49778.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:55:57,486 INFO [train.py:892] (1/4) Epoch 27, batch 1550, loss[loss=0.1655, simple_loss=0.2456, pruned_loss=0.04269, over 19836.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.244, pruned_loss=0.04585, over 3950200.07 frames. ], batch size: 43, lr: 5.77e-03, grad_scale: 16.0 +2023-03-28 21:55:58,838 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3968, 4.1670, 4.2136, 4.0315, 4.3952, 3.1258, 3.7128, 2.3777], + device='cuda:1'), covar=tensor([0.0205, 0.0230, 0.0147, 0.0184, 0.0139, 0.0917, 0.0694, 0.1306], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0143, 0.0112, 0.0132, 0.0117, 0.0133, 0.0139, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 21:56:16,186 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49788.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 21:57:39,877 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.487e+02 3.954e+02 4.680e+02 6.114e+02 1.039e+03, threshold=9.360e+02, percent-clipped=2.0 +2023-03-28 21:57:53,482 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49827.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:58:00,469 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8550, 1.7754, 1.9875, 1.9261, 1.8315, 1.9738, 1.8927, 1.9966], + device='cuda:1'), covar=tensor([0.0337, 0.0368, 0.0316, 0.0296, 0.0506, 0.0303, 0.0448, 0.0294], + device='cuda:1'), in_proj_covar=tensor([0.0081, 0.0075, 0.0078, 0.0071, 0.0085, 0.0079, 0.0096, 0.0069], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 21:58:01,507 INFO [train.py:892] (1/4) Epoch 27, batch 1600, loss[loss=0.1699, simple_loss=0.2508, pruned_loss=0.04451, over 19772.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2445, pruned_loss=0.04603, over 3950093.54 frames. ], batch size: 273, lr: 5.77e-03, grad_scale: 16.0 +2023-03-28 21:58:23,317 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-28 22:00:08,569 INFO [train.py:892] (1/4) Epoch 27, batch 1650, loss[loss=0.1593, simple_loss=0.2354, pruned_loss=0.04158, over 19844.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2444, pruned_loss=0.04598, over 3950527.95 frames. ], batch size: 190, lr: 5.77e-03, grad_scale: 16.0 +2023-03-28 22:00:15,144 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4770, 2.6496, 3.7438, 3.0349, 3.1732, 3.0616, 2.2543, 2.3692], + device='cuda:1'), covar=tensor([0.1018, 0.2792, 0.0639, 0.1005, 0.1737, 0.1422, 0.2393, 0.2639], + device='cuda:1'), in_proj_covar=tensor([0.0348, 0.0382, 0.0342, 0.0279, 0.0369, 0.0365, 0.0364, 0.0334], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 22:00:36,763 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49892.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:01:35,033 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3136, 4.2599, 4.6742, 4.4605, 4.5979, 4.1715, 4.3819, 4.1966], + device='cuda:1'), covar=tensor([0.1462, 0.1629, 0.0939, 0.1291, 0.0902, 0.0958, 0.1957, 0.2093], + device='cuda:1'), in_proj_covar=tensor([0.0293, 0.0324, 0.0366, 0.0293, 0.0273, 0.0271, 0.0354, 0.0389], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 22:01:55,267 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.537e+02 3.614e+02 4.321e+02 5.513e+02 1.291e+03, threshold=8.641e+02, percent-clipped=1.0 +2023-03-28 22:02:20,724 INFO [train.py:892] (1/4) Epoch 27, batch 1700, loss[loss=0.1769, simple_loss=0.2624, pruned_loss=0.04571, over 19844.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2445, pruned_loss=0.04544, over 3949606.11 frames. ], batch size: 56, lr: 5.76e-03, grad_scale: 16.0 +2023-03-28 22:02:44,480 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49940.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:04:24,956 INFO [train.py:892] (1/4) Epoch 27, batch 1750, loss[loss=0.1789, simple_loss=0.263, pruned_loss=0.04738, over 19711.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2436, pruned_loss=0.0451, over 3950427.68 frames. ], batch size: 54, lr: 5.76e-03, grad_scale: 16.0 +2023-03-28 22:05:54,958 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.637e+02 3.767e+02 4.699e+02 5.313e+02 7.903e+02, threshold=9.399e+02, percent-clipped=0.0 +2023-03-28 22:06:12,948 INFO [train.py:892] (1/4) Epoch 27, batch 1800, loss[loss=0.2107, simple_loss=0.2891, pruned_loss=0.06618, over 19657.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2443, pruned_loss=0.04552, over 3949369.18 frames. ], batch size: 330, lr: 5.76e-03, grad_scale: 16.0 +2023-03-28 22:07:44,961 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0341, 3.3348, 2.8478, 2.4275, 2.8839, 3.2116, 3.1228, 3.1943], + device='cuda:1'), covar=tensor([0.0279, 0.0271, 0.0291, 0.0521, 0.0365, 0.0271, 0.0287, 0.0221], + device='cuda:1'), in_proj_covar=tensor([0.0096, 0.0091, 0.0094, 0.0098, 0.0100, 0.0081, 0.0080, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 22:07:55,323 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50078.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:08:00,322 INFO [train.py:892] (1/4) Epoch 27, batch 1850, loss[loss=0.1844, simple_loss=0.2683, pruned_loss=0.05024, over 19821.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2455, pruned_loss=0.04576, over 3949618.63 frames. ], batch size: 57, lr: 5.76e-03, grad_scale: 16.0 +2023-03-28 22:09:11,364 INFO [train.py:892] (1/4) Epoch 28, batch 0, loss[loss=0.15, simple_loss=0.2229, pruned_loss=0.03852, over 19882.00 frames. ], tot_loss[loss=0.15, simple_loss=0.2229, pruned_loss=0.03852, over 19882.00 frames. ], batch size: 110, lr: 5.65e-03, grad_scale: 16.0 +2023-03-28 22:09:11,364 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 22:09:45,363 INFO [train.py:926] (1/4) Epoch 28, validation: loss=0.1765, simple_loss=0.2481, pruned_loss=0.05251, over 2883724.00 frames. +2023-03-28 22:09:45,365 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 22:09:51,458 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50088.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 22:10:30,968 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9090, 2.9191, 4.3881, 3.3131, 3.7333, 3.3629, 2.4583, 2.5519], + device='cuda:1'), covar=tensor([0.1040, 0.3156, 0.0578, 0.1018, 0.1532, 0.1493, 0.2547, 0.2789], + device='cuda:1'), in_proj_covar=tensor([0.0347, 0.0380, 0.0341, 0.0277, 0.0366, 0.0364, 0.0363, 0.0333], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 22:11:20,018 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.385e+02 4.071e+02 4.813e+02 5.962e+02 1.168e+03, threshold=9.627e+02, percent-clipped=3.0 +2023-03-28 22:11:21,122 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50122.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:11:32,564 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50126.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:11:37,051 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50128.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:11:58,862 INFO [train.py:892] (1/4) Epoch 28, batch 50, loss[loss=0.1721, simple_loss=0.244, pruned_loss=0.05006, over 19804.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2413, pruned_loss=0.04659, over 891597.95 frames. ], batch size: 229, lr: 5.65e-03, grad_scale: 16.0 +2023-03-28 22:11:59,949 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50136.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 22:14:00,301 INFO [train.py:892] (1/4) Epoch 28, batch 100, loss[loss=0.1718, simple_loss=0.2406, pruned_loss=0.05147, over 19848.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2422, pruned_loss=0.04565, over 1569732.24 frames. ], batch size: 137, lr: 5.64e-03, grad_scale: 32.0 +2023-03-28 22:14:01,387 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9454, 3.2274, 2.8017, 2.4109, 2.9343, 3.1788, 3.2084, 3.2189], + device='cuda:1'), covar=tensor([0.0302, 0.0392, 0.0309, 0.0571, 0.0346, 0.0319, 0.0252, 0.0241], + device='cuda:1'), in_proj_covar=tensor([0.0098, 0.0092, 0.0096, 0.0100, 0.0102, 0.0083, 0.0081, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 22:14:08,054 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50189.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:14:18,360 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.8919, 5.9125, 5.8615, 5.9829, 5.7761, 5.9751, 5.3407, 4.9596], + device='cuda:1'), covar=tensor([0.0677, 0.0890, 0.1025, 0.0647, 0.0936, 0.0966, 0.1206, 0.2384], + device='cuda:1'), in_proj_covar=tensor([0.0259, 0.0273, 0.0289, 0.0251, 0.0256, 0.0239, 0.0258, 0.0305], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 22:14:24,049 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50197.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 22:14:26,071 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.9466, 2.1417, 2.1397, 1.9046, 2.2227, 1.9302, 2.1561, 2.1930], + device='cuda:1'), covar=tensor([0.0487, 0.0515, 0.0459, 0.0907, 0.0426, 0.0524, 0.0500, 0.0377], + device='cuda:1'), in_proj_covar=tensor([0.0075, 0.0082, 0.0081, 0.0108, 0.0076, 0.0079, 0.0076, 0.0069], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 22:15:23,508 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.658e+02 4.112e+02 4.923e+02 6.330e+02 1.508e+03, threshold=9.845e+02, percent-clipped=2.0 +2023-03-28 22:15:55,018 INFO [train.py:892] (1/4) Epoch 28, batch 150, loss[loss=0.1477, simple_loss=0.2315, pruned_loss=0.03196, over 19755.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.244, pruned_loss=0.04557, over 2096815.43 frames. ], batch size: 100, lr: 5.64e-03, grad_scale: 32.0 +2023-03-28 22:16:49,949 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50258.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 22:17:57,292 INFO [train.py:892] (1/4) Epoch 28, batch 200, loss[loss=0.1613, simple_loss=0.2435, pruned_loss=0.03953, over 19770.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2446, pruned_loss=0.04539, over 2508479.20 frames. ], batch size: 198, lr: 5.64e-03, grad_scale: 32.0 +2023-03-28 22:18:22,339 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8853, 3.2849, 3.3630, 3.8338, 2.7053, 3.1851, 2.4553, 2.5122], + device='cuda:1'), covar=tensor([0.0547, 0.1871, 0.0912, 0.0416, 0.1976, 0.0816, 0.1474, 0.1688], + device='cuda:1'), in_proj_covar=tensor([0.0239, 0.0332, 0.0245, 0.0197, 0.0247, 0.0204, 0.0214, 0.0216], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 22:19:26,401 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.556e+02 4.059e+02 4.750e+02 5.729e+02 1.085e+03, threshold=9.500e+02, percent-clipped=1.0 +2023-03-28 22:19:44,254 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1901, 2.9185, 3.2651, 2.8396, 3.5086, 3.4454, 4.0672, 4.5407], + device='cuda:1'), covar=tensor([0.0571, 0.1763, 0.1502, 0.2249, 0.1637, 0.1381, 0.0586, 0.0506], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0240, 0.0265, 0.0255, 0.0295, 0.0255, 0.0231, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 22:20:01,040 INFO [train.py:892] (1/4) Epoch 28, batch 250, loss[loss=0.1823, simple_loss=0.2652, pruned_loss=0.04972, over 19804.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2446, pruned_loss=0.04529, over 2829235.73 frames. ], batch size: 68, lr: 5.64e-03, grad_scale: 16.0 +2023-03-28 22:21:44,512 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3611, 2.4596, 4.2148, 3.7303, 4.1458, 4.1815, 4.0899, 3.9406], + device='cuda:1'), covar=tensor([0.0508, 0.0991, 0.0109, 0.0633, 0.0134, 0.0236, 0.0160, 0.0170], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0102, 0.0085, 0.0152, 0.0082, 0.0096, 0.0088, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 22:22:07,575 INFO [train.py:892] (1/4) Epoch 28, batch 300, loss[loss=0.1823, simple_loss=0.245, pruned_loss=0.05977, over 19787.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2439, pruned_loss=0.04522, over 3077129.06 frames. ], batch size: 154, lr: 5.63e-03, grad_scale: 16.0 +2023-03-28 22:23:39,884 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50422.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:23:40,968 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.722e+02 4.064e+02 4.778e+02 6.064e+02 1.211e+03, threshold=9.555e+02, percent-clipped=3.0 +2023-03-28 22:24:15,527 INFO [train.py:892] (1/4) Epoch 28, batch 350, loss[loss=0.1567, simple_loss=0.2282, pruned_loss=0.0426, over 19829.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2435, pruned_loss=0.04509, over 3270350.29 frames. ], batch size: 184, lr: 5.63e-03, grad_scale: 16.0 +2023-03-28 22:25:39,668 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50470.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:26:16,898 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50484.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:26:20,365 INFO [train.py:892] (1/4) Epoch 28, batch 400, loss[loss=0.1664, simple_loss=0.2519, pruned_loss=0.04049, over 19782.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2445, pruned_loss=0.04577, over 3421454.26 frames. ], batch size: 91, lr: 5.63e-03, grad_scale: 16.0 +2023-03-28 22:27:29,072 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4119, 3.6710, 2.1583, 3.8077, 3.8828, 1.7671, 3.0044, 2.7816], + device='cuda:1'), covar=tensor([0.0882, 0.0763, 0.2820, 0.0704, 0.0514, 0.2808, 0.1386, 0.1015], + device='cuda:1'), in_proj_covar=tensor([0.0231, 0.0255, 0.0230, 0.0269, 0.0250, 0.0204, 0.0240, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 22:27:38,250 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3381, 4.0908, 4.1513, 3.8998, 4.3067, 3.0841, 3.6158, 2.0645], + device='cuda:1'), covar=tensor([0.0198, 0.0252, 0.0149, 0.0195, 0.0151, 0.0942, 0.0663, 0.1517], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0143, 0.0112, 0.0132, 0.0117, 0.0133, 0.0139, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 22:27:49,850 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.616e+02 4.023e+02 4.684e+02 5.835e+02 1.086e+03, threshold=9.368e+02, percent-clipped=2.0 +2023-03-28 22:28:11,271 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8874, 1.8625, 1.9830, 1.9529, 1.9064, 1.9854, 1.8815, 1.9901], + device='cuda:1'), covar=tensor([0.0342, 0.0303, 0.0306, 0.0283, 0.0418, 0.0295, 0.0448, 0.0312], + device='cuda:1'), in_proj_covar=tensor([0.0081, 0.0075, 0.0078, 0.0071, 0.0085, 0.0078, 0.0096, 0.0068], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 22:28:16,117 INFO [train.py:892] (1/4) Epoch 28, batch 450, loss[loss=0.1633, simple_loss=0.236, pruned_loss=0.04529, over 19770.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.246, pruned_loss=0.04627, over 3538105.37 frames. ], batch size: 154, lr: 5.63e-03, grad_scale: 16.0 +2023-03-28 22:28:42,856 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50546.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:28:45,278 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6966, 2.6654, 2.9359, 2.5691, 3.0669, 3.0272, 3.5506, 3.8765], + device='cuda:1'), covar=tensor([0.0687, 0.1721, 0.1620, 0.2234, 0.1903, 0.1539, 0.0674, 0.0701], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0242, 0.0267, 0.0256, 0.0297, 0.0257, 0.0232, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 22:29:01,631 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50553.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 22:29:57,213 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-28 22:30:25,750 INFO [train.py:892] (1/4) Epoch 28, batch 500, loss[loss=0.1679, simple_loss=0.2466, pruned_loss=0.04456, over 19775.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.247, pruned_loss=0.04698, over 3628550.82 frames. ], batch size: 213, lr: 5.62e-03, grad_scale: 16.0 +2023-03-28 22:31:18,897 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50607.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:31:40,097 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.02 vs. limit=5.0 +2023-03-28 22:31:59,940 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.484e+02 3.842e+02 4.586e+02 5.688e+02 1.000e+03, threshold=9.173e+02, percent-clipped=1.0 +2023-03-28 22:32:20,787 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7590, 2.7572, 4.6182, 3.9974, 4.4108, 4.5527, 4.4534, 4.2188], + device='cuda:1'), covar=tensor([0.0425, 0.0910, 0.0093, 0.0779, 0.0125, 0.0195, 0.0148, 0.0150], + device='cuda:1'), in_proj_covar=tensor([0.0098, 0.0102, 0.0086, 0.0152, 0.0083, 0.0096, 0.0088, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 22:32:30,996 INFO [train.py:892] (1/4) Epoch 28, batch 550, loss[loss=0.2036, simple_loss=0.2808, pruned_loss=0.06321, over 19618.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2474, pruned_loss=0.04714, over 3699075.83 frames. ], batch size: 367, lr: 5.62e-03, grad_scale: 16.0 +2023-03-28 22:34:36,434 INFO [train.py:892] (1/4) Epoch 28, batch 600, loss[loss=0.1649, simple_loss=0.2373, pruned_loss=0.04626, over 19831.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2467, pruned_loss=0.04671, over 3755092.87 frames. ], batch size: 177, lr: 5.62e-03, grad_scale: 16.0 +2023-03-28 22:34:53,162 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9815, 2.4056, 3.8472, 3.4085, 3.8100, 3.8814, 3.7122, 3.5786], + device='cuda:1'), covar=tensor([0.0567, 0.0942, 0.0114, 0.0535, 0.0158, 0.0235, 0.0182, 0.0204], + device='cuda:1'), in_proj_covar=tensor([0.0098, 0.0102, 0.0086, 0.0153, 0.0083, 0.0096, 0.0089, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 22:36:08,594 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.507e+02 3.618e+02 4.238e+02 5.077e+02 1.172e+03, threshold=8.476e+02, percent-clipped=2.0 +2023-03-28 22:36:42,023 INFO [train.py:892] (1/4) Epoch 28, batch 650, loss[loss=0.1735, simple_loss=0.257, pruned_loss=0.04501, over 19854.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2462, pruned_loss=0.04619, over 3798075.34 frames. ], batch size: 104, lr: 5.61e-03, grad_scale: 16.0 +2023-03-28 22:38:38,705 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50782.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:38:43,232 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50784.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:38:46,807 INFO [train.py:892] (1/4) Epoch 28, batch 700, loss[loss=0.1608, simple_loss=0.2413, pruned_loss=0.04017, over 19889.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2459, pruned_loss=0.04616, over 3832511.99 frames. ], batch size: 71, lr: 5.61e-03, grad_scale: 16.0 +2023-03-28 22:39:45,281 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50808.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 22:40:20,275 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.436e+02 3.963e+02 4.473e+02 5.641e+02 1.140e+03, threshold=8.946e+02, percent-clipped=3.0 +2023-03-28 22:40:43,847 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50832.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:40:51,164 INFO [train.py:892] (1/4) Epoch 28, batch 750, loss[loss=0.1611, simple_loss=0.2434, pruned_loss=0.03939, over 19718.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.245, pruned_loss=0.04583, over 3858842.30 frames. ], batch size: 81, lr: 5.61e-03, grad_scale: 16.0 +2023-03-28 22:41:08,963 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50843.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:41:33,259 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50853.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 22:42:13,361 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50869.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 22:42:55,347 INFO [train.py:892] (1/4) Epoch 28, batch 800, loss[loss=0.1643, simple_loss=0.249, pruned_loss=0.03984, over 19838.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2459, pruned_loss=0.04598, over 3877494.72 frames. ], batch size: 81, lr: 5.61e-03, grad_scale: 16.0 +2023-03-28 22:43:31,257 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50901.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 22:43:33,391 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50902.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:44:24,106 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.705e+02 4.018e+02 4.822e+02 5.652e+02 1.134e+03, threshold=9.644e+02, percent-clipped=5.0 +2023-03-28 22:44:40,977 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-28 22:44:54,727 INFO [train.py:892] (1/4) Epoch 28, batch 850, loss[loss=0.1585, simple_loss=0.2398, pruned_loss=0.03856, over 19745.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2465, pruned_loss=0.04623, over 3893360.43 frames. ], batch size: 276, lr: 5.60e-03, grad_scale: 16.0 +2023-03-28 22:45:20,988 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50946.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:46:58,604 INFO [train.py:892] (1/4) Epoch 28, batch 900, loss[loss=0.203, simple_loss=0.2799, pruned_loss=0.06304, over 19638.00 frames. ], tot_loss[loss=0.17, simple_loss=0.247, pruned_loss=0.04651, over 3903431.68 frames. ], batch size: 351, lr: 5.60e-03, grad_scale: 16.0 +2023-03-28 22:47:51,341 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51007.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:48:31,181 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.571e+02 3.452e+02 4.322e+02 5.014e+02 9.389e+02, threshold=8.643e+02, percent-clipped=0.0 +2023-03-28 22:49:04,848 INFO [train.py:892] (1/4) Epoch 28, batch 950, loss[loss=0.1578, simple_loss=0.238, pruned_loss=0.03887, over 19810.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2455, pruned_loss=0.04598, over 3915313.11 frames. ], batch size: 181, lr: 5.60e-03, grad_scale: 16.0 +2023-03-28 22:49:38,131 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.65 vs. limit=5.0 +2023-03-28 22:50:10,968 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51062.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:51:04,967 INFO [train.py:892] (1/4) Epoch 28, batch 1000, loss[loss=0.1588, simple_loss=0.2328, pruned_loss=0.0424, over 19805.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2457, pruned_loss=0.04625, over 3923689.01 frames. ], batch size: 132, lr: 5.60e-03, grad_scale: 16.0 +2023-03-28 22:52:34,543 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.807e+02 3.916e+02 4.760e+02 5.853e+02 1.174e+03, threshold=9.520e+02, percent-clipped=7.0 +2023-03-28 22:52:35,911 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51123.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:52:54,329 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6466, 3.2761, 3.5986, 3.1784, 3.9055, 3.8913, 4.5079, 4.9545], + device='cuda:1'), covar=tensor([0.0516, 0.1693, 0.1463, 0.2100, 0.1668, 0.1343, 0.0600, 0.0546], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0239, 0.0263, 0.0252, 0.0294, 0.0252, 0.0228, 0.0249], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 22:53:06,009 INFO [train.py:892] (1/4) Epoch 28, batch 1050, loss[loss=0.1383, simple_loss=0.2199, pruned_loss=0.02833, over 19882.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2462, pruned_loss=0.04644, over 3929540.08 frames. ], batch size: 88, lr: 5.59e-03, grad_scale: 16.0 +2023-03-28 22:53:11,915 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51138.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:54:08,775 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51164.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 22:54:25,393 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51172.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 22:55:00,724 INFO [train.py:892] (1/4) Epoch 28, batch 1100, loss[loss=0.1534, simple_loss=0.2289, pruned_loss=0.03897, over 19729.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.246, pruned_loss=0.04619, over 3933931.49 frames. ], batch size: 80, lr: 5.59e-03, grad_scale: 16.0 +2023-03-28 22:55:40,156 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51202.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:55:47,852 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4958, 3.6721, 2.2077, 4.3288, 3.7154, 4.3223, 4.3286, 3.3120], + device='cuda:1'), covar=tensor([0.0578, 0.0559, 0.1631, 0.0600, 0.0632, 0.0400, 0.0585, 0.0764], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0141, 0.0141, 0.0148, 0.0130, 0.0130, 0.0144, 0.0142], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 22:55:56,296 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4599, 5.7475, 6.0036, 5.8605, 5.6703, 5.6051, 5.6219, 5.6012], + device='cuda:1'), covar=tensor([0.1345, 0.1358, 0.0760, 0.1013, 0.0630, 0.0786, 0.1860, 0.1873], + device='cuda:1'), in_proj_covar=tensor([0.0294, 0.0325, 0.0366, 0.0293, 0.0272, 0.0274, 0.0353, 0.0389], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-28 22:56:33,436 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2909, 3.0046, 3.3456, 2.5365, 3.6043, 2.8538, 3.2223, 3.3632], + device='cuda:1'), covar=tensor([0.0594, 0.0571, 0.0559, 0.0802, 0.0283, 0.0540, 0.0434, 0.0344], + device='cuda:1'), in_proj_covar=tensor([0.0075, 0.0082, 0.0081, 0.0107, 0.0076, 0.0079, 0.0076, 0.0069], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 22:56:34,344 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.745e+02 3.704e+02 4.632e+02 5.921e+02 1.286e+03, threshold=9.265e+02, percent-clipped=1.0 +2023-03-28 22:56:57,457 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51233.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 22:57:02,647 INFO [train.py:892] (1/4) Epoch 28, batch 1150, loss[loss=0.1956, simple_loss=0.2646, pruned_loss=0.0633, over 19741.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2454, pruned_loss=0.04623, over 3938710.91 frames. ], batch size: 291, lr: 5.59e-03, grad_scale: 16.0 +2023-03-28 22:57:43,006 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51250.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:59:16,865 INFO [train.py:892] (1/4) Epoch 28, batch 1200, loss[loss=0.1629, simple_loss=0.2391, pruned_loss=0.04339, over 19904.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2455, pruned_loss=0.04568, over 3940628.84 frames. ], batch size: 116, lr: 5.58e-03, grad_scale: 16.0 +2023-03-28 22:59:56,099 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51302.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:00:45,071 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.551e+02 3.659e+02 4.255e+02 5.414e+02 8.940e+02, threshold=8.510e+02, percent-clipped=0.0 +2023-03-28 23:01:16,027 INFO [train.py:892] (1/4) Epoch 28, batch 1250, loss[loss=0.1673, simple_loss=0.2336, pruned_loss=0.05052, over 19741.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.245, pruned_loss=0.04575, over 3941753.67 frames. ], batch size: 140, lr: 5.58e-03, grad_scale: 16.0 +2023-03-28 23:03:06,404 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51382.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:03:13,713 INFO [train.py:892] (1/4) Epoch 28, batch 1300, loss[loss=0.1673, simple_loss=0.2385, pruned_loss=0.048, over 19800.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2458, pruned_loss=0.04611, over 3943018.91 frames. ], batch size: 211, lr: 5.58e-03, grad_scale: 16.0 +2023-03-28 23:03:31,709 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51392.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:04:20,075 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-28 23:04:35,504 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51418.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:04:45,692 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.672e+02 4.378e+02 5.169e+02 6.239e+02 1.127e+03, threshold=1.034e+03, percent-clipped=3.0 +2023-03-28 23:05:20,064 INFO [train.py:892] (1/4) Epoch 28, batch 1350, loss[loss=0.1505, simple_loss=0.2235, pruned_loss=0.03873, over 19733.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2458, pruned_loss=0.04646, over 3944563.92 frames. ], batch size: 77, lr: 5.58e-03, grad_scale: 16.0 +2023-03-28 23:05:25,358 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51438.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:05:34,833 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9428, 3.1556, 2.7643, 2.3865, 2.7260, 3.0233, 3.0087, 3.1265], + device='cuda:1'), covar=tensor([0.0292, 0.0374, 0.0307, 0.0500, 0.0372, 0.0294, 0.0251, 0.0230], + device='cuda:1'), in_proj_covar=tensor([0.0098, 0.0093, 0.0096, 0.0099, 0.0102, 0.0082, 0.0082, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 23:05:36,739 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3144, 2.5269, 3.9663, 3.5751, 3.9160, 4.0292, 3.8764, 3.7415], + device='cuda:1'), covar=tensor([0.0476, 0.0955, 0.0116, 0.0646, 0.0146, 0.0242, 0.0181, 0.0184], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0103, 0.0087, 0.0154, 0.0084, 0.0098, 0.0090, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 23:05:36,806 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51443.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:05:59,712 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51453.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:06:23,801 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8356, 3.9142, 2.3642, 4.1670, 4.3232, 1.9812, 3.4648, 3.2116], + device='cuda:1'), covar=tensor([0.0734, 0.0808, 0.2823, 0.0705, 0.0450, 0.2721, 0.1170, 0.0903], + device='cuda:1'), in_proj_covar=tensor([0.0231, 0.0254, 0.0229, 0.0270, 0.0250, 0.0203, 0.0239, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 23:06:25,835 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51464.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 23:07:22,887 INFO [train.py:892] (1/4) Epoch 28, batch 1400, loss[loss=0.1545, simple_loss=0.2268, pruned_loss=0.04115, over 19635.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2458, pruned_loss=0.04678, over 3945828.89 frames. ], batch size: 72, lr: 5.57e-03, grad_scale: 16.0 +2023-03-28 23:07:23,985 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51486.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:07:44,124 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51495.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:08:23,173 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51512.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 23:08:48,582 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.195e+02 3.889e+02 4.620e+02 5.536e+02 9.901e+02, threshold=9.240e+02, percent-clipped=0.0 +2023-03-28 23:09:01,317 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51528.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 23:09:20,635 INFO [train.py:892] (1/4) Epoch 28, batch 1450, loss[loss=0.1729, simple_loss=0.2541, pruned_loss=0.04585, over 19563.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2463, pruned_loss=0.04668, over 3947181.36 frames. ], batch size: 60, lr: 5.57e-03, grad_scale: 16.0 +2023-03-28 23:10:13,927 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51556.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:10:43,733 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6029, 3.6245, 2.4247, 4.3232, 3.8977, 4.2176, 4.3142, 3.4066], + device='cuda:1'), covar=tensor([0.0561, 0.0585, 0.1359, 0.0587, 0.0583, 0.0448, 0.0584, 0.0686], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0142, 0.0141, 0.0149, 0.0131, 0.0132, 0.0145, 0.0143], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 23:11:27,578 INFO [train.py:892] (1/4) Epoch 28, batch 1500, loss[loss=0.1496, simple_loss=0.2196, pruned_loss=0.03978, over 19773.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2468, pruned_loss=0.04682, over 3946221.49 frames. ], batch size: 169, lr: 5.57e-03, grad_scale: 16.0 +2023-03-28 23:11:51,975 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9362, 3.1334, 2.8347, 2.3478, 2.8349, 3.1739, 3.0298, 2.9922], + device='cuda:1'), covar=tensor([0.0283, 0.0369, 0.0276, 0.0531, 0.0348, 0.0261, 0.0246, 0.0281], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0093, 0.0097, 0.0100, 0.0102, 0.0083, 0.0082, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 23:11:54,119 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5017, 3.6624, 2.3654, 4.3586, 3.8616, 4.2790, 4.3883, 3.3254], + device='cuda:1'), covar=tensor([0.0633, 0.0544, 0.1586, 0.0591, 0.0614, 0.0446, 0.0459, 0.0777], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0142, 0.0141, 0.0150, 0.0131, 0.0132, 0.0146, 0.0143], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 23:12:07,144 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51602.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:13:00,838 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.657e+02 3.781e+02 4.719e+02 5.439e+02 9.463e+02, threshold=9.439e+02, percent-clipped=1.0 +2023-03-28 23:13:02,670 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-28 23:13:30,377 INFO [train.py:892] (1/4) Epoch 28, batch 1550, loss[loss=0.1683, simple_loss=0.2399, pruned_loss=0.04829, over 19844.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2455, pruned_loss=0.04608, over 3947357.31 frames. ], batch size: 190, lr: 5.57e-03, grad_scale: 16.0 +2023-03-28 23:14:07,641 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51650.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:15:35,022 INFO [train.py:892] (1/4) Epoch 28, batch 1600, loss[loss=0.1678, simple_loss=0.241, pruned_loss=0.04727, over 19793.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2456, pruned_loss=0.04563, over 3946610.96 frames. ], batch size: 73, lr: 5.56e-03, grad_scale: 16.0 +2023-03-28 23:16:12,107 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7739, 2.7552, 1.6924, 3.2532, 2.9387, 3.1445, 3.2534, 2.6056], + device='cuda:1'), covar=tensor([0.0709, 0.0740, 0.1856, 0.0674, 0.0788, 0.0552, 0.0694, 0.0909], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0143, 0.0142, 0.0150, 0.0132, 0.0132, 0.0146, 0.0144], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 23:16:54,150 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51718.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:17:03,722 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.853e+02 3.747e+02 4.417e+02 5.191e+02 1.106e+03, threshold=8.834e+02, percent-clipped=1.0 +2023-03-28 23:17:34,806 INFO [train.py:892] (1/4) Epoch 28, batch 1650, loss[loss=0.1539, simple_loss=0.2389, pruned_loss=0.03446, over 19793.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2448, pruned_loss=0.04508, over 3947003.38 frames. ], batch size: 51, lr: 5.56e-03, grad_scale: 16.0 +2023-03-28 23:17:40,908 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51738.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:18:00,911 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51747.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:18:03,582 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51748.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:18:50,854 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51766.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:19:37,703 INFO [train.py:892] (1/4) Epoch 28, batch 1700, loss[loss=0.1822, simple_loss=0.2587, pruned_loss=0.05287, over 19882.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2463, pruned_loss=0.04588, over 3947114.53 frames. ], batch size: 92, lr: 5.56e-03, grad_scale: 16.0 +2023-03-28 23:20:23,226 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-03-28 23:20:24,780 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8032, 2.3314, 2.6799, 3.0371, 3.4958, 3.5960, 3.6267, 3.6213], + device='cuda:1'), covar=tensor([0.0951, 0.1569, 0.1222, 0.0662, 0.0401, 0.0285, 0.0336, 0.0409], + device='cuda:1'), in_proj_covar=tensor([0.0158, 0.0171, 0.0178, 0.0150, 0.0135, 0.0130, 0.0122, 0.0115], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 23:20:33,830 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9857, 2.4408, 2.8648, 3.2713, 3.6882, 4.0731, 4.0301, 3.9293], + device='cuda:1'), covar=tensor([0.0890, 0.1612, 0.1210, 0.0595, 0.0359, 0.0238, 0.0288, 0.0424], + device='cuda:1'), in_proj_covar=tensor([0.0158, 0.0171, 0.0178, 0.0150, 0.0134, 0.0130, 0.0121, 0.0114], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 23:20:36,188 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51808.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:21:06,661 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.935e+02 3.915e+02 4.350e+02 5.709e+02 1.199e+03, threshold=8.700e+02, percent-clipped=1.0 +2023-03-28 23:21:17,088 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51828.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 23:21:33,055 INFO [train.py:892] (1/4) Epoch 28, batch 1750, loss[loss=0.1581, simple_loss=0.2394, pruned_loss=0.03842, over 19835.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2456, pruned_loss=0.0453, over 3948004.63 frames. ], batch size: 75, lr: 5.55e-03, grad_scale: 16.0 +2023-03-28 23:22:05,878 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51851.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:22:53,508 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51876.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 23:23:13,464 INFO [train.py:892] (1/4) Epoch 28, batch 1800, loss[loss=0.1514, simple_loss=0.2243, pruned_loss=0.03928, over 19864.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2462, pruned_loss=0.04529, over 3946258.97 frames. ], batch size: 136, lr: 5.55e-03, grad_scale: 16.0 +2023-03-28 23:24:26,510 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.458e+02 3.828e+02 4.368e+02 5.479e+02 1.522e+03, threshold=8.737e+02, percent-clipped=1.0 +2023-03-28 23:24:33,102 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.3150, 4.8381, 4.8843, 4.6446, 5.2029, 3.2470, 4.0997, 2.6903], + device='cuda:1'), covar=tensor([0.0195, 0.0190, 0.0161, 0.0214, 0.0149, 0.0964, 0.1059, 0.1570], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0144, 0.0113, 0.0134, 0.0119, 0.0134, 0.0142, 0.0128], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 23:24:52,033 INFO [train.py:892] (1/4) Epoch 28, batch 1850, loss[loss=0.1658, simple_loss=0.2508, pruned_loss=0.0404, over 19825.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2468, pruned_loss=0.04474, over 3946794.91 frames. ], batch size: 57, lr: 5.55e-03, grad_scale: 16.0 +2023-03-28 23:25:59,252 INFO [train.py:892] (1/4) Epoch 29, batch 0, loss[loss=0.1566, simple_loss=0.2329, pruned_loss=0.04022, over 19835.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2329, pruned_loss=0.04022, over 19835.00 frames. ], batch size: 184, lr: 5.45e-03, grad_scale: 16.0 +2023-03-28 23:25:59,253 INFO [train.py:917] (1/4) Computing validation loss +2023-03-28 23:26:37,598 INFO [train.py:926] (1/4) Epoch 29, validation: loss=0.1782, simple_loss=0.2489, pruned_loss=0.05378, over 2883724.00 frames. +2023-03-28 23:26:37,607 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-28 23:28:41,212 INFO [train.py:892] (1/4) Epoch 29, batch 50, loss[loss=0.2355, simple_loss=0.3094, pruned_loss=0.08076, over 19472.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2442, pruned_loss=0.04467, over 889003.97 frames. ], batch size: 396, lr: 5.45e-03, grad_scale: 16.0 +2023-03-28 23:29:49,408 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.56 vs. limit=5.0 +2023-03-28 23:30:05,630 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.398e+02 3.680e+02 4.499e+02 5.217e+02 8.656e+02, threshold=8.998e+02, percent-clipped=0.0 +2023-03-28 23:30:16,640 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9311, 2.4228, 3.0646, 3.0825, 3.7324, 4.2313, 4.1278, 4.1154], + device='cuda:1'), covar=tensor([0.1001, 0.1864, 0.1321, 0.0772, 0.0396, 0.0231, 0.0307, 0.0414], + device='cuda:1'), in_proj_covar=tensor([0.0159, 0.0171, 0.0178, 0.0151, 0.0135, 0.0130, 0.0122, 0.0115], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-28 23:30:44,613 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52038.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:30:49,118 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52040.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:30:50,312 INFO [train.py:892] (1/4) Epoch 29, batch 100, loss[loss=0.1594, simple_loss=0.2352, pruned_loss=0.04178, over 19798.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2433, pruned_loss=0.04434, over 1567551.07 frames. ], batch size: 224, lr: 5.45e-03, grad_scale: 16.0 +2023-03-28 23:31:10,346 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52048.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:32:05,826 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6096, 2.7822, 4.4687, 3.9478, 4.2801, 4.4582, 4.3435, 4.1378], + device='cuda:1'), covar=tensor([0.0490, 0.0892, 0.0108, 0.0711, 0.0149, 0.0200, 0.0162, 0.0175], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0102, 0.0086, 0.0153, 0.0084, 0.0097, 0.0090, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 23:32:43,559 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52086.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:32:54,815 INFO [train.py:892] (1/4) Epoch 29, batch 150, loss[loss=0.1632, simple_loss=0.2442, pruned_loss=0.04104, over 19886.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.247, pruned_loss=0.0469, over 2093952.43 frames. ], batch size: 97, lr: 5.44e-03, grad_scale: 16.0 +2023-03-28 23:33:10,141 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52096.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:33:21,737 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52101.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:33:27,850 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52103.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:34:15,958 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.333e+02 4.011e+02 4.815e+02 5.907e+02 9.716e+02, threshold=9.631e+02, percent-clipped=1.0 +2023-03-28 23:34:59,440 INFO [train.py:892] (1/4) Epoch 29, batch 200, loss[loss=0.1694, simple_loss=0.2525, pruned_loss=0.04311, over 19656.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2441, pruned_loss=0.04473, over 2505892.93 frames. ], batch size: 66, lr: 5.44e-03, grad_scale: 16.0 +2023-03-28 23:35:24,714 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52151.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:35:25,347 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.67 vs. limit=5.0 +2023-03-28 23:36:59,111 INFO [train.py:892] (1/4) Epoch 29, batch 250, loss[loss=0.1657, simple_loss=0.2549, pruned_loss=0.03827, over 19775.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2431, pruned_loss=0.04424, over 2825664.62 frames. ], batch size: 53, lr: 5.44e-03, grad_scale: 16.0 +2023-03-28 23:37:05,221 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52193.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:37:17,891 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52199.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:38:17,970 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.564e+02 3.825e+02 4.477e+02 5.151e+02 8.477e+02, threshold=8.954e+02, percent-clipped=0.0 +2023-03-28 23:39:01,936 INFO [train.py:892] (1/4) Epoch 29, batch 300, loss[loss=0.1635, simple_loss=0.2387, pruned_loss=0.04413, over 19851.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2433, pruned_loss=0.04465, over 3076287.38 frames. ], batch size: 190, lr: 5.44e-03, grad_scale: 16.0 +2023-03-28 23:39:35,726 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52254.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:40:39,224 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-28 23:41:06,881 INFO [train.py:892] (1/4) Epoch 29, batch 350, loss[loss=0.1771, simple_loss=0.2539, pruned_loss=0.0502, over 19713.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2441, pruned_loss=0.04479, over 3269102.77 frames. ], batch size: 283, lr: 5.43e-03, grad_scale: 8.0 +2023-03-28 23:42:02,695 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52312.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:42:30,778 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.445e+02 3.965e+02 4.571e+02 5.589e+02 1.011e+03, threshold=9.142e+02, percent-clipped=1.0 +2023-03-28 23:43:07,790 INFO [train.py:892] (1/4) Epoch 29, batch 400, loss[loss=0.167, simple_loss=0.2412, pruned_loss=0.04639, over 19782.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2443, pruned_loss=0.04503, over 3420111.39 frames. ], batch size: 191, lr: 5.43e-03, grad_scale: 8.0 +2023-03-28 23:44:27,303 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52373.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:45:13,601 INFO [train.py:892] (1/4) Epoch 29, batch 450, loss[loss=0.2309, simple_loss=0.3047, pruned_loss=0.0786, over 19617.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2457, pruned_loss=0.04542, over 3538246.48 frames. ], batch size: 351, lr: 5.43e-03, grad_scale: 8.0 +2023-03-28 23:45:28,357 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52396.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:45:46,518 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52403.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:46:29,424 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9916, 4.6030, 4.6809, 4.4154, 4.8786, 3.1540, 3.9850, 2.6565], + device='cuda:1'), covar=tensor([0.0140, 0.0192, 0.0126, 0.0190, 0.0136, 0.0926, 0.0821, 0.1346], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0144, 0.0113, 0.0133, 0.0119, 0.0134, 0.0142, 0.0127], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 23:46:38,530 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.695e+02 3.819e+02 4.668e+02 5.658e+02 1.148e+03, threshold=9.335e+02, percent-clipped=4.0 +2023-03-28 23:46:42,076 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.57 vs. limit=2.0 +2023-03-28 23:47:02,821 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9687, 2.5007, 3.9112, 3.4625, 3.8259, 3.8931, 3.7016, 3.6652], + device='cuda:1'), covar=tensor([0.0591, 0.0981, 0.0108, 0.0541, 0.0142, 0.0233, 0.0187, 0.0184], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0102, 0.0087, 0.0154, 0.0084, 0.0098, 0.0090, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 23:47:22,615 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9674, 3.9766, 2.3857, 4.2178, 4.3609, 1.9373, 3.5776, 3.3637], + device='cuda:1'), covar=tensor([0.0712, 0.0882, 0.2842, 0.0839, 0.0615, 0.2716, 0.1041, 0.0825], + device='cuda:1'), in_proj_covar=tensor([0.0232, 0.0256, 0.0229, 0.0272, 0.0252, 0.0203, 0.0238, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 23:47:23,521 INFO [train.py:892] (1/4) Epoch 29, batch 500, loss[loss=0.1438, simple_loss=0.2166, pruned_loss=0.03548, over 19897.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2462, pruned_loss=0.04574, over 3629564.18 frames. ], batch size: 113, lr: 5.43e-03, grad_scale: 8.0 +2023-03-28 23:47:50,900 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52451.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:49:23,599 INFO [train.py:892] (1/4) Epoch 29, batch 550, loss[loss=0.1582, simple_loss=0.2357, pruned_loss=0.04038, over 19895.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2463, pruned_loss=0.04617, over 3700393.05 frames. ], batch size: 87, lr: 5.42e-03, grad_scale: 8.0 +2023-03-28 23:50:29,925 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8823, 3.8595, 2.3179, 4.1123, 4.2641, 1.8704, 3.4542, 3.2084], + device='cuda:1'), covar=tensor([0.0664, 0.0918, 0.2912, 0.0758, 0.0542, 0.3013, 0.1163, 0.0890], + device='cuda:1'), in_proj_covar=tensor([0.0233, 0.0255, 0.0230, 0.0273, 0.0252, 0.0204, 0.0238, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-28 23:50:40,690 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7254, 4.5537, 5.1452, 4.6067, 4.2462, 4.8697, 4.7770, 5.2762], + device='cuda:1'), covar=tensor([0.0868, 0.0378, 0.0330, 0.0379, 0.0805, 0.0457, 0.0422, 0.0306], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0221, 0.0220, 0.0233, 0.0206, 0.0238, 0.0230, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-28 23:50:45,582 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.600e+02 4.376e+02 5.079e+02 6.009e+02 1.236e+03, threshold=1.016e+03, percent-clipped=4.0 +2023-03-28 23:51:25,999 INFO [train.py:892] (1/4) Epoch 29, batch 600, loss[loss=0.1342, simple_loss=0.2174, pruned_loss=0.02551, over 19901.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2457, pruned_loss=0.04598, over 3757602.14 frames. ], batch size: 116, lr: 5.42e-03, grad_scale: 8.0 +2023-03-28 23:51:36,650 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-03-28 23:51:48,603 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52549.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:52:05,041 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52555.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 23:53:25,790 INFO [train.py:892] (1/4) Epoch 29, batch 650, loss[loss=0.1751, simple_loss=0.2462, pruned_loss=0.05195, over 19830.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2454, pruned_loss=0.04566, over 3800626.79 frames. ], batch size: 121, lr: 5.42e-03, grad_scale: 8.0 +2023-03-28 23:54:27,306 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52616.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 23:54:45,470 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.294e+02 3.664e+02 4.679e+02 5.983e+02 1.020e+03, threshold=9.358e+02, percent-clipped=1.0 +2023-03-28 23:55:31,437 INFO [train.py:892] (1/4) Epoch 29, batch 700, loss[loss=0.1446, simple_loss=0.2198, pruned_loss=0.03467, over 19808.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.244, pruned_loss=0.04475, over 3834616.51 frames. ], batch size: 148, lr: 5.41e-03, grad_scale: 8.0 +2023-03-28 23:55:56,540 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.72 vs. limit=5.0 +2023-03-28 23:56:37,544 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 23:56:40,086 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52668.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:57:32,156 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.64 vs. limit=5.0 +2023-03-28 23:57:34,772 INFO [train.py:892] (1/4) Epoch 29, batch 750, loss[loss=0.1566, simple_loss=0.2274, pruned_loss=0.04291, over 19822.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2428, pruned_loss=0.04419, over 3861259.13 frames. ], batch size: 187, lr: 5.41e-03, grad_scale: 8.0 +2023-03-28 23:57:46,832 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52696.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:58:51,488 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.283e+02 3.891e+02 4.308e+02 5.320e+02 9.738e+02, threshold=8.617e+02, percent-clipped=1.0 +2023-03-28 23:59:16,870 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-28 23:59:30,959 INFO [train.py:892] (1/4) Epoch 29, batch 800, loss[loss=0.1871, simple_loss=0.2673, pruned_loss=0.0534, over 19658.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2421, pruned_loss=0.04393, over 3881954.25 frames. ], batch size: 299, lr: 5.41e-03, grad_scale: 8.0 +2023-03-28 23:59:38,695 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52744.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:00:01,487 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52752.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:01:37,842 INFO [train.py:892] (1/4) Epoch 29, batch 850, loss[loss=0.2375, simple_loss=0.3105, pruned_loss=0.08227, over 19436.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2429, pruned_loss=0.0442, over 3897787.86 frames. ], batch size: 412, lr: 5.41e-03, grad_scale: 8.0 +2023-03-29 00:01:46,634 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0005, 2.5015, 2.9201, 3.2997, 3.8141, 4.1180, 3.9795, 4.0307], + device='cuda:1'), covar=tensor([0.0871, 0.1625, 0.1296, 0.0595, 0.0346, 0.0214, 0.0376, 0.0400], + device='cuda:1'), in_proj_covar=tensor([0.0156, 0.0168, 0.0175, 0.0148, 0.0131, 0.0128, 0.0120, 0.0112], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 00:02:06,043 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-29 00:02:27,739 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-29 00:02:31,952 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4447, 4.3405, 4.7613, 4.3222, 3.9988, 4.5555, 4.4330, 4.8579], + device='cuda:1'), covar=tensor([0.0759, 0.0344, 0.0335, 0.0405, 0.0963, 0.0492, 0.0479, 0.0335], + device='cuda:1'), in_proj_covar=tensor([0.0284, 0.0220, 0.0220, 0.0232, 0.0206, 0.0239, 0.0230, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 00:02:32,081 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52813.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:02:58,927 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.946e+02 3.447e+02 4.095e+02 4.667e+02 7.447e+02, threshold=8.190e+02, percent-clipped=0.0 +2023-03-29 00:03:38,963 INFO [train.py:892] (1/4) Epoch 29, batch 900, loss[loss=0.1996, simple_loss=0.3096, pruned_loss=0.04482, over 18719.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2413, pruned_loss=0.04339, over 3909668.56 frames. ], batch size: 564, lr: 5.40e-03, grad_scale: 8.0 +2023-03-29 00:03:58,510 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52849.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:04:06,953 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0255, 4.2220, 4.2567, 4.1202, 4.0384, 4.1992, 3.7623, 3.7919], + device='cuda:1'), covar=tensor([0.0566, 0.0538, 0.0556, 0.0506, 0.0682, 0.0581, 0.0721, 0.1104], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0280, 0.0292, 0.0254, 0.0258, 0.0244, 0.0263, 0.0307], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 00:05:35,933 INFO [train.py:892] (1/4) Epoch 29, batch 950, loss[loss=0.1672, simple_loss=0.243, pruned_loss=0.0457, over 19733.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2417, pruned_loss=0.04344, over 3918289.86 frames. ], batch size: 47, lr: 5.40e-03, grad_scale: 8.0 +2023-03-29 00:05:52,717 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52897.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:06:28,171 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52911.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 00:06:58,679 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.525e+02 3.679e+02 4.624e+02 5.272e+02 1.115e+03, threshold=9.248e+02, percent-clipped=1.0 +2023-03-29 00:07:29,082 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.5404, 5.8547, 6.0707, 5.8334, 5.8011, 5.6571, 5.7401, 5.6030], + device='cuda:1'), covar=tensor([0.1327, 0.1192, 0.0760, 0.1107, 0.0593, 0.0689, 0.1725, 0.1829], + device='cuda:1'), in_proj_covar=tensor([0.0291, 0.0326, 0.0367, 0.0295, 0.0273, 0.0275, 0.0355, 0.0388], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 00:07:41,659 INFO [train.py:892] (1/4) Epoch 29, batch 1000, loss[loss=0.1328, simple_loss=0.2104, pruned_loss=0.02764, over 19810.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2421, pruned_loss=0.04363, over 3924165.89 frames. ], batch size: 96, lr: 5.40e-03, grad_scale: 8.0 +2023-03-29 00:07:42,975 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.98 vs. limit=5.0 +2023-03-29 00:08:46,571 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52968.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:09:39,928 INFO [train.py:892] (1/4) Epoch 29, batch 1050, loss[loss=0.1591, simple_loss=0.2363, pruned_loss=0.0409, over 19655.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2428, pruned_loss=0.04401, over 3929626.45 frames. ], batch size: 43, lr: 5.40e-03, grad_scale: 8.0 +2023-03-29 00:10:17,078 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3232, 2.9408, 3.3170, 2.9522, 3.4567, 3.4486, 4.1171, 4.5900], + device='cuda:1'), covar=tensor([0.0531, 0.1766, 0.1484, 0.2220, 0.1727, 0.1586, 0.0649, 0.0543], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0240, 0.0267, 0.0254, 0.0296, 0.0256, 0.0231, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 00:10:42,613 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53016.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:10:42,755 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9541, 3.8332, 4.2474, 3.8869, 3.5884, 4.0838, 3.9672, 4.3135], + device='cuda:1'), covar=tensor([0.0767, 0.0375, 0.0355, 0.0393, 0.1180, 0.0569, 0.0457, 0.0350], + device='cuda:1'), in_proj_covar=tensor([0.0282, 0.0219, 0.0218, 0.0230, 0.0204, 0.0236, 0.0228, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 00:10:59,361 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.507e+02 3.902e+02 4.763e+02 5.659e+02 9.296e+02, threshold=9.526e+02, percent-clipped=1.0 +2023-03-29 00:11:39,433 INFO [train.py:892] (1/4) Epoch 29, batch 1100, loss[loss=0.1666, simple_loss=0.2394, pruned_loss=0.04683, over 19803.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2428, pruned_loss=0.0441, over 3934568.45 frames. ], batch size: 117, lr: 5.39e-03, grad_scale: 8.0 +2023-03-29 00:12:18,057 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53056.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:12:20,331 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0711, 2.8875, 3.1723, 2.9010, 3.4126, 3.3287, 3.9262, 4.3424], + device='cuda:1'), covar=tensor([0.0677, 0.1783, 0.1704, 0.2048, 0.1557, 0.1490, 0.0677, 0.0636], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0239, 0.0266, 0.0254, 0.0295, 0.0255, 0.0230, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 00:13:40,656 INFO [train.py:892] (1/4) Epoch 29, batch 1150, loss[loss=0.1505, simple_loss=0.2347, pruned_loss=0.03312, over 19876.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2423, pruned_loss=0.04409, over 3938496.17 frames. ], batch size: 95, lr: 5.39e-03, grad_scale: 8.0 +2023-03-29 00:14:25,720 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53108.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:14:44,073 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.16 vs. limit=5.0 +2023-03-29 00:14:48,417 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53117.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:15:03,923 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.770e+02 4.130e+02 4.907e+02 5.867e+02 8.841e+02, threshold=9.813e+02, percent-clipped=0.0 +2023-03-29 00:15:41,714 INFO [train.py:892] (1/4) Epoch 29, batch 1200, loss[loss=0.15, simple_loss=0.2341, pruned_loss=0.03296, over 19802.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2423, pruned_loss=0.04391, over 3941086.90 frames. ], batch size: 40, lr: 5.39e-03, grad_scale: 8.0 +2023-03-29 00:15:56,094 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7834, 3.2229, 3.1999, 3.8028, 2.6329, 3.2803, 2.4026, 2.3403], + device='cuda:1'), covar=tensor([0.0574, 0.1622, 0.1061, 0.0405, 0.2068, 0.0815, 0.1475, 0.1726], + device='cuda:1'), in_proj_covar=tensor([0.0240, 0.0330, 0.0244, 0.0199, 0.0245, 0.0205, 0.0214, 0.0214], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 00:15:57,861 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4615, 4.1277, 4.2275, 4.4184, 4.2128, 4.5291, 4.5475, 4.7813], + device='cuda:1'), covar=tensor([0.0619, 0.0439, 0.0514, 0.0383, 0.0722, 0.0466, 0.0440, 0.0284], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0174, 0.0201, 0.0174, 0.0171, 0.0157, 0.0149, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 00:17:43,324 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-29 00:17:48,615 INFO [train.py:892] (1/4) Epoch 29, batch 1250, loss[loss=0.152, simple_loss=0.2197, pruned_loss=0.04217, over 19837.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2423, pruned_loss=0.04369, over 3942065.57 frames. ], batch size: 177, lr: 5.39e-03, grad_scale: 8.0 +2023-03-29 00:18:19,768 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-29 00:18:38,658 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53211.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 00:18:44,084 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0849, 2.8152, 3.1547, 2.8095, 3.3597, 3.3337, 3.9152, 4.3502], + device='cuda:1'), covar=tensor([0.0624, 0.1772, 0.1629, 0.2100, 0.1569, 0.1331, 0.0636, 0.0503], + device='cuda:1'), in_proj_covar=tensor([0.0247, 0.0237, 0.0263, 0.0251, 0.0292, 0.0253, 0.0228, 0.0249], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 00:19:09,958 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.614e+02 3.724e+02 4.127e+02 5.069e+02 9.146e+02, threshold=8.253e+02, percent-clipped=0.0 +2023-03-29 00:19:26,616 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8699, 3.1443, 2.6281, 2.2559, 2.7650, 3.0099, 3.0288, 3.0543], + device='cuda:1'), covar=tensor([0.0283, 0.0282, 0.0330, 0.0556, 0.0350, 0.0283, 0.0236, 0.0241], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0096, 0.0098, 0.0101, 0.0104, 0.0085, 0.0084, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 00:19:52,128 INFO [train.py:892] (1/4) Epoch 29, batch 1300, loss[loss=0.1521, simple_loss=0.2322, pruned_loss=0.036, over 19752.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.242, pruned_loss=0.0435, over 3943790.65 frames. ], batch size: 100, lr: 5.38e-03, grad_scale: 8.0 +2023-03-29 00:19:53,206 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53241.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:20:39,676 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53259.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 00:21:58,559 INFO [train.py:892] (1/4) Epoch 29, batch 1350, loss[loss=0.2158, simple_loss=0.2897, pruned_loss=0.0709, over 19631.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2424, pruned_loss=0.0437, over 3945811.09 frames. ], batch size: 367, lr: 5.38e-03, grad_scale: 8.0 +2023-03-29 00:22:13,999 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9539, 2.5587, 4.0221, 3.6438, 3.9037, 4.0006, 3.8929, 3.7990], + device='cuda:1'), covar=tensor([0.0671, 0.0963, 0.0140, 0.0674, 0.0188, 0.0229, 0.0189, 0.0187], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0103, 0.0088, 0.0155, 0.0085, 0.0098, 0.0090, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 00:22:26,066 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53302.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:22:30,784 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53304.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:22:59,237 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53315.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:23:19,509 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.794e+02 3.937e+02 4.567e+02 5.405e+02 8.872e+02, threshold=9.134e+02, percent-clipped=4.0 +2023-03-29 00:24:04,171 INFO [train.py:892] (1/4) Epoch 29, batch 1400, loss[loss=0.1512, simple_loss=0.2313, pruned_loss=0.03559, over 19654.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2433, pruned_loss=0.04395, over 3946414.25 frames. ], batch size: 66, lr: 5.38e-03, grad_scale: 8.0 +2023-03-29 00:24:49,455 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3574, 2.3407, 2.5040, 2.3954, 2.4769, 2.4873, 2.4079, 2.4165], + device='cuda:1'), covar=tensor([0.0357, 0.0351, 0.0323, 0.0363, 0.0424, 0.0349, 0.0445, 0.0471], + device='cuda:1'), in_proj_covar=tensor([0.0083, 0.0077, 0.0080, 0.0074, 0.0087, 0.0080, 0.0097, 0.0070], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 00:24:49,839 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 00:25:04,810 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53365.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:25:09,268 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53367.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:25:14,722 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6288, 2.9958, 2.6327, 2.1593, 2.6831, 2.8900, 2.9004, 2.9576], + device='cuda:1'), covar=tensor([0.0389, 0.0363, 0.0325, 0.0595, 0.0398, 0.0325, 0.0285, 0.0243], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0096, 0.0099, 0.0102, 0.0105, 0.0086, 0.0085, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 00:25:31,035 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53376.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:26:04,830 INFO [train.py:892] (1/4) Epoch 29, batch 1450, loss[loss=0.1778, simple_loss=0.2614, pruned_loss=0.04707, over 19886.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2441, pruned_loss=0.04387, over 3946339.00 frames. ], batch size: 84, lr: 5.38e-03, grad_scale: 8.0 +2023-03-29 00:26:26,999 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.65 vs. limit=5.0 +2023-03-29 00:26:48,417 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53408.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:26:59,134 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53412.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:27:27,175 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.651e+02 3.715e+02 4.371e+02 5.322e+02 9.816e+02, threshold=8.743e+02, percent-clipped=2.0 +2023-03-29 00:27:38,863 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53428.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 00:27:56,285 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2736, 2.8841, 3.3552, 3.0200, 3.5660, 3.5305, 4.0999, 4.5327], + device='cuda:1'), covar=tensor([0.0560, 0.1792, 0.1565, 0.2039, 0.1722, 0.1473, 0.0641, 0.0564], + device='cuda:1'), in_proj_covar=tensor([0.0248, 0.0238, 0.0265, 0.0251, 0.0293, 0.0253, 0.0230, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 00:28:09,504 INFO [train.py:892] (1/4) Epoch 29, batch 1500, loss[loss=0.1531, simple_loss=0.2371, pruned_loss=0.03452, over 19954.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2434, pruned_loss=0.04365, over 3948466.30 frames. ], batch size: 53, lr: 5.37e-03, grad_scale: 8.0 +2023-03-29 00:28:22,308 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-03-29 00:28:26,697 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0776, 4.7435, 4.8018, 5.1154, 4.7196, 5.3407, 5.1906, 5.4368], + device='cuda:1'), covar=tensor([0.0620, 0.0393, 0.0434, 0.0324, 0.0650, 0.0403, 0.0408, 0.0309], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0174, 0.0200, 0.0174, 0.0171, 0.0156, 0.0150, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 00:28:49,111 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53456.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:30:14,161 INFO [train.py:892] (1/4) Epoch 29, batch 1550, loss[loss=0.1608, simple_loss=0.2476, pruned_loss=0.03703, over 19682.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2427, pruned_loss=0.04337, over 3949185.97 frames. ], batch size: 52, lr: 5.37e-03, grad_scale: 8.0 +2023-03-29 00:31:28,870 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.518e+02 3.925e+02 4.450e+02 5.196e+02 1.158e+03, threshold=8.900e+02, percent-clipped=2.0 +2023-03-29 00:32:15,042 INFO [train.py:892] (1/4) Epoch 29, batch 1600, loss[loss=0.2035, simple_loss=0.2835, pruned_loss=0.06177, over 19676.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2433, pruned_loss=0.04348, over 3950646.86 frames. ], batch size: 325, lr: 5.37e-03, grad_scale: 8.0 +2023-03-29 00:34:05,316 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7390, 3.2809, 3.6156, 3.2954, 4.0050, 4.1114, 4.4665, 5.0868], + device='cuda:1'), covar=tensor([0.0461, 0.1566, 0.1400, 0.1990, 0.1550, 0.1091, 0.0577, 0.0424], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0239, 0.0267, 0.0254, 0.0296, 0.0255, 0.0231, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 00:34:19,708 INFO [train.py:892] (1/4) Epoch 29, batch 1650, loss[loss=0.1718, simple_loss=0.2445, pruned_loss=0.04953, over 19826.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2443, pruned_loss=0.04385, over 3948156.70 frames. ], batch size: 184, lr: 5.37e-03, grad_scale: 8.0 +2023-03-29 00:34:25,903 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7238, 3.7308, 2.2559, 4.0236, 4.1362, 1.9111, 3.3295, 3.2141], + device='cuda:1'), covar=tensor([0.0711, 0.0889, 0.2698, 0.0746, 0.0569, 0.2794, 0.1174, 0.0845], + device='cuda:1'), in_proj_covar=tensor([0.0233, 0.0255, 0.0230, 0.0273, 0.0253, 0.0204, 0.0239, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 00:34:36,501 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53597.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:35:41,049 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.655e+02 3.870e+02 4.535e+02 5.487e+02 1.255e+03, threshold=9.070e+02, percent-clipped=2.0 +2023-03-29 00:36:23,490 INFO [train.py:892] (1/4) Epoch 29, batch 1700, loss[loss=0.1333, simple_loss=0.2121, pruned_loss=0.02721, over 19729.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2443, pruned_loss=0.04378, over 3948614.25 frames. ], batch size: 47, lr: 5.36e-03, grad_scale: 8.0 +2023-03-29 00:37:12,307 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53660.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:37:40,356 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53671.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:38:02,331 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-29 00:38:16,868 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-29 00:38:27,185 INFO [train.py:892] (1/4) Epoch 29, batch 1750, loss[loss=0.2016, simple_loss=0.302, pruned_loss=0.05065, over 18918.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2456, pruned_loss=0.04449, over 3946773.05 frames. ], batch size: 514, lr: 5.36e-03, grad_scale: 8.0 +2023-03-29 00:39:14,836 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53712.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:39:37,571 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53723.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 00:39:39,000 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.549e+02 3.643e+02 4.593e+02 5.639e+02 1.111e+03, threshold=9.186e+02, percent-clipped=1.0 +2023-03-29 00:40:13,010 INFO [train.py:892] (1/4) Epoch 29, batch 1800, loss[loss=0.1505, simple_loss=0.2335, pruned_loss=0.03377, over 19755.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2443, pruned_loss=0.04381, over 3948581.67 frames. ], batch size: 88, lr: 5.36e-03, grad_scale: 8.0 +2023-03-29 00:40:53,938 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53760.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:41:55,179 INFO [train.py:892] (1/4) Epoch 29, batch 1850, loss[loss=0.1752, simple_loss=0.2604, pruned_loss=0.04501, over 19674.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2451, pruned_loss=0.04376, over 3947936.21 frames. ], batch size: 55, lr: 5.36e-03, grad_scale: 8.0 +2023-03-29 00:42:59,929 INFO [train.py:892] (1/4) Epoch 30, batch 0, loss[loss=0.176, simple_loss=0.2515, pruned_loss=0.05028, over 19763.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2515, pruned_loss=0.05028, over 19763.00 frames. ], batch size: 241, lr: 5.27e-03, grad_scale: 8.0 +2023-03-29 00:42:59,930 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 00:43:13,894 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0544, 2.9205, 4.5449, 3.3769, 3.7620, 3.3951, 2.4537, 2.5787], + device='cuda:1'), covar=tensor([0.1078, 0.3689, 0.0559, 0.1095, 0.1845, 0.1711, 0.2881, 0.2883], + device='cuda:1'), in_proj_covar=tensor([0.0352, 0.0388, 0.0348, 0.0285, 0.0373, 0.0374, 0.0371, 0.0340], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 00:43:35,115 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2069, 2.8176, 2.9062, 3.0856, 3.0633, 2.8163, 4.2224, 4.5272], + device='cuda:1'), covar=tensor([0.1241, 0.1635, 0.1564, 0.2146, 0.2071, 0.2044, 0.0575, 0.0347], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0238, 0.0266, 0.0252, 0.0295, 0.0256, 0.0232, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 00:43:35,880 INFO [train.py:926] (1/4) Epoch 30, validation: loss=0.1794, simple_loss=0.2489, pruned_loss=0.05491, over 2883724.00 frames. +2023-03-29 00:43:35,882 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-29 00:43:50,344 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7491, 3.8077, 2.3498, 4.0032, 4.1944, 1.8932, 3.3811, 3.2127], + device='cuda:1'), covar=tensor([0.0752, 0.0896, 0.2695, 0.0836, 0.0605, 0.2848, 0.1208, 0.0934], + device='cuda:1'), in_proj_covar=tensor([0.0233, 0.0255, 0.0231, 0.0274, 0.0253, 0.0204, 0.0240, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 00:44:13,539 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0747, 4.7294, 4.7893, 5.1014, 4.8613, 5.3487, 5.1826, 5.4584], + device='cuda:1'), covar=tensor([0.0698, 0.0336, 0.0431, 0.0291, 0.0499, 0.0289, 0.0392, 0.0268], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0175, 0.0202, 0.0174, 0.0172, 0.0157, 0.0150, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 00:44:26,915 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-29 00:44:48,493 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.508e+02 3.762e+02 4.456e+02 5.146e+02 7.887e+02, threshold=8.911e+02, percent-clipped=0.0 +2023-03-29 00:45:19,601 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8910, 2.3997, 2.7660, 3.1375, 3.6335, 3.9361, 3.8115, 3.8064], + device='cuda:1'), covar=tensor([0.0946, 0.1656, 0.1368, 0.0691, 0.0374, 0.0236, 0.0344, 0.0404], + device='cuda:1'), in_proj_covar=tensor([0.0158, 0.0170, 0.0178, 0.0151, 0.0134, 0.0131, 0.0122, 0.0114], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 00:45:28,926 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7924, 3.8775, 2.3536, 4.0809, 4.2571, 1.8923, 3.5216, 3.2693], + device='cuda:1'), covar=tensor([0.0754, 0.0864, 0.2873, 0.0713, 0.0505, 0.2833, 0.1020, 0.0865], + device='cuda:1'), in_proj_covar=tensor([0.0232, 0.0254, 0.0230, 0.0274, 0.0252, 0.0204, 0.0239, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 00:45:46,655 INFO [train.py:892] (1/4) Epoch 30, batch 50, loss[loss=0.1625, simple_loss=0.2512, pruned_loss=0.03687, over 19676.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2358, pruned_loss=0.04107, over 891361.77 frames. ], batch size: 52, lr: 5.26e-03, grad_scale: 8.0 +2023-03-29 00:47:42,739 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5809, 2.7912, 2.6022, 2.0381, 2.5501, 2.7865, 2.7543, 2.7480], + device='cuda:1'), covar=tensor([0.0331, 0.0349, 0.0291, 0.0543, 0.0379, 0.0294, 0.0268, 0.0274], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0096, 0.0099, 0.0101, 0.0105, 0.0085, 0.0085, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 00:47:46,059 INFO [train.py:892] (1/4) Epoch 30, batch 100, loss[loss=0.1664, simple_loss=0.2408, pruned_loss=0.04604, over 19738.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2405, pruned_loss=0.04221, over 1567140.51 frames. ], batch size: 179, lr: 5.26e-03, grad_scale: 8.0 +2023-03-29 00:47:49,950 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53897.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:48:56,426 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.692e+02 3.728e+02 4.256e+02 5.465e+02 1.327e+03, threshold=8.513e+02, percent-clipped=4.0 +2023-03-29 00:49:36,173 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4350, 2.5237, 2.6015, 2.6529, 2.5967, 2.6204, 2.6036, 2.7711], + device='cuda:1'), covar=tensor([0.0352, 0.0349, 0.0348, 0.0276, 0.0399, 0.0370, 0.0445, 0.0346], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0079, 0.0082, 0.0075, 0.0089, 0.0081, 0.0098, 0.0071], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 00:49:46,002 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3019, 3.3710, 2.1424, 3.4560, 3.5675, 1.7538, 2.9544, 2.8612], + device='cuda:1'), covar=tensor([0.0902, 0.1016, 0.2787, 0.0935, 0.0746, 0.2766, 0.1259, 0.0964], + device='cuda:1'), in_proj_covar=tensor([0.0234, 0.0257, 0.0232, 0.0276, 0.0254, 0.0205, 0.0241, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 00:49:50,727 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53945.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:49:51,902 INFO [train.py:892] (1/4) Epoch 30, batch 150, loss[loss=0.1558, simple_loss=0.2431, pruned_loss=0.03427, over 19711.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2417, pruned_loss=0.0425, over 2094158.91 frames. ], batch size: 81, lr: 5.26e-03, grad_scale: 8.0 +2023-03-29 00:50:27,128 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53960.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:50:54,557 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53971.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:51:57,117 INFO [train.py:892] (1/4) Epoch 30, batch 200, loss[loss=0.1531, simple_loss=0.2211, pruned_loss=0.04257, over 19764.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2423, pruned_loss=0.04222, over 2505253.55 frames. ], batch size: 155, lr: 5.26e-03, grad_scale: 8.0 +2023-03-29 00:52:32,067 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=54008.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:52:59,044 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=54019.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:53:09,698 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=54023.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 00:53:11,542 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.594e+02 3.930e+02 4.416e+02 5.090e+02 1.215e+03, threshold=8.832e+02, percent-clipped=2.0 +2023-03-29 00:53:26,098 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6923, 4.4127, 4.4664, 4.1647, 4.6686, 3.2304, 3.8263, 2.3773], + device='cuda:1'), covar=tensor([0.0188, 0.0228, 0.0148, 0.0202, 0.0147, 0.0901, 0.0846, 0.1506], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0146, 0.0114, 0.0134, 0.0120, 0.0135, 0.0144, 0.0128], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 00:54:01,470 INFO [train.py:892] (1/4) Epoch 30, batch 250, loss[loss=0.1487, simple_loss=0.2187, pruned_loss=0.03935, over 19836.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2427, pruned_loss=0.04252, over 2823712.31 frames. ], batch size: 184, lr: 5.25e-03, grad_scale: 8.0 +2023-03-29 00:54:14,151 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0671, 4.1026, 2.4603, 4.3536, 4.5632, 2.0547, 3.8196, 3.3958], + device='cuda:1'), covar=tensor([0.0742, 0.0937, 0.2798, 0.0898, 0.0567, 0.2733, 0.0986, 0.0912], + device='cuda:1'), in_proj_covar=tensor([0.0236, 0.0258, 0.0233, 0.0277, 0.0255, 0.0206, 0.0241, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 00:55:00,566 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2337, 2.3427, 2.4027, 2.4198, 2.3193, 2.4515, 2.3106, 2.5498], + device='cuda:1'), covar=tensor([0.0385, 0.0309, 0.0304, 0.0267, 0.0419, 0.0320, 0.0421, 0.0286], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0078, 0.0082, 0.0075, 0.0089, 0.0081, 0.0098, 0.0071], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 00:55:04,745 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=54071.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:56:07,066 INFO [train.py:892] (1/4) Epoch 30, batch 300, loss[loss=0.1546, simple_loss=0.2261, pruned_loss=0.04159, over 19877.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2433, pruned_loss=0.04247, over 3070984.66 frames. ], batch size: 139, lr: 5.25e-03, grad_scale: 8.0 +2023-03-29 00:56:08,385 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1175, 3.3643, 2.9263, 2.4928, 2.9974, 3.2242, 3.2683, 3.3579], + device='cuda:1'), covar=tensor([0.0308, 0.0275, 0.0298, 0.0495, 0.0321, 0.0302, 0.0198, 0.0210], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0096, 0.0098, 0.0101, 0.0104, 0.0085, 0.0085, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 00:56:28,652 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-29 00:57:14,836 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.600e+02 3.558e+02 4.130e+02 5.342e+02 1.010e+03, threshold=8.261e+02, percent-clipped=1.0 +2023-03-29 00:58:16,962 INFO [train.py:892] (1/4) Epoch 30, batch 350, loss[loss=0.1579, simple_loss=0.2361, pruned_loss=0.03988, over 19881.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2425, pruned_loss=0.04249, over 3266454.88 frames. ], batch size: 84, lr: 5.25e-03, grad_scale: 8.0 +2023-03-29 00:58:53,258 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9382, 2.9032, 3.0744, 2.4624, 3.1651, 2.6851, 3.0670, 3.1097], + device='cuda:1'), covar=tensor([0.0550, 0.0437, 0.0459, 0.0758, 0.0369, 0.0466, 0.0379, 0.0330], + device='cuda:1'), in_proj_covar=tensor([0.0077, 0.0084, 0.0082, 0.0109, 0.0078, 0.0080, 0.0078, 0.0070], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 00:59:07,432 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9094, 4.5701, 4.5965, 4.3224, 4.8500, 3.1728, 3.9175, 2.4460], + device='cuda:1'), covar=tensor([0.0207, 0.0218, 0.0179, 0.0212, 0.0185, 0.1006, 0.0859, 0.1579], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0145, 0.0114, 0.0134, 0.0119, 0.0134, 0.0143, 0.0128], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 01:00:25,808 INFO [train.py:892] (1/4) Epoch 30, batch 400, loss[loss=0.1393, simple_loss=0.2179, pruned_loss=0.03037, over 19853.00 frames. ], tot_loss[loss=0.1625, simple_loss=0.2411, pruned_loss=0.04191, over 3419095.55 frames. ], batch size: 118, lr: 5.25e-03, grad_scale: 8.0 +2023-03-29 01:00:35,753 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-29 01:00:41,407 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1699, 4.1026, 4.4760, 4.1241, 3.7735, 4.3210, 4.1340, 4.5446], + device='cuda:1'), covar=tensor([0.0800, 0.0375, 0.0369, 0.0407, 0.1094, 0.0556, 0.0535, 0.0355], + device='cuda:1'), in_proj_covar=tensor([0.0284, 0.0221, 0.0223, 0.0234, 0.0208, 0.0241, 0.0230, 0.0214], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 01:01:25,462 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-03-29 01:01:38,160 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.477e+02 3.958e+02 4.510e+02 5.368e+02 1.015e+03, threshold=9.019e+02, percent-clipped=3.0 +2023-03-29 01:02:36,047 INFO [train.py:892] (1/4) Epoch 30, batch 450, loss[loss=0.1398, simple_loss=0.2203, pruned_loss=0.02966, over 19676.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2413, pruned_loss=0.04223, over 3535962.55 frames. ], batch size: 59, lr: 5.24e-03, grad_scale: 8.0 +2023-03-29 01:02:51,993 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.32 vs. limit=5.0 +2023-03-29 01:04:20,875 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7262, 2.9013, 2.9052, 2.9034, 2.7735, 2.7988, 2.7248, 2.9994], + device='cuda:1'), covar=tensor([0.0316, 0.0330, 0.0289, 0.0298, 0.0378, 0.0344, 0.0365, 0.0361], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0078, 0.0082, 0.0076, 0.0089, 0.0081, 0.0098, 0.0071], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 01:04:23,992 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-29 01:04:31,200 INFO [train.py:892] (1/4) Epoch 30, batch 500, loss[loss=0.1456, simple_loss=0.2141, pruned_loss=0.0385, over 19859.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2409, pruned_loss=0.04235, over 3628379.34 frames. ], batch size: 165, lr: 5.24e-03, grad_scale: 16.0 +2023-03-29 01:05:41,307 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.522e+02 3.919e+02 4.339e+02 5.578e+02 1.318e+03, threshold=8.679e+02, percent-clipped=2.0 +2023-03-29 01:06:34,642 INFO [train.py:892] (1/4) Epoch 30, batch 550, loss[loss=0.1705, simple_loss=0.2599, pruned_loss=0.04052, over 19707.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2415, pruned_loss=0.04283, over 3699938.89 frames. ], batch size: 61, lr: 5.24e-03, grad_scale: 16.0 +2023-03-29 01:06:40,712 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-03-29 01:08:41,100 INFO [train.py:892] (1/4) Epoch 30, batch 600, loss[loss=0.1603, simple_loss=0.2376, pruned_loss=0.04148, over 19772.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2415, pruned_loss=0.04295, over 3755173.10 frames. ], batch size: 113, lr: 5.24e-03, grad_scale: 16.0 +2023-03-29 01:09:49,426 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.185e+02 3.804e+02 4.477e+02 5.601e+02 8.237e+02, threshold=8.953e+02, percent-clipped=0.0 +2023-03-29 01:10:01,885 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8044, 2.9431, 4.8635, 4.1616, 4.4196, 4.7600, 4.6415, 4.4348], + device='cuda:1'), covar=tensor([0.0537, 0.0965, 0.0102, 0.0933, 0.0163, 0.0199, 0.0154, 0.0155], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0103, 0.0087, 0.0154, 0.0085, 0.0098, 0.0090, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 01:10:46,704 INFO [train.py:892] (1/4) Epoch 30, batch 650, loss[loss=0.25, simple_loss=0.3206, pruned_loss=0.08965, over 19461.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2413, pruned_loss=0.04315, over 3799455.84 frames. ], batch size: 396, lr: 5.23e-03, grad_scale: 16.0 +2023-03-29 01:12:48,876 INFO [train.py:892] (1/4) Epoch 30, batch 700, loss[loss=0.1436, simple_loss=0.2216, pruned_loss=0.0328, over 19776.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2416, pruned_loss=0.04295, over 3832385.10 frames. ], batch size: 116, lr: 5.23e-03, grad_scale: 16.0 +2023-03-29 01:13:54,052 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1567, 4.0403, 4.4509, 4.0799, 3.7910, 4.3231, 4.1293, 4.5329], + device='cuda:1'), covar=tensor([0.0802, 0.0378, 0.0367, 0.0417, 0.1104, 0.0526, 0.0480, 0.0342], + device='cuda:1'), in_proj_covar=tensor([0.0283, 0.0222, 0.0223, 0.0234, 0.0208, 0.0241, 0.0230, 0.0214], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 01:13:59,664 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.972e+02 4.007e+02 4.588e+02 5.401e+02 1.198e+03, threshold=9.175e+02, percent-clipped=2.0 +2023-03-29 01:14:32,939 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0851, 2.4835, 3.2364, 2.7468, 2.8814, 2.8300, 2.1159, 2.2416], + device='cuda:1'), covar=tensor([0.1164, 0.2347, 0.0762, 0.1029, 0.1632, 0.1294, 0.2344, 0.2368], + device='cuda:1'), in_proj_covar=tensor([0.0352, 0.0388, 0.0348, 0.0284, 0.0372, 0.0372, 0.0371, 0.0340], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 01:14:53,555 INFO [train.py:892] (1/4) Epoch 30, batch 750, loss[loss=0.1522, simple_loss=0.2222, pruned_loss=0.04108, over 19766.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.243, pruned_loss=0.04357, over 3857945.11 frames. ], batch size: 155, lr: 5.23e-03, grad_scale: 16.0 +2023-03-29 01:17:01,356 INFO [train.py:892] (1/4) Epoch 30, batch 800, loss[loss=0.1495, simple_loss=0.2348, pruned_loss=0.03211, over 19596.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2429, pruned_loss=0.04324, over 3877282.62 frames. ], batch size: 42, lr: 5.23e-03, grad_scale: 16.0 +2023-03-29 01:18:07,896 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.698e+02 3.768e+02 4.525e+02 5.531e+02 9.208e+02, threshold=9.049e+02, percent-clipped=1.0 +2023-03-29 01:19:02,289 INFO [train.py:892] (1/4) Epoch 30, batch 850, loss[loss=0.1425, simple_loss=0.2212, pruned_loss=0.03193, over 19897.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2425, pruned_loss=0.04291, over 3893015.80 frames. ], batch size: 116, lr: 5.22e-03, grad_scale: 16.0 +2023-03-29 01:19:48,423 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54664.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 01:20:19,801 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9743, 2.7225, 2.9785, 3.2052, 3.7163, 4.1243, 4.0251, 4.0066], + device='cuda:1'), covar=tensor([0.0981, 0.1525, 0.1354, 0.0681, 0.0420, 0.0283, 0.0348, 0.0408], + device='cuda:1'), in_proj_covar=tensor([0.0158, 0.0169, 0.0176, 0.0150, 0.0134, 0.0130, 0.0121, 0.0114], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 01:21:04,087 INFO [train.py:892] (1/4) Epoch 30, batch 900, loss[loss=0.1474, simple_loss=0.2281, pruned_loss=0.03329, over 19902.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2417, pruned_loss=0.04253, over 3906945.23 frames. ], batch size: 91, lr: 5.22e-03, grad_scale: 16.0 +2023-03-29 01:21:23,714 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2597, 3.2504, 3.5468, 2.5846, 3.6832, 3.0040, 3.2769, 3.4318], + device='cuda:1'), covar=tensor([0.0694, 0.0418, 0.0582, 0.0862, 0.0315, 0.0432, 0.0467, 0.0410], + device='cuda:1'), in_proj_covar=tensor([0.0078, 0.0085, 0.0083, 0.0111, 0.0079, 0.0082, 0.0080, 0.0071], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 01:22:16,861 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.427e+02 3.830e+02 4.433e+02 5.548e+02 1.086e+03, threshold=8.866e+02, percent-clipped=1.0 +2023-03-29 01:22:22,758 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54725.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 01:23:12,086 INFO [train.py:892] (1/4) Epoch 30, batch 950, loss[loss=0.1742, simple_loss=0.2431, pruned_loss=0.05264, over 19795.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2431, pruned_loss=0.04328, over 3916259.38 frames. ], batch size: 185, lr: 5.22e-03, grad_scale: 16.0 +2023-03-29 01:24:22,531 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8097, 2.8479, 3.0826, 2.4046, 3.1413, 2.6332, 2.9491, 3.0019], + device='cuda:1'), covar=tensor([0.0590, 0.0468, 0.0494, 0.0808, 0.0366, 0.0452, 0.0502, 0.0371], + device='cuda:1'), in_proj_covar=tensor([0.0078, 0.0085, 0.0083, 0.0111, 0.0079, 0.0082, 0.0080, 0.0071], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 01:25:12,152 INFO [train.py:892] (1/4) Epoch 30, batch 1000, loss[loss=0.1501, simple_loss=0.2339, pruned_loss=0.03317, over 19626.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2422, pruned_loss=0.0431, over 3923910.42 frames. ], batch size: 52, lr: 5.22e-03, grad_scale: 16.0 +2023-03-29 01:26:22,832 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 3.838e+02 4.641e+02 5.585e+02 1.320e+03, threshold=9.281e+02, percent-clipped=3.0 +2023-03-29 01:27:18,844 INFO [train.py:892] (1/4) Epoch 30, batch 1050, loss[loss=0.1514, simple_loss=0.2226, pruned_loss=0.04013, over 19479.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2426, pruned_loss=0.04344, over 3930456.89 frames. ], batch size: 43, lr: 5.21e-03, grad_scale: 16.0 +2023-03-29 01:27:59,656 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54862.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:29:25,282 INFO [train.py:892] (1/4) Epoch 30, batch 1100, loss[loss=0.1614, simple_loss=0.2475, pruned_loss=0.03767, over 19814.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2424, pruned_loss=0.04332, over 3935394.74 frames. ], batch size: 50, lr: 5.21e-03, grad_scale: 16.0 +2023-03-29 01:30:37,458 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54923.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 01:30:38,513 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.418e+02 3.807e+02 4.386e+02 5.197e+02 8.292e+02, threshold=8.772e+02, percent-clipped=0.0 +2023-03-29 01:31:31,545 INFO [train.py:892] (1/4) Epoch 30, batch 1150, loss[loss=0.1989, simple_loss=0.2831, pruned_loss=0.05737, over 19628.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2426, pruned_loss=0.04318, over 3937880.00 frames. ], batch size: 367, lr: 5.21e-03, grad_scale: 16.0 +2023-03-29 01:31:43,122 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3575, 2.5871, 3.5990, 2.9796, 3.1006, 2.9797, 2.1845, 2.3234], + device='cuda:1'), covar=tensor([0.1143, 0.2895, 0.0635, 0.1022, 0.1692, 0.1511, 0.2505, 0.2735], + device='cuda:1'), in_proj_covar=tensor([0.0352, 0.0387, 0.0348, 0.0285, 0.0372, 0.0374, 0.0372, 0.0340], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 01:32:31,294 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2844, 3.3977, 2.9734, 2.6495, 2.9962, 3.3595, 3.3249, 3.4023], + device='cuda:1'), covar=tensor([0.0241, 0.0344, 0.0284, 0.0459, 0.0325, 0.0301, 0.0310, 0.0211], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0096, 0.0098, 0.0101, 0.0104, 0.0086, 0.0085, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 01:33:13,202 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8886, 3.7271, 3.7496, 3.9587, 3.8709, 4.1212, 3.9258, 4.0179], + device='cuda:1'), covar=tensor([0.0946, 0.0640, 0.0698, 0.0558, 0.0738, 0.0593, 0.0681, 0.0754], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0175, 0.0202, 0.0174, 0.0172, 0.0157, 0.0149, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 01:33:31,561 INFO [train.py:892] (1/4) Epoch 30, batch 1200, loss[loss=0.1399, simple_loss=0.2215, pruned_loss=0.02916, over 19710.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2429, pruned_loss=0.04356, over 3940810.64 frames. ], batch size: 78, lr: 5.21e-03, grad_scale: 16.0 +2023-03-29 01:34:33,657 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55020.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 01:34:43,811 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.409e+02 3.740e+02 4.299e+02 5.341e+02 1.989e+03, threshold=8.597e+02, percent-clipped=2.0 +2023-03-29 01:35:37,509 INFO [train.py:892] (1/4) Epoch 30, batch 1250, loss[loss=0.1572, simple_loss=0.2343, pruned_loss=0.04002, over 19745.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2437, pruned_loss=0.04407, over 3941606.95 frames. ], batch size: 84, lr: 5.21e-03, grad_scale: 16.0 +2023-03-29 01:35:42,375 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1023, 3.9726, 4.4062, 4.0728, 3.7828, 4.2889, 4.0904, 4.4992], + device='cuda:1'), covar=tensor([0.0846, 0.0430, 0.0385, 0.0394, 0.1004, 0.0511, 0.0489, 0.0359], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0225, 0.0224, 0.0235, 0.0210, 0.0243, 0.0232, 0.0217], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 01:37:26,002 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3541, 2.9891, 3.4005, 2.9192, 3.6160, 3.5492, 4.2563, 4.6834], + device='cuda:1'), covar=tensor([0.0525, 0.1649, 0.1457, 0.2103, 0.1437, 0.1324, 0.0545, 0.0452], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0238, 0.0266, 0.0252, 0.0294, 0.0257, 0.0231, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 01:37:48,067 INFO [train.py:892] (1/4) Epoch 30, batch 1300, loss[loss=0.1612, simple_loss=0.2345, pruned_loss=0.0439, over 19830.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2426, pruned_loss=0.04377, over 3944774.34 frames. ], batch size: 166, lr: 5.20e-03, grad_scale: 16.0 +2023-03-29 01:38:55,105 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.487e+02 3.571e+02 4.407e+02 5.506e+02 1.177e+03, threshold=8.814e+02, percent-clipped=1.0 +2023-03-29 01:39:32,365 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.82 vs. limit=5.0 +2023-03-29 01:39:52,140 INFO [train.py:892] (1/4) Epoch 30, batch 1350, loss[loss=0.1563, simple_loss=0.2388, pruned_loss=0.03693, over 19845.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2421, pruned_loss=0.04352, over 3945145.22 frames. ], batch size: 112, lr: 5.20e-03, grad_scale: 16.0 +2023-03-29 01:39:58,524 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55148.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:39:58,602 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9680, 3.2773, 3.4567, 3.9334, 2.8346, 3.2008, 2.4576, 2.4595], + device='cuda:1'), covar=tensor([0.0525, 0.2038, 0.0929, 0.0401, 0.1840, 0.0852, 0.1375, 0.1720], + device='cuda:1'), in_proj_covar=tensor([0.0244, 0.0331, 0.0247, 0.0201, 0.0247, 0.0208, 0.0216, 0.0218], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 01:41:51,237 INFO [train.py:892] (1/4) Epoch 30, batch 1400, loss[loss=0.1488, simple_loss=0.2311, pruned_loss=0.03326, over 19799.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2409, pruned_loss=0.04293, over 3947987.01 frames. ], batch size: 107, lr: 5.20e-03, grad_scale: 16.0 +2023-03-29 01:42:28,638 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55209.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:42:34,894 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55211.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:42:52,101 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55218.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 01:43:04,802 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.260e+02 3.753e+02 4.356e+02 5.536e+02 1.082e+03, threshold=8.713e+02, percent-clipped=2.0 +2023-03-29 01:44:02,231 INFO [train.py:892] (1/4) Epoch 30, batch 1450, loss[loss=0.1559, simple_loss=0.248, pruned_loss=0.0319, over 19666.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2415, pruned_loss=0.04307, over 3949277.21 frames. ], batch size: 50, lr: 5.20e-03, grad_scale: 16.0 +2023-03-29 01:45:06,732 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55272.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:46:06,685 INFO [train.py:892] (1/4) Epoch 30, batch 1500, loss[loss=0.171, simple_loss=0.2475, pruned_loss=0.04727, over 19645.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2415, pruned_loss=0.04266, over 3947617.30 frames. ], batch size: 72, lr: 5.19e-03, grad_scale: 16.0 +2023-03-29 01:47:08,136 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55320.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 01:47:16,424 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.567e+02 3.622e+02 4.364e+02 5.637e+02 1.011e+03, threshold=8.727e+02, percent-clipped=2.0 +2023-03-29 01:48:05,804 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4946, 4.6533, 2.6917, 4.9377, 5.0816, 2.1743, 4.3616, 3.6459], + device='cuda:1'), covar=tensor([0.0634, 0.0650, 0.2600, 0.0636, 0.0412, 0.2807, 0.0820, 0.0862], + device='cuda:1'), in_proj_covar=tensor([0.0234, 0.0255, 0.0230, 0.0275, 0.0254, 0.0204, 0.0239, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 01:48:09,071 INFO [train.py:892] (1/4) Epoch 30, batch 1550, loss[loss=0.1653, simple_loss=0.2469, pruned_loss=0.04191, over 19752.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2426, pruned_loss=0.04313, over 3947513.64 frames. ], batch size: 205, lr: 5.19e-03, grad_scale: 16.0 +2023-03-29 01:49:02,384 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55368.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 01:50:11,457 INFO [train.py:892] (1/4) Epoch 30, batch 1600, loss[loss=0.1638, simple_loss=0.2336, pruned_loss=0.04703, over 19812.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2427, pruned_loss=0.04281, over 3945677.67 frames. ], batch size: 167, lr: 5.19e-03, grad_scale: 16.0 +2023-03-29 01:51:20,584 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.706e+02 3.762e+02 4.518e+02 5.538e+02 9.560e+02, threshold=9.036e+02, percent-clipped=2.0 +2023-03-29 01:52:18,794 INFO [train.py:892] (1/4) Epoch 30, batch 1650, loss[loss=0.2048, simple_loss=0.2799, pruned_loss=0.06483, over 19662.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2415, pruned_loss=0.04213, over 3944916.41 frames. ], batch size: 299, lr: 5.19e-03, grad_scale: 16.0 +2023-03-29 01:54:24,825 INFO [train.py:892] (1/4) Epoch 30, batch 1700, loss[loss=0.2089, simple_loss=0.2754, pruned_loss=0.07122, over 19790.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2425, pruned_loss=0.04264, over 3945847.37 frames. ], batch size: 263, lr: 5.18e-03, grad_scale: 16.0 +2023-03-29 01:54:44,226 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55504.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:55:19,274 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55518.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 01:55:22,745 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-29 01:55:32,324 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.784e+02 3.668e+02 4.195e+02 5.159e+02 8.554e+02, threshold=8.391e+02, percent-clipped=0.0 +2023-03-29 01:56:20,117 INFO [train.py:892] (1/4) Epoch 30, batch 1750, loss[loss=0.1378, simple_loss=0.2103, pruned_loss=0.03267, over 19774.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2428, pruned_loss=0.04319, over 3944763.51 frames. ], batch size: 182, lr: 5.18e-03, grad_scale: 16.0 +2023-03-29 01:57:01,871 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55566.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:57:04,579 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55567.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:58:06,235 INFO [train.py:892] (1/4) Epoch 30, batch 1800, loss[loss=0.2174, simple_loss=0.2875, pruned_loss=0.07365, over 19754.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2431, pruned_loss=0.04382, over 3946287.14 frames. ], batch size: 276, lr: 5.18e-03, grad_scale: 16.0 +2023-03-29 01:59:02,413 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.794e+02 4.015e+02 4.867e+02 6.116e+02 1.314e+03, threshold=9.734e+02, percent-clipped=8.0 +2023-03-29 01:59:44,976 INFO [train.py:892] (1/4) Epoch 30, batch 1850, loss[loss=0.1637, simple_loss=0.2507, pruned_loss=0.0384, over 19855.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.244, pruned_loss=0.04308, over 3946402.65 frames. ], batch size: 58, lr: 5.18e-03, grad_scale: 16.0 +2023-03-29 02:00:52,120 INFO [train.py:892] (1/4) Epoch 31, batch 0, loss[loss=0.1396, simple_loss=0.2186, pruned_loss=0.03027, over 19859.00 frames. ], tot_loss[loss=0.1396, simple_loss=0.2186, pruned_loss=0.03027, over 19859.00 frames. ], batch size: 106, lr: 5.09e-03, grad_scale: 16.0 +2023-03-29 02:00:52,120 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 02:01:29,196 INFO [train.py:926] (1/4) Epoch 31, validation: loss=0.1803, simple_loss=0.2493, pruned_loss=0.05567, over 2883724.00 frames. +2023-03-29 02:01:29,198 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-29 02:02:51,593 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4172, 4.0988, 4.2383, 4.5083, 4.1331, 4.5436, 4.5801, 4.7627], + device='cuda:1'), covar=tensor([0.0685, 0.0459, 0.0534, 0.0374, 0.0746, 0.0530, 0.0420, 0.0349], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0176, 0.0201, 0.0174, 0.0172, 0.0157, 0.0148, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 02:03:38,850 INFO [train.py:892] (1/4) Epoch 31, batch 50, loss[loss=0.145, simple_loss=0.2247, pruned_loss=0.03269, over 19768.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2302, pruned_loss=0.03831, over 891778.44 frames. ], batch size: 198, lr: 5.09e-03, grad_scale: 16.0 +2023-03-29 02:04:42,030 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.130e+02 3.666e+02 4.158e+02 5.108e+02 9.085e+02, threshold=8.317e+02, percent-clipped=0.0 +2023-03-29 02:05:06,878 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0076, 2.8474, 5.0720, 4.3107, 4.6475, 4.9534, 4.7862, 4.6506], + device='cuda:1'), covar=tensor([0.0480, 0.0963, 0.0098, 0.0904, 0.0163, 0.0190, 0.0166, 0.0133], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0104, 0.0089, 0.0155, 0.0086, 0.0098, 0.0092, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 02:05:38,334 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55748.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:05:43,125 INFO [train.py:892] (1/4) Epoch 31, batch 100, loss[loss=0.1815, simple_loss=0.2613, pruned_loss=0.05089, over 19896.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.239, pruned_loss=0.04105, over 1569554.91 frames. ], batch size: 62, lr: 5.09e-03, grad_scale: 16.0 +2023-03-29 02:06:40,804 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6630, 2.8348, 4.0586, 3.2332, 3.3373, 3.2438, 2.3634, 2.5282], + device='cuda:1'), covar=tensor([0.1085, 0.2793, 0.0577, 0.1103, 0.1723, 0.1427, 0.2489, 0.2669], + device='cuda:1'), in_proj_covar=tensor([0.0352, 0.0388, 0.0347, 0.0285, 0.0372, 0.0375, 0.0373, 0.0340], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 02:06:48,282 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7587, 3.5278, 3.6149, 3.7814, 3.5552, 3.7805, 3.8728, 4.0571], + device='cuda:1'), covar=tensor([0.0725, 0.0493, 0.0589, 0.0458, 0.0790, 0.0576, 0.0478, 0.0373], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0176, 0.0202, 0.0175, 0.0173, 0.0158, 0.0149, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 02:07:54,519 INFO [train.py:892] (1/4) Epoch 31, batch 150, loss[loss=0.1541, simple_loss=0.237, pruned_loss=0.03563, over 19709.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2409, pruned_loss=0.04261, over 2097309.26 frames. ], batch size: 81, lr: 5.08e-03, grad_scale: 16.0 +2023-03-29 02:08:04,239 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55804.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:08:17,858 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55809.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:08:43,550 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1875, 4.7698, 4.8268, 4.5673, 5.1253, 3.3613, 4.1013, 2.8860], + device='cuda:1'), covar=tensor([0.0154, 0.0186, 0.0144, 0.0185, 0.0133, 0.0840, 0.0857, 0.1285], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0146, 0.0113, 0.0134, 0.0119, 0.0135, 0.0143, 0.0127], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 02:08:48,899 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-03-29 02:08:54,011 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.568e+02 3.733e+02 4.638e+02 5.672e+02 1.192e+03, threshold=9.276e+02, percent-clipped=1.0 +2023-03-29 02:10:02,893 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55848.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:10:09,638 INFO [train.py:892] (1/4) Epoch 31, batch 200, loss[loss=0.1676, simple_loss=0.2459, pruned_loss=0.04462, over 19712.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2409, pruned_loss=0.04212, over 2507528.54 frames. ], batch size: 78, lr: 5.08e-03, grad_scale: 16.0 +2023-03-29 02:10:13,104 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55852.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:10:52,086 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-29 02:10:53,744 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55867.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:12:23,378 INFO [train.py:892] (1/4) Epoch 31, batch 250, loss[loss=0.1649, simple_loss=0.2374, pruned_loss=0.04621, over 19849.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2406, pruned_loss=0.042, over 2827160.06 frames. ], batch size: 208, lr: 5.08e-03, grad_scale: 16.0 +2023-03-29 02:12:44,192 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55909.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:12:59,716 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55915.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:13:23,181 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.896e+02 3.622e+02 4.377e+02 5.144e+02 8.817e+02, threshold=8.755e+02, percent-clipped=0.0 +2023-03-29 02:14:32,145 INFO [train.py:892] (1/4) Epoch 31, batch 300, loss[loss=0.142, simple_loss=0.2249, pruned_loss=0.02958, over 19812.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2407, pruned_loss=0.04166, over 3076100.50 frames. ], batch size: 117, lr: 5.08e-03, grad_scale: 16.0 +2023-03-29 02:15:51,830 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.0946, 2.1421, 2.2230, 2.2130, 2.1657, 2.1880, 2.1517, 2.2389], + device='cuda:1'), covar=tensor([0.0371, 0.0364, 0.0319, 0.0312, 0.0421, 0.0352, 0.0463, 0.0326], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0079, 0.0082, 0.0076, 0.0089, 0.0082, 0.0099, 0.0071], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 02:16:45,189 INFO [train.py:892] (1/4) Epoch 31, batch 350, loss[loss=0.1409, simple_loss=0.2138, pruned_loss=0.03401, over 19755.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2414, pruned_loss=0.04211, over 3270954.29 frames. ], batch size: 188, lr: 5.08e-03, grad_scale: 16.0 +2023-03-29 02:17:41,471 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.448e+02 3.652e+02 4.141e+02 5.135e+02 1.534e+03, threshold=8.281e+02, percent-clipped=2.0 +2023-03-29 02:18:47,915 INFO [train.py:892] (1/4) Epoch 31, batch 400, loss[loss=0.1612, simple_loss=0.2468, pruned_loss=0.03784, over 19608.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.242, pruned_loss=0.04172, over 3418426.15 frames. ], batch size: 48, lr: 5.07e-03, grad_scale: 16.0 +2023-03-29 02:20:15,435 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.09 vs. limit=5.0 +2023-03-29 02:20:53,766 INFO [train.py:892] (1/4) Epoch 31, batch 450, loss[loss=0.2211, simple_loss=0.3025, pruned_loss=0.06985, over 19635.00 frames. ], tot_loss[loss=0.163, simple_loss=0.2423, pruned_loss=0.04187, over 3536952.12 frames. ], batch size: 351, lr: 5.07e-03, grad_scale: 16.0 +2023-03-29 02:21:03,440 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56104.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:21:56,009 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.311e+02 3.637e+02 4.331e+02 5.341e+02 9.053e+02, threshold=8.662e+02, percent-clipped=1.0 +2023-03-29 02:22:47,349 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56145.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:23:00,063 INFO [train.py:892] (1/4) Epoch 31, batch 500, loss[loss=0.1643, simple_loss=0.2412, pruned_loss=0.04366, over 19808.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2432, pruned_loss=0.04271, over 3628786.08 frames. ], batch size: 96, lr: 5.07e-03, grad_scale: 16.0 +2023-03-29 02:23:04,030 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3672, 3.4596, 2.1480, 3.5288, 3.6366, 1.7619, 3.0473, 2.8598], + device='cuda:1'), covar=tensor([0.0837, 0.0895, 0.2688, 0.0928, 0.0708, 0.2547, 0.1148, 0.0941], + device='cuda:1'), in_proj_covar=tensor([0.0234, 0.0257, 0.0230, 0.0275, 0.0254, 0.0205, 0.0241, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 02:24:21,769 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.42 vs. limit=5.0 +2023-03-29 02:25:07,739 INFO [train.py:892] (1/4) Epoch 31, batch 550, loss[loss=0.174, simple_loss=0.2586, pruned_loss=0.04477, over 19782.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2437, pruned_loss=0.04316, over 3699962.49 frames. ], batch size: 236, lr: 5.07e-03, grad_scale: 16.0 +2023-03-29 02:25:15,894 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56204.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:25:20,977 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56206.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:26:05,177 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.698e+02 3.907e+02 4.783e+02 5.520e+02 2.240e+03, threshold=9.567e+02, percent-clipped=4.0 +2023-03-29 02:27:18,045 INFO [train.py:892] (1/4) Epoch 31, batch 600, loss[loss=0.1463, simple_loss=0.2253, pruned_loss=0.03368, over 19819.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2434, pruned_loss=0.04313, over 3755488.19 frames. ], batch size: 50, lr: 5.06e-03, grad_scale: 16.0 +2023-03-29 02:29:17,271 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4786, 2.6745, 3.8085, 3.0108, 3.2010, 3.0727, 2.2801, 2.4504], + device='cuda:1'), covar=tensor([0.1170, 0.3221, 0.0709, 0.1113, 0.1775, 0.1590, 0.2584, 0.2655], + device='cuda:1'), in_proj_covar=tensor([0.0352, 0.0387, 0.0348, 0.0286, 0.0373, 0.0376, 0.0372, 0.0341], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 02:29:20,424 INFO [train.py:892] (1/4) Epoch 31, batch 650, loss[loss=0.1917, simple_loss=0.27, pruned_loss=0.0567, over 19840.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2437, pruned_loss=0.04363, over 3798390.03 frames. ], batch size: 177, lr: 5.06e-03, grad_scale: 32.0 +2023-03-29 02:30:21,256 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.457e+02 3.714e+02 4.450e+02 5.010e+02 8.599e+02, threshold=8.900e+02, percent-clipped=0.0 +2023-03-29 02:31:30,909 INFO [train.py:892] (1/4) Epoch 31, batch 700, loss[loss=0.1532, simple_loss=0.2336, pruned_loss=0.0364, over 19418.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2442, pruned_loss=0.04368, over 3832279.44 frames. ], batch size: 40, lr: 5.06e-03, grad_scale: 32.0 +2023-03-29 02:31:39,000 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2675, 3.6122, 3.2574, 2.6470, 3.1384, 3.5304, 3.4317, 3.5349], + device='cuda:1'), covar=tensor([0.0268, 0.0294, 0.0245, 0.0519, 0.0327, 0.0248, 0.0216, 0.0166], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0096, 0.0099, 0.0101, 0.0105, 0.0087, 0.0086, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 02:33:34,625 INFO [train.py:892] (1/4) Epoch 31, batch 750, loss[loss=0.1685, simple_loss=0.242, pruned_loss=0.04752, over 19746.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2417, pruned_loss=0.04292, over 3859457.11 frames. ], batch size: 221, lr: 5.06e-03, grad_scale: 32.0 +2023-03-29 02:33:43,719 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56404.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:34:36,544 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.075e+02 3.994e+02 4.748e+02 5.714e+02 1.014e+03, threshold=9.496e+02, percent-clipped=1.0 +2023-03-29 02:35:40,562 INFO [train.py:892] (1/4) Epoch 31, batch 800, loss[loss=0.2112, simple_loss=0.281, pruned_loss=0.07075, over 19704.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2423, pruned_loss=0.0433, over 3879789.57 frames. ], batch size: 325, lr: 5.06e-03, grad_scale: 32.0 +2023-03-29 02:35:44,489 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56452.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:36:14,537 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5860, 3.9565, 4.1466, 4.6797, 3.0795, 3.4994, 2.8168, 2.8521], + device='cuda:1'), covar=tensor([0.0490, 0.1613, 0.0712, 0.0347, 0.1883, 0.0953, 0.1282, 0.1574], + device='cuda:1'), in_proj_covar=tensor([0.0243, 0.0329, 0.0246, 0.0202, 0.0246, 0.0208, 0.0215, 0.0215], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 02:37:49,401 INFO [train.py:892] (1/4) Epoch 31, batch 850, loss[loss=0.1347, simple_loss=0.2168, pruned_loss=0.02632, over 19747.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2423, pruned_loss=0.04336, over 3894357.27 frames. ], batch size: 102, lr: 5.05e-03, grad_scale: 32.0 +2023-03-29 02:37:50,601 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56501.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:37:58,460 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56504.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:38:41,494 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56523.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 02:38:42,537 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.680e+02 3.782e+02 4.394e+02 5.442e+02 8.928e+02, threshold=8.788e+02, percent-clipped=0.0 +2023-03-29 02:39:38,324 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-29 02:39:52,901 INFO [train.py:892] (1/4) Epoch 31, batch 900, loss[loss=0.1644, simple_loss=0.2425, pruned_loss=0.04316, over 19790.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2426, pruned_loss=0.04312, over 3906423.21 frames. ], batch size: 241, lr: 5.05e-03, grad_scale: 32.0 +2023-03-29 02:39:56,937 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56552.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:40:49,951 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4595, 5.7681, 5.9322, 5.7205, 5.6603, 5.5954, 5.6676, 5.4475], + device='cuda:1'), covar=tensor([0.1308, 0.1121, 0.0830, 0.1114, 0.0648, 0.0873, 0.1707, 0.1947], + device='cuda:1'), in_proj_covar=tensor([0.0294, 0.0330, 0.0368, 0.0298, 0.0274, 0.0281, 0.0358, 0.0391], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 02:41:04,103 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0280, 4.2184, 4.2447, 4.1414, 4.0026, 4.1915, 3.7502, 3.8176], + device='cuda:1'), covar=tensor([0.0527, 0.0554, 0.0514, 0.0477, 0.0693, 0.0567, 0.0705, 0.0983], + device='cuda:1'), in_proj_covar=tensor([0.0269, 0.0286, 0.0294, 0.0258, 0.0264, 0.0248, 0.0267, 0.0314], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 02:41:18,992 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56584.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 02:42:00,172 INFO [train.py:892] (1/4) Epoch 31, batch 950, loss[loss=0.1742, simple_loss=0.2543, pruned_loss=0.04701, over 19771.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2424, pruned_loss=0.04275, over 3916912.37 frames. ], batch size: 247, lr: 5.05e-03, grad_scale: 32.0 +2023-03-29 02:42:56,315 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-29 02:43:01,365 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.678e+02 3.807e+02 4.389e+02 5.044e+02 8.722e+02, threshold=8.778e+02, percent-clipped=0.0 +2023-03-29 02:44:14,752 INFO [train.py:892] (1/4) Epoch 31, batch 1000, loss[loss=0.1501, simple_loss=0.2309, pruned_loss=0.03462, over 19749.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2414, pruned_loss=0.04185, over 3924280.60 frames. ], batch size: 110, lr: 5.05e-03, grad_scale: 32.0 +2023-03-29 02:46:18,028 INFO [train.py:892] (1/4) Epoch 31, batch 1050, loss[loss=0.1715, simple_loss=0.2573, pruned_loss=0.04284, over 19838.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2428, pruned_loss=0.04231, over 3928663.37 frames. ], batch size: 75, lr: 5.04e-03, grad_scale: 32.0 +2023-03-29 02:46:19,347 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1871, 3.3361, 2.0826, 3.8597, 3.5393, 3.8177, 3.9086, 3.0193], + device='cuda:1'), covar=tensor([0.0658, 0.0639, 0.1433, 0.0593, 0.0615, 0.0458, 0.0545, 0.0828], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0142, 0.0141, 0.0149, 0.0130, 0.0133, 0.0144, 0.0142], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 02:47:17,644 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.497e+02 3.728e+02 4.373e+02 5.189e+02 1.039e+03, threshold=8.746e+02, percent-clipped=2.0 +2023-03-29 02:48:25,986 INFO [train.py:892] (1/4) Epoch 31, batch 1100, loss[loss=0.1574, simple_loss=0.2296, pruned_loss=0.04259, over 19853.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2436, pruned_loss=0.04274, over 3932159.62 frames. ], batch size: 104, lr: 5.04e-03, grad_scale: 32.0 +2023-03-29 02:50:26,115 INFO [train.py:892] (1/4) Epoch 31, batch 1150, loss[loss=0.1857, simple_loss=0.2715, pruned_loss=0.05, over 19562.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2424, pruned_loss=0.04222, over 3936793.51 frames. ], batch size: 60, lr: 5.04e-03, grad_scale: 32.0 +2023-03-29 02:50:27,146 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56801.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:50:32,131 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56803.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:51:22,677 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.381e+02 3.820e+02 4.662e+02 5.416e+02 9.191e+02, threshold=9.324e+02, percent-clipped=1.0 +2023-03-29 02:52:23,911 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9952, 3.3475, 2.8482, 2.4537, 2.9028, 3.2105, 3.2034, 3.2358], + device='cuda:1'), covar=tensor([0.0304, 0.0272, 0.0299, 0.0504, 0.0340, 0.0374, 0.0224, 0.0212], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0097, 0.0100, 0.0102, 0.0106, 0.0088, 0.0087, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 02:52:27,709 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56849.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:52:31,103 INFO [train.py:892] (1/4) Epoch 31, batch 1200, loss[loss=0.1581, simple_loss=0.2353, pruned_loss=0.04048, over 19826.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2419, pruned_loss=0.04227, over 3940477.52 frames. ], batch size: 166, lr: 5.04e-03, grad_scale: 32.0 +2023-03-29 02:53:06,337 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56864.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:53:45,256 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56879.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 02:54:12,123 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3162, 4.1733, 4.6171, 4.2126, 3.8384, 4.4369, 4.2826, 4.7334], + device='cuda:1'), covar=tensor([0.0846, 0.0372, 0.0404, 0.0450, 0.1068, 0.0612, 0.0525, 0.0356], + device='cuda:1'), in_proj_covar=tensor([0.0287, 0.0223, 0.0223, 0.0236, 0.0208, 0.0245, 0.0233, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 02:54:12,234 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0504, 2.9502, 1.9748, 3.6105, 3.3108, 3.5033, 3.5663, 2.8100], + device='cuda:1'), covar=tensor([0.0737, 0.0794, 0.1588, 0.0645, 0.0690, 0.0596, 0.0693, 0.0850], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0143, 0.0143, 0.0151, 0.0132, 0.0134, 0.0146, 0.0144], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 02:54:42,016 INFO [train.py:892] (1/4) Epoch 31, batch 1250, loss[loss=0.2049, simple_loss=0.2832, pruned_loss=0.06333, over 19631.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2423, pruned_loss=0.04266, over 3942378.42 frames. ], batch size: 351, lr: 5.04e-03, grad_scale: 32.0 +2023-03-29 02:54:54,383 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4331, 4.9197, 5.0362, 4.7832, 5.3676, 3.4275, 4.2587, 2.5547], + device='cuda:1'), covar=tensor([0.0160, 0.0206, 0.0147, 0.0190, 0.0121, 0.0887, 0.0963, 0.1555], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0148, 0.0114, 0.0135, 0.0120, 0.0136, 0.0144, 0.0128], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 02:55:03,270 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56908.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:55:17,023 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-29 02:55:43,171 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.950e+02 3.644e+02 4.260e+02 5.333e+02 1.198e+03, threshold=8.519e+02, percent-clipped=0.0 +2023-03-29 02:56:45,856 INFO [train.py:892] (1/4) Epoch 31, batch 1300, loss[loss=0.1563, simple_loss=0.2371, pruned_loss=0.03776, over 19686.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.243, pruned_loss=0.04317, over 3943821.95 frames. ], batch size: 82, lr: 5.03e-03, grad_scale: 32.0 +2023-03-29 02:57:09,324 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3690, 2.4705, 2.5463, 2.5087, 2.4660, 2.4751, 2.4773, 2.5764], + device='cuda:1'), covar=tensor([0.0413, 0.0364, 0.0342, 0.0315, 0.0459, 0.0413, 0.0416, 0.0382], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0081, 0.0084, 0.0078, 0.0091, 0.0084, 0.0101, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 02:57:31,471 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56969.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:58:08,028 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8173, 3.9620, 2.3395, 4.1108, 4.2507, 1.9305, 3.4663, 3.3031], + device='cuda:1'), covar=tensor([0.0778, 0.0852, 0.2805, 0.0878, 0.0690, 0.2922, 0.1113, 0.0912], + device='cuda:1'), in_proj_covar=tensor([0.0233, 0.0257, 0.0230, 0.0276, 0.0254, 0.0204, 0.0240, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 02:58:27,092 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56993.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:58:31,594 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56995.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:58:45,990 INFO [train.py:892] (1/4) Epoch 31, batch 1350, loss[loss=0.1613, simple_loss=0.2341, pruned_loss=0.04424, over 19780.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2434, pruned_loss=0.04302, over 3944168.89 frames. ], batch size: 154, lr: 5.03e-03, grad_scale: 32.0 +2023-03-29 02:59:37,596 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.342e+02 3.868e+02 4.711e+02 5.636e+02 9.457e+02, threshold=9.422e+02, percent-clipped=6.0 +2023-03-29 03:00:14,780 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4492, 3.5829, 2.1517, 3.7161, 3.8304, 1.7881, 3.1560, 2.9350], + device='cuda:1'), covar=tensor([0.0876, 0.0945, 0.2860, 0.0860, 0.0680, 0.2836, 0.1255, 0.1006], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0259, 0.0232, 0.0278, 0.0257, 0.0206, 0.0241, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 03:00:41,606 INFO [train.py:892] (1/4) Epoch 31, batch 1400, loss[loss=0.1552, simple_loss=0.2263, pruned_loss=0.04203, over 19786.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2425, pruned_loss=0.04272, over 3946556.72 frames. ], batch size: 168, lr: 5.03e-03, grad_scale: 32.0 +2023-03-29 03:00:50,414 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57054.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:00:54,584 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57056.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:02:21,182 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8760, 2.7307, 4.8431, 4.0171, 4.5817, 4.7752, 4.5156, 4.4482], + device='cuda:1'), covar=tensor([0.0506, 0.1048, 0.0096, 0.1036, 0.0143, 0.0205, 0.0181, 0.0155], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0104, 0.0089, 0.0154, 0.0085, 0.0098, 0.0091, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 03:02:44,894 INFO [train.py:892] (1/4) Epoch 31, batch 1450, loss[loss=0.1693, simple_loss=0.2515, pruned_loss=0.04353, over 19780.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2426, pruned_loss=0.04273, over 3948442.90 frames. ], batch size: 69, lr: 5.03e-03, grad_scale: 32.0 +2023-03-29 03:03:41,325 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.443e+02 3.544e+02 4.145e+02 5.196e+02 9.297e+02, threshold=8.291e+02, percent-clipped=0.0 +2023-03-29 03:03:51,269 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6037, 4.7789, 2.8823, 5.0790, 5.2670, 2.2908, 4.4814, 3.7270], + device='cuda:1'), covar=tensor([0.0592, 0.0595, 0.2436, 0.0550, 0.0344, 0.2628, 0.0824, 0.0861], + device='cuda:1'), in_proj_covar=tensor([0.0234, 0.0258, 0.0231, 0.0276, 0.0255, 0.0205, 0.0241, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 03:04:03,922 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6709, 2.5789, 4.6182, 4.0574, 4.2945, 4.5476, 4.3550, 4.2318], + device='cuda:1'), covar=tensor([0.0476, 0.0991, 0.0098, 0.0800, 0.0153, 0.0234, 0.0197, 0.0170], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0104, 0.0089, 0.0153, 0.0085, 0.0098, 0.0090, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 03:04:44,663 INFO [train.py:892] (1/4) Epoch 31, batch 1500, loss[loss=0.1752, simple_loss=0.2468, pruned_loss=0.05182, over 19800.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2415, pruned_loss=0.04236, over 3948200.44 frames. ], batch size: 200, lr: 5.02e-03, grad_scale: 32.0 +2023-03-29 03:05:07,863 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57159.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:05:50,998 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.46 vs. limit=5.0 +2023-03-29 03:05:55,153 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57179.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 03:06:12,902 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57186.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:06:22,159 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.65 vs. limit=2.0 +2023-03-29 03:06:41,353 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57197.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:06:49,031 INFO [train.py:892] (1/4) Epoch 31, batch 1550, loss[loss=0.1677, simple_loss=0.2515, pruned_loss=0.0419, over 19793.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2426, pruned_loss=0.04284, over 3946476.00 frames. ], batch size: 173, lr: 5.02e-03, grad_scale: 32.0 +2023-03-29 03:07:43,727 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.504e+02 3.735e+02 4.518e+02 5.755e+02 1.077e+03, threshold=9.035e+02, percent-clipped=5.0 +2023-03-29 03:07:52,659 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57227.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 03:08:30,514 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57244.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:08:38,658 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57247.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:08:46,355 INFO [train.py:892] (1/4) Epoch 31, batch 1600, loss[loss=0.1564, simple_loss=0.2301, pruned_loss=0.0414, over 19822.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2428, pruned_loss=0.04267, over 3945267.96 frames. ], batch size: 187, lr: 5.02e-03, grad_scale: 32.0 +2023-03-29 03:08:53,916 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.5745, 5.9781, 6.1404, 5.9433, 5.7645, 5.6243, 5.8933, 5.6923], + device='cuda:1'), covar=tensor([0.1294, 0.1002, 0.0792, 0.1064, 0.0583, 0.0812, 0.1576, 0.1804], + device='cuda:1'), in_proj_covar=tensor([0.0297, 0.0336, 0.0373, 0.0301, 0.0278, 0.0286, 0.0362, 0.0392], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 03:09:03,961 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57258.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 03:09:20,685 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57264.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:10:45,136 INFO [train.py:892] (1/4) Epoch 31, batch 1650, loss[loss=0.1438, simple_loss=0.2121, pruned_loss=0.03778, over 19838.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2422, pruned_loss=0.04218, over 3946698.33 frames. ], batch size: 143, lr: 5.02e-03, grad_scale: 32.0 +2023-03-29 03:10:58,451 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57305.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:11:38,987 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.517e+02 3.692e+02 4.492e+02 5.232e+02 9.843e+02, threshold=8.984e+02, percent-clipped=1.0 +2023-03-29 03:12:37,105 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57349.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:12:40,547 INFO [train.py:892] (1/4) Epoch 31, batch 1700, loss[loss=0.1818, simple_loss=0.2576, pruned_loss=0.05302, over 19753.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2434, pruned_loss=0.04301, over 3945665.21 frames. ], batch size: 205, lr: 5.02e-03, grad_scale: 32.0 +2023-03-29 03:12:41,491 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57351.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:14:26,334 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57398.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:14:31,698 INFO [train.py:892] (1/4) Epoch 31, batch 1750, loss[loss=0.1689, simple_loss=0.2454, pruned_loss=0.04617, over 19816.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2433, pruned_loss=0.04281, over 3944826.44 frames. ], batch size: 202, lr: 5.01e-03, grad_scale: 32.0 +2023-03-29 03:15:05,711 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6136, 4.6864, 2.7788, 4.9036, 5.1512, 2.2467, 4.2741, 3.8074], + device='cuda:1'), covar=tensor([0.0553, 0.0735, 0.2522, 0.0614, 0.0422, 0.2678, 0.0957, 0.0824], + device='cuda:1'), in_proj_covar=tensor([0.0233, 0.0257, 0.0230, 0.0276, 0.0254, 0.0203, 0.0239, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 03:15:20,066 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.125e+02 3.770e+02 4.540e+02 5.122e+02 1.411e+03, threshold=9.080e+02, percent-clipped=2.0 +2023-03-29 03:16:11,441 INFO [train.py:892] (1/4) Epoch 31, batch 1800, loss[loss=0.1951, simple_loss=0.2846, pruned_loss=0.05275, over 19591.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2425, pruned_loss=0.04242, over 3945181.56 frames. ], batch size: 53, lr: 5.01e-03, grad_scale: 16.0 +2023-03-29 03:16:27,885 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57459.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:16:28,022 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57459.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:16:33,414 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57462.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:17:02,552 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1912, 3.5069, 3.1906, 2.6016, 3.1407, 3.4769, 3.4106, 3.4453], + device='cuda:1'), covar=tensor([0.0266, 0.0289, 0.0256, 0.0545, 0.0310, 0.0286, 0.0220, 0.0217], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0097, 0.0100, 0.0103, 0.0106, 0.0087, 0.0087, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 03:17:33,901 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57495.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:17:42,485 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-29 03:17:44,838 INFO [train.py:892] (1/4) Epoch 31, batch 1850, loss[loss=0.1975, simple_loss=0.2938, pruned_loss=0.05065, over 19837.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2439, pruned_loss=0.04241, over 3946523.53 frames. ], batch size: 57, lr: 5.01e-03, grad_scale: 16.0 +2023-03-29 03:18:57,397 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4535, 4.9490, 5.0298, 4.7791, 5.3325, 3.4529, 4.2374, 2.8348], + device='cuda:1'), covar=tensor([0.0133, 0.0177, 0.0121, 0.0172, 0.0117, 0.0834, 0.0898, 0.1289], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0147, 0.0114, 0.0135, 0.0119, 0.0136, 0.0143, 0.0128], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 03:18:58,456 INFO [train.py:892] (1/4) Epoch 32, batch 0, loss[loss=0.1574, simple_loss=0.2426, pruned_loss=0.03609, over 19953.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2426, pruned_loss=0.03609, over 19953.00 frames. ], batch size: 53, lr: 4.93e-03, grad_scale: 16.0 +2023-03-29 03:18:58,456 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 03:19:34,346 INFO [train.py:926] (1/4) Epoch 32, validation: loss=0.1821, simple_loss=0.2499, pruned_loss=0.05717, over 2883724.00 frames. +2023-03-29 03:19:34,349 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-29 03:19:37,625 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57507.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:20:17,636 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57523.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:20:21,054 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.254e+02 3.434e+02 4.255e+02 5.001e+02 8.278e+02, threshold=8.509e+02, percent-clipped=0.0 +2023-03-29 03:21:01,599 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57542.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:21:28,460 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57553.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 03:21:33,948 INFO [train.py:892] (1/4) Epoch 32, batch 50, loss[loss=0.1283, simple_loss=0.2043, pruned_loss=0.02612, over 19574.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2339, pruned_loss=0.03885, over 892682.67 frames. ], batch size: 42, lr: 4.93e-03, grad_scale: 16.0 +2023-03-29 03:21:35,083 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57556.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:21:53,724 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57564.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:21:58,224 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1683, 3.0718, 2.0097, 3.6656, 3.4232, 3.6654, 3.6936, 3.0103], + device='cuda:1'), covar=tensor([0.0618, 0.0682, 0.1795, 0.0596, 0.0584, 0.0451, 0.0652, 0.0768], + device='cuda:1'), in_proj_covar=tensor([0.0144, 0.0144, 0.0143, 0.0153, 0.0133, 0.0135, 0.0147, 0.0145], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 03:23:18,871 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57600.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:23:31,215 INFO [train.py:892] (1/4) Epoch 32, batch 100, loss[loss=0.1555, simple_loss=0.2384, pruned_loss=0.03624, over 19876.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2334, pruned_loss=0.03824, over 1571276.99 frames. ], batch size: 95, lr: 4.92e-03, grad_scale: 16.0 +2023-03-29 03:23:47,402 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57612.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:24:17,754 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.599e+02 3.672e+02 4.484e+02 5.551e+02 1.135e+03, threshold=8.969e+02, percent-clipped=1.0 +2023-03-29 03:25:12,276 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57649.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:25:16,275 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57651.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:25:27,034 INFO [train.py:892] (1/4) Epoch 32, batch 150, loss[loss=0.1658, simple_loss=0.2494, pruned_loss=0.04106, over 19849.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2359, pruned_loss=0.03928, over 2099276.04 frames. ], batch size: 78, lr: 4.92e-03, grad_scale: 16.0 +2023-03-29 03:25:34,483 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-03-29 03:25:41,417 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7360, 4.4234, 4.5183, 4.2727, 4.7266, 3.2391, 3.9368, 2.4606], + device='cuda:1'), covar=tensor([0.0166, 0.0195, 0.0131, 0.0160, 0.0120, 0.0845, 0.0664, 0.1384], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0148, 0.0114, 0.0136, 0.0120, 0.0136, 0.0144, 0.0128], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 03:26:32,151 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-03-29 03:27:07,513 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57697.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:27:11,775 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57699.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:27:27,803 INFO [train.py:892] (1/4) Epoch 32, batch 200, loss[loss=0.1561, simple_loss=0.233, pruned_loss=0.03964, over 19761.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2378, pruned_loss=0.03993, over 2510537.81 frames. ], batch size: 217, lr: 4.92e-03, grad_scale: 16.0 +2023-03-29 03:28:10,858 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.596e+02 3.546e+02 4.264e+02 5.379e+02 8.670e+02, threshold=8.529e+02, percent-clipped=0.0 +2023-03-29 03:28:21,963 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0429, 4.7235, 4.8252, 5.0769, 4.7775, 5.2697, 5.2129, 5.4153], + device='cuda:1'), covar=tensor([0.0641, 0.0466, 0.0523, 0.0389, 0.0722, 0.0409, 0.0459, 0.0302], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0176, 0.0201, 0.0174, 0.0174, 0.0158, 0.0150, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 03:28:30,932 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57733.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:29:16,450 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57754.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:29:19,484 INFO [train.py:892] (1/4) Epoch 32, batch 250, loss[loss=0.1568, simple_loss=0.2384, pruned_loss=0.03763, over 19860.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2384, pruned_loss=0.04022, over 2829171.54 frames. ], batch size: 58, lr: 4.92e-03, grad_scale: 16.0 +2023-03-29 03:30:49,981 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57794.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:31:19,680 INFO [train.py:892] (1/4) Epoch 32, batch 300, loss[loss=0.242, simple_loss=0.3433, pruned_loss=0.07039, over 18745.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.2408, pruned_loss=0.04123, over 3076665.71 frames. ], batch size: 564, lr: 4.92e-03, grad_scale: 16.0 +2023-03-29 03:31:50,690 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57818.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:32:05,156 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.255e+02 3.451e+02 4.259e+02 5.251e+02 1.158e+03, threshold=8.517e+02, percent-clipped=3.0 +2023-03-29 03:32:48,063 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57842.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:33:08,958 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57851.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:33:13,424 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57853.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 03:33:19,338 INFO [train.py:892] (1/4) Epoch 32, batch 350, loss[loss=0.1406, simple_loss=0.2203, pruned_loss=0.03047, over 19661.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2418, pruned_loss=0.04179, over 3269567.45 frames. ], batch size: 43, lr: 4.91e-03, grad_scale: 16.0 +2023-03-29 03:34:27,228 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9586, 4.0252, 2.3949, 4.2000, 4.3926, 1.9604, 3.6233, 3.3640], + device='cuda:1'), covar=tensor([0.0717, 0.0827, 0.2896, 0.0904, 0.0569, 0.2782, 0.1132, 0.0861], + device='cuda:1'), in_proj_covar=tensor([0.0234, 0.0257, 0.0230, 0.0275, 0.0254, 0.0203, 0.0240, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 03:34:39,374 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57890.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:35:01,024 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57900.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:35:02,891 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57901.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:35:14,150 INFO [train.py:892] (1/4) Epoch 32, batch 400, loss[loss=0.1469, simple_loss=0.229, pruned_loss=0.0324, over 19894.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2415, pruned_loss=0.04122, over 3419184.46 frames. ], batch size: 94, lr: 4.91e-03, grad_scale: 16.0 +2023-03-29 03:35:59,241 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.499e+02 3.927e+02 4.614e+02 5.637e+02 8.834e+02, threshold=9.228e+02, percent-clipped=1.0 +2023-03-29 03:36:53,728 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57948.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:37:11,494 INFO [train.py:892] (1/4) Epoch 32, batch 450, loss[loss=0.1503, simple_loss=0.2297, pruned_loss=0.03544, over 19874.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.241, pruned_loss=0.04138, over 3538335.71 frames. ], batch size: 52, lr: 4.91e-03, grad_scale: 16.0 +2023-03-29 03:37:17,552 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.69 vs. limit=5.0 +2023-03-29 03:39:11,305 INFO [train.py:892] (1/4) Epoch 32, batch 500, loss[loss=0.14, simple_loss=0.2191, pruned_loss=0.03052, over 19828.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.24, pruned_loss=0.04117, over 3629746.14 frames. ], batch size: 127, lr: 4.91e-03, grad_scale: 16.0 +2023-03-29 03:39:55,700 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.669e+02 3.901e+02 4.425e+02 5.266e+02 9.447e+02, threshold=8.850e+02, percent-clipped=1.0 +2023-03-29 03:40:09,159 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.5753, 4.9969, 5.2078, 4.9733, 5.4978, 3.4739, 4.3429, 2.9167], + device='cuda:1'), covar=tensor([0.0145, 0.0206, 0.0125, 0.0168, 0.0117, 0.0831, 0.0912, 0.1345], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0147, 0.0113, 0.0134, 0.0119, 0.0135, 0.0142, 0.0127], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 03:41:02,868 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58054.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:41:07,801 INFO [train.py:892] (1/4) Epoch 32, batch 550, loss[loss=0.1634, simple_loss=0.2466, pruned_loss=0.04013, over 19889.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.24, pruned_loss=0.04109, over 3702248.47 frames. ], batch size: 92, lr: 4.91e-03, grad_scale: 16.0 +2023-03-29 03:42:24,157 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58089.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:42:56,943 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58102.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:43:04,382 INFO [train.py:892] (1/4) Epoch 32, batch 600, loss[loss=0.1641, simple_loss=0.2336, pruned_loss=0.04734, over 19780.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.2399, pruned_loss=0.04127, over 3757569.23 frames. ], batch size: 215, lr: 4.90e-03, grad_scale: 16.0 +2023-03-29 03:43:30,628 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-29 03:43:34,293 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58118.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:43:48,668 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.610e+02 3.618e+02 4.223e+02 5.011e+02 6.745e+02, threshold=8.447e+02, percent-clipped=0.0 +2023-03-29 03:44:15,970 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5179, 3.4024, 3.7231, 2.9373, 3.9646, 3.1996, 3.3771, 3.6894], + device='cuda:1'), covar=tensor([0.0681, 0.0415, 0.0492, 0.0739, 0.0365, 0.0457, 0.0481, 0.0347], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0085, 0.0084, 0.0110, 0.0079, 0.0082, 0.0080, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 03:44:27,813 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58141.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:44:33,744 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58144.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:44:49,557 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58151.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:44:49,585 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2729, 1.7594, 1.8032, 2.4663, 2.7094, 2.7796, 2.6103, 2.7535], + device='cuda:1'), covar=tensor([0.1050, 0.1766, 0.1786, 0.0825, 0.0583, 0.0444, 0.0548, 0.0530], + device='cuda:1'), in_proj_covar=tensor([0.0160, 0.0169, 0.0179, 0.0152, 0.0138, 0.0133, 0.0125, 0.0116], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 03:45:00,221 INFO [train.py:892] (1/4) Epoch 32, batch 650, loss[loss=0.1533, simple_loss=0.232, pruned_loss=0.03732, over 19740.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2392, pruned_loss=0.04103, over 3799397.26 frames. ], batch size: 134, lr: 4.90e-03, grad_scale: 16.0 +2023-03-29 03:45:19,290 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6825, 2.9844, 3.1636, 3.5635, 2.5927, 3.1234, 2.4484, 2.3802], + device='cuda:1'), covar=tensor([0.0624, 0.1861, 0.1179, 0.0503, 0.2149, 0.0849, 0.1488, 0.1736], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0331, 0.0250, 0.0205, 0.0249, 0.0210, 0.0220, 0.0216], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 03:45:21,013 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58166.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:45:21,248 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8020, 1.7728, 1.8431, 1.8737, 1.7722, 1.8483, 1.7325, 1.8989], + device='cuda:1'), covar=tensor([0.0363, 0.0339, 0.0364, 0.0323, 0.0467, 0.0359, 0.0503, 0.0295], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0081, 0.0083, 0.0078, 0.0090, 0.0083, 0.0100, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 03:46:36,786 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58199.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:46:43,590 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58202.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:46:51,901 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58205.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:46:52,974 INFO [train.py:892] (1/4) Epoch 32, batch 700, loss[loss=0.1719, simple_loss=0.2435, pruned_loss=0.05017, over 19803.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2417, pruned_loss=0.04175, over 3829036.73 frames. ], batch size: 148, lr: 4.90e-03, grad_scale: 16.0 +2023-03-29 03:47:15,665 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1350, 5.4334, 5.4703, 5.3771, 5.0796, 5.4391, 4.8793, 4.9609], + device='cuda:1'), covar=tensor([0.0448, 0.0441, 0.0495, 0.0402, 0.0590, 0.0495, 0.0691, 0.0922], + device='cuda:1'), in_proj_covar=tensor([0.0268, 0.0285, 0.0299, 0.0260, 0.0263, 0.0249, 0.0267, 0.0311], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 03:47:39,108 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.613e+02 3.735e+02 4.349e+02 5.105e+02 9.277e+02, threshold=8.697e+02, percent-clipped=1.0 +2023-03-29 03:47:47,368 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-03-29 03:48:03,248 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1787, 2.1120, 2.2031, 2.2092, 2.2190, 2.2861, 2.1926, 2.2920], + device='cuda:1'), covar=tensor([0.0391, 0.0378, 0.0359, 0.0351, 0.0409, 0.0344, 0.0446, 0.0345], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0080, 0.0083, 0.0078, 0.0090, 0.0083, 0.0100, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 03:48:54,096 INFO [train.py:892] (1/4) Epoch 32, batch 750, loss[loss=0.1831, simple_loss=0.2609, pruned_loss=0.05266, over 19704.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2422, pruned_loss=0.04204, over 3855069.53 frames. ], batch size: 325, lr: 4.90e-03, grad_scale: 16.0 +2023-03-29 03:49:47,256 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3927, 1.9643, 2.1011, 2.6505, 2.9900, 3.0530, 2.8800, 3.0071], + device='cuda:1'), covar=tensor([0.1118, 0.1666, 0.1525, 0.0806, 0.0536, 0.0391, 0.0554, 0.0522], + device='cuda:1'), in_proj_covar=tensor([0.0160, 0.0169, 0.0178, 0.0151, 0.0138, 0.0133, 0.0125, 0.0117], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 03:49:52,825 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6591, 2.8190, 4.5240, 3.8751, 4.1662, 4.5186, 4.3628, 4.1659], + device='cuda:1'), covar=tensor([0.0452, 0.0895, 0.0099, 0.0796, 0.0176, 0.0193, 0.0157, 0.0178], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0103, 0.0088, 0.0153, 0.0086, 0.0098, 0.0090, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 03:50:47,178 INFO [train.py:892] (1/4) Epoch 32, batch 800, loss[loss=0.1349, simple_loss=0.213, pruned_loss=0.02842, over 19758.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2432, pruned_loss=0.04212, over 3873194.18 frames. ], batch size: 100, lr: 4.90e-03, grad_scale: 16.0 +2023-03-29 03:51:10,044 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8284, 3.5521, 3.6650, 3.8568, 3.6026, 3.8013, 3.9064, 4.1080], + device='cuda:1'), covar=tensor([0.0737, 0.0449, 0.0568, 0.0420, 0.0805, 0.0631, 0.0488, 0.0326], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0177, 0.0202, 0.0176, 0.0174, 0.0159, 0.0150, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 03:51:31,606 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.700e+02 3.818e+02 4.452e+02 5.202e+02 1.002e+03, threshold=8.904e+02, percent-clipped=2.0 +2023-03-29 03:51:58,926 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1091, 2.4495, 3.3467, 2.7785, 2.8572, 2.8185, 2.0555, 2.2182], + device='cuda:1'), covar=tensor([0.1149, 0.2643, 0.0685, 0.1094, 0.1810, 0.1492, 0.2656, 0.2607], + device='cuda:1'), in_proj_covar=tensor([0.0350, 0.0385, 0.0347, 0.0285, 0.0370, 0.0374, 0.0371, 0.0343], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 03:52:43,142 INFO [train.py:892] (1/4) Epoch 32, batch 850, loss[loss=0.1482, simple_loss=0.2327, pruned_loss=0.03189, over 19792.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2431, pruned_loss=0.04207, over 3889882.66 frames. ], batch size: 105, lr: 4.89e-03, grad_scale: 16.0 +2023-03-29 03:53:31,805 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7993, 3.6934, 4.0609, 3.7650, 3.4830, 3.9663, 3.8589, 4.1207], + device='cuda:1'), covar=tensor([0.0813, 0.0390, 0.0361, 0.0379, 0.1253, 0.0555, 0.0448, 0.0347], + device='cuda:1'), in_proj_covar=tensor([0.0286, 0.0223, 0.0222, 0.0234, 0.0208, 0.0245, 0.0232, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 03:53:59,992 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58389.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:54:39,305 INFO [train.py:892] (1/4) Epoch 32, batch 900, loss[loss=0.1609, simple_loss=0.2317, pruned_loss=0.04499, over 19837.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.241, pruned_loss=0.04157, over 3904937.60 frames. ], batch size: 239, lr: 4.89e-03, grad_scale: 16.0 +2023-03-29 03:55:21,924 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.766e+02 3.700e+02 4.222e+02 4.872e+02 9.847e+02, threshold=8.445e+02, percent-clipped=1.0 +2023-03-29 03:55:23,434 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.07 vs. limit=2.0 +2023-03-29 03:55:51,441 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58437.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:56:34,990 INFO [train.py:892] (1/4) Epoch 32, batch 950, loss[loss=0.2596, simple_loss=0.3318, pruned_loss=0.0937, over 19417.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2417, pruned_loss=0.042, over 3915411.65 frames. ], batch size: 431, lr: 4.89e-03, grad_scale: 16.0 +2023-03-29 03:56:44,601 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8535, 2.7664, 1.8055, 3.2586, 3.0222, 3.1579, 3.2693, 2.6512], + device='cuda:1'), covar=tensor([0.0671, 0.0780, 0.1716, 0.0676, 0.0666, 0.0525, 0.0649, 0.0858], + device='cuda:1'), in_proj_covar=tensor([0.0144, 0.0145, 0.0144, 0.0153, 0.0135, 0.0136, 0.0149, 0.0146], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 03:58:05,421 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58497.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:58:11,649 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58500.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:58:24,172 INFO [train.py:892] (1/4) Epoch 32, batch 1000, loss[loss=0.1413, simple_loss=0.2266, pruned_loss=0.02795, over 19741.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.2407, pruned_loss=0.04152, over 3923338.38 frames. ], batch size: 92, lr: 4.89e-03, grad_scale: 16.0 +2023-03-29 03:59:09,108 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.374e+02 3.686e+02 4.298e+02 5.253e+02 9.198e+02, threshold=8.596e+02, percent-clipped=1.0 +2023-03-29 04:00:20,500 INFO [train.py:892] (1/4) Epoch 32, batch 1050, loss[loss=0.1588, simple_loss=0.2352, pruned_loss=0.04118, over 19872.00 frames. ], tot_loss[loss=0.162, simple_loss=0.241, pruned_loss=0.04148, over 3929588.43 frames. ], batch size: 138, lr: 4.88e-03, grad_scale: 16.0 +2023-03-29 04:00:28,002 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.0045, 2.2848, 2.1236, 1.5118, 2.1633, 2.2755, 2.1414, 2.1817], + device='cuda:1'), covar=tensor([0.0448, 0.0334, 0.0342, 0.0598, 0.0408, 0.0303, 0.0350, 0.0321], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0098, 0.0100, 0.0103, 0.0106, 0.0088, 0.0088, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 04:00:43,844 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-03-29 04:02:18,301 INFO [train.py:892] (1/4) Epoch 32, batch 1100, loss[loss=0.1589, simple_loss=0.2403, pruned_loss=0.03871, over 19687.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2419, pruned_loss=0.04188, over 3934278.12 frames. ], batch size: 82, lr: 4.88e-03, grad_scale: 16.0 +2023-03-29 04:02:58,443 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1307, 3.2162, 2.0069, 3.2654, 3.3733, 1.6878, 2.8256, 2.5930], + device='cuda:1'), covar=tensor([0.0867, 0.0881, 0.2727, 0.0874, 0.0724, 0.2477, 0.1139, 0.1054], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0258, 0.0231, 0.0277, 0.0257, 0.0205, 0.0241, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 04:03:01,447 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.871e+02 3.808e+02 4.430e+02 5.351e+02 1.082e+03, threshold=8.860e+02, percent-clipped=2.0 +2023-03-29 04:03:13,262 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8186, 2.8126, 1.7979, 3.2284, 3.0083, 3.1188, 3.2025, 2.6588], + device='cuda:1'), covar=tensor([0.0685, 0.0697, 0.1660, 0.0580, 0.0632, 0.0523, 0.0708, 0.0882], + device='cuda:1'), in_proj_covar=tensor([0.0144, 0.0145, 0.0144, 0.0153, 0.0135, 0.0137, 0.0149, 0.0146], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 04:04:08,056 INFO [train.py:892] (1/4) Epoch 32, batch 1150, loss[loss=0.152, simple_loss=0.2324, pruned_loss=0.03573, over 19784.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2421, pruned_loss=0.04263, over 3938040.53 frames. ], batch size: 163, lr: 4.88e-03, grad_scale: 16.0 +2023-03-29 04:05:05,284 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58681.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:05:35,072 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2959, 2.5571, 3.5385, 2.8437, 3.0139, 2.9213, 2.2162, 2.3458], + device='cuda:1'), covar=tensor([0.1241, 0.3333, 0.0773, 0.1236, 0.1901, 0.1631, 0.2705, 0.2761], + device='cuda:1'), in_proj_covar=tensor([0.0353, 0.0389, 0.0350, 0.0287, 0.0375, 0.0377, 0.0374, 0.0344], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 04:06:02,814 INFO [train.py:892] (1/4) Epoch 32, batch 1200, loss[loss=0.1545, simple_loss=0.2282, pruned_loss=0.04037, over 19838.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2441, pruned_loss=0.04354, over 3940319.94 frames. ], batch size: 43, lr: 4.88e-03, grad_scale: 16.0 +2023-03-29 04:06:46,116 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 3.575e+02 4.310e+02 5.528e+02 1.202e+03, threshold=8.619e+02, percent-clipped=2.0 +2023-03-29 04:06:47,035 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4042, 4.0933, 4.2043, 4.4211, 4.0964, 4.4700, 4.5046, 4.7097], + device='cuda:1'), covar=tensor([0.0696, 0.0474, 0.0535, 0.0387, 0.0693, 0.0543, 0.0473, 0.0283], + device='cuda:1'), in_proj_covar=tensor([0.0153, 0.0179, 0.0203, 0.0178, 0.0176, 0.0161, 0.0152, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 04:07:29,300 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58742.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:07:57,924 INFO [train.py:892] (1/4) Epoch 32, batch 1250, loss[loss=0.1488, simple_loss=0.2296, pruned_loss=0.03401, over 19745.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2421, pruned_loss=0.04249, over 3942261.80 frames. ], batch size: 139, lr: 4.88e-03, grad_scale: 16.0 +2023-03-29 04:08:45,734 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58775.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:09:34,859 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58797.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:09:41,893 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58800.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:09:55,327 INFO [train.py:892] (1/4) Epoch 32, batch 1300, loss[loss=0.1555, simple_loss=0.2452, pruned_loss=0.03288, over 19805.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2404, pruned_loss=0.04157, over 3944977.04 frames. ], batch size: 47, lr: 4.87e-03, grad_scale: 16.0 +2023-03-29 04:10:37,441 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.259e+02 3.414e+02 4.354e+02 4.908e+02 8.205e+02, threshold=8.708e+02, percent-clipped=0.0 +2023-03-29 04:10:59,795 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58836.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:11:07,390 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5517, 4.4035, 4.8888, 4.4610, 4.1062, 4.6917, 4.5590, 5.0167], + device='cuda:1'), covar=tensor([0.0771, 0.0385, 0.0342, 0.0373, 0.0931, 0.0561, 0.0470, 0.0304], + device='cuda:1'), in_proj_covar=tensor([0.0287, 0.0225, 0.0225, 0.0237, 0.0210, 0.0249, 0.0235, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 04:11:18,892 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.02 vs. limit=5.0 +2023-03-29 04:11:20,207 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58845.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:11:28,497 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58848.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:11:34,804 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.48 vs. limit=5.0 +2023-03-29 04:11:46,382 INFO [train.py:892] (1/4) Epoch 32, batch 1350, loss[loss=0.1585, simple_loss=0.2328, pruned_loss=0.04215, over 19730.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2413, pruned_loss=0.04192, over 3946721.26 frames. ], batch size: 134, lr: 4.87e-03, grad_scale: 16.0 +2023-03-29 04:13:32,074 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8102, 2.7651, 4.7690, 4.1474, 4.4991, 4.6198, 4.5868, 4.3671], + device='cuda:1'), covar=tensor([0.0453, 0.0949, 0.0097, 0.0841, 0.0142, 0.0216, 0.0154, 0.0164], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0104, 0.0090, 0.0155, 0.0087, 0.0099, 0.0091, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 04:13:39,852 INFO [train.py:892] (1/4) Epoch 32, batch 1400, loss[loss=0.1477, simple_loss=0.2319, pruned_loss=0.03176, over 19691.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.2413, pruned_loss=0.04152, over 3946351.56 frames. ], batch size: 59, lr: 4.87e-03, grad_scale: 16.0 +2023-03-29 04:13:40,765 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58906.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:14:27,792 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.427e+02 3.533e+02 4.416e+02 5.664e+02 1.235e+03, threshold=8.833e+02, percent-clipped=5.0 +2023-03-29 04:14:59,031 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9313, 2.8898, 3.0267, 2.4970, 3.0419, 2.6868, 3.0442, 2.9589], + device='cuda:1'), covar=tensor([0.0660, 0.0499, 0.0514, 0.0792, 0.0449, 0.0487, 0.0444, 0.0378], + device='cuda:1'), in_proj_covar=tensor([0.0080, 0.0087, 0.0085, 0.0112, 0.0080, 0.0083, 0.0081, 0.0074], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 04:15:40,829 INFO [train.py:892] (1/4) Epoch 32, batch 1450, loss[loss=0.1665, simple_loss=0.2516, pruned_loss=0.04067, over 19841.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.2413, pruned_loss=0.04157, over 3947557.04 frames. ], batch size: 43, lr: 4.87e-03, grad_scale: 16.0 +2023-03-29 04:16:06,692 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58967.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:16:59,204 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4885, 1.9833, 2.2790, 2.7140, 3.0931, 3.1516, 3.0692, 3.1080], + device='cuda:1'), covar=tensor([0.1099, 0.1808, 0.1536, 0.0849, 0.0557, 0.0446, 0.0472, 0.0525], + device='cuda:1'), in_proj_covar=tensor([0.0162, 0.0169, 0.0178, 0.0153, 0.0137, 0.0134, 0.0126, 0.0117], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 04:17:07,097 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5758, 3.4335, 5.1081, 3.7552, 4.1027, 3.8906, 2.7423, 2.9751], + device='cuda:1'), covar=tensor([0.0851, 0.3209, 0.0406, 0.1152, 0.1702, 0.1355, 0.2619, 0.2544], + device='cuda:1'), in_proj_covar=tensor([0.0354, 0.0390, 0.0351, 0.0288, 0.0376, 0.0377, 0.0374, 0.0346], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 04:17:32,727 INFO [train.py:892] (1/4) Epoch 32, batch 1500, loss[loss=0.1402, simple_loss=0.2269, pruned_loss=0.02675, over 19860.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2406, pruned_loss=0.04117, over 3948704.70 frames. ], batch size: 112, lr: 4.87e-03, grad_scale: 16.0 +2023-03-29 04:18:18,935 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.783e+02 3.777e+02 4.436e+02 5.379e+02 1.000e+03, threshold=8.872e+02, percent-clipped=1.0 +2023-03-29 04:18:48,038 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59037.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:19:27,085 INFO [train.py:892] (1/4) Epoch 32, batch 1550, loss[loss=0.1618, simple_loss=0.2444, pruned_loss=0.03964, over 19764.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2402, pruned_loss=0.04083, over 3948996.96 frames. ], batch size: 244, lr: 4.86e-03, grad_scale: 16.0 +2023-03-29 04:20:42,513 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5980, 3.6797, 2.2692, 3.7787, 3.9522, 1.8025, 3.3001, 3.0132], + device='cuda:1'), covar=tensor([0.0754, 0.0817, 0.2662, 0.0845, 0.0637, 0.2667, 0.1128, 0.0897], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0259, 0.0231, 0.0278, 0.0257, 0.0205, 0.0242, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 04:21:25,980 INFO [train.py:892] (1/4) Epoch 32, batch 1600, loss[loss=0.1799, simple_loss=0.2591, pruned_loss=0.05028, over 19675.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2398, pruned_loss=0.04063, over 3950014.32 frames. ], batch size: 73, lr: 4.86e-03, grad_scale: 16.0 +2023-03-29 04:22:08,642 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.456e+02 3.560e+02 4.342e+02 5.121e+02 7.778e+02, threshold=8.685e+02, percent-clipped=0.0 +2023-03-29 04:22:23,503 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59131.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:23:16,333 INFO [train.py:892] (1/4) Epoch 32, batch 1650, loss[loss=0.1463, simple_loss=0.2265, pruned_loss=0.03301, over 19751.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2389, pruned_loss=0.04001, over 3949534.55 frames. ], batch size: 89, lr: 4.86e-03, grad_scale: 16.0 +2023-03-29 04:23:33,836 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59164.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:25:14,212 INFO [train.py:892] (1/4) Epoch 32, batch 1700, loss[loss=0.1697, simple_loss=0.2535, pruned_loss=0.04301, over 19573.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.24, pruned_loss=0.0409, over 3949762.97 frames. ], batch size: 53, lr: 4.86e-03, grad_scale: 16.0 +2023-03-29 04:26:00,342 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.757e+02 3.824e+02 4.316e+02 5.380e+02 8.214e+02, threshold=8.632e+02, percent-clipped=0.0 +2023-03-29 04:26:01,375 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59225.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:27:06,697 INFO [train.py:892] (1/4) Epoch 32, batch 1750, loss[loss=0.2208, simple_loss=0.296, pruned_loss=0.07278, over 19597.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2399, pruned_loss=0.04117, over 3950157.35 frames. ], batch size: 376, lr: 4.86e-03, grad_scale: 16.0 +2023-03-29 04:27:18,855 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59262.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:28:23,820 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-29 04:28:46,843 INFO [train.py:892] (1/4) Epoch 32, batch 1800, loss[loss=0.1599, simple_loss=0.2353, pruned_loss=0.04229, over 19681.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.24, pruned_loss=0.04077, over 3948981.13 frames. ], batch size: 45, lr: 4.85e-03, grad_scale: 16.0 +2023-03-29 04:29:03,132 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-29 04:29:10,486 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.14 vs. limit=5.0 +2023-03-29 04:29:16,874 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.71 vs. limit=5.0 +2023-03-29 04:29:23,521 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.122e+02 3.692e+02 4.509e+02 5.287e+02 9.435e+02, threshold=9.017e+02, percent-clipped=2.0 +2023-03-29 04:29:38,748 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.63 vs. limit=5.0 +2023-03-29 04:29:45,407 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59337.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:29:58,667 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-29 04:30:19,878 INFO [train.py:892] (1/4) Epoch 32, batch 1850, loss[loss=0.1622, simple_loss=0.2501, pruned_loss=0.03716, over 19839.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2414, pruned_loss=0.041, over 3948726.31 frames. ], batch size: 57, lr: 4.85e-03, grad_scale: 16.0 +2023-03-29 04:31:27,304 INFO [train.py:892] (1/4) Epoch 33, batch 0, loss[loss=0.1588, simple_loss=0.2307, pruned_loss=0.04347, over 19740.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2307, pruned_loss=0.04347, over 19740.00 frames. ], batch size: 134, lr: 4.78e-03, grad_scale: 16.0 +2023-03-29 04:31:27,305 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 04:32:02,114 INFO [train.py:926] (1/4) Epoch 33, validation: loss=0.1828, simple_loss=0.2501, pruned_loss=0.05775, over 2883724.00 frames. +2023-03-29 04:32:02,115 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-29 04:32:58,842 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59385.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:34:00,944 INFO [train.py:892] (1/4) Epoch 33, batch 50, loss[loss=0.1399, simple_loss=0.2057, pruned_loss=0.03704, over 19865.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2363, pruned_loss=0.03985, over 891553.95 frames. ], batch size: 158, lr: 4.77e-03, grad_scale: 32.0 +2023-03-29 04:34:31,973 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.011e+02 3.734e+02 4.113e+02 4.881e+02 1.279e+03, threshold=8.226e+02, percent-clipped=1.0 +2023-03-29 04:34:45,594 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59431.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:35:56,489 INFO [train.py:892] (1/4) Epoch 33, batch 100, loss[loss=0.1541, simple_loss=0.2345, pruned_loss=0.03689, over 19793.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2378, pruned_loss=0.0402, over 1569648.93 frames. ], batch size: 195, lr: 4.77e-03, grad_scale: 32.0 +2023-03-29 04:36:10,234 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8440, 3.5216, 3.7303, 2.7542, 4.2285, 3.3739, 3.6167, 3.9970], + device='cuda:1'), covar=tensor([0.0596, 0.0460, 0.0687, 0.0861, 0.0278, 0.0401, 0.0453, 0.0274], + device='cuda:1'), in_proj_covar=tensor([0.0080, 0.0088, 0.0086, 0.0114, 0.0081, 0.0084, 0.0082, 0.0075], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 04:36:36,983 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59479.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:37:08,410 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6075, 2.9156, 3.0940, 3.4497, 2.4528, 3.1038, 2.2749, 2.3078], + device='cuda:1'), covar=tensor([0.0567, 0.1497, 0.1013, 0.0535, 0.2117, 0.0801, 0.1397, 0.1629], + device='cuda:1'), in_proj_covar=tensor([0.0247, 0.0331, 0.0251, 0.0206, 0.0250, 0.0210, 0.0220, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 04:37:31,942 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59502.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:37:43,346 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6424, 3.6595, 2.2564, 3.8674, 3.9763, 1.8905, 3.2369, 3.1384], + device='cuda:1'), covar=tensor([0.0866, 0.0936, 0.3042, 0.0901, 0.0764, 0.2907, 0.1253, 0.0953], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0259, 0.0232, 0.0278, 0.0257, 0.0204, 0.0242, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 04:37:51,785 INFO [train.py:892] (1/4) Epoch 33, batch 150, loss[loss=0.1523, simple_loss=0.2277, pruned_loss=0.03848, over 19860.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2381, pruned_loss=0.04095, over 2097969.82 frames. ], batch size: 197, lr: 4.77e-03, grad_scale: 32.0 +2023-03-29 04:38:12,532 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59520.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:38:22,511 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.374e+02 3.870e+02 4.435e+02 5.412e+02 1.132e+03, threshold=8.870e+02, percent-clipped=1.0 +2023-03-29 04:39:44,895 INFO [train.py:892] (1/4) Epoch 33, batch 200, loss[loss=0.145, simple_loss=0.22, pruned_loss=0.03505, over 19822.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2372, pruned_loss=0.03998, over 2507659.61 frames. ], batch size: 187, lr: 4.77e-03, grad_scale: 32.0 +2023-03-29 04:39:47,957 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59562.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:39:50,030 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59563.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:40:16,519 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.87 vs. limit=5.0 +2023-03-29 04:41:35,581 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59610.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:41:36,995 INFO [train.py:892] (1/4) Epoch 33, batch 250, loss[loss=0.1582, simple_loss=0.2424, pruned_loss=0.03702, over 19566.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2385, pruned_loss=0.04079, over 2826721.39 frames. ], batch size: 42, lr: 4.77e-03, grad_scale: 32.0 +2023-03-29 04:42:08,470 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.374e+02 3.603e+02 4.122e+02 4.867e+02 8.945e+02, threshold=8.243e+02, percent-clipped=1.0 +2023-03-29 04:42:26,504 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9395, 3.3084, 3.3348, 3.8527, 2.7912, 3.2504, 2.5427, 2.5261], + device='cuda:1'), covar=tensor([0.0557, 0.1608, 0.0996, 0.0432, 0.1895, 0.0873, 0.1373, 0.1601], + device='cuda:1'), in_proj_covar=tensor([0.0247, 0.0332, 0.0252, 0.0206, 0.0250, 0.0211, 0.0221, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 04:43:31,417 INFO [train.py:892] (1/4) Epoch 33, batch 300, loss[loss=0.1705, simple_loss=0.2479, pruned_loss=0.04654, over 19765.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2388, pruned_loss=0.04038, over 3074060.98 frames. ], batch size: 70, lr: 4.76e-03, grad_scale: 32.0 +2023-03-29 04:45:17,702 INFO [train.py:892] (1/4) Epoch 33, batch 350, loss[loss=0.1547, simple_loss=0.2392, pruned_loss=0.03512, over 19701.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2385, pruned_loss=0.03977, over 3266621.23 frames. ], batch size: 315, lr: 4.76e-03, grad_scale: 32.0 +2023-03-29 04:45:29,673 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6450, 3.9050, 4.0631, 4.7899, 3.2558, 3.4620, 2.9791, 2.9055], + device='cuda:1'), covar=tensor([0.0444, 0.1924, 0.0881, 0.0331, 0.1879, 0.1067, 0.1291, 0.1537], + device='cuda:1'), in_proj_covar=tensor([0.0243, 0.0326, 0.0248, 0.0202, 0.0246, 0.0208, 0.0217, 0.0216], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 04:45:51,030 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.193e+02 3.458e+02 4.083e+02 4.831e+02 8.519e+02, threshold=8.167e+02, percent-clipped=1.0 +2023-03-29 04:46:03,824 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3517, 2.6004, 3.7817, 3.0094, 3.1714, 2.9568, 2.2203, 2.2980], + device='cuda:1'), covar=tensor([0.1250, 0.2870, 0.0656, 0.1103, 0.1820, 0.1695, 0.2692, 0.2855], + device='cuda:1'), in_proj_covar=tensor([0.0351, 0.0388, 0.0349, 0.0286, 0.0373, 0.0376, 0.0372, 0.0343], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 04:46:25,468 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-03-29 04:47:13,240 INFO [train.py:892] (1/4) Epoch 33, batch 400, loss[loss=0.1505, simple_loss=0.2341, pruned_loss=0.0335, over 19780.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.238, pruned_loss=0.03978, over 3418480.79 frames. ], batch size: 66, lr: 4.76e-03, grad_scale: 32.0 +2023-03-29 04:49:08,536 INFO [train.py:892] (1/4) Epoch 33, batch 450, loss[loss=0.1489, simple_loss=0.2179, pruned_loss=0.03992, over 19824.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2378, pruned_loss=0.0398, over 3538038.37 frames. ], batch size: 184, lr: 4.76e-03, grad_scale: 32.0 +2023-03-29 04:49:14,412 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6145, 3.7212, 2.2227, 3.8299, 3.9856, 1.8206, 3.2748, 3.0094], + device='cuda:1'), covar=tensor([0.0774, 0.0770, 0.2838, 0.0869, 0.0592, 0.2666, 0.1167, 0.0944], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0260, 0.0232, 0.0278, 0.0257, 0.0204, 0.0242, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 04:49:28,885 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59820.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:49:39,971 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.419e+02 3.507e+02 4.283e+02 5.159e+02 1.029e+03, threshold=8.565e+02, percent-clipped=3.0 +2023-03-29 04:50:33,016 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59848.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:50:50,748 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-29 04:50:53,618 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59858.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:50:59,530 INFO [train.py:892] (1/4) Epoch 33, batch 500, loss[loss=0.1638, simple_loss=0.248, pruned_loss=0.03974, over 19816.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2379, pruned_loss=0.0401, over 3629846.09 frames. ], batch size: 57, lr: 4.76e-03, grad_scale: 32.0 +2023-03-29 04:51:19,548 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59868.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:52:06,984 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59889.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:52:46,633 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4190, 3.0122, 3.3355, 3.0300, 3.6652, 3.5762, 4.2408, 4.6441], + device='cuda:1'), covar=tensor([0.0521, 0.1628, 0.1459, 0.2092, 0.1484, 0.1414, 0.0596, 0.0442], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0242, 0.0270, 0.0255, 0.0299, 0.0259, 0.0234, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 04:52:52,711 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0591, 3.9030, 4.3271, 3.9839, 3.7294, 4.2043, 4.0208, 4.4372], + device='cuda:1'), covar=tensor([0.0841, 0.0409, 0.0390, 0.0420, 0.1085, 0.0572, 0.0491, 0.0355], + device='cuda:1'), in_proj_covar=tensor([0.0286, 0.0224, 0.0225, 0.0237, 0.0211, 0.0249, 0.0234, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 04:52:52,804 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59909.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:52:54,811 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3660, 2.4086, 2.5134, 2.4769, 2.4604, 2.5722, 2.3603, 2.5945], + device='cuda:1'), covar=tensor([0.0383, 0.0364, 0.0340, 0.0309, 0.0465, 0.0307, 0.0485, 0.0324], + device='cuda:1'), in_proj_covar=tensor([0.0088, 0.0081, 0.0084, 0.0079, 0.0091, 0.0084, 0.0101, 0.0074], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 04:52:55,892 INFO [train.py:892] (1/4) Epoch 33, batch 550, loss[loss=0.1546, simple_loss=0.2296, pruned_loss=0.03975, over 19875.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2377, pruned_loss=0.03996, over 3701146.53 frames. ], batch size: 125, lr: 4.75e-03, grad_scale: 32.0 +2023-03-29 04:53:28,208 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.466e+02 3.637e+02 4.346e+02 5.121e+02 1.028e+03, threshold=8.693e+02, percent-clipped=1.0 +2023-03-29 04:54:25,918 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59950.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:54:50,037 INFO [train.py:892] (1/4) Epoch 33, batch 600, loss[loss=0.1338, simple_loss=0.2114, pruned_loss=0.02812, over 19393.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2379, pruned_loss=0.03974, over 3756703.97 frames. ], batch size: 40, lr: 4.75e-03, grad_scale: 32.0 +2023-03-29 04:55:05,676 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5222, 4.2640, 4.2847, 4.0143, 4.4752, 3.0579, 3.7328, 2.2810], + device='cuda:1'), covar=tensor([0.0168, 0.0223, 0.0155, 0.0214, 0.0143, 0.1032, 0.0721, 0.1540], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0146, 0.0114, 0.0134, 0.0119, 0.0134, 0.0142, 0.0127], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 04:55:33,998 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3765, 3.0405, 3.3966, 2.9779, 3.5942, 3.5088, 4.1810, 4.5689], + device='cuda:1'), covar=tensor([0.0537, 0.1616, 0.1511, 0.2194, 0.1655, 0.1511, 0.0598, 0.0520], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0242, 0.0270, 0.0255, 0.0299, 0.0260, 0.0235, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 04:56:47,914 INFO [train.py:892] (1/4) Epoch 33, batch 650, loss[loss=0.1521, simple_loss=0.2363, pruned_loss=0.03397, over 19788.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2369, pruned_loss=0.03926, over 3799625.71 frames. ], batch size: 174, lr: 4.75e-03, grad_scale: 32.0 +2023-03-29 04:56:55,121 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6506, 3.8713, 2.3615, 4.1115, 4.1882, 1.9299, 3.1544, 3.0859], + device='cuda:1'), covar=tensor([0.0852, 0.0858, 0.2803, 0.0658, 0.0548, 0.3031, 0.1550, 0.1000], + device='cuda:1'), in_proj_covar=tensor([0.0236, 0.0261, 0.0233, 0.0278, 0.0258, 0.0205, 0.0243, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 04:57:20,321 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.343e+02 3.751e+02 4.539e+02 5.312e+02 8.817e+02, threshold=9.077e+02, percent-clipped=1.0 +2023-03-29 04:57:55,054 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60040.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:58:22,765 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60052.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:58:38,902 INFO [train.py:892] (1/4) Epoch 33, batch 700, loss[loss=0.1447, simple_loss=0.2197, pruned_loss=0.03484, over 19737.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2388, pruned_loss=0.03987, over 3831719.29 frames. ], batch size: 134, lr: 4.75e-03, grad_scale: 32.0 +2023-03-29 04:58:48,278 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60065.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:59:18,321 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60078.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:00:08,303 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60101.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 05:00:28,986 INFO [train.py:892] (1/4) Epoch 33, batch 750, loss[loss=0.1314, simple_loss=0.2054, pruned_loss=0.02868, over 19866.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2374, pruned_loss=0.03958, over 3859042.32 frames. ], batch size: 154, lr: 4.75e-03, grad_scale: 32.0 +2023-03-29 05:00:33,731 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60113.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 05:00:44,704 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3459, 3.5222, 2.2616, 4.1335, 3.7332, 4.0206, 4.1010, 3.2288], + device='cuda:1'), covar=tensor([0.0627, 0.0580, 0.1369, 0.0512, 0.0633, 0.0448, 0.0614, 0.0750], + device='cuda:1'), in_proj_covar=tensor([0.0144, 0.0145, 0.0144, 0.0154, 0.0135, 0.0138, 0.0149, 0.0147], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 05:00:59,004 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6079, 2.7405, 4.0779, 3.2299, 3.3783, 3.1709, 2.3782, 2.4411], + device='cuda:1'), covar=tensor([0.1188, 0.3273, 0.0614, 0.1110, 0.1827, 0.1626, 0.2741, 0.2955], + device='cuda:1'), in_proj_covar=tensor([0.0353, 0.0391, 0.0352, 0.0288, 0.0376, 0.0379, 0.0375, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 05:00:59,947 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.577e+02 3.667e+02 4.352e+02 5.271e+02 8.551e+02, threshold=8.703e+02, percent-clipped=0.0 +2023-03-29 05:01:03,187 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60126.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:01:21,329 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.87 vs. limit=5.0 +2023-03-29 05:01:34,622 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60139.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:02:17,816 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60158.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:02:23,088 INFO [train.py:892] (1/4) Epoch 33, batch 800, loss[loss=0.1413, simple_loss=0.2203, pruned_loss=0.03118, over 19667.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2387, pruned_loss=0.04036, over 3879317.82 frames. ], batch size: 43, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:03:47,858 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60198.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:04:03,565 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60204.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:04:08,048 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60206.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:04:17,807 INFO [train.py:892] (1/4) Epoch 33, batch 850, loss[loss=0.1601, simple_loss=0.2458, pruned_loss=0.03716, over 19747.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2395, pruned_loss=0.04049, over 3895764.81 frames. ], batch size: 44, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:04:48,778 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.588e+02 3.634e+02 4.278e+02 5.134e+02 8.109e+02, threshold=8.556e+02, percent-clipped=0.0 +2023-03-29 05:05:37,061 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60245.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:06:08,066 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60259.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:06:11,095 INFO [train.py:892] (1/4) Epoch 33, batch 900, loss[loss=0.1533, simple_loss=0.2396, pruned_loss=0.03345, over 19806.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2399, pruned_loss=0.04081, over 3907862.62 frames. ], batch size: 98, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:06:51,251 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3390, 4.4141, 2.5619, 4.6505, 4.9172, 2.0849, 4.0087, 3.4892], + device='cuda:1'), covar=tensor([0.0636, 0.0750, 0.2815, 0.0690, 0.0424, 0.2927, 0.1038, 0.0887], + device='cuda:1'), in_proj_covar=tensor([0.0237, 0.0262, 0.0234, 0.0279, 0.0260, 0.0207, 0.0244, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 05:06:57,400 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8205, 2.2068, 2.7450, 3.0837, 3.5780, 3.8889, 3.7182, 3.7785], + device='cuda:1'), covar=tensor([0.1114, 0.1898, 0.1426, 0.0767, 0.0451, 0.0314, 0.0457, 0.0418], + device='cuda:1'), in_proj_covar=tensor([0.0162, 0.0171, 0.0179, 0.0153, 0.0137, 0.0135, 0.0126, 0.0118], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 05:08:06,813 INFO [train.py:892] (1/4) Epoch 33, batch 950, loss[loss=0.1637, simple_loss=0.2311, pruned_loss=0.04812, over 19842.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2398, pruned_loss=0.04065, over 3917677.64 frames. ], batch size: 161, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:08:37,051 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.481e+02 3.823e+02 4.629e+02 5.710e+02 9.734e+02, threshold=9.258e+02, percent-clipped=1.0 +2023-03-29 05:08:48,610 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9534, 4.5977, 4.6433, 4.3804, 4.9065, 3.0333, 3.9450, 2.4649], + device='cuda:1'), covar=tensor([0.0187, 0.0203, 0.0163, 0.0189, 0.0145, 0.1059, 0.0811, 0.1584], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0147, 0.0115, 0.0135, 0.0120, 0.0136, 0.0142, 0.0128], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 05:09:56,802 INFO [train.py:892] (1/4) Epoch 33, batch 1000, loss[loss=0.1716, simple_loss=0.2516, pruned_loss=0.04576, over 19758.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2402, pruned_loss=0.04088, over 3925170.03 frames. ], batch size: 253, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:11:13,837 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60396.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 05:11:39,714 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4530, 3.5931, 2.4262, 4.2787, 3.9624, 4.2402, 4.3105, 3.3530], + device='cuda:1'), covar=tensor([0.0654, 0.0624, 0.1622, 0.0592, 0.0544, 0.0453, 0.0559, 0.0876], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0143, 0.0143, 0.0152, 0.0134, 0.0137, 0.0148, 0.0145], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 05:11:41,554 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60408.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 05:11:48,654 INFO [train.py:892] (1/4) Epoch 33, batch 1050, loss[loss=0.1378, simple_loss=0.2152, pruned_loss=0.03026, over 19803.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2403, pruned_loss=0.04089, over 3931627.53 frames. ], batch size: 151, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:12:11,035 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60421.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:12:21,533 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.510e+02 3.870e+02 4.484e+02 5.211e+02 8.559e+02, threshold=8.968e+02, percent-clipped=0.0 +2023-03-29 05:12:41,273 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60434.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:12:41,612 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4444, 3.3484, 5.0362, 3.8391, 3.9773, 3.7835, 2.7100, 2.9751], + device='cuda:1'), covar=tensor([0.0879, 0.2805, 0.0368, 0.0944, 0.1656, 0.1456, 0.2503, 0.2299], + device='cuda:1'), in_proj_covar=tensor([0.0350, 0.0388, 0.0349, 0.0286, 0.0373, 0.0375, 0.0373, 0.0344], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 05:13:40,384 INFO [train.py:892] (1/4) Epoch 33, batch 1100, loss[loss=0.1511, simple_loss=0.2216, pruned_loss=0.04028, over 19766.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2403, pruned_loss=0.04096, over 3936569.05 frames. ], batch size: 182, lr: 4.73e-03, grad_scale: 32.0 +2023-03-29 05:15:19,595 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5778, 3.6682, 2.4376, 4.3022, 3.9785, 4.3370, 4.3631, 3.5062], + device='cuda:1'), covar=tensor([0.0570, 0.0588, 0.1396, 0.0606, 0.0529, 0.0379, 0.0500, 0.0677], + device='cuda:1'), in_proj_covar=tensor([0.0144, 0.0144, 0.0143, 0.0154, 0.0134, 0.0137, 0.0148, 0.0146], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 05:15:21,690 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60504.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:15:35,155 INFO [train.py:892] (1/4) Epoch 33, batch 1150, loss[loss=0.1644, simple_loss=0.243, pruned_loss=0.04287, over 19730.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.241, pruned_loss=0.04129, over 3937357.39 frames. ], batch size: 77, lr: 4.73e-03, grad_scale: 32.0 +2023-03-29 05:16:08,384 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.786e+02 3.806e+02 4.491e+02 5.149e+02 7.976e+02, threshold=8.981e+02, percent-clipped=0.0 +2023-03-29 05:16:50,640 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-29 05:16:54,230 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60545.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:17:08,795 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60552.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:17:12,988 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60554.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:17:29,246 INFO [train.py:892] (1/4) Epoch 33, batch 1200, loss[loss=0.1668, simple_loss=0.2423, pruned_loss=0.04569, over 19838.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2424, pruned_loss=0.04194, over 3938438.45 frames. ], batch size: 166, lr: 4.73e-03, grad_scale: 32.0 +2023-03-29 05:17:30,106 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9114, 3.7945, 3.7675, 3.5122, 3.9100, 2.8931, 3.2257, 1.9989], + device='cuda:1'), covar=tensor([0.0199, 0.0236, 0.0150, 0.0195, 0.0149, 0.1029, 0.0645, 0.1574], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0147, 0.0114, 0.0134, 0.0119, 0.0135, 0.0142, 0.0127], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 05:18:42,755 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60593.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:18:46,915 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1356, 2.8883, 3.2422, 2.7790, 3.4646, 3.3741, 4.0022, 4.3793], + device='cuda:1'), covar=tensor([0.0554, 0.1759, 0.1434, 0.2240, 0.1605, 0.1470, 0.0568, 0.0562], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0241, 0.0268, 0.0253, 0.0298, 0.0258, 0.0233, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 05:18:48,743 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6010, 3.5061, 3.8745, 3.5625, 3.3838, 3.8091, 3.6410, 3.9308], + device='cuda:1'), covar=tensor([0.0823, 0.0404, 0.0393, 0.0439, 0.1212, 0.0578, 0.0489, 0.0386], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0223, 0.0223, 0.0237, 0.0208, 0.0247, 0.0235, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 05:19:21,745 INFO [train.py:892] (1/4) Epoch 33, batch 1250, loss[loss=0.1413, simple_loss=0.2207, pruned_loss=0.03093, over 19786.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2422, pruned_loss=0.04161, over 3938935.34 frames. ], batch size: 83, lr: 4.73e-03, grad_scale: 32.0 +2023-03-29 05:19:52,021 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.888e+02 3.769e+02 4.430e+02 5.364e+02 1.022e+03, threshold=8.861e+02, percent-clipped=3.0 +2023-03-29 05:20:34,606 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4026, 2.7366, 2.4196, 1.9160, 2.5729, 2.7360, 2.6434, 2.6923], + device='cuda:1'), covar=tensor([0.0442, 0.0307, 0.0350, 0.0641, 0.0383, 0.0308, 0.0319, 0.0259], + device='cuda:1'), in_proj_covar=tensor([0.0107, 0.0100, 0.0102, 0.0105, 0.0107, 0.0090, 0.0090, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 05:21:14,272 INFO [train.py:892] (1/4) Epoch 33, batch 1300, loss[loss=0.1441, simple_loss=0.2198, pruned_loss=0.03425, over 19808.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.2407, pruned_loss=0.04085, over 3941959.05 frames. ], batch size: 40, lr: 4.73e-03, grad_scale: 32.0 +2023-03-29 05:21:48,508 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.5435, 5.9744, 5.9730, 5.8855, 5.7107, 5.6199, 5.6946, 5.5860], + device='cuda:1'), covar=tensor([0.1346, 0.1277, 0.0905, 0.1157, 0.0613, 0.0778, 0.1932, 0.1989], + device='cuda:1'), in_proj_covar=tensor([0.0295, 0.0337, 0.0371, 0.0301, 0.0277, 0.0285, 0.0365, 0.0393], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 05:22:34,577 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60696.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:23:03,438 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60708.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:23:08,875 INFO [train.py:892] (1/4) Epoch 33, batch 1350, loss[loss=0.1427, simple_loss=0.2193, pruned_loss=0.0331, over 19826.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2399, pruned_loss=0.04029, over 3943772.67 frames. ], batch size: 202, lr: 4.72e-03, grad_scale: 16.0 +2023-03-29 05:23:27,833 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60718.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:23:33,904 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60721.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:23:44,303 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.407e+02 3.398e+02 4.212e+02 5.272e+02 1.001e+03, threshold=8.423e+02, percent-clipped=0.0 +2023-03-29 05:24:01,983 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60734.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:24:24,755 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60744.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:24:40,870 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3741, 3.2203, 4.8847, 3.6159, 3.8107, 3.6928, 2.6415, 2.8183], + device='cuda:1'), covar=tensor([0.0833, 0.3072, 0.0417, 0.1059, 0.1808, 0.1496, 0.2681, 0.2659], + device='cuda:1'), in_proj_covar=tensor([0.0350, 0.0388, 0.0349, 0.0287, 0.0373, 0.0375, 0.0373, 0.0344], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 05:24:51,741 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60756.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:25:04,416 INFO [train.py:892] (1/4) Epoch 33, batch 1400, loss[loss=0.1547, simple_loss=0.222, pruned_loss=0.04368, over 19836.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2391, pruned_loss=0.04028, over 3945728.08 frames. ], batch size: 171, lr: 4.72e-03, grad_scale: 16.0 +2023-03-29 05:25:21,875 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60769.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:25:44,787 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60779.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:25:50,853 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60782.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:26:54,136 INFO [train.py:892] (1/4) Epoch 33, batch 1450, loss[loss=0.1844, simple_loss=0.2702, pruned_loss=0.04927, over 19756.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2389, pruned_loss=0.04025, over 3948263.92 frames. ], batch size: 253, lr: 4.72e-03, grad_scale: 16.0 +2023-03-29 05:26:54,994 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6991, 2.1914, 2.6584, 2.9507, 3.3825, 3.6054, 3.5171, 3.5058], + device='cuda:1'), covar=tensor([0.1041, 0.1754, 0.1338, 0.0760, 0.0497, 0.0316, 0.0447, 0.0529], + device='cuda:1'), in_proj_covar=tensor([0.0161, 0.0170, 0.0178, 0.0152, 0.0136, 0.0134, 0.0125, 0.0116], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 05:27:25,502 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.764e+02 3.944e+02 4.679e+02 5.484e+02 1.088e+03, threshold=9.358e+02, percent-clipped=4.0 +2023-03-29 05:28:30,873 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60854.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:28:46,083 INFO [train.py:892] (1/4) Epoch 33, batch 1500, loss[loss=0.1703, simple_loss=0.2417, pruned_loss=0.04947, over 19842.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2374, pruned_loss=0.03952, over 3949981.38 frames. ], batch size: 190, lr: 4.72e-03, grad_scale: 16.0 +2023-03-29 05:30:19,452 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60902.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:30:40,148 INFO [train.py:892] (1/4) Epoch 33, batch 1550, loss[loss=0.1547, simple_loss=0.2489, pruned_loss=0.03021, over 19685.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2381, pruned_loss=0.03952, over 3949345.99 frames. ], batch size: 55, lr: 4.72e-03, grad_scale: 16.0 +2023-03-29 05:31:12,863 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.819e+02 3.799e+02 4.581e+02 5.362e+02 9.999e+02, threshold=9.162e+02, percent-clipped=1.0 +2023-03-29 05:32:02,944 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2739, 3.1835, 3.3484, 2.6418, 3.5434, 3.0325, 3.2307, 3.4461], + device='cuda:1'), covar=tensor([0.0612, 0.0454, 0.0536, 0.0800, 0.0309, 0.0429, 0.0477, 0.0290], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0087, 0.0085, 0.0111, 0.0080, 0.0083, 0.0081, 0.0074], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 05:32:34,462 INFO [train.py:892] (1/4) Epoch 33, batch 1600, loss[loss=0.1988, simple_loss=0.3173, pruned_loss=0.04017, over 18030.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.239, pruned_loss=0.03982, over 3946763.00 frames. ], batch size: 633, lr: 4.71e-03, grad_scale: 16.0 +2023-03-29 05:32:51,090 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6085, 3.4485, 3.7229, 2.8486, 3.9415, 3.2595, 3.2352, 3.7667], + device='cuda:1'), covar=tensor([0.0723, 0.0394, 0.0586, 0.0794, 0.0314, 0.0404, 0.0620, 0.0339], + device='cuda:1'), in_proj_covar=tensor([0.0080, 0.0087, 0.0085, 0.0112, 0.0080, 0.0083, 0.0082, 0.0074], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 05:33:16,042 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.51 vs. limit=5.0 +2023-03-29 05:34:30,667 INFO [train.py:892] (1/4) Epoch 33, batch 1650, loss[loss=0.1486, simple_loss=0.2282, pruned_loss=0.03454, over 19818.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2389, pruned_loss=0.03995, over 3946893.95 frames. ], batch size: 96, lr: 4.71e-03, grad_scale: 16.0 +2023-03-29 05:34:52,588 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7272, 3.8315, 2.2824, 3.9454, 4.1056, 1.9076, 3.3856, 3.0786], + device='cuda:1'), covar=tensor([0.0774, 0.0826, 0.2774, 0.0840, 0.0608, 0.2731, 0.1109, 0.0945], + device='cuda:1'), in_proj_covar=tensor([0.0236, 0.0261, 0.0233, 0.0281, 0.0258, 0.0205, 0.0242, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 05:35:04,279 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.576e+02 3.785e+02 4.446e+02 5.538e+02 8.034e+02, threshold=8.891e+02, percent-clipped=0.0 +2023-03-29 05:36:07,470 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2601, 3.6215, 3.0537, 2.6941, 3.2122, 3.5276, 3.4046, 3.4599], + device='cuda:1'), covar=tensor([0.0283, 0.0239, 0.0274, 0.0503, 0.0283, 0.0214, 0.0222, 0.0220], + device='cuda:1'), in_proj_covar=tensor([0.0106, 0.0098, 0.0100, 0.0103, 0.0106, 0.0089, 0.0090, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 05:36:15,276 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8345, 3.7537, 4.1863, 3.8055, 3.6538, 4.1247, 3.8661, 4.2625], + device='cuda:1'), covar=tensor([0.1041, 0.0473, 0.0490, 0.0529, 0.1137, 0.0619, 0.0593, 0.0431], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0224, 0.0223, 0.0238, 0.0209, 0.0248, 0.0236, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 05:36:28,671 INFO [train.py:892] (1/4) Epoch 33, batch 1700, loss[loss=0.167, simple_loss=0.2485, pruned_loss=0.04279, over 19759.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2393, pruned_loss=0.03987, over 3949005.75 frames. ], batch size: 70, lr: 4.71e-03, grad_scale: 16.0 +2023-03-29 05:36:29,871 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61061.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:36:37,241 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-29 05:36:53,604 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1602, 3.5043, 3.7496, 4.1687, 2.8885, 3.2401, 2.7243, 2.7538], + device='cuda:1'), covar=tensor([0.0550, 0.1811, 0.0838, 0.0416, 0.1933, 0.0942, 0.1236, 0.1460], + device='cuda:1'), in_proj_covar=tensor([0.0245, 0.0329, 0.0250, 0.0205, 0.0248, 0.0210, 0.0219, 0.0217], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 05:37:06,951 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61074.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:37:19,489 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-29 05:38:23,928 INFO [train.py:892] (1/4) Epoch 33, batch 1750, loss[loss=0.1697, simple_loss=0.2489, pruned_loss=0.04527, over 19801.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2382, pruned_loss=0.03979, over 3949915.14 frames. ], batch size: 74, lr: 4.71e-03, grad_scale: 16.0 +2023-03-29 05:38:44,981 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61122.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:38:52,716 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.343e+02 3.701e+02 4.217e+02 4.986e+02 8.389e+02, threshold=8.433e+02, percent-clipped=0.0 +2023-03-29 05:39:55,569 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.38 vs. limit=5.0 +2023-03-29 05:39:59,810 INFO [train.py:892] (1/4) Epoch 33, batch 1800, loss[loss=0.19, simple_loss=0.2582, pruned_loss=0.06093, over 19765.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2384, pruned_loss=0.03994, over 3951016.60 frames. ], batch size: 253, lr: 4.71e-03, grad_scale: 16.0 +2023-03-29 05:41:31,935 INFO [train.py:892] (1/4) Epoch 33, batch 1850, loss[loss=0.1511, simple_loss=0.2446, pruned_loss=0.0288, over 19822.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2397, pruned_loss=0.03988, over 3950935.79 frames. ], batch size: 57, lr: 4.70e-03, grad_scale: 16.0 +2023-03-29 05:42:34,516 INFO [train.py:892] (1/4) Epoch 34, batch 0, loss[loss=0.1447, simple_loss=0.2174, pruned_loss=0.03596, over 19792.00 frames. ], tot_loss[loss=0.1447, simple_loss=0.2174, pruned_loss=0.03596, over 19792.00 frames. ], batch size: 126, lr: 4.63e-03, grad_scale: 16.0 +2023-03-29 05:42:34,516 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 05:42:56,208 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5798, 4.0458, 3.8831, 3.8800, 4.0894, 3.9295, 3.8618, 3.6635], + device='cuda:1'), covar=tensor([0.2137, 0.1354, 0.1625, 0.1437, 0.0852, 0.0975, 0.1983, 0.2252], + device='cuda:1'), in_proj_covar=tensor([0.0299, 0.0340, 0.0372, 0.0303, 0.0279, 0.0289, 0.0368, 0.0397], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 05:43:03,324 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7871, 3.0407, 3.3518, 3.6092, 2.7965, 3.1211, 2.5221, 2.6204], + device='cuda:1'), covar=tensor([0.0527, 0.1610, 0.0884, 0.0503, 0.1847, 0.0762, 0.1353, 0.1506], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0329, 0.0251, 0.0205, 0.0249, 0.0211, 0.0220, 0.0218], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 05:43:07,411 INFO [train.py:926] (1/4) Epoch 34, validation: loss=0.1816, simple_loss=0.2491, pruned_loss=0.05706, over 2883724.00 frames. +2023-03-29 05:43:07,414 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22586MB +2023-03-29 05:43:30,405 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.548e+02 3.493e+02 4.214e+02 5.020e+02 1.069e+03, threshold=8.428e+02, percent-clipped=3.0 +2023-03-29 05:44:23,176 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9600, 4.6651, 4.7262, 4.9894, 4.7562, 5.2011, 5.1186, 5.3245], + device='cuda:1'), covar=tensor([0.0656, 0.0337, 0.0478, 0.0302, 0.0628, 0.0354, 0.0412, 0.0270], + device='cuda:1'), in_proj_covar=tensor([0.0154, 0.0180, 0.0204, 0.0178, 0.0177, 0.0161, 0.0153, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 05:45:06,395 INFO [train.py:892] (1/4) Epoch 34, batch 50, loss[loss=0.1417, simple_loss=0.216, pruned_loss=0.03368, over 19817.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.238, pruned_loss=0.04112, over 890447.86 frames. ], batch size: 133, lr: 4.63e-03, grad_scale: 16.0 +2023-03-29 05:47:01,408 INFO [train.py:892] (1/4) Epoch 34, batch 100, loss[loss=0.1539, simple_loss=0.2419, pruned_loss=0.03298, over 19755.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2358, pruned_loss=0.03951, over 1569976.44 frames. ], batch size: 89, lr: 4.63e-03, grad_scale: 16.0 +2023-03-29 05:47:24,798 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.578e+02 3.716e+02 4.447e+02 5.513e+02 1.175e+03, threshold=8.893e+02, percent-clipped=3.0 +2023-03-29 05:48:57,020 INFO [train.py:892] (1/4) Epoch 34, batch 150, loss[loss=0.1876, simple_loss=0.2676, pruned_loss=0.05379, over 19835.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2359, pruned_loss=0.03903, over 2098266.51 frames. ], batch size: 43, lr: 4.63e-03, grad_scale: 16.0 +2023-03-29 05:49:18,160 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61374.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:50:14,617 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7368, 4.4182, 4.5052, 4.7311, 4.4408, 4.8688, 4.8241, 5.0518], + device='cuda:1'), covar=tensor([0.0654, 0.0409, 0.0431, 0.0338, 0.0637, 0.0380, 0.0420, 0.0265], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0180, 0.0205, 0.0179, 0.0178, 0.0162, 0.0154, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 05:50:56,758 INFO [train.py:892] (1/4) Epoch 34, batch 200, loss[loss=0.1684, simple_loss=0.2421, pruned_loss=0.04737, over 19819.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2377, pruned_loss=0.03963, over 2507893.09 frames. ], batch size: 288, lr: 4.63e-03, grad_scale: 16.0 +2023-03-29 05:50:59,672 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61417.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:51:10,033 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61422.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:51:10,776 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 05:51:17,508 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9705, 3.4069, 3.5637, 4.0057, 2.7212, 3.2294, 2.5199, 2.6225], + device='cuda:1'), covar=tensor([0.0561, 0.1764, 0.0849, 0.0408, 0.1924, 0.0861, 0.1362, 0.1536], + device='cuda:1'), in_proj_covar=tensor([0.0247, 0.0328, 0.0251, 0.0205, 0.0250, 0.0212, 0.0220, 0.0218], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 05:51:18,487 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.390e+02 3.871e+02 4.515e+02 5.267e+02 1.071e+03, threshold=9.030e+02, percent-clipped=3.0 +2023-03-29 05:52:53,118 INFO [train.py:892] (1/4) Epoch 34, batch 250, loss[loss=0.1596, simple_loss=0.2416, pruned_loss=0.03877, over 19575.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2385, pruned_loss=0.04003, over 2827440.98 frames. ], batch size: 53, lr: 4.62e-03, grad_scale: 16.0 +2023-03-29 05:54:06,465 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-29 05:54:47,105 INFO [train.py:892] (1/4) Epoch 34, batch 300, loss[loss=0.1604, simple_loss=0.2415, pruned_loss=0.03958, over 19659.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.239, pruned_loss=0.03985, over 3076822.47 frames. ], batch size: 43, lr: 4.62e-03, grad_scale: 16.0 +2023-03-29 05:55:09,937 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.400e+02 3.531e+02 4.342e+02 5.324e+02 1.066e+03, threshold=8.684e+02, percent-clipped=3.0 +2023-03-29 05:55:47,765 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-29 05:56:23,647 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5499, 2.6183, 2.8597, 2.5884, 2.9982, 2.9958, 3.3932, 3.7016], + device='cuda:1'), covar=tensor([0.0826, 0.1696, 0.1662, 0.2167, 0.1592, 0.1508, 0.0759, 0.0734], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0241, 0.0268, 0.0255, 0.0299, 0.0257, 0.0235, 0.0258], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 05:56:44,401 INFO [train.py:892] (1/4) Epoch 34, batch 350, loss[loss=0.1776, simple_loss=0.2714, pruned_loss=0.04194, over 19923.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2382, pruned_loss=0.03947, over 3271457.11 frames. ], batch size: 51, lr: 4.62e-03, grad_scale: 16.0 +2023-03-29 05:58:21,569 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1317, 3.1257, 1.9923, 3.7058, 3.4608, 3.6722, 3.7062, 3.0241], + device='cuda:1'), covar=tensor([0.0632, 0.0819, 0.1713, 0.0708, 0.0623, 0.0452, 0.0679, 0.0847], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0145, 0.0144, 0.0154, 0.0135, 0.0137, 0.0150, 0.0147], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 05:58:27,821 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8379, 4.0919, 4.2618, 4.8812, 3.3704, 3.6306, 3.2330, 3.1921], + device='cuda:1'), covar=tensor([0.0422, 0.2034, 0.0819, 0.0359, 0.1806, 0.0987, 0.1189, 0.1420], + device='cuda:1'), in_proj_covar=tensor([0.0247, 0.0328, 0.0251, 0.0206, 0.0249, 0.0212, 0.0221, 0.0218], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 05:58:37,548 INFO [train.py:892] (1/4) Epoch 34, batch 400, loss[loss=0.1351, simple_loss=0.2175, pruned_loss=0.02636, over 19692.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2372, pruned_loss=0.03913, over 3422854.74 frames. ], batch size: 74, lr: 4.62e-03, grad_scale: 16.0 +2023-03-29 05:58:58,233 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.3900, 4.8658, 4.9126, 4.7008, 5.2556, 3.3112, 4.1815, 2.6900], + device='cuda:1'), covar=tensor([0.0139, 0.0192, 0.0149, 0.0173, 0.0139, 0.0910, 0.0917, 0.1501], + device='cuda:1'), in_proj_covar=tensor([0.0107, 0.0149, 0.0116, 0.0137, 0.0121, 0.0138, 0.0145, 0.0129], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 05:59:04,287 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.292e+02 3.497e+02 4.355e+02 5.264e+02 1.050e+03, threshold=8.709e+02, percent-clipped=2.0 +2023-03-29 06:00:32,496 INFO [train.py:892] (1/4) Epoch 34, batch 450, loss[loss=0.1484, simple_loss=0.2325, pruned_loss=0.03217, over 19734.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2378, pruned_loss=0.03919, over 3538124.90 frames. ], batch size: 99, lr: 4.62e-03, grad_scale: 16.0 +2023-03-29 06:00:48,282 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7026, 2.7776, 4.2051, 3.1591, 3.4324, 3.2003, 2.3598, 2.4853], + device='cuda:1'), covar=tensor([0.1182, 0.3341, 0.0570, 0.1221, 0.1764, 0.1642, 0.2731, 0.3182], + device='cuda:1'), in_proj_covar=tensor([0.0350, 0.0390, 0.0349, 0.0287, 0.0374, 0.0377, 0.0377, 0.0346], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 06:01:48,376 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3333, 1.8410, 1.9433, 2.5905, 2.8459, 2.8996, 2.8259, 2.8700], + device='cuda:1'), covar=tensor([0.1099, 0.1807, 0.1624, 0.0779, 0.0565, 0.0457, 0.0515, 0.0556], + device='cuda:1'), in_proj_covar=tensor([0.0162, 0.0171, 0.0180, 0.0152, 0.0137, 0.0134, 0.0125, 0.0118], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 06:02:27,772 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4616, 2.7070, 3.8202, 3.0497, 3.2270, 3.0408, 2.3186, 2.4549], + device='cuda:1'), covar=tensor([0.1245, 0.2874, 0.0650, 0.1096, 0.1704, 0.1554, 0.2531, 0.2558], + device='cuda:1'), in_proj_covar=tensor([0.0349, 0.0388, 0.0347, 0.0286, 0.0372, 0.0375, 0.0374, 0.0344], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 06:02:28,758 INFO [train.py:892] (1/4) Epoch 34, batch 500, loss[loss=0.1585, simple_loss=0.231, pruned_loss=0.04297, over 19795.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2374, pruned_loss=0.03952, over 3630073.72 frames. ], batch size: 185, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:02:32,269 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61717.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:02:51,069 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.296e+02 3.666e+02 4.435e+02 5.198e+02 9.761e+02, threshold=8.870e+02, percent-clipped=3.0 +2023-03-29 06:03:07,829 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61732.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:03:32,862 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61743.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:03:56,222 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-29 06:04:06,667 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-29 06:04:20,956 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61765.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:04:22,372 INFO [train.py:892] (1/4) Epoch 34, batch 550, loss[loss=0.1487, simple_loss=0.2218, pruned_loss=0.03778, over 19872.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2369, pruned_loss=0.03896, over 3700806.01 frames. ], batch size: 138, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:04:44,766 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8716, 2.0871, 1.8970, 1.3005, 1.9403, 2.0324, 1.9346, 1.9717], + device='cuda:1'), covar=tensor([0.0393, 0.0296, 0.0331, 0.0574, 0.0405, 0.0323, 0.0332, 0.0303], + device='cuda:1'), in_proj_covar=tensor([0.0106, 0.0098, 0.0100, 0.0103, 0.0106, 0.0089, 0.0089, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 06:05:28,112 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61793.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:05:55,280 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61804.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:06:24,060 INFO [train.py:892] (1/4) Epoch 34, batch 600, loss[loss=0.1557, simple_loss=0.2339, pruned_loss=0.03871, over 19768.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2373, pruned_loss=0.03933, over 3756144.30 frames. ], batch size: 130, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:06:46,806 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-03-29 06:06:47,556 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.635e+02 3.514e+02 4.079e+02 4.976e+02 9.879e+02, threshold=8.158e+02, percent-clipped=2.0 +2023-03-29 06:08:22,954 INFO [train.py:892] (1/4) Epoch 34, batch 650, loss[loss=0.1468, simple_loss=0.2265, pruned_loss=0.03356, over 19831.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.237, pruned_loss=0.03927, over 3799205.86 frames. ], batch size: 101, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:10:09,704 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61914.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:10:14,467 INFO [train.py:892] (1/4) Epoch 34, batch 700, loss[loss=0.2092, simple_loss=0.2893, pruned_loss=0.06453, over 19608.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2381, pruned_loss=0.03963, over 3832789.90 frames. ], batch size: 367, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:10:38,143 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.620e+02 3.763e+02 4.447e+02 5.196e+02 1.125e+03, threshold=8.894e+02, percent-clipped=5.0 +2023-03-29 06:11:28,754 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0004, 3.1847, 3.2611, 3.2270, 2.9443, 3.1227, 2.8661, 3.2174], + device='cuda:1'), covar=tensor([0.0294, 0.0291, 0.0243, 0.0228, 0.0406, 0.0313, 0.0390, 0.0304], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0083, 0.0087, 0.0080, 0.0093, 0.0086, 0.0103, 0.0075], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 06:12:12,039 INFO [train.py:892] (1/4) Epoch 34, batch 750, loss[loss=0.1503, simple_loss=0.2365, pruned_loss=0.03206, over 19642.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2387, pruned_loss=0.03994, over 3858492.95 frames. ], batch size: 68, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:12:34,517 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61975.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:13:51,065 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6325, 2.7348, 4.3297, 3.1605, 3.3630, 3.1437, 2.3390, 2.4817], + device='cuda:1'), covar=tensor([0.1180, 0.3269, 0.0497, 0.1175, 0.2093, 0.1548, 0.2725, 0.2893], + device='cuda:1'), in_proj_covar=tensor([0.0350, 0.0388, 0.0348, 0.0286, 0.0372, 0.0376, 0.0374, 0.0345], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 06:14:10,218 INFO [train.py:892] (1/4) Epoch 34, batch 800, loss[loss=0.1405, simple_loss=0.2212, pruned_loss=0.02985, over 19891.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2384, pruned_loss=0.03987, over 3878948.04 frames. ], batch size: 113, lr: 4.60e-03, grad_scale: 16.0 +2023-03-29 06:14:31,338 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.697e+02 3.764e+02 4.280e+02 5.112e+02 1.282e+03, threshold=8.560e+02, percent-clipped=3.0 +2023-03-29 06:15:08,276 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0932, 3.7706, 3.9020, 4.0754, 3.8386, 4.0379, 4.1174, 4.3484], + device='cuda:1'), covar=tensor([0.0601, 0.0461, 0.0553, 0.0375, 0.0703, 0.0602, 0.0451, 0.0327], + device='cuda:1'), in_proj_covar=tensor([0.0156, 0.0181, 0.0206, 0.0179, 0.0179, 0.0163, 0.0155, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 06:16:03,789 INFO [train.py:892] (1/4) Epoch 34, batch 850, loss[loss=0.2186, simple_loss=0.2982, pruned_loss=0.06953, over 19583.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2395, pruned_loss=0.03998, over 3893209.26 frames. ], batch size: 376, lr: 4.60e-03, grad_scale: 16.0 +2023-03-29 06:16:53,210 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62088.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:17:17,397 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62099.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:17:54,508 INFO [train.py:892] (1/4) Epoch 34, batch 900, loss[loss=0.1671, simple_loss=0.2632, pruned_loss=0.03554, over 19684.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2393, pruned_loss=0.03969, over 3905215.59 frames. ], batch size: 56, lr: 4.60e-03, grad_scale: 16.0 +2023-03-29 06:18:09,199 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62122.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:18:10,876 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4770, 4.3332, 4.7821, 4.3867, 4.0802, 4.6559, 4.4363, 4.9146], + device='cuda:1'), covar=tensor([0.0809, 0.0382, 0.0362, 0.0406, 0.0866, 0.0507, 0.0504, 0.0328], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0225, 0.0224, 0.0238, 0.0210, 0.0248, 0.0238, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 06:18:16,041 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.740e+02 3.509e+02 4.216e+02 5.041e+02 1.543e+03, threshold=8.432e+02, percent-clipped=2.0 +2023-03-29 06:18:38,778 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5000, 3.3827, 3.7416, 3.4128, 3.3037, 3.7254, 3.5367, 3.8116], + device='cuda:1'), covar=tensor([0.0802, 0.0396, 0.0393, 0.0472, 0.1244, 0.0571, 0.0493, 0.0385], + device='cuda:1'), in_proj_covar=tensor([0.0284, 0.0225, 0.0223, 0.0237, 0.0209, 0.0247, 0.0237, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 06:19:45,227 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6664, 2.7997, 2.8472, 2.8273, 2.7378, 2.7643, 2.7814, 2.8781], + device='cuda:1'), covar=tensor([0.0332, 0.0370, 0.0349, 0.0277, 0.0431, 0.0357, 0.0382, 0.0417], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0083, 0.0086, 0.0080, 0.0093, 0.0086, 0.0102, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 06:19:48,584 INFO [train.py:892] (1/4) Epoch 34, batch 950, loss[loss=0.1548, simple_loss=0.24, pruned_loss=0.0348, over 19690.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2394, pruned_loss=0.03981, over 3915073.44 frames. ], batch size: 75, lr: 4.60e-03, grad_scale: 16.0 +2023-03-29 06:20:28,403 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62183.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:21:00,898 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.66 vs. limit=5.0 +2023-03-29 06:21:45,057 INFO [train.py:892] (1/4) Epoch 34, batch 1000, loss[loss=0.1441, simple_loss=0.22, pruned_loss=0.03406, over 19761.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2393, pruned_loss=0.03969, over 3922996.31 frames. ], batch size: 205, lr: 4.60e-03, grad_scale: 16.0 +2023-03-29 06:22:08,034 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.587e+02 3.779e+02 4.502e+02 5.626e+02 1.320e+03, threshold=9.004e+02, percent-clipped=5.0 +2023-03-29 06:22:58,562 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.60 vs. limit=2.0 +2023-03-29 06:23:37,010 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4930, 2.0557, 2.3118, 2.7569, 3.0991, 3.2072, 3.0608, 3.1086], + device='cuda:1'), covar=tensor([0.1059, 0.1654, 0.1459, 0.0748, 0.0504, 0.0362, 0.0504, 0.0552], + device='cuda:1'), in_proj_covar=tensor([0.0164, 0.0172, 0.0181, 0.0153, 0.0138, 0.0135, 0.0126, 0.0119], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 06:23:39,547 INFO [train.py:892] (1/4) Epoch 34, batch 1050, loss[loss=0.1629, simple_loss=0.2388, pruned_loss=0.04353, over 19785.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2389, pruned_loss=0.03968, over 3929473.81 frames. ], batch size: 211, lr: 4.59e-03, grad_scale: 16.0 +2023-03-29 06:23:49,448 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62270.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:23:55,844 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62273.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:25:02,529 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-29 06:25:31,601 INFO [train.py:892] (1/4) Epoch 34, batch 1100, loss[loss=0.1618, simple_loss=0.248, pruned_loss=0.03775, over 19690.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2392, pruned_loss=0.03981, over 3933230.61 frames. ], batch size: 75, lr: 4.59e-03, grad_scale: 16.0 +2023-03-29 06:25:55,917 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 3.591e+02 4.112e+02 5.051e+02 8.825e+02, threshold=8.223e+02, percent-clipped=0.0 +2023-03-29 06:25:57,280 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 06:26:15,536 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62334.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:26:32,681 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 06:27:28,353 INFO [train.py:892] (1/4) Epoch 34, batch 1150, loss[loss=0.1744, simple_loss=0.2542, pruned_loss=0.0473, over 19859.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2386, pruned_loss=0.03957, over 3936844.46 frames. ], batch size: 58, lr: 4.59e-03, grad_scale: 16.0 +2023-03-29 06:28:19,811 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62388.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:28:26,341 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62391.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:28:46,307 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62399.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:29:24,308 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6785, 2.1423, 2.4582, 2.9090, 3.3371, 3.4985, 3.3314, 3.4519], + device='cuda:1'), covar=tensor([0.1061, 0.1773, 0.1519, 0.0767, 0.0474, 0.0343, 0.0485, 0.0442], + device='cuda:1'), in_proj_covar=tensor([0.0163, 0.0172, 0.0182, 0.0153, 0.0139, 0.0135, 0.0126, 0.0119], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 06:29:25,704 INFO [train.py:892] (1/4) Epoch 34, batch 1200, loss[loss=0.1613, simple_loss=0.2298, pruned_loss=0.04644, over 19750.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2382, pruned_loss=0.03917, over 3940350.12 frames. ], batch size: 139, lr: 4.59e-03, grad_scale: 16.0 +2023-03-29 06:29:49,442 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.662e+02 4.259e+02 5.186e+02 8.409e+02, threshold=8.517e+02, percent-clipped=1.0 +2023-03-29 06:30:12,705 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62436.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:30:39,293 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62447.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:30:51,239 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62452.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:31:02,811 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-29 06:31:20,400 INFO [train.py:892] (1/4) Epoch 34, batch 1250, loss[loss=0.1587, simple_loss=0.2387, pruned_loss=0.03931, over 19805.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2377, pruned_loss=0.03907, over 3942859.23 frames. ], batch size: 132, lr: 4.59e-03, grad_scale: 16.0 +2023-03-29 06:31:46,049 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62478.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:33:12,373 INFO [train.py:892] (1/4) Epoch 34, batch 1300, loss[loss=0.1538, simple_loss=0.2274, pruned_loss=0.04005, over 19847.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2368, pruned_loss=0.03892, over 3944920.46 frames. ], batch size: 144, lr: 4.58e-03, grad_scale: 16.0 +2023-03-29 06:33:37,405 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.393e+02 3.741e+02 4.359e+02 5.306e+02 1.139e+03, threshold=8.717e+02, percent-clipped=4.0 +2023-03-29 06:34:21,160 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62545.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:35:08,968 INFO [train.py:892] (1/4) Epoch 34, batch 1350, loss[loss=0.1953, simple_loss=0.2779, pruned_loss=0.05631, over 19630.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2369, pruned_loss=0.03914, over 3947237.94 frames. ], batch size: 367, lr: 4.58e-03, grad_scale: 16.0 +2023-03-29 06:35:18,052 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62570.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:35:50,158 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62585.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:36:41,183 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62606.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:37:03,307 INFO [train.py:892] (1/4) Epoch 34, batch 1400, loss[loss=0.1572, simple_loss=0.2492, pruned_loss=0.03263, over 19698.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2366, pruned_loss=0.03882, over 3948117.80 frames. ], batch size: 48, lr: 4.58e-03, grad_scale: 16.0 +2023-03-29 06:37:08,445 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62618.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:37:26,794 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.572e+02 3.612e+02 4.260e+02 5.220e+02 9.254e+02, threshold=8.520e+02, percent-clipped=0.0 +2023-03-29 06:37:35,374 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62629.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:38:13,428 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62646.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:38:58,251 INFO [train.py:892] (1/4) Epoch 34, batch 1450, loss[loss=0.1601, simple_loss=0.2419, pruned_loss=0.03912, over 19624.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2368, pruned_loss=0.03885, over 3948005.01 frames. ], batch size: 52, lr: 4.58e-03, grad_scale: 16.0 +2023-03-29 06:39:00,725 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62666.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:40:41,708 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8534, 4.9037, 5.2200, 5.0102, 5.0688, 4.7595, 4.9593, 4.7968], + device='cuda:1'), covar=tensor([0.1431, 0.1781, 0.0850, 0.1278, 0.0763, 0.0896, 0.1770, 0.1979], + device='cuda:1'), in_proj_covar=tensor([0.0301, 0.0340, 0.0375, 0.0306, 0.0280, 0.0290, 0.0370, 0.0396], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 06:40:42,150 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-03-29 06:40:56,716 INFO [train.py:892] (1/4) Epoch 34, batch 1500, loss[loss=0.2085, simple_loss=0.287, pruned_loss=0.06501, over 19655.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2371, pruned_loss=0.03905, over 3947495.20 frames. ], batch size: 343, lr: 4.58e-03, grad_scale: 32.0 +2023-03-29 06:41:19,181 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.451e+02 3.877e+02 4.374e+02 5.206e+02 9.071e+02, threshold=8.749e+02, percent-clipped=3.0 +2023-03-29 06:41:22,447 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62727.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:41:29,484 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-29 06:42:11,240 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62747.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:42:53,783 INFO [train.py:892] (1/4) Epoch 34, batch 1550, loss[loss=0.1688, simple_loss=0.2509, pruned_loss=0.04338, over 19734.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2378, pruned_loss=0.0393, over 3947828.59 frames. ], batch size: 99, lr: 4.58e-03, grad_scale: 32.0 +2023-03-29 06:43:11,218 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62773.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:43:22,679 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62778.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:44:43,600 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0270, 2.9207, 3.0731, 2.4358, 3.1787, 2.6928, 3.0605, 3.0505], + device='cuda:1'), covar=tensor([0.0664, 0.0513, 0.0671, 0.0867, 0.0410, 0.0505, 0.0552, 0.0429], + device='cuda:1'), in_proj_covar=tensor([0.0080, 0.0088, 0.0085, 0.0113, 0.0082, 0.0085, 0.0082, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 06:44:53,495 INFO [train.py:892] (1/4) Epoch 34, batch 1600, loss[loss=0.1676, simple_loss=0.2429, pruned_loss=0.04613, over 19765.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2373, pruned_loss=0.03903, over 3949086.34 frames. ], batch size: 213, lr: 4.57e-03, grad_scale: 32.0 +2023-03-29 06:45:16,009 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.873e+02 3.915e+02 4.396e+02 5.347e+02 9.230e+02, threshold=8.791e+02, percent-clipped=1.0 +2023-03-29 06:45:16,914 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62826.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:45:34,110 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62834.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:45:50,306 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.7049, 1.6104, 1.7609, 1.7445, 1.6225, 1.7631, 1.6050, 1.7595], + device='cuda:1'), covar=tensor([0.0415, 0.0429, 0.0352, 0.0356, 0.0520, 0.0357, 0.0535, 0.0362], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0084, 0.0086, 0.0080, 0.0094, 0.0086, 0.0103, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 06:46:09,281 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8760, 3.9724, 2.3245, 4.1296, 4.3057, 1.9435, 3.5451, 3.2417], + device='cuda:1'), covar=tensor([0.0727, 0.0868, 0.2840, 0.0880, 0.0650, 0.2954, 0.1137, 0.0932], + device='cuda:1'), in_proj_covar=tensor([0.0238, 0.0260, 0.0234, 0.0282, 0.0260, 0.0206, 0.0243, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 06:46:17,459 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1847, 2.7368, 4.5485, 3.9948, 4.3916, 4.4443, 4.3419, 4.2275], + device='cuda:1'), covar=tensor([0.0748, 0.1119, 0.0139, 0.0884, 0.0181, 0.0259, 0.0207, 0.0194], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0105, 0.0089, 0.0153, 0.0087, 0.0099, 0.0090, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 06:46:47,356 INFO [train.py:892] (1/4) Epoch 34, batch 1650, loss[loss=0.155, simple_loss=0.234, pruned_loss=0.03804, over 19789.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2371, pruned_loss=0.03884, over 3950542.83 frames. ], batch size: 73, lr: 4.57e-03, grad_scale: 32.0 +2023-03-29 06:48:09,077 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62901.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:48:42,506 INFO [train.py:892] (1/4) Epoch 34, batch 1700, loss[loss=0.1366, simple_loss=0.2091, pruned_loss=0.03202, over 19792.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2388, pruned_loss=0.0395, over 3950017.92 frames. ], batch size: 149, lr: 4.57e-03, grad_scale: 32.0 +2023-03-29 06:48:43,531 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6706, 2.5886, 2.7134, 2.2263, 2.7530, 2.3464, 2.7697, 2.6057], + device='cuda:1'), covar=tensor([0.0540, 0.0537, 0.0508, 0.0873, 0.0529, 0.0537, 0.0437, 0.0462], + device='cuda:1'), in_proj_covar=tensor([0.0080, 0.0089, 0.0085, 0.0113, 0.0082, 0.0085, 0.0083, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 06:49:05,118 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.243e+02 3.824e+02 4.369e+02 5.372e+02 9.431e+02, threshold=8.739e+02, percent-clipped=1.0 +2023-03-29 06:49:14,364 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62929.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:49:35,070 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5464, 2.2124, 3.6911, 3.2804, 3.6935, 3.6790, 3.4630, 3.4803], + device='cuda:1'), covar=tensor([0.0822, 0.1198, 0.0162, 0.0558, 0.0181, 0.0293, 0.0252, 0.0232], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0104, 0.0089, 0.0153, 0.0086, 0.0098, 0.0090, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 06:49:41,129 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62941.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:49:53,979 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0069, 2.5078, 4.1668, 3.7357, 4.1255, 4.1621, 3.9906, 3.8789], + device='cuda:1'), covar=tensor([0.0594, 0.0989, 0.0120, 0.0578, 0.0135, 0.0225, 0.0174, 0.0185], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0104, 0.0089, 0.0153, 0.0086, 0.0099, 0.0090, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 06:50:34,875 INFO [train.py:892] (1/4) Epoch 34, batch 1750, loss[loss=0.1495, simple_loss=0.2247, pruned_loss=0.03711, over 19817.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2386, pruned_loss=0.03968, over 3951612.50 frames. ], batch size: 133, lr: 4.57e-03, grad_scale: 32.0 +2023-03-29 06:50:56,663 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62977.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:52:08,782 INFO [train.py:892] (1/4) Epoch 34, batch 1800, loss[loss=0.1514, simple_loss=0.2378, pruned_loss=0.03252, over 19656.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.24, pruned_loss=0.04014, over 3945313.20 frames. ], batch size: 58, lr: 4.57e-03, grad_scale: 32.0 +2023-03-29 06:52:20,188 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63022.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:52:26,906 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.273e+02 3.774e+02 4.619e+02 5.631e+02 1.047e+03, threshold=9.238e+02, percent-clipped=1.0 +2023-03-29 06:53:07,310 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63047.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:53:11,073 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4455, 3.4934, 2.2228, 4.2455, 3.8058, 4.1437, 4.2415, 3.3089], + device='cuda:1'), covar=tensor([0.0600, 0.0602, 0.1523, 0.0559, 0.0554, 0.0431, 0.0551, 0.0738], + device='cuda:1'), in_proj_covar=tensor([0.0145, 0.0146, 0.0146, 0.0156, 0.0137, 0.0139, 0.0150, 0.0148], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 06:53:40,506 INFO [train.py:892] (1/4) Epoch 34, batch 1850, loss[loss=0.1611, simple_loss=0.2592, pruned_loss=0.03155, over 19682.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2413, pruned_loss=0.03985, over 3945948.68 frames. ], batch size: 55, lr: 4.56e-03, grad_scale: 32.0 +2023-03-29 06:54:44,121 INFO [train.py:892] (1/4) Epoch 35, batch 0, loss[loss=0.1567, simple_loss=0.2286, pruned_loss=0.04243, over 19876.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2286, pruned_loss=0.04243, over 19876.00 frames. ], batch size: 84, lr: 4.50e-03, grad_scale: 32.0 +2023-03-29 06:54:44,121 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 06:55:00,077 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0206, 2.7345, 3.1762, 3.2666, 3.6577, 4.0715, 3.8411, 3.9261], + device='cuda:1'), covar=tensor([0.0964, 0.1479, 0.1203, 0.0718, 0.0501, 0.0253, 0.0412, 0.0385], + device='cuda:1'), in_proj_covar=tensor([0.0162, 0.0169, 0.0179, 0.0152, 0.0138, 0.0134, 0.0125, 0.0118], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 06:55:18,885 INFO [train.py:926] (1/4) Epoch 35, validation: loss=0.1837, simple_loss=0.2499, pruned_loss=0.05876, over 2883724.00 frames. +2023-03-29 06:55:18,886 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22859MB +2023-03-29 06:56:18,679 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63095.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:57:16,283 INFO [train.py:892] (1/4) Epoch 35, batch 50, loss[loss=0.132, simple_loss=0.2146, pruned_loss=0.02467, over 19745.00 frames. ], tot_loss[loss=0.1525, simple_loss=0.2323, pruned_loss=0.03636, over 889277.94 frames. ], batch size: 44, lr: 4.50e-03, grad_scale: 32.0 +2023-03-29 06:57:28,125 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.370e+02 3.277e+02 3.899e+02 4.647e+02 1.054e+03, threshold=7.797e+02, percent-clipped=1.0 +2023-03-29 06:57:36,764 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63129.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:57:55,850 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 06:59:14,202 INFO [train.py:892] (1/4) Epoch 35, batch 100, loss[loss=0.1311, simple_loss=0.208, pruned_loss=0.02711, over 19820.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.2358, pruned_loss=0.03783, over 1567566.72 frames. ], batch size: 121, lr: 4.49e-03, grad_scale: 32.0 +2023-03-29 07:00:25,402 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63201.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:01:09,273 INFO [train.py:892] (1/4) Epoch 35, batch 150, loss[loss=0.1374, simple_loss=0.2176, pruned_loss=0.02863, over 19800.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2349, pruned_loss=0.03792, over 2096363.70 frames. ], batch size: 107, lr: 4.49e-03, grad_scale: 16.0 +2023-03-29 07:01:10,390 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63221.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:01:22,851 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.863e+02 3.708e+02 4.219e+02 5.359e+02 8.315e+02, threshold=8.439e+02, percent-clipped=1.0 +2023-03-29 07:01:57,360 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63241.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:02:14,823 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63249.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:03:02,037 INFO [train.py:892] (1/4) Epoch 35, batch 200, loss[loss=0.1626, simple_loss=0.2356, pruned_loss=0.0448, over 19787.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2381, pruned_loss=0.03885, over 2507109.29 frames. ], batch size: 120, lr: 4.49e-03, grad_scale: 16.0 +2023-03-29 07:03:28,895 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63282.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:03:45,912 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63289.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:04:09,657 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2344, 3.0123, 3.2017, 2.6090, 3.3375, 2.8059, 3.0707, 3.2181], + device='cuda:1'), covar=tensor([0.0515, 0.0500, 0.0626, 0.0782, 0.0399, 0.0509, 0.0519, 0.0374], + device='cuda:1'), in_proj_covar=tensor([0.0081, 0.0089, 0.0086, 0.0114, 0.0082, 0.0085, 0.0083, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 07:04:56,639 INFO [train.py:892] (1/4) Epoch 35, batch 250, loss[loss=0.1224, simple_loss=0.2022, pruned_loss=0.02133, over 19750.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2378, pruned_loss=0.03852, over 2826081.55 frames. ], batch size: 95, lr: 4.49e-03, grad_scale: 16.0 +2023-03-29 07:05:00,110 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63322.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:05:09,783 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.149e+02 3.669e+02 4.457e+02 5.249e+02 9.820e+02, threshold=8.914e+02, percent-clipped=1.0 +2023-03-29 07:06:48,044 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63370.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:06:49,437 INFO [train.py:892] (1/4) Epoch 35, batch 300, loss[loss=0.1467, simple_loss=0.2322, pruned_loss=0.0306, over 19664.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2396, pruned_loss=0.03942, over 3072866.00 frames. ], batch size: 50, lr: 4.49e-03, grad_scale: 16.0 +2023-03-29 07:08:02,729 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7772, 4.0552, 4.2869, 4.8484, 3.3926, 3.5461, 3.1177, 3.2563], + device='cuda:1'), covar=tensor([0.0450, 0.1983, 0.0767, 0.0375, 0.1915, 0.1086, 0.1198, 0.1434], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0334, 0.0252, 0.0209, 0.0252, 0.0214, 0.0224, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 07:08:45,600 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63420.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:08:46,591 INFO [train.py:892] (1/4) Epoch 35, batch 350, loss[loss=0.1367, simple_loss=0.2112, pruned_loss=0.0311, over 19880.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2389, pruned_loss=0.03921, over 3267880.86 frames. ], batch size: 136, lr: 4.49e-03, grad_scale: 16.0 +2023-03-29 07:08:49,581 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63422.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:09:00,251 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.282e+02 3.589e+02 4.143e+02 4.777e+02 8.790e+02, threshold=8.287e+02, percent-clipped=0.0 +2023-03-29 07:09:08,366 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63429.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:10:44,410 INFO [train.py:892] (1/4) Epoch 35, batch 400, loss[loss=0.1681, simple_loss=0.2515, pruned_loss=0.04238, over 19765.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2392, pruned_loss=0.03908, over 3415895.09 frames. ], batch size: 226, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:10:58,993 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63477.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:11:06,212 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-29 07:11:07,757 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63481.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:11:12,043 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63483.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:11:33,158 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2942, 4.5113, 4.5626, 4.4070, 4.2842, 4.5065, 4.0745, 4.0504], + device='cuda:1'), covar=tensor([0.0567, 0.0544, 0.0512, 0.0480, 0.0715, 0.0564, 0.0678, 0.1013], + device='cuda:1'), in_proj_covar=tensor([0.0278, 0.0292, 0.0305, 0.0268, 0.0274, 0.0258, 0.0274, 0.0321], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 07:12:42,334 INFO [train.py:892] (1/4) Epoch 35, batch 450, loss[loss=0.1564, simple_loss=0.2353, pruned_loss=0.03875, over 19837.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.241, pruned_loss=0.03987, over 3533305.00 frames. ], batch size: 239, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:12:56,128 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.456e+02 3.739e+02 4.362e+02 5.360e+02 8.901e+02, threshold=8.724e+02, percent-clipped=1.0 +2023-03-29 07:14:28,054 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8982, 3.2071, 2.7526, 2.3992, 2.8758, 3.1815, 3.1283, 3.1071], + device='cuda:1'), covar=tensor([0.0309, 0.0291, 0.0306, 0.0474, 0.0335, 0.0251, 0.0222, 0.0251], + device='cuda:1'), in_proj_covar=tensor([0.0107, 0.0101, 0.0104, 0.0104, 0.0108, 0.0091, 0.0091, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 07:14:32,256 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63569.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:14:35,582 INFO [train.py:892] (1/4) Epoch 35, batch 500, loss[loss=0.1327, simple_loss=0.21, pruned_loss=0.02769, over 19802.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2393, pruned_loss=0.03934, over 3626363.21 frames. ], batch size: 47, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:14:52,567 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63577.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:15:10,922 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63586.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:16:32,812 INFO [train.py:892] (1/4) Epoch 35, batch 550, loss[loss=0.1487, simple_loss=0.2255, pruned_loss=0.03598, over 19814.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2388, pruned_loss=0.03917, over 3698614.77 frames. ], batch size: 117, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:16:47,196 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.269e+02 3.713e+02 4.298e+02 5.040e+02 8.851e+02, threshold=8.596e+02, percent-clipped=1.0 +2023-03-29 07:16:54,475 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63630.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:17:34,968 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63647.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:18:08,761 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0867, 3.2164, 3.2260, 3.2596, 3.1596, 2.9512, 2.9701, 3.3823], + device='cuda:1'), covar=tensor([0.0295, 0.0309, 0.0310, 0.0287, 0.0345, 0.0423, 0.0449, 0.0276], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0085, 0.0087, 0.0081, 0.0094, 0.0087, 0.0104, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 07:18:28,415 INFO [train.py:892] (1/4) Epoch 35, batch 600, loss[loss=0.2005, simple_loss=0.2831, pruned_loss=0.05894, over 19875.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.238, pruned_loss=0.03895, over 3755885.67 frames. ], batch size: 64, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:19:04,380 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7996, 3.1728, 2.7579, 2.3964, 2.8487, 3.1031, 2.9993, 3.0436], + device='cuda:1'), covar=tensor([0.0332, 0.0297, 0.0307, 0.0517, 0.0328, 0.0277, 0.0304, 0.0265], + device='cuda:1'), in_proj_covar=tensor([0.0107, 0.0101, 0.0103, 0.0104, 0.0107, 0.0091, 0.0091, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 07:20:27,004 INFO [train.py:892] (1/4) Epoch 35, batch 650, loss[loss=0.1817, simple_loss=0.2568, pruned_loss=0.05323, over 19793.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2384, pruned_loss=0.03945, over 3799404.08 frames. ], batch size: 236, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:20:30,534 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6357, 2.7182, 2.8401, 2.8022, 2.6740, 2.7510, 2.6302, 2.9226], + device='cuda:1'), covar=tensor([0.0383, 0.0343, 0.0300, 0.0314, 0.0448, 0.0332, 0.0438, 0.0316], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0085, 0.0087, 0.0081, 0.0095, 0.0087, 0.0104, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 07:20:40,710 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.573e+02 3.756e+02 4.198e+02 4.970e+02 9.612e+02, threshold=8.396e+02, percent-clipped=1.0 +2023-03-29 07:21:04,123 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9077, 3.0597, 3.0523, 3.0051, 2.9062, 3.0814, 2.8072, 3.1994], + device='cuda:1'), covar=tensor([0.0351, 0.0311, 0.0333, 0.0292, 0.0419, 0.0328, 0.0365, 0.0324], + device='cuda:1'), in_proj_covar=tensor([0.0091, 0.0085, 0.0088, 0.0082, 0.0095, 0.0087, 0.0104, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 07:21:05,968 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8808, 3.2110, 3.3683, 3.8250, 2.6476, 3.3077, 2.4452, 2.5431], + device='cuda:1'), covar=tensor([0.0575, 0.1827, 0.1024, 0.0445, 0.2016, 0.0817, 0.1405, 0.1656], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0332, 0.0250, 0.0207, 0.0249, 0.0212, 0.0223, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 07:22:16,901 INFO [train.py:892] (1/4) Epoch 35, batch 700, loss[loss=0.1293, simple_loss=0.1978, pruned_loss=0.03039, over 19859.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2377, pruned_loss=0.03911, over 3833671.18 frames. ], batch size: 142, lr: 4.47e-03, grad_scale: 16.0 +2023-03-29 07:22:29,932 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63776.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:22:34,097 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63778.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:23:50,911 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63810.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:23:53,196 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4793, 5.0140, 5.0666, 4.8006, 5.4153, 3.3591, 4.3856, 3.0318], + device='cuda:1'), covar=tensor([0.0146, 0.0182, 0.0145, 0.0188, 0.0126, 0.0925, 0.0821, 0.1237], + device='cuda:1'), in_proj_covar=tensor([0.0107, 0.0150, 0.0116, 0.0138, 0.0122, 0.0137, 0.0144, 0.0130], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 07:24:14,485 INFO [train.py:892] (1/4) Epoch 35, batch 750, loss[loss=0.1518, simple_loss=0.2257, pruned_loss=0.03901, over 19852.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2372, pruned_loss=0.03908, over 3859552.26 frames. ], batch size: 78, lr: 4.47e-03, grad_scale: 16.0 +2023-03-29 07:24:28,552 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.314e+02 3.905e+02 4.508e+02 5.292e+02 1.021e+03, threshold=9.015e+02, percent-clipped=3.0 +2023-03-29 07:25:13,102 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63845.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:26:13,518 INFO [train.py:892] (1/4) Epoch 35, batch 800, loss[loss=0.1362, simple_loss=0.2165, pruned_loss=0.02792, over 19838.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2366, pruned_loss=0.0387, over 3880264.49 frames. ], batch size: 90, lr: 4.47e-03, grad_scale: 16.0 +2023-03-29 07:26:14,632 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63871.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:26:28,842 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63877.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:27:35,494 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-29 07:27:37,199 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63906.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:28:01,362 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63916.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:28:11,345 INFO [train.py:892] (1/4) Epoch 35, batch 850, loss[loss=0.149, simple_loss=0.2285, pruned_loss=0.03471, over 19739.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2369, pruned_loss=0.0388, over 3895589.10 frames. ], batch size: 77, lr: 4.47e-03, grad_scale: 16.0 +2023-03-29 07:28:21,644 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63925.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:28:21,663 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63925.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:28:25,038 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 3.650e+02 4.447e+02 5.355e+02 8.426e+02, threshold=8.894e+02, percent-clipped=0.0 +2023-03-29 07:29:02,893 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63942.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:30:06,805 INFO [train.py:892] (1/4) Epoch 35, batch 900, loss[loss=0.1528, simple_loss=0.2281, pruned_loss=0.03872, over 19781.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2361, pruned_loss=0.03825, over 3909361.33 frames. ], batch size: 131, lr: 4.47e-03, grad_scale: 16.0 +2023-03-29 07:30:11,973 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2439, 5.5767, 5.7385, 5.5250, 5.5608, 5.3855, 5.4377, 5.2535], + device='cuda:1'), covar=tensor([0.1737, 0.1434, 0.0901, 0.1193, 0.0773, 0.0670, 0.2058, 0.2023], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0341, 0.0379, 0.0308, 0.0282, 0.0292, 0.0370, 0.0400], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 07:30:21,941 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63977.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:32:00,595 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2051, 1.8949, 3.4731, 2.9797, 3.5474, 3.4595, 3.1938, 3.3716], + device='cuda:1'), covar=tensor([0.1132, 0.1569, 0.0163, 0.0567, 0.0149, 0.0288, 0.0284, 0.0240], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0105, 0.0089, 0.0153, 0.0087, 0.0099, 0.0091, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 07:32:03,692 INFO [train.py:892] (1/4) Epoch 35, batch 950, loss[loss=0.1635, simple_loss=0.2488, pruned_loss=0.0391, over 19757.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2364, pruned_loss=0.0384, over 3918710.39 frames. ], batch size: 253, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:32:08,706 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64023.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:32:16,290 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.345e+02 3.650e+02 4.248e+02 4.892e+02 8.257e+02, threshold=8.496e+02, percent-clipped=0.0 +2023-03-29 07:32:27,924 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-29 07:33:58,704 INFO [train.py:892] (1/4) Epoch 35, batch 1000, loss[loss=0.1922, simple_loss=0.3024, pruned_loss=0.041, over 18737.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2373, pruned_loss=0.03852, over 3925514.44 frames. ], batch size: 564, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:34:09,995 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64076.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:34:14,008 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64078.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:34:27,930 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64084.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:35:32,948 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-29 07:35:51,140 INFO [train.py:892] (1/4) Epoch 35, batch 1050, loss[loss=0.1382, simple_loss=0.2208, pruned_loss=0.02783, over 19757.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2363, pruned_loss=0.03806, over 3931717.52 frames. ], batch size: 89, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:35:59,006 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64124.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:36:04,350 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64126.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:36:05,646 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.257e+02 3.875e+02 4.468e+02 5.208e+02 1.393e+03, threshold=8.936e+02, percent-clipped=2.0 +2023-03-29 07:36:14,884 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4688, 2.3198, 3.6519, 3.0453, 3.6126, 3.6525, 3.4398, 3.4195], + device='cuda:1'), covar=tensor([0.0807, 0.1053, 0.0133, 0.0488, 0.0153, 0.0263, 0.0211, 0.0203], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0105, 0.0090, 0.0153, 0.0087, 0.0100, 0.0091, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 07:36:54,577 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6455, 3.7434, 2.4452, 4.4523, 4.0161, 4.3746, 4.3964, 3.4824], + device='cuda:1'), covar=tensor([0.0576, 0.0545, 0.1435, 0.0475, 0.0639, 0.0422, 0.0661, 0.0758], + device='cuda:1'), in_proj_covar=tensor([0.0145, 0.0146, 0.0146, 0.0154, 0.0137, 0.0139, 0.0151, 0.0149], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 07:37:37,647 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64166.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:37:51,236 INFO [train.py:892] (1/4) Epoch 35, batch 1100, loss[loss=0.1566, simple_loss=0.2423, pruned_loss=0.03543, over 19786.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2374, pruned_loss=0.03842, over 3934272.50 frames. ], batch size: 46, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:39:00,235 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64201.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:39:48,444 INFO [train.py:892] (1/4) Epoch 35, batch 1150, loss[loss=0.1596, simple_loss=0.2465, pruned_loss=0.03637, over 19792.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2376, pruned_loss=0.03868, over 3937449.37 frames. ], batch size: 193, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:39:57,663 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64225.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:40:00,936 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.058e+02 3.821e+02 4.310e+02 5.075e+02 9.198e+02, threshold=8.620e+02, percent-clipped=1.0 +2023-03-29 07:40:13,162 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64232.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:40:37,922 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64242.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:41:41,967 INFO [train.py:892] (1/4) Epoch 35, batch 1200, loss[loss=0.1468, simple_loss=0.2297, pruned_loss=0.03197, over 19826.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2381, pruned_loss=0.03912, over 3941259.15 frames. ], batch size: 93, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:41:46,071 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64272.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:41:48,214 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64273.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:42:26,843 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64290.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:42:33,591 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64293.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:42:38,346 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-29 07:43:37,216 INFO [train.py:892] (1/4) Epoch 35, batch 1250, loss[loss=0.1611, simple_loss=0.2557, pruned_loss=0.03322, over 19692.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2383, pruned_loss=0.03907, over 3942285.34 frames. ], batch size: 55, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:43:50,726 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.911e+02 3.890e+02 4.539e+02 5.516e+02 1.564e+03, threshold=9.078e+02, percent-clipped=1.0 +2023-03-29 07:44:36,245 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64345.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:45:36,293 INFO [train.py:892] (1/4) Epoch 35, batch 1300, loss[loss=0.1467, simple_loss=0.2308, pruned_loss=0.03133, over 19874.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2365, pruned_loss=0.03823, over 3945200.17 frames. ], batch size: 92, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:45:55,075 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64379.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:46:59,374 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64406.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:47:23,167 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-29 07:47:31,791 INFO [train.py:892] (1/4) Epoch 35, batch 1350, loss[loss=0.1593, simple_loss=0.2363, pruned_loss=0.04114, over 19718.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.237, pruned_loss=0.03836, over 3946213.16 frames. ], batch size: 269, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:47:46,402 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.065e+02 3.591e+02 4.239e+02 5.222e+02 9.468e+02, threshold=8.478e+02, percent-clipped=2.0 +2023-03-29 07:48:36,699 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-03-29 07:49:14,320 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64465.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:49:16,561 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64466.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:49:25,916 INFO [train.py:892] (1/4) Epoch 35, batch 1400, loss[loss=0.1559, simple_loss=0.2326, pruned_loss=0.03954, over 19766.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.236, pruned_loss=0.03844, over 3947682.35 frames. ], batch size: 213, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:49:27,086 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5143, 3.1647, 3.4316, 3.0541, 3.7579, 3.7358, 4.2983, 4.8231], + device='cuda:1'), covar=tensor([0.0506, 0.1606, 0.1486, 0.2223, 0.1612, 0.1390, 0.0610, 0.0506], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0245, 0.0272, 0.0259, 0.0305, 0.0263, 0.0238, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 07:50:37,401 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64501.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:51:09,405 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64514.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:51:22,227 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4726, 3.2405, 3.5032, 2.8452, 3.7040, 3.0585, 3.3115, 3.5293], + device='cuda:1'), covar=tensor([0.0527, 0.0490, 0.0507, 0.0720, 0.0382, 0.0486, 0.0464, 0.0378], + device='cuda:1'), in_proj_covar=tensor([0.0081, 0.0089, 0.0085, 0.0112, 0.0082, 0.0085, 0.0083, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 07:51:23,337 INFO [train.py:892] (1/4) Epoch 35, batch 1450, loss[loss=0.163, simple_loss=0.2455, pruned_loss=0.04029, over 19708.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2368, pruned_loss=0.03856, over 3949078.55 frames. ], batch size: 78, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:51:35,098 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64526.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:51:36,073 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.625e+02 3.695e+02 4.182e+02 5.095e+02 9.377e+02, threshold=8.363e+02, percent-clipped=1.0 +2023-03-29 07:52:27,013 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3798, 2.6527, 2.8333, 3.2647, 2.2377, 2.9905, 2.1562, 2.1447], + device='cuda:1'), covar=tensor([0.0612, 0.1408, 0.1180, 0.0534, 0.2148, 0.0724, 0.1362, 0.1691], + device='cuda:1'), in_proj_covar=tensor([0.0247, 0.0328, 0.0251, 0.0207, 0.0249, 0.0210, 0.0221, 0.0218], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 07:52:31,726 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64549.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:53:21,046 INFO [train.py:892] (1/4) Epoch 35, batch 1500, loss[loss=0.1544, simple_loss=0.2255, pruned_loss=0.0416, over 19833.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2369, pruned_loss=0.0389, over 3949612.79 frames. ], batch size: 143, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:53:24,947 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64572.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:54:00,629 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64588.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:54:02,546 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64589.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:55:13,372 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64620.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:55:15,314 INFO [train.py:892] (1/4) Epoch 35, batch 1550, loss[loss=0.1876, simple_loss=0.2749, pruned_loss=0.05014, over 19641.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2372, pruned_loss=0.03876, over 3948211.45 frames. ], batch size: 343, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 07:55:27,762 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.542e+02 3.591e+02 4.055e+02 5.054e+02 8.662e+02, threshold=8.110e+02, percent-clipped=1.0 +2023-03-29 07:56:11,338 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9861, 3.8338, 4.2476, 3.8755, 3.6519, 4.1195, 3.9555, 4.2848], + device='cuda:1'), covar=tensor([0.0762, 0.0385, 0.0344, 0.0402, 0.1049, 0.0582, 0.0478, 0.0340], + device='cuda:1'), in_proj_covar=tensor([0.0286, 0.0226, 0.0226, 0.0240, 0.0211, 0.0250, 0.0239, 0.0223], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 07:56:18,135 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64648.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:56:22,725 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64650.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:57:10,780 INFO [train.py:892] (1/4) Epoch 35, batch 1600, loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02978, over 19743.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2382, pruned_loss=0.039, over 3946037.15 frames. ], batch size: 89, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 07:57:11,845 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4760, 2.4885, 2.6024, 2.5562, 2.4954, 2.6538, 2.5514, 2.6760], + device='cuda:1'), covar=tensor([0.0361, 0.0339, 0.0316, 0.0350, 0.0439, 0.0310, 0.0446, 0.0341], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0085, 0.0086, 0.0081, 0.0094, 0.0086, 0.0103, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 07:57:31,534 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64679.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:57:49,478 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8267, 3.9001, 2.4055, 4.0437, 4.2468, 1.9635, 3.5011, 3.2485], + device='cuda:1'), covar=tensor([0.0710, 0.0828, 0.2588, 0.0808, 0.0559, 0.2703, 0.1041, 0.0827], + device='cuda:1'), in_proj_covar=tensor([0.0239, 0.0262, 0.0234, 0.0283, 0.0261, 0.0208, 0.0243, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 07:58:22,494 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64701.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:58:26,728 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64703.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:58:38,591 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-29 07:58:40,072 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64709.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:58:51,928 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-29 07:59:08,804 INFO [train.py:892] (1/4) Epoch 35, batch 1650, loss[loss=0.2853, simple_loss=0.3508, pruned_loss=0.1099, over 19444.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2381, pruned_loss=0.03947, over 3947097.56 frames. ], batch size: 431, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 07:59:09,680 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4302, 4.2243, 4.2590, 4.0450, 4.4104, 3.0374, 3.7934, 2.1579], + device='cuda:1'), covar=tensor([0.0190, 0.0222, 0.0150, 0.0172, 0.0134, 0.1033, 0.0606, 0.1420], + device='cuda:1'), in_proj_covar=tensor([0.0106, 0.0149, 0.0115, 0.0136, 0.0121, 0.0136, 0.0144, 0.0129], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 07:59:14,126 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5555, 4.3609, 4.3742, 4.1485, 4.5868, 3.0531, 3.8645, 2.0926], + device='cuda:1'), covar=tensor([0.0232, 0.0241, 0.0187, 0.0219, 0.0168, 0.1131, 0.0688, 0.1660], + device='cuda:1'), in_proj_covar=tensor([0.0106, 0.0149, 0.0115, 0.0136, 0.0121, 0.0136, 0.0143, 0.0129], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 07:59:21,888 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.745e+02 3.702e+02 4.321e+02 4.955e+02 1.270e+03, threshold=8.641e+02, percent-clipped=4.0 +2023-03-29 07:59:22,824 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64727.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:00:03,421 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64745.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:00:11,053 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2286, 2.2721, 1.5116, 2.4552, 2.2908, 2.3494, 2.5000, 1.9271], + device='cuda:1'), covar=tensor([0.0753, 0.0820, 0.1451, 0.0752, 0.0771, 0.0669, 0.0671, 0.1087], + device='cuda:1'), in_proj_covar=tensor([0.0145, 0.0147, 0.0146, 0.0156, 0.0136, 0.0140, 0.0151, 0.0149], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 08:00:20,045 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64752.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:00:46,865 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64764.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:01:02,098 INFO [train.py:892] (1/4) Epoch 35, batch 1700, loss[loss=0.2169, simple_loss=0.2811, pruned_loss=0.07631, over 19696.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2389, pruned_loss=0.03997, over 3946840.30 frames. ], batch size: 295, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 08:02:25,959 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64806.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:02:39,342 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64813.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:02:55,099 INFO [train.py:892] (1/4) Epoch 35, batch 1750, loss[loss=0.1722, simple_loss=0.2582, pruned_loss=0.04308, over 19684.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2389, pruned_loss=0.04011, over 3946580.08 frames. ], batch size: 56, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 08:02:55,754 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64821.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:03:07,045 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.308e+02 3.852e+02 4.447e+02 5.753e+02 1.014e+03, threshold=8.894e+02, percent-clipped=4.0 +2023-03-29 08:03:22,001 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7762, 4.0488, 4.2662, 4.9511, 3.3985, 3.5262, 2.8739, 2.9772], + device='cuda:1'), covar=tensor([0.0499, 0.2073, 0.0778, 0.0327, 0.1844, 0.1160, 0.1407, 0.1599], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0331, 0.0253, 0.0208, 0.0251, 0.0213, 0.0223, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 08:04:10,066 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8125, 2.9291, 2.9694, 2.9624, 2.8765, 2.9500, 2.7939, 2.9750], + device='cuda:1'), covar=tensor([0.0326, 0.0383, 0.0343, 0.0316, 0.0376, 0.0310, 0.0399, 0.0355], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0085, 0.0087, 0.0081, 0.0094, 0.0086, 0.0103, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 08:04:35,113 INFO [train.py:892] (1/4) Epoch 35, batch 1800, loss[loss=0.1543, simple_loss=0.2332, pruned_loss=0.03773, over 19781.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2388, pruned_loss=0.04018, over 3947389.51 frames. ], batch size: 191, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 08:05:06,274 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64888.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:06:07,616 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1161, 4.8286, 4.8204, 5.1517, 4.7429, 5.3866, 5.2521, 5.4705], + device='cuda:1'), covar=tensor([0.0707, 0.0422, 0.0486, 0.0341, 0.0719, 0.0391, 0.0398, 0.0319], + device='cuda:1'), in_proj_covar=tensor([0.0158, 0.0182, 0.0206, 0.0183, 0.0180, 0.0164, 0.0156, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 08:06:09,185 INFO [train.py:892] (1/4) Epoch 35, batch 1850, loss[loss=0.156, simple_loss=0.2531, pruned_loss=0.02947, over 19582.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2403, pruned_loss=0.03961, over 3947861.11 frames. ], batch size: 53, lr: 4.43e-03, grad_scale: 16.0 +2023-03-29 08:07:11,823 INFO [train.py:892] (1/4) Epoch 36, batch 0, loss[loss=0.1321, simple_loss=0.216, pruned_loss=0.02411, over 19838.00 frames. ], tot_loss[loss=0.1321, simple_loss=0.216, pruned_loss=0.02411, over 19838.00 frames. ], batch size: 90, lr: 4.37e-03, grad_scale: 16.0 +2023-03-29 08:07:11,824 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 08:07:46,029 INFO [train.py:926] (1/4) Epoch 36, validation: loss=0.183, simple_loss=0.249, pruned_loss=0.05846, over 2883724.00 frames. +2023-03-29 08:07:46,031 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22859MB +2023-03-29 08:07:48,051 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.083e+02 3.485e+02 4.267e+02 5.108e+02 8.561e+02, threshold=8.534e+02, percent-clipped=0.0 +2023-03-29 08:08:10,070 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64936.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:08:19,507 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8893, 2.4556, 2.7433, 3.0363, 3.5833, 3.8324, 3.7655, 3.7858], + device='cuda:1'), covar=tensor([0.0985, 0.1542, 0.1357, 0.0744, 0.0468, 0.0279, 0.0388, 0.0426], + device='cuda:1'), in_proj_covar=tensor([0.0161, 0.0168, 0.0180, 0.0152, 0.0137, 0.0133, 0.0126, 0.0117], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 08:08:30,565 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.9071, 1.3123, 1.5263, 2.1600, 2.2521, 2.3634, 2.2370, 2.3490], + device='cuda:1'), covar=tensor([0.1169, 0.2122, 0.1915, 0.0888, 0.0656, 0.0475, 0.0508, 0.0529], + device='cuda:1'), in_proj_covar=tensor([0.0161, 0.0168, 0.0179, 0.0152, 0.0137, 0.0133, 0.0126, 0.0117], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 08:08:32,637 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64945.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:09:41,961 INFO [train.py:892] (1/4) Epoch 36, batch 50, loss[loss=0.1396, simple_loss=0.2213, pruned_loss=0.0289, over 19788.00 frames. ], tot_loss[loss=0.1517, simple_loss=0.2323, pruned_loss=0.03558, over 890051.72 frames. ], batch size: 42, lr: 4.37e-03, grad_scale: 16.0 +2023-03-29 08:09:42,933 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64976.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:10:42,220 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65001.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:10:49,902 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65004.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:11:33,502 INFO [train.py:892] (1/4) Epoch 36, batch 100, loss[loss=0.1592, simple_loss=0.2333, pruned_loss=0.04258, over 19875.00 frames. ], tot_loss[loss=0.154, simple_loss=0.2334, pruned_loss=0.03733, over 1569579.80 frames. ], batch size: 134, lr: 4.37e-03, grad_scale: 16.0 +2023-03-29 08:11:35,863 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.481e+02 3.835e+02 4.535e+02 5.519e+02 9.407e+02, threshold=9.071e+02, percent-clipped=1.0 +2023-03-29 08:11:45,452 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6430, 2.1005, 2.4797, 2.9118, 3.3178, 3.5275, 3.4091, 3.3972], + device='cuda:1'), covar=tensor([0.1108, 0.1853, 0.1479, 0.0788, 0.0567, 0.0339, 0.0434, 0.0524], + device='cuda:1'), in_proj_covar=tensor([0.0161, 0.0168, 0.0179, 0.0151, 0.0137, 0.0133, 0.0125, 0.0117], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 08:11:59,357 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65037.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 08:12:26,396 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65049.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:12:42,356 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65055.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:12:44,285 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4905, 2.8273, 2.5289, 2.0291, 2.6012, 2.7698, 2.7461, 2.7524], + device='cuda:1'), covar=tensor([0.0387, 0.0329, 0.0341, 0.0687, 0.0396, 0.0326, 0.0280, 0.0274], + device='cuda:1'), in_proj_covar=tensor([0.0108, 0.0101, 0.0103, 0.0104, 0.0108, 0.0091, 0.0092, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 08:12:50,281 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65059.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:13:25,777 INFO [train.py:892] (1/4) Epoch 36, batch 150, loss[loss=0.1524, simple_loss=0.2281, pruned_loss=0.0383, over 19781.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2358, pruned_loss=0.03848, over 2096032.54 frames. ], batch size: 131, lr: 4.37e-03, grad_scale: 16.0 +2023-03-29 08:13:49,012 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4937, 4.6482, 2.6623, 4.9679, 5.1306, 2.1645, 4.3959, 3.6336], + device='cuda:1'), covar=tensor([0.0639, 0.0554, 0.2580, 0.0612, 0.0408, 0.2695, 0.0780, 0.0835], + device='cuda:1'), in_proj_covar=tensor([0.0239, 0.0261, 0.0233, 0.0281, 0.0261, 0.0206, 0.0242, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 08:14:25,073 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65101.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:14:39,298 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65108.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:14:43,692 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5000, 2.5725, 2.6748, 2.5900, 2.5057, 2.6671, 2.4766, 2.7381], + device='cuda:1'), covar=tensor([0.0369, 0.0316, 0.0310, 0.0351, 0.0442, 0.0346, 0.0446, 0.0335], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0085, 0.0087, 0.0082, 0.0094, 0.0087, 0.0103, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 08:14:58,865 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65116.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:15:10,894 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65121.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:15:13,303 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2929, 4.3905, 2.5776, 4.6358, 4.8161, 2.1466, 4.1318, 3.5806], + device='cuda:1'), covar=tensor([0.0665, 0.0723, 0.2579, 0.0730, 0.0566, 0.2780, 0.0914, 0.0837], + device='cuda:1'), in_proj_covar=tensor([0.0238, 0.0260, 0.0233, 0.0280, 0.0260, 0.0206, 0.0242, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 08:15:22,304 INFO [train.py:892] (1/4) Epoch 36, batch 200, loss[loss=0.1447, simple_loss=0.2232, pruned_loss=0.03307, over 19738.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2379, pruned_loss=0.03883, over 2505308.66 frames. ], batch size: 106, lr: 4.36e-03, grad_scale: 16.0 +2023-03-29 08:15:24,691 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.519e+02 3.588e+02 4.285e+02 5.132e+02 1.208e+03, threshold=8.571e+02, percent-clipped=3.0 +2023-03-29 08:15:57,662 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3207, 2.4481, 4.4720, 3.8247, 4.2903, 4.4096, 4.1772, 4.1691], + device='cuda:1'), covar=tensor([0.0621, 0.1123, 0.0123, 0.0877, 0.0166, 0.0223, 0.0210, 0.0188], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0106, 0.0091, 0.0154, 0.0088, 0.0101, 0.0092, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 08:16:29,157 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-29 08:17:01,277 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65169.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:17:15,481 INFO [train.py:892] (1/4) Epoch 36, batch 250, loss[loss=0.1456, simple_loss=0.2254, pruned_loss=0.03289, over 19848.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2366, pruned_loss=0.03851, over 2826556.82 frames. ], batch size: 104, lr: 4.36e-03, grad_scale: 16.0 +2023-03-29 08:19:08,897 INFO [train.py:892] (1/4) Epoch 36, batch 300, loss[loss=0.1474, simple_loss=0.228, pruned_loss=0.03344, over 19647.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.238, pruned_loss=0.03913, over 3075411.39 frames. ], batch size: 79, lr: 4.36e-03, grad_scale: 32.0 +2023-03-29 08:19:12,291 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.658e+02 3.728e+02 4.313e+02 5.514e+02 9.328e+02, threshold=8.626e+02, percent-clipped=1.0 +2023-03-29 08:19:53,958 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65245.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:20:47,041 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6281, 2.0530, 2.6402, 2.3067, 2.3131, 2.3659, 1.7482, 1.8811], + device='cuda:1'), covar=tensor([0.1231, 0.2277, 0.0943, 0.1104, 0.1938, 0.1521, 0.2722, 0.2523], + device='cuda:1'), in_proj_covar=tensor([0.0354, 0.0393, 0.0351, 0.0289, 0.0377, 0.0384, 0.0380, 0.0353], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 08:21:04,026 INFO [train.py:892] (1/4) Epoch 36, batch 350, loss[loss=0.1503, simple_loss=0.226, pruned_loss=0.03734, over 19823.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2367, pruned_loss=0.03824, over 3268659.94 frames. ], batch size: 147, lr: 4.36e-03, grad_scale: 32.0 +2023-03-29 08:21:23,767 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65285.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:21:42,257 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65293.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:22:08,664 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65304.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:22:22,047 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.87 vs. limit=5.0 +2023-03-29 08:22:56,431 INFO [train.py:892] (1/4) Epoch 36, batch 400, loss[loss=0.1478, simple_loss=0.2233, pruned_loss=0.03616, over 19773.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2368, pruned_loss=0.03825, over 3418871.45 frames. ], batch size: 169, lr: 4.36e-03, grad_scale: 32.0 +2023-03-29 08:22:58,380 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.673e+02 3.890e+02 4.309e+02 5.103e+02 7.724e+02, threshold=8.618e+02, percent-clipped=0.0 +2023-03-29 08:23:09,685 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65332.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 08:23:45,284 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65346.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:23:58,029 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65352.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:24:14,582 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65359.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:24:54,091 INFO [train.py:892] (1/4) Epoch 36, batch 450, loss[loss=0.1451, simple_loss=0.2296, pruned_loss=0.03028, over 19787.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2371, pruned_loss=0.03816, over 3537335.17 frames. ], batch size: 52, lr: 4.36e-03, grad_scale: 16.0 +2023-03-29 08:25:49,852 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65401.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:26:03,582 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65407.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:26:05,637 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65408.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:26:14,911 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65411.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:26:46,804 INFO [train.py:892] (1/4) Epoch 36, batch 500, loss[loss=0.1545, simple_loss=0.2357, pruned_loss=0.03661, over 19849.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2373, pruned_loss=0.03855, over 3628892.99 frames. ], batch size: 112, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:26:52,437 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.389e+02 3.662e+02 4.205e+02 4.670e+02 6.884e+02, threshold=8.409e+02, percent-clipped=0.0 +2023-03-29 08:27:37,907 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65449.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:27:55,789 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65456.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:28:28,702 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3168, 1.9360, 3.1742, 2.5020, 3.2341, 3.2824, 2.9712, 3.1462], + device='cuda:1'), covar=tensor([0.0951, 0.1335, 0.0163, 0.0501, 0.0175, 0.0256, 0.0297, 0.0226], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0105, 0.0091, 0.0154, 0.0088, 0.0101, 0.0092, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 08:28:38,065 INFO [train.py:892] (1/4) Epoch 36, batch 550, loss[loss=0.1593, simple_loss=0.2497, pruned_loss=0.0345, over 19622.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2383, pruned_loss=0.03919, over 3698045.52 frames. ], batch size: 52, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:29:51,155 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7451, 3.0163, 2.6011, 2.2993, 2.7828, 2.9579, 2.8733, 2.9486], + device='cuda:1'), covar=tensor([0.0363, 0.0317, 0.0352, 0.0574, 0.0377, 0.0340, 0.0340, 0.0268], + device='cuda:1'), in_proj_covar=tensor([0.0109, 0.0102, 0.0104, 0.0105, 0.0109, 0.0093, 0.0093, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 08:30:30,660 INFO [train.py:892] (1/4) Epoch 36, batch 600, loss[loss=0.1583, simple_loss=0.2373, pruned_loss=0.03959, over 19643.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2387, pruned_loss=0.03916, over 3753612.86 frames. ], batch size: 299, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:30:34,427 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.355e+02 3.593e+02 4.441e+02 5.313e+02 1.329e+03, threshold=8.882e+02, percent-clipped=4.0 +2023-03-29 08:30:42,682 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65532.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 08:32:02,917 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5190, 3.4042, 3.7742, 3.4497, 3.2541, 3.6965, 3.5211, 3.8088], + device='cuda:1'), covar=tensor([0.0790, 0.0408, 0.0381, 0.0436, 0.1452, 0.0589, 0.0527, 0.0392], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0226, 0.0226, 0.0238, 0.0208, 0.0249, 0.0239, 0.0223], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 08:32:02,972 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3425, 2.0431, 3.3483, 2.7147, 3.3799, 3.4311, 3.1306, 3.3025], + device='cuda:1'), covar=tensor([0.0921, 0.1198, 0.0137, 0.0449, 0.0175, 0.0240, 0.0249, 0.0200], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0105, 0.0090, 0.0153, 0.0087, 0.0100, 0.0091, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 08:32:21,944 INFO [train.py:892] (1/4) Epoch 36, batch 650, loss[loss=0.1837, simple_loss=0.2588, pruned_loss=0.05429, over 19749.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2374, pruned_loss=0.03879, over 3798480.40 frames. ], batch size: 209, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:33:01,021 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65593.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 08:34:14,079 INFO [train.py:892] (1/4) Epoch 36, batch 700, loss[loss=0.1536, simple_loss=0.2335, pruned_loss=0.0368, over 19755.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2367, pruned_loss=0.0383, over 3831562.09 frames. ], batch size: 205, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:34:18,131 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.459e+02 3.698e+02 4.189e+02 4.947e+02 1.521e+03, threshold=8.379e+02, percent-clipped=3.0 +2023-03-29 08:34:30,936 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65632.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:34:44,629 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.63 vs. limit=5.0 +2023-03-29 08:34:52,625 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65641.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:36:12,646 INFO [train.py:892] (1/4) Epoch 36, batch 750, loss[loss=0.1511, simple_loss=0.2271, pruned_loss=0.03751, over 19793.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2368, pruned_loss=0.03857, over 3858975.15 frames. ], batch size: 126, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:36:22,141 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65680.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:36:22,704 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-03-29 08:37:34,602 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65711.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:38:07,196 INFO [train.py:892] (1/4) Epoch 36, batch 800, loss[loss=0.1591, simple_loss=0.2405, pruned_loss=0.03884, over 19653.00 frames. ], tot_loss[loss=0.158, simple_loss=0.238, pruned_loss=0.03898, over 3877791.31 frames. ], batch size: 79, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:38:11,738 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.627e+02 3.709e+02 4.446e+02 5.789e+02 1.138e+03, threshold=8.893e+02, percent-clipped=2.0 +2023-03-29 08:39:24,892 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65759.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:39:31,741 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8754, 2.3038, 2.7392, 3.1044, 3.5309, 3.8229, 3.7483, 3.7111], + device='cuda:1'), covar=tensor([0.1023, 0.1754, 0.1376, 0.0664, 0.0489, 0.0281, 0.0380, 0.0487], + device='cuda:1'), in_proj_covar=tensor([0.0164, 0.0171, 0.0181, 0.0155, 0.0140, 0.0135, 0.0127, 0.0120], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 08:39:35,986 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0152, 2.9920, 3.0766, 2.5156, 3.1381, 2.6705, 2.9914, 2.9478], + device='cuda:1'), covar=tensor([0.0561, 0.0476, 0.0460, 0.0775, 0.0379, 0.0483, 0.0507, 0.0476], + device='cuda:1'), in_proj_covar=tensor([0.0082, 0.0090, 0.0087, 0.0113, 0.0082, 0.0086, 0.0083, 0.0077], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 08:40:02,205 INFO [train.py:892] (1/4) Epoch 36, batch 850, loss[loss=0.1629, simple_loss=0.2376, pruned_loss=0.04412, over 19804.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2383, pruned_loss=0.03905, over 3895593.50 frames. ], batch size: 74, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:41:36,438 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-03-29 08:41:53,824 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3055, 2.3770, 2.5038, 2.3836, 2.4217, 2.4779, 2.3864, 2.5259], + device='cuda:1'), covar=tensor([0.0418, 0.0357, 0.0366, 0.0352, 0.0451, 0.0358, 0.0462, 0.0345], + device='cuda:1'), in_proj_covar=tensor([0.0091, 0.0086, 0.0088, 0.0083, 0.0095, 0.0088, 0.0105, 0.0077], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 08:41:57,349 INFO [train.py:892] (1/4) Epoch 36, batch 900, loss[loss=0.1544, simple_loss=0.2338, pruned_loss=0.0375, over 19871.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2383, pruned_loss=0.03914, over 3907872.12 frames. ], batch size: 64, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:42:01,150 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.659e+02 3.793e+02 4.465e+02 5.730e+02 1.109e+03, threshold=8.930e+02, percent-clipped=1.0 +2023-03-29 08:42:24,339 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65838.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 08:43:50,243 INFO [train.py:892] (1/4) Epoch 36, batch 950, loss[loss=0.1435, simple_loss=0.214, pruned_loss=0.03651, over 19861.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2382, pruned_loss=0.0396, over 3917887.72 frames. ], batch size: 165, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:44:17,904 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65888.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 08:44:45,508 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65899.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 08:45:36,034 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.0184, 1.8607, 2.9102, 2.2200, 2.9812, 3.1026, 2.6932, 2.9951], + device='cuda:1'), covar=tensor([0.1247, 0.1484, 0.0160, 0.0431, 0.0173, 0.0246, 0.0290, 0.0229], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0105, 0.0091, 0.0153, 0.0088, 0.0101, 0.0092, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 08:45:40,257 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3894, 2.4738, 2.7343, 2.4279, 2.8551, 2.8560, 3.2786, 3.5602], + device='cuda:1'), covar=tensor([0.0765, 0.1755, 0.1740, 0.2279, 0.1675, 0.1529, 0.0778, 0.0702], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0243, 0.0271, 0.0257, 0.0303, 0.0261, 0.0236, 0.0262], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 08:45:43,716 INFO [train.py:892] (1/4) Epoch 36, batch 1000, loss[loss=0.1534, simple_loss=0.2313, pruned_loss=0.03771, over 19882.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2377, pruned_loss=0.03948, over 3925261.38 frames. ], batch size: 95, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:45:47,945 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.017e+02 3.590e+02 4.101e+02 4.810e+02 1.037e+03, threshold=8.201e+02, percent-clipped=3.0 +2023-03-29 08:46:20,446 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65941.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:46:39,095 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.8099, 6.0709, 6.1267, 6.0237, 5.8584, 6.1342, 5.4703, 5.4944], + device='cuda:1'), covar=tensor([0.0457, 0.0505, 0.0514, 0.0451, 0.0543, 0.0485, 0.0712, 0.1055], + device='cuda:1'), in_proj_covar=tensor([0.0283, 0.0299, 0.0309, 0.0271, 0.0279, 0.0262, 0.0278, 0.0325], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 08:47:38,764 INFO [train.py:892] (1/4) Epoch 36, batch 1050, loss[loss=0.1861, simple_loss=0.2649, pruned_loss=0.05365, over 19705.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2379, pruned_loss=0.0393, over 3929861.23 frames. ], batch size: 305, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:48:10,608 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65989.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:48:56,351 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2819, 3.1388, 3.4347, 2.5858, 3.4451, 2.9558, 3.2290, 3.3502], + device='cuda:1'), covar=tensor([0.0637, 0.0503, 0.0404, 0.0808, 0.0346, 0.0479, 0.0470, 0.0368], + device='cuda:1'), in_proj_covar=tensor([0.0082, 0.0091, 0.0087, 0.0114, 0.0083, 0.0086, 0.0084, 0.0078], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 08:49:06,135 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5780, 4.3698, 4.3682, 4.1420, 4.5903, 3.0048, 3.8538, 2.0706], + device='cuda:1'), covar=tensor([0.0218, 0.0231, 0.0164, 0.0209, 0.0169, 0.1095, 0.0727, 0.1685], + device='cuda:1'), in_proj_covar=tensor([0.0107, 0.0149, 0.0115, 0.0137, 0.0121, 0.0137, 0.0145, 0.0129], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 08:49:35,982 INFO [train.py:892] (1/4) Epoch 36, batch 1100, loss[loss=0.1842, simple_loss=0.2948, pruned_loss=0.03679, over 18756.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2381, pruned_loss=0.03927, over 3932381.19 frames. ], batch size: 564, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:49:39,607 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.898e+02 3.611e+02 4.375e+02 5.188e+02 7.346e+02, threshold=8.750e+02, percent-clipped=0.0 +2023-03-29 08:51:23,877 INFO [train.py:892] (1/4) Epoch 36, batch 1150, loss[loss=0.1468, simple_loss=0.2259, pruned_loss=0.03383, over 19735.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2384, pruned_loss=0.03976, over 3936863.57 frames. ], batch size: 80, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:51:33,001 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.43 vs. limit=5.0 +2023-03-29 08:52:16,548 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3528, 3.2098, 3.3891, 2.5943, 3.5021, 2.9869, 3.2556, 3.4695], + device='cuda:1'), covar=tensor([0.0674, 0.0483, 0.0559, 0.0858, 0.0402, 0.0501, 0.0481, 0.0410], + device='cuda:1'), in_proj_covar=tensor([0.0082, 0.0091, 0.0087, 0.0114, 0.0083, 0.0086, 0.0084, 0.0078], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 08:53:15,606 INFO [train.py:892] (1/4) Epoch 36, batch 1200, loss[loss=0.1361, simple_loss=0.2168, pruned_loss=0.02765, over 19846.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2377, pruned_loss=0.03957, over 3941079.28 frames. ], batch size: 118, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:53:19,998 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.230e+02 3.760e+02 4.454e+02 5.058e+02 1.051e+03, threshold=8.908e+02, percent-clipped=2.0 +2023-03-29 08:53:58,140 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66144.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:53:58,479 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.78 vs. limit=5.0 +2023-03-29 08:54:33,330 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66160.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 08:55:09,243 INFO [train.py:892] (1/4) Epoch 36, batch 1250, loss[loss=0.1482, simple_loss=0.2349, pruned_loss=0.03077, over 19809.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2365, pruned_loss=0.03903, over 3944848.62 frames. ], batch size: 98, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:55:38,284 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66188.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 08:55:51,142 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66194.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 08:56:16,266 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66205.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 08:56:52,713 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66221.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 08:57:03,443 INFO [train.py:892] (1/4) Epoch 36, batch 1300, loss[loss=0.1559, simple_loss=0.2342, pruned_loss=0.0388, over 19815.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2365, pruned_loss=0.03864, over 3947057.20 frames. ], batch size: 167, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:57:07,573 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.794e+02 3.837e+02 4.506e+02 5.422e+02 1.103e+03, threshold=9.011e+02, percent-clipped=2.0 +2023-03-29 08:57:25,564 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66236.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 08:57:29,754 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9006, 3.7859, 3.7566, 3.5118, 3.8883, 2.7887, 3.2565, 1.7879], + device='cuda:1'), covar=tensor([0.0216, 0.0248, 0.0164, 0.0223, 0.0161, 0.1188, 0.0653, 0.1829], + device='cuda:1'), in_proj_covar=tensor([0.0107, 0.0150, 0.0116, 0.0137, 0.0121, 0.0137, 0.0145, 0.0130], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 08:57:42,002 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6063, 3.8967, 4.1251, 4.6360, 3.1214, 3.4924, 2.8332, 2.8318], + device='cuda:1'), covar=tensor([0.0444, 0.1766, 0.0798, 0.0373, 0.1969, 0.1052, 0.1364, 0.1653], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0330, 0.0252, 0.0207, 0.0251, 0.0214, 0.0224, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 08:58:16,475 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66258.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:58:30,425 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5885, 4.4457, 4.9077, 4.4670, 4.0904, 4.7022, 4.5402, 5.0281], + device='cuda:1'), covar=tensor([0.0765, 0.0354, 0.0385, 0.0391, 0.0926, 0.0561, 0.0480, 0.0299], + device='cuda:1'), in_proj_covar=tensor([0.0282, 0.0225, 0.0224, 0.0236, 0.0206, 0.0247, 0.0237, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 08:58:57,028 INFO [train.py:892] (1/4) Epoch 36, batch 1350, loss[loss=0.1886, simple_loss=0.2698, pruned_loss=0.05372, over 19653.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2376, pruned_loss=0.03885, over 3946428.23 frames. ], batch size: 343, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 09:00:26,695 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7357, 3.2321, 3.6206, 3.1176, 3.8905, 3.9377, 4.5233, 5.0089], + device='cuda:1'), covar=tensor([0.0460, 0.1629, 0.1407, 0.2107, 0.1523, 0.1283, 0.0533, 0.0432], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0244, 0.0272, 0.0258, 0.0304, 0.0261, 0.0237, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 09:00:37,161 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66319.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:00:49,670 INFO [train.py:892] (1/4) Epoch 36, batch 1400, loss[loss=0.1432, simple_loss=0.2176, pruned_loss=0.03441, over 19818.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2373, pruned_loss=0.03905, over 3947536.84 frames. ], batch size: 147, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:00:54,073 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.446e+02 3.517e+02 4.188e+02 4.908e+02 8.356e+02, threshold=8.377e+02, percent-clipped=0.0 +2023-03-29 09:02:06,143 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7645, 2.7608, 1.8170, 3.1636, 2.9228, 3.0676, 3.1882, 2.5871], + device='cuda:1'), covar=tensor([0.0722, 0.0771, 0.1618, 0.0790, 0.0680, 0.0598, 0.0633, 0.0904], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0148, 0.0145, 0.0157, 0.0137, 0.0140, 0.0152, 0.0149], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 09:02:43,607 INFO [train.py:892] (1/4) Epoch 36, batch 1450, loss[loss=0.1528, simple_loss=0.238, pruned_loss=0.03381, over 19759.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2381, pruned_loss=0.03926, over 3948151.24 frames. ], batch size: 217, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:04:27,567 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-29 09:04:34,690 INFO [train.py:892] (1/4) Epoch 36, batch 1500, loss[loss=0.1632, simple_loss=0.2419, pruned_loss=0.0422, over 19621.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2377, pruned_loss=0.03923, over 3946125.21 frames. ], batch size: 65, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:04:40,368 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.359e+02 3.684e+02 4.435e+02 5.309e+02 1.081e+03, threshold=8.870e+02, percent-clipped=2.0 +2023-03-29 09:04:55,509 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9015, 3.2844, 3.3369, 3.8103, 2.6220, 3.3172, 2.4462, 2.4285], + device='cuda:1'), covar=tensor([0.0527, 0.1833, 0.1025, 0.0470, 0.2108, 0.0853, 0.1485, 0.1660], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0332, 0.0253, 0.0208, 0.0253, 0.0214, 0.0224, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 09:06:32,053 INFO [train.py:892] (1/4) Epoch 36, batch 1550, loss[loss=0.1484, simple_loss=0.2338, pruned_loss=0.0315, over 19778.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2375, pruned_loss=0.03882, over 3947262.24 frames. ], batch size: 53, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:07:11,627 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66494.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:07:25,587 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66500.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:08:01,220 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66516.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:08:11,935 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66521.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:08:20,342 INFO [train.py:892] (1/4) Epoch 36, batch 1600, loss[loss=0.1594, simple_loss=0.2305, pruned_loss=0.04415, over 19762.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2374, pruned_loss=0.03845, over 3947606.14 frames. ], batch size: 129, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:08:24,086 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.296e+02 3.591e+02 4.311e+02 5.202e+02 1.053e+03, threshold=8.622e+02, percent-clipped=1.0 +2023-03-29 09:08:35,676 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2426, 3.2629, 2.1234, 3.8156, 3.5178, 3.7587, 3.7926, 3.0422], + device='cuda:1'), covar=tensor([0.0680, 0.0688, 0.1552, 0.0684, 0.0624, 0.0496, 0.0726, 0.0800], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0149, 0.0146, 0.0158, 0.0138, 0.0141, 0.0152, 0.0150], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 09:08:49,454 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8698, 1.7130, 1.9105, 1.9336, 1.8281, 1.8933, 1.7793, 1.8913], + device='cuda:1'), covar=tensor([0.0396, 0.0413, 0.0352, 0.0344, 0.0485, 0.0354, 0.0522, 0.0370], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0087, 0.0090, 0.0083, 0.0096, 0.0089, 0.0107, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 09:08:57,599 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66542.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:10:14,429 INFO [train.py:892] (1/4) Epoch 36, batch 1650, loss[loss=0.1561, simple_loss=0.2227, pruned_loss=0.04471, over 19808.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2371, pruned_loss=0.03868, over 3948731.89 frames. ], batch size: 132, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:10:20,416 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.24 vs. limit=5.0 +2023-03-29 09:10:29,306 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66582.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 09:11:42,684 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66614.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:12:09,543 INFO [train.py:892] (1/4) Epoch 36, batch 1700, loss[loss=0.1638, simple_loss=0.2381, pruned_loss=0.0448, over 19770.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2372, pruned_loss=0.03844, over 3947619.75 frames. ], batch size: 241, lr: 4.31e-03, grad_scale: 16.0 +2023-03-29 09:12:13,640 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.577e+02 3.609e+02 4.438e+02 5.504e+02 8.717e+02, threshold=8.877e+02, percent-clipped=1.0 +2023-03-29 09:13:56,458 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2271, 3.5953, 3.7524, 4.2426, 2.9458, 3.3872, 2.6075, 2.6398], + device='cuda:1'), covar=tensor([0.0506, 0.1728, 0.0875, 0.0398, 0.1915, 0.0972, 0.1373, 0.1611], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0331, 0.0252, 0.0208, 0.0252, 0.0214, 0.0224, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 09:13:57,345 INFO [train.py:892] (1/4) Epoch 36, batch 1750, loss[loss=0.1347, simple_loss=0.2152, pruned_loss=0.02715, over 19704.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2382, pruned_loss=0.03897, over 3948150.66 frames. ], batch size: 101, lr: 4.31e-03, grad_scale: 16.0 +2023-03-29 09:15:32,756 INFO [train.py:892] (1/4) Epoch 36, batch 1800, loss[loss=0.1445, simple_loss=0.2211, pruned_loss=0.03402, over 19862.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2372, pruned_loss=0.03873, over 3949902.27 frames. ], batch size: 85, lr: 4.31e-03, grad_scale: 16.0 +2023-03-29 09:15:36,378 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.260e+02 3.634e+02 4.363e+02 5.062e+02 8.323e+02, threshold=8.726e+02, percent-clipped=0.0 +2023-03-29 09:15:42,652 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66731.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:16:21,786 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-29 09:16:55,802 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4614, 4.4585, 2.6733, 4.7083, 4.9708, 2.2118, 4.1184, 3.5822], + device='cuda:1'), covar=tensor([0.0584, 0.0784, 0.2585, 0.0703, 0.0440, 0.2677, 0.1033, 0.0881], + device='cuda:1'), in_proj_covar=tensor([0.0240, 0.0263, 0.0235, 0.0282, 0.0262, 0.0207, 0.0245, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 09:17:03,404 INFO [train.py:892] (1/4) Epoch 36, batch 1850, loss[loss=0.1552, simple_loss=0.2346, pruned_loss=0.03787, over 19832.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2378, pruned_loss=0.0383, over 3949208.92 frames. ], batch size: 57, lr: 4.31e-03, grad_scale: 16.0 +2023-03-29 09:18:08,604 INFO [train.py:892] (1/4) Epoch 37, batch 0, loss[loss=0.1602, simple_loss=0.2452, pruned_loss=0.03754, over 19732.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2452, pruned_loss=0.03754, over 19732.00 frames. ], batch size: 269, lr: 4.25e-03, grad_scale: 16.0 +2023-03-29 09:18:08,604 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 09:18:41,824 INFO [train.py:926] (1/4) Epoch 37, validation: loss=0.1834, simple_loss=0.2492, pruned_loss=0.05881, over 2883724.00 frames. +2023-03-29 09:18:41,825 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22859MB +2023-03-29 09:18:58,490 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9018, 3.5930, 3.9143, 3.0020, 4.0961, 3.3316, 3.5767, 4.0554], + device='cuda:1'), covar=tensor([0.0589, 0.0441, 0.0461, 0.0778, 0.0405, 0.0399, 0.0480, 0.0288], + device='cuda:1'), in_proj_covar=tensor([0.0082, 0.0091, 0.0087, 0.0114, 0.0083, 0.0086, 0.0084, 0.0078], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 09:19:11,078 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66792.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 09:19:27,961 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66800.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 09:20:03,798 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66816.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:20:31,170 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.683e+02 3.599e+02 4.155e+02 4.773e+02 9.045e+02, threshold=8.309e+02, percent-clipped=1.0 +2023-03-29 09:20:39,157 INFO [train.py:892] (1/4) Epoch 37, batch 50, loss[loss=0.1515, simple_loss=0.2321, pruned_loss=0.03547, over 19746.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.234, pruned_loss=0.03651, over 889035.67 frames. ], batch size: 273, lr: 4.25e-03, grad_scale: 16.0 +2023-03-29 09:21:04,047 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7362, 4.5662, 5.0957, 4.5996, 4.1396, 4.8208, 4.6622, 5.2181], + device='cuda:1'), covar=tensor([0.0783, 0.0368, 0.0348, 0.0376, 0.0914, 0.0483, 0.0497, 0.0305], + device='cuda:1'), in_proj_covar=tensor([0.0284, 0.0228, 0.0226, 0.0239, 0.0209, 0.0247, 0.0239, 0.0222], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 09:21:15,271 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66848.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:21:51,432 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66864.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:22:19,776 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66877.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:22:30,252 INFO [train.py:892] (1/4) Epoch 37, batch 100, loss[loss=0.14, simple_loss=0.2223, pruned_loss=0.02884, over 19694.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.2324, pruned_loss=0.03615, over 1568640.58 frames. ], batch size: 74, lr: 4.25e-03, grad_scale: 16.0 +2023-03-29 09:23:45,879 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66914.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:24:15,784 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.134e+02 3.769e+02 4.346e+02 5.384e+02 1.182e+03, threshold=8.692e+02, percent-clipped=4.0 +2023-03-29 09:24:22,077 INFO [train.py:892] (1/4) Epoch 37, batch 150, loss[loss=0.1507, simple_loss=0.2296, pruned_loss=0.0359, over 19787.00 frames. ], tot_loss[loss=0.1519, simple_loss=0.2321, pruned_loss=0.0358, over 2095847.43 frames. ], batch size: 217, lr: 4.25e-03, grad_scale: 16.0 +2023-03-29 09:24:46,038 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.61 vs. limit=5.0 +2023-03-29 09:25:24,882 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66957.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:25:27,150 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6620, 2.1840, 2.5101, 2.9400, 3.3132, 3.4396, 3.3283, 3.3212], + device='cuda:1'), covar=tensor([0.1070, 0.1693, 0.1430, 0.0716, 0.0526, 0.0360, 0.0471, 0.0559], + device='cuda:1'), in_proj_covar=tensor([0.0161, 0.0168, 0.0178, 0.0153, 0.0138, 0.0134, 0.0127, 0.0119], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 09:25:35,488 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66962.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:26:13,788 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2330, 4.3568, 4.6228, 4.4312, 4.5540, 4.1818, 4.3383, 4.1861], + device='cuda:1'), covar=tensor([0.1531, 0.1537, 0.0969, 0.1198, 0.0903, 0.0934, 0.2057, 0.2095], + device='cuda:1'), in_proj_covar=tensor([0.0301, 0.0339, 0.0374, 0.0304, 0.0280, 0.0287, 0.0368, 0.0392], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 09:26:19,101 INFO [train.py:892] (1/4) Epoch 37, batch 200, loss[loss=0.1555, simple_loss=0.2392, pruned_loss=0.03588, over 19779.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.2341, pruned_loss=0.03659, over 2506554.21 frames. ], batch size: 52, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:27:13,918 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-29 09:27:45,869 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67018.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 09:28:07,316 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.541e+02 3.516e+02 4.008e+02 4.634e+02 9.207e+02, threshold=8.015e+02, percent-clipped=1.0 +2023-03-29 09:28:13,846 INFO [train.py:892] (1/4) Epoch 37, batch 250, loss[loss=0.165, simple_loss=0.2355, pruned_loss=0.04727, over 19764.00 frames. ], tot_loss[loss=0.1528, simple_loss=0.2332, pruned_loss=0.03623, over 2827888.33 frames. ], batch size: 122, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:30:07,569 INFO [train.py:892] (1/4) Epoch 37, batch 300, loss[loss=0.1478, simple_loss=0.2326, pruned_loss=0.03151, over 19806.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2334, pruned_loss=0.03658, over 3077623.79 frames. ], batch size: 117, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:30:14,822 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67083.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:30:17,519 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.95 vs. limit=5.0 +2023-03-29 09:30:22,915 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67087.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:31:58,040 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.283e+02 3.982e+02 4.417e+02 5.136e+02 1.288e+03, threshold=8.835e+02, percent-clipped=3.0 +2023-03-29 09:32:05,858 INFO [train.py:892] (1/4) Epoch 37, batch 350, loss[loss=0.155, simple_loss=0.2201, pruned_loss=0.04498, over 19817.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2355, pruned_loss=0.03757, over 3269227.68 frames. ], batch size: 133, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:32:34,372 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67144.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:33:47,459 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5571, 1.9983, 2.2996, 2.7289, 3.0951, 3.2026, 3.0890, 3.1043], + device='cuda:1'), covar=tensor([0.1060, 0.1860, 0.1504, 0.0804, 0.0560, 0.0390, 0.0510, 0.0593], + device='cuda:1'), in_proj_covar=tensor([0.0164, 0.0171, 0.0181, 0.0155, 0.0141, 0.0136, 0.0129, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 09:33:49,495 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67177.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:33:54,161 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.7390, 1.6228, 1.7708, 1.7227, 1.6696, 1.7530, 1.5899, 1.7650], + device='cuda:1'), covar=tensor([0.0408, 0.0390, 0.0380, 0.0388, 0.0528, 0.0351, 0.0548, 0.0374], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0087, 0.0089, 0.0083, 0.0097, 0.0089, 0.0106, 0.0078], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 09:33:57,485 INFO [train.py:892] (1/4) Epoch 37, batch 400, loss[loss=0.1388, simple_loss=0.2137, pruned_loss=0.03192, over 19889.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2375, pruned_loss=0.03854, over 3419063.60 frames. ], batch size: 61, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:35:05,879 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9632, 3.3287, 3.2059, 3.3108, 3.0906, 3.2654, 2.9707, 3.2671], + device='cuda:1'), covar=tensor([0.0380, 0.0253, 0.0395, 0.0254, 0.0381, 0.0347, 0.0389, 0.0397], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0087, 0.0089, 0.0083, 0.0097, 0.0089, 0.0106, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 09:35:34,613 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67225.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:35:41,240 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.424e+02 3.741e+02 4.233e+02 5.097e+02 1.151e+03, threshold=8.466e+02, percent-clipped=1.0 +2023-03-29 09:35:49,528 INFO [train.py:892] (1/4) Epoch 37, batch 450, loss[loss=0.1436, simple_loss=0.2317, pruned_loss=0.02772, over 19770.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2375, pruned_loss=0.03809, over 3535433.53 frames. ], batch size: 87, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:35:58,434 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2959, 2.4064, 2.6863, 2.3874, 2.8407, 2.8274, 3.1840, 3.4696], + device='cuda:1'), covar=tensor([0.0775, 0.1797, 0.1685, 0.2225, 0.1496, 0.1460, 0.0815, 0.0676], + device='cuda:1'), in_proj_covar=tensor([0.0259, 0.0245, 0.0273, 0.0259, 0.0305, 0.0264, 0.0239, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 09:37:36,835 INFO [train.py:892] (1/4) Epoch 37, batch 500, loss[loss=0.1589, simple_loss=0.2405, pruned_loss=0.03868, over 19752.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2377, pruned_loss=0.03841, over 3626780.59 frames. ], batch size: 253, lr: 4.23e-03, grad_scale: 16.0 +2023-03-29 09:38:02,600 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4352, 5.9810, 6.0378, 5.8729, 5.6555, 5.6841, 5.7939, 5.5868], + device='cuda:1'), covar=tensor([0.1412, 0.1266, 0.0846, 0.1057, 0.0656, 0.0718, 0.1729, 0.1817], + device='cuda:1'), in_proj_covar=tensor([0.0301, 0.0339, 0.0375, 0.0304, 0.0281, 0.0287, 0.0369, 0.0392], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 09:38:51,674 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67313.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:38:58,494 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5748, 2.5777, 4.1544, 3.0172, 3.2755, 2.9863, 2.3055, 2.4349], + device='cuda:1'), covar=tensor([0.1366, 0.3999, 0.0570, 0.1257, 0.2204, 0.2022, 0.2916, 0.3024], + device='cuda:1'), in_proj_covar=tensor([0.0353, 0.0393, 0.0350, 0.0291, 0.0377, 0.0385, 0.0381, 0.0351], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 09:39:24,436 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.430e+02 3.820e+02 4.510e+02 5.565e+02 1.076e+03, threshold=9.020e+02, percent-clipped=2.0 +2023-03-29 09:39:30,777 INFO [train.py:892] (1/4) Epoch 37, batch 550, loss[loss=0.1446, simple_loss=0.2194, pruned_loss=0.0349, over 19750.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2361, pruned_loss=0.03822, over 3699404.07 frames. ], batch size: 182, lr: 4.23e-03, grad_scale: 16.0 +2023-03-29 09:39:39,715 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7623, 4.5962, 4.5076, 4.2332, 4.8126, 2.9550, 3.7828, 2.1057], + device='cuda:1'), covar=tensor([0.0285, 0.0240, 0.0226, 0.0259, 0.0217, 0.1290, 0.1090, 0.2302], + device='cuda:1'), in_proj_covar=tensor([0.0106, 0.0148, 0.0115, 0.0136, 0.0120, 0.0136, 0.0143, 0.0129], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 09:41:10,390 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7377, 2.9118, 3.0173, 2.9403, 2.8161, 2.8709, 2.6784, 2.9291], + device='cuda:1'), covar=tensor([0.0422, 0.0357, 0.0289, 0.0277, 0.0383, 0.0333, 0.0410, 0.0426], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0086, 0.0088, 0.0083, 0.0096, 0.0089, 0.0105, 0.0078], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 09:41:12,780 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6398, 3.1600, 3.5318, 3.0514, 3.8112, 3.8096, 4.4209, 4.9419], + device='cuda:1'), covar=tensor([0.0480, 0.1681, 0.1411, 0.2256, 0.1656, 0.1416, 0.0600, 0.0448], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0244, 0.0273, 0.0258, 0.0304, 0.0263, 0.0238, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 09:41:25,505 INFO [train.py:892] (1/4) Epoch 37, batch 600, loss[loss=0.1279, simple_loss=0.2089, pruned_loss=0.02343, over 19870.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2364, pruned_loss=0.0384, over 3755369.39 frames. ], batch size: 99, lr: 4.23e-03, grad_scale: 32.0 +2023-03-29 09:41:42,895 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67387.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:42:08,084 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2370, 3.5703, 3.2035, 2.6778, 3.1829, 3.3660, 3.4850, 3.4486], + device='cuda:1'), covar=tensor([0.0308, 0.0260, 0.0295, 0.0525, 0.0339, 0.0303, 0.0228, 0.0264], + device='cuda:1'), in_proj_covar=tensor([0.0109, 0.0102, 0.0103, 0.0104, 0.0108, 0.0092, 0.0092, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 09:43:07,894 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-29 09:43:17,840 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.189e+02 3.534e+02 4.252e+02 5.287e+02 1.120e+03, threshold=8.504e+02, percent-clipped=1.0 +2023-03-29 09:43:24,014 INFO [train.py:892] (1/4) Epoch 37, batch 650, loss[loss=0.1438, simple_loss=0.2302, pruned_loss=0.02867, over 19845.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2359, pruned_loss=0.03816, over 3799661.67 frames. ], batch size: 109, lr: 4.23e-03, grad_scale: 32.0 +2023-03-29 09:43:33,720 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67435.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:43:42,619 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67439.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:45:16,665 INFO [train.py:892] (1/4) Epoch 37, batch 700, loss[loss=0.1634, simple_loss=0.2339, pruned_loss=0.04647, over 19781.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.236, pruned_loss=0.03857, over 3833366.46 frames. ], batch size: 131, lr: 4.23e-03, grad_scale: 32.0 +2023-03-29 09:46:31,482 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67513.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:47:05,431 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.319e+02 3.696e+02 4.491e+02 5.364e+02 8.859e+02, threshold=8.982e+02, percent-clipped=1.0 +2023-03-29 09:47:12,733 INFO [train.py:892] (1/4) Epoch 37, batch 750, loss[loss=0.1445, simple_loss=0.2246, pruned_loss=0.03218, over 19803.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2352, pruned_loss=0.03825, over 3861212.70 frames. ], batch size: 172, lr: 4.23e-03, grad_scale: 32.0 +2023-03-29 09:47:55,600 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1369, 2.6537, 4.3584, 3.7543, 4.1466, 4.3050, 4.1618, 3.9772], + device='cuda:1'), covar=tensor([0.0628, 0.0965, 0.0105, 0.0691, 0.0157, 0.0214, 0.0168, 0.0191], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0106, 0.0091, 0.0154, 0.0088, 0.0101, 0.0092, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 09:48:51,588 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67574.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 09:49:07,696 INFO [train.py:892] (1/4) Epoch 37, batch 800, loss[loss=0.1508, simple_loss=0.2333, pruned_loss=0.03411, over 19677.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2362, pruned_loss=0.03824, over 3879469.05 frames. ], batch size: 64, lr: 4.22e-03, grad_scale: 32.0 +2023-03-29 09:49:18,133 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.3133, 5.7159, 5.8638, 5.5246, 5.5720, 5.4863, 5.5080, 5.2452], + device='cuda:1'), covar=tensor([0.1494, 0.1159, 0.0814, 0.1128, 0.0675, 0.0754, 0.1917, 0.1865], + device='cuda:1'), in_proj_covar=tensor([0.0305, 0.0342, 0.0378, 0.0309, 0.0284, 0.0290, 0.0373, 0.0396], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 09:50:18,603 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67613.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:50:51,536 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.125e+02 3.592e+02 4.129e+02 4.799e+02 7.012e+02, threshold=8.258e+02, percent-clipped=0.0 +2023-03-29 09:50:58,158 INFO [train.py:892] (1/4) Epoch 37, batch 850, loss[loss=0.1418, simple_loss=0.2281, pruned_loss=0.02771, over 19835.00 frames. ], tot_loss[loss=0.155, simple_loss=0.235, pruned_loss=0.03748, over 3896543.97 frames. ], batch size: 52, lr: 4.22e-03, grad_scale: 32.0 +2023-03-29 09:52:10,322 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67661.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:52:53,800 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67680.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:52:54,905 INFO [train.py:892] (1/4) Epoch 37, batch 900, loss[loss=0.145, simple_loss=0.2155, pruned_loss=0.03721, over 19814.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.234, pruned_loss=0.03721, over 3910288.22 frames. ], batch size: 123, lr: 4.22e-03, grad_scale: 16.0 +2023-03-29 09:53:21,587 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67692.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:53:40,668 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-29 09:54:45,463 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.385e+02 3.765e+02 4.292e+02 5.023e+02 7.542e+02, threshold=8.583e+02, percent-clipped=0.0 +2023-03-29 09:54:49,559 INFO [train.py:892] (1/4) Epoch 37, batch 950, loss[loss=0.158, simple_loss=0.2407, pruned_loss=0.03765, over 19758.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2348, pruned_loss=0.03701, over 3919454.02 frames. ], batch size: 89, lr: 4.22e-03, grad_scale: 16.0 +2023-03-29 09:55:09,685 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67739.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:55:14,055 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67741.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:55:41,432 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67753.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 09:56:41,366 INFO [train.py:892] (1/4) Epoch 37, batch 1000, loss[loss=0.225, simple_loss=0.2902, pruned_loss=0.07987, over 19697.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2357, pruned_loss=0.0377, over 3926496.75 frames. ], batch size: 310, lr: 4.22e-03, grad_scale: 16.0 +2023-03-29 09:56:54,989 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67787.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:58:28,695 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.309e+02 3.606e+02 4.244e+02 4.990e+02 9.256e+02, threshold=8.487e+02, percent-clipped=3.0 +2023-03-29 09:58:34,534 INFO [train.py:892] (1/4) Epoch 37, batch 1050, loss[loss=0.141, simple_loss=0.214, pruned_loss=0.034, over 19837.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2365, pruned_loss=0.03784, over 3931012.95 frames. ], batch size: 146, lr: 4.22e-03, grad_scale: 16.0 +2023-03-29 09:59:36,053 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9627, 3.3710, 3.4721, 3.9518, 2.7526, 3.2619, 2.5095, 2.4690], + device='cuda:1'), covar=tensor([0.0596, 0.1685, 0.0972, 0.0429, 0.2008, 0.0911, 0.1427, 0.1679], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0329, 0.0251, 0.0207, 0.0249, 0.0213, 0.0223, 0.0218], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 10:00:00,866 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67869.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 10:00:25,292 INFO [train.py:892] (1/4) Epoch 37, batch 1100, loss[loss=0.1444, simple_loss=0.2228, pruned_loss=0.033, over 19878.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2357, pruned_loss=0.03763, over 3935382.15 frames. ], batch size: 88, lr: 4.22e-03, grad_scale: 16.0 +2023-03-29 10:01:26,861 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-29 10:02:13,702 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.252e+02 3.748e+02 4.417e+02 5.113e+02 1.132e+03, threshold=8.834e+02, percent-clipped=3.0 +2023-03-29 10:02:18,006 INFO [train.py:892] (1/4) Epoch 37, batch 1150, loss[loss=0.1511, simple_loss=0.2267, pruned_loss=0.03774, over 19836.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2354, pruned_loss=0.03781, over 3939932.29 frames. ], batch size: 171, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:04:11,699 INFO [train.py:892] (1/4) Epoch 37, batch 1200, loss[loss=0.1417, simple_loss=0.2195, pruned_loss=0.03201, over 19784.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2358, pruned_loss=0.03826, over 3942597.38 frames. ], batch size: 83, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:04:12,776 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67981.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:06:05,488 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.592e+02 3.677e+02 4.252e+02 5.217e+02 1.129e+03, threshold=8.505e+02, percent-clipped=2.0 +2023-03-29 10:06:09,653 INFO [train.py:892] (1/4) Epoch 37, batch 1250, loss[loss=0.153, simple_loss=0.2399, pruned_loss=0.03299, over 19944.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2357, pruned_loss=0.03818, over 3944900.21 frames. ], batch size: 52, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:06:20,211 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68036.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:06:35,584 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68042.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:06:50,968 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68048.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 10:08:04,309 INFO [train.py:892] (1/4) Epoch 37, batch 1300, loss[loss=0.1667, simple_loss=0.2424, pruned_loss=0.04554, over 19777.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2358, pruned_loss=0.03792, over 3946829.77 frames. ], batch size: 70, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:08:18,880 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68087.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 10:09:25,155 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1271, 4.3086, 4.3664, 4.2383, 4.0906, 4.3246, 3.8984, 3.8959], + device='cuda:1'), covar=tensor([0.0560, 0.0573, 0.0521, 0.0474, 0.0696, 0.0522, 0.0689, 0.1006], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0299, 0.0306, 0.0268, 0.0280, 0.0260, 0.0275, 0.0322], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 10:09:54,441 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.204e+02 3.685e+02 4.439e+02 5.105e+02 9.466e+02, threshold=8.879e+02, percent-clipped=1.0 +2023-03-29 10:09:58,209 INFO [train.py:892] (1/4) Epoch 37, batch 1350, loss[loss=0.137, simple_loss=0.2144, pruned_loss=0.0298, over 19907.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2362, pruned_loss=0.03805, over 3948162.66 frames. ], batch size: 116, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:10:01,406 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7575, 2.8882, 3.6808, 3.0673, 3.8277, 3.7385, 4.6247, 5.0861], + device='cuda:1'), covar=tensor([0.0471, 0.2017, 0.1478, 0.2242, 0.1815, 0.1651, 0.0524, 0.0509], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0243, 0.0272, 0.0258, 0.0303, 0.0263, 0.0237, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 10:10:09,410 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4802, 2.6830, 3.8543, 3.0219, 3.2106, 3.0237, 2.2692, 2.4241], + device='cuda:1'), covar=tensor([0.1288, 0.3011, 0.0598, 0.1154, 0.1893, 0.1694, 0.2664, 0.2739], + device='cuda:1'), in_proj_covar=tensor([0.0357, 0.0397, 0.0353, 0.0291, 0.0380, 0.0389, 0.0384, 0.0355], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 10:10:35,180 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68148.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 10:11:24,106 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68169.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:11:48,281 INFO [train.py:892] (1/4) Epoch 37, batch 1400, loss[loss=0.1395, simple_loss=0.2246, pruned_loss=0.02714, over 19644.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2355, pruned_loss=0.0378, over 3948547.63 frames. ], batch size: 79, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:13:09,640 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68217.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:13:35,439 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.616e+02 3.628e+02 4.259e+02 5.104e+02 1.150e+03, threshold=8.519e+02, percent-clipped=2.0 +2023-03-29 10:13:41,104 INFO [train.py:892] (1/4) Epoch 37, batch 1450, loss[loss=0.1503, simple_loss=0.2323, pruned_loss=0.03416, over 19729.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2356, pruned_loss=0.03751, over 3949769.58 frames. ], batch size: 104, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:13:59,907 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-29 10:15:37,195 INFO [train.py:892] (1/4) Epoch 37, batch 1500, loss[loss=0.1494, simple_loss=0.2374, pruned_loss=0.03068, over 19846.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2356, pruned_loss=0.03768, over 3948925.88 frames. ], batch size: 115, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:17:23,946 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.350e+02 3.594e+02 4.207e+02 5.059e+02 8.265e+02, threshold=8.414e+02, percent-clipped=0.0 +2023-03-29 10:17:29,440 INFO [train.py:892] (1/4) Epoch 37, batch 1550, loss[loss=0.1549, simple_loss=0.2361, pruned_loss=0.0369, over 19656.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2355, pruned_loss=0.03757, over 3950075.09 frames. ], batch size: 47, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:17:40,391 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68336.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:17:42,246 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68337.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:18:06,595 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68348.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:18:10,106 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6270, 3.0116, 2.5927, 2.1919, 2.6810, 2.8233, 2.8830, 2.8868], + device='cuda:1'), covar=tensor([0.0377, 0.0271, 0.0344, 0.0579, 0.0395, 0.0334, 0.0258, 0.0280], + device='cuda:1'), in_proj_covar=tensor([0.0110, 0.0103, 0.0105, 0.0105, 0.0108, 0.0093, 0.0093, 0.0093], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 10:19:17,874 INFO [train.py:892] (1/4) Epoch 37, batch 1600, loss[loss=0.1544, simple_loss=0.2241, pruned_loss=0.04238, over 19855.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2346, pruned_loss=0.03728, over 3950673.39 frames. ], batch size: 137, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:19:26,506 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68384.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:19:41,726 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-03-29 10:19:54,637 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68396.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:20:01,402 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68399.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:21:10,224 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.336e+02 3.362e+02 4.148e+02 5.414e+02 8.370e+02, threshold=8.296e+02, percent-clipped=0.0 +2023-03-29 10:21:15,823 INFO [train.py:892] (1/4) Epoch 37, batch 1650, loss[loss=0.1425, simple_loss=0.2306, pruned_loss=0.02726, over 19647.00 frames. ], tot_loss[loss=0.154, simple_loss=0.2338, pruned_loss=0.03706, over 3951283.36 frames. ], batch size: 47, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:21:44,666 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68443.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 10:21:49,494 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 10:22:24,179 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68460.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:23:09,504 INFO [train.py:892] (1/4) Epoch 37, batch 1700, loss[loss=0.1472, simple_loss=0.2314, pruned_loss=0.03145, over 19802.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2342, pruned_loss=0.03705, over 3952722.03 frames. ], batch size: 74, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:24:19,281 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8809, 3.9116, 2.3690, 4.0563, 4.2275, 1.9241, 3.4760, 3.2896], + device='cuda:1'), covar=tensor([0.0727, 0.0862, 0.2856, 0.0889, 0.0640, 0.3065, 0.1178, 0.0919], + device='cuda:1'), in_proj_covar=tensor([0.0239, 0.0263, 0.0234, 0.0281, 0.0261, 0.0206, 0.0242, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 10:24:53,967 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.100e+02 3.371e+02 4.024e+02 4.531e+02 9.295e+02, threshold=8.049e+02, percent-clipped=2.0 +2023-03-29 10:24:57,886 INFO [train.py:892] (1/4) Epoch 37, batch 1750, loss[loss=0.1422, simple_loss=0.2275, pruned_loss=0.02852, over 19711.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2341, pruned_loss=0.03711, over 3952167.83 frames. ], batch size: 85, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:25:06,101 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7171, 2.1601, 3.7756, 3.2781, 3.6805, 3.7152, 3.4523, 3.5209], + device='cuda:1'), covar=tensor([0.0756, 0.1204, 0.0132, 0.0514, 0.0182, 0.0256, 0.0233, 0.0220], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0105, 0.0091, 0.0154, 0.0088, 0.0100, 0.0092, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 10:26:31,163 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68578.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:26:35,981 INFO [train.py:892] (1/4) Epoch 37, batch 1800, loss[loss=0.1466, simple_loss=0.2255, pruned_loss=0.03383, over 19814.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2346, pruned_loss=0.03738, over 3950552.42 frames. ], batch size: 67, lr: 4.19e-03, grad_scale: 16.0 +2023-03-29 10:28:00,280 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.18 vs. limit=5.0 +2023-03-29 10:28:03,182 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.060e+02 3.457e+02 4.221e+02 5.089e+02 1.107e+03, threshold=8.442e+02, percent-clipped=2.0 +2023-03-29 10:28:03,753 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.5992, 5.9060, 5.9713, 5.8480, 5.6519, 5.9940, 5.2461, 5.3474], + device='cuda:1'), covar=tensor([0.0511, 0.0537, 0.0506, 0.0492, 0.0618, 0.0499, 0.0733, 0.1026], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0303, 0.0313, 0.0272, 0.0283, 0.0264, 0.0279, 0.0326], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 10:28:06,909 INFO [train.py:892] (1/4) Epoch 37, batch 1850, loss[loss=0.1741, simple_loss=0.2603, pruned_loss=0.04395, over 19681.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2358, pruned_loss=0.0375, over 3949749.19 frames. ], batch size: 56, lr: 4.19e-03, grad_scale: 16.0 +2023-03-29 10:28:12,291 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-29 10:29:10,991 INFO [train.py:892] (1/4) Epoch 38, batch 0, loss[loss=0.1665, simple_loss=0.251, pruned_loss=0.041, over 19862.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.251, pruned_loss=0.041, over 19862.00 frames. ], batch size: 48, lr: 4.14e-03, grad_scale: 16.0 +2023-03-29 10:29:10,992 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 10:29:46,257 INFO [train.py:926] (1/4) Epoch 38, validation: loss=0.1847, simple_loss=0.2497, pruned_loss=0.05979, over 2883724.00 frames. +2023-03-29 10:29:46,258 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22859MB +2023-03-29 10:29:49,749 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68637.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:29:54,527 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68639.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:31:45,406 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68685.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:31:45,530 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68685.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:31:46,766 INFO [train.py:892] (1/4) Epoch 38, batch 50, loss[loss=0.1412, simple_loss=0.2191, pruned_loss=0.03169, over 19786.00 frames. ], tot_loss[loss=0.1505, simple_loss=0.2299, pruned_loss=0.03554, over 891722.85 frames. ], batch size: 193, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:33:14,603 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1439, 4.7830, 4.8456, 4.5298, 5.1050, 3.2789, 4.1797, 2.6403], + device='cuda:1'), covar=tensor([0.0172, 0.0201, 0.0146, 0.0200, 0.0151, 0.0977, 0.0905, 0.1415], + device='cuda:1'), in_proj_covar=tensor([0.0107, 0.0150, 0.0116, 0.0138, 0.0122, 0.0138, 0.0145, 0.0131], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 10:33:21,840 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.214e+02 3.660e+02 4.318e+02 5.204e+02 7.882e+02, threshold=8.636e+02, percent-clipped=0.0 +2023-03-29 10:33:36,552 INFO [train.py:892] (1/4) Epoch 38, batch 100, loss[loss=0.1639, simple_loss=0.2486, pruned_loss=0.03956, over 19742.00 frames. ], tot_loss[loss=0.1506, simple_loss=0.2302, pruned_loss=0.03551, over 1571030.42 frames. ], batch size: 291, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:33:54,256 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68743.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 10:34:01,143 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68746.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:34:22,494 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68755.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:35:24,036 INFO [train.py:892] (1/4) Epoch 38, batch 150, loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02986, over 19871.00 frames. ], tot_loss[loss=0.1514, simple_loss=0.2314, pruned_loss=0.03572, over 2099680.50 frames. ], batch size: 92, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:35:35,760 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0963, 3.0033, 3.1569, 2.5994, 3.2282, 2.7406, 3.0946, 3.1125], + device='cuda:1'), covar=tensor([0.0578, 0.0508, 0.0538, 0.0828, 0.0411, 0.0546, 0.0518, 0.0442], + device='cuda:1'), in_proj_covar=tensor([0.0083, 0.0092, 0.0088, 0.0115, 0.0084, 0.0087, 0.0085, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 10:35:38,003 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68791.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 10:37:03,412 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.249e+02 3.607e+02 4.338e+02 5.110e+02 1.083e+03, threshold=8.676e+02, percent-clipped=2.0 +2023-03-29 10:37:19,972 INFO [train.py:892] (1/4) Epoch 38, batch 200, loss[loss=0.1631, simple_loss=0.2493, pruned_loss=0.0384, over 19811.00 frames. ], tot_loss[loss=0.1518, simple_loss=0.2327, pruned_loss=0.03546, over 2508616.50 frames. ], batch size: 74, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:37:20,867 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5877, 4.7449, 5.0156, 4.7597, 4.8968, 4.4887, 4.7433, 4.4619], + device='cuda:1'), covar=tensor([0.1565, 0.1543, 0.0850, 0.1342, 0.0810, 0.0953, 0.1895, 0.2231], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0341, 0.0377, 0.0309, 0.0284, 0.0288, 0.0367, 0.0397], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 10:39:12,969 INFO [train.py:892] (1/4) Epoch 38, batch 250, loss[loss=0.1387, simple_loss=0.2206, pruned_loss=0.0284, over 19818.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.2309, pruned_loss=0.0348, over 2828530.50 frames. ], batch size: 98, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:40:13,335 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-29 10:40:53,780 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.221e+02 3.576e+02 4.212e+02 4.903e+02 9.274e+02, threshold=8.425e+02, percent-clipped=1.0 +2023-03-29 10:41:06,843 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68934.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:41:10,027 INFO [train.py:892] (1/4) Epoch 38, batch 300, loss[loss=0.1363, simple_loss=0.2146, pruned_loss=0.02904, over 19799.00 frames. ], tot_loss[loss=0.1518, simple_loss=0.2327, pruned_loss=0.03545, over 3075778.10 frames. ], batch size: 211, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:42:28,276 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.14 vs. limit=5.0 +2023-03-29 10:43:04,371 INFO [train.py:892] (1/4) Epoch 38, batch 350, loss[loss=0.1571, simple_loss=0.2322, pruned_loss=0.04099, over 19745.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.2333, pruned_loss=0.03595, over 3270586.59 frames. ], batch size: 77, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:44:39,916 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.577e+02 3.628e+02 4.401e+02 5.124e+02 1.005e+03, threshold=8.801e+02, percent-clipped=1.0 +2023-03-29 10:44:56,351 INFO [train.py:892] (1/4) Epoch 38, batch 400, loss[loss=0.1753, simple_loss=0.2519, pruned_loss=0.04938, over 19716.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2342, pruned_loss=0.03678, over 3420940.09 frames. ], batch size: 310, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:45:08,659 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69041.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:45:13,688 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69043.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:45:27,799 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2073, 4.0728, 4.4839, 4.0882, 3.8213, 4.3312, 4.1533, 4.5631], + device='cuda:1'), covar=tensor([0.0777, 0.0384, 0.0338, 0.0400, 0.1033, 0.0561, 0.0497, 0.0330], + device='cuda:1'), in_proj_covar=tensor([0.0288, 0.0229, 0.0228, 0.0240, 0.0209, 0.0251, 0.0242, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 10:45:34,646 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-03-29 10:45:41,702 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69055.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:46:50,039 INFO [train.py:892] (1/4) Epoch 38, batch 450, loss[loss=0.1262, simple_loss=0.2044, pruned_loss=0.02401, over 19776.00 frames. ], tot_loss[loss=0.154, simple_loss=0.2344, pruned_loss=0.03678, over 3538543.31 frames. ], batch size: 113, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:47:28,751 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69103.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:47:31,092 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69104.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:48:23,826 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.342e+02 3.756e+02 4.232e+02 5.064e+02 8.468e+02, threshold=8.465e+02, percent-clipped=0.0 +2023-03-29 10:48:24,733 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3172, 4.1552, 4.6050, 4.2071, 3.9593, 4.4966, 4.2794, 4.7214], + device='cuda:1'), covar=tensor([0.0863, 0.0443, 0.0414, 0.0449, 0.1008, 0.0557, 0.0542, 0.0391], + device='cuda:1'), in_proj_covar=tensor([0.0288, 0.0230, 0.0228, 0.0240, 0.0210, 0.0252, 0.0242, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 10:48:30,161 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6950, 4.6999, 2.9060, 4.9925, 5.1782, 2.2647, 4.4064, 3.8616], + device='cuda:1'), covar=tensor([0.0531, 0.0699, 0.2368, 0.0631, 0.0439, 0.2629, 0.0803, 0.0769], + device='cuda:1'), in_proj_covar=tensor([0.0239, 0.0263, 0.0234, 0.0283, 0.0262, 0.0206, 0.0243, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 10:48:39,773 INFO [train.py:892] (1/4) Epoch 38, batch 500, loss[loss=0.1466, simple_loss=0.2254, pruned_loss=0.03393, over 19818.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.2327, pruned_loss=0.03607, over 3631127.96 frames. ], batch size: 96, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:49:38,199 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69162.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:49:52,945 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69168.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:50:22,713 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3163, 4.5096, 4.5676, 4.5553, 4.2042, 4.5254, 4.1334, 3.8465], + device='cuda:1'), covar=tensor([0.1170, 0.1135, 0.0963, 0.0812, 0.1153, 0.1041, 0.1385, 0.2425], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0302, 0.0311, 0.0273, 0.0283, 0.0264, 0.0279, 0.0328], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 10:50:32,785 INFO [train.py:892] (1/4) Epoch 38, batch 550, loss[loss=0.1648, simple_loss=0.2484, pruned_loss=0.04057, over 19709.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2343, pruned_loss=0.03678, over 3702748.28 frames. ], batch size: 283, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:50:42,632 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.59 vs. limit=5.0 +2023-03-29 10:50:54,161 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7523, 5.0269, 5.0906, 4.9836, 4.7321, 5.0593, 4.6268, 4.6250], + device='cuda:1'), covar=tensor([0.0537, 0.0502, 0.0479, 0.0428, 0.0593, 0.0487, 0.0696, 0.1005], + device='cuda:1'), in_proj_covar=tensor([0.0286, 0.0303, 0.0311, 0.0273, 0.0283, 0.0264, 0.0279, 0.0328], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 10:51:40,891 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9700, 2.9654, 3.1814, 3.1491, 2.9937, 2.9668, 2.9354, 3.2005], + device='cuda:1'), covar=tensor([0.0337, 0.0448, 0.0320, 0.0308, 0.0379, 0.0346, 0.0412, 0.0367], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0087, 0.0090, 0.0084, 0.0096, 0.0089, 0.0106, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 10:51:57,160 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69223.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:52:02,587 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69225.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:52:11,485 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.978e+02 3.709e+02 4.324e+02 5.706e+02 8.766e+02, threshold=8.649e+02, percent-clipped=1.0 +2023-03-29 10:52:12,553 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69229.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:52:13,009 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.73 vs. limit=5.0 +2023-03-29 10:52:23,832 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69234.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:52:27,222 INFO [train.py:892] (1/4) Epoch 38, batch 600, loss[loss=0.1492, simple_loss=0.2319, pruned_loss=0.03326, over 19775.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2339, pruned_loss=0.03674, over 3758864.53 frames. ], batch size: 241, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:52:49,566 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69245.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 10:53:40,692 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5901, 4.7537, 4.8204, 4.8079, 4.4775, 4.7542, 4.2963, 4.0351], + device='cuda:1'), covar=tensor([0.1079, 0.1101, 0.0975, 0.0789, 0.1186, 0.1055, 0.1461, 0.2305], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0302, 0.0311, 0.0272, 0.0282, 0.0264, 0.0278, 0.0328], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 10:53:53,975 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69273.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:54:14,121 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69282.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:54:22,213 INFO [train.py:892] (1/4) Epoch 38, batch 650, loss[loss=0.1622, simple_loss=0.2507, pruned_loss=0.03686, over 19689.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2338, pruned_loss=0.03693, over 3801652.78 frames. ], batch size: 59, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:54:23,330 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69286.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:54:42,504 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6024, 2.1772, 3.5689, 2.9841, 3.5642, 3.5889, 3.2848, 3.3740], + device='cuda:1'), covar=tensor([0.0781, 0.1115, 0.0129, 0.0486, 0.0160, 0.0248, 0.0228, 0.0221], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0105, 0.0091, 0.0153, 0.0088, 0.0100, 0.0091, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 10:55:09,541 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1530, 4.0409, 4.4310, 4.0396, 3.7718, 4.2998, 4.0955, 4.4762], + device='cuda:1'), covar=tensor([0.0728, 0.0369, 0.0341, 0.0378, 0.1004, 0.0523, 0.0482, 0.0357], + device='cuda:1'), in_proj_covar=tensor([0.0289, 0.0231, 0.0228, 0.0241, 0.0211, 0.0252, 0.0242, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 10:55:11,589 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69306.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 10:55:42,205 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69320.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:55:59,575 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.644e+02 3.609e+02 4.195e+02 5.280e+02 9.691e+02, threshold=8.389e+02, percent-clipped=2.0 +2023-03-29 10:56:15,752 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69334.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 10:56:18,448 INFO [train.py:892] (1/4) Epoch 38, batch 700, loss[loss=0.1631, simple_loss=0.2339, pruned_loss=0.04622, over 19803.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.235, pruned_loss=0.03721, over 3833703.88 frames. ], batch size: 126, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:56:30,234 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69341.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:57:30,706 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69368.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:57:36,649 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-29 10:58:00,790 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69381.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:58:10,271 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7701, 3.5250, 3.6231, 3.7833, 3.6160, 3.7602, 3.8668, 4.0244], + device='cuda:1'), covar=tensor([0.0741, 0.0539, 0.0595, 0.0444, 0.0759, 0.0635, 0.0487, 0.0368], + device='cuda:1'), in_proj_covar=tensor([0.0160, 0.0184, 0.0207, 0.0182, 0.0181, 0.0165, 0.0158, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 10:58:11,793 INFO [train.py:892] (1/4) Epoch 38, batch 750, loss[loss=0.1499, simple_loss=0.2329, pruned_loss=0.03347, over 19764.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2356, pruned_loss=0.03756, over 3859167.62 frames. ], batch size: 244, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 10:58:19,034 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69389.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:58:29,472 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69394.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:58:40,557 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69399.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:59:09,184 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-29 10:59:32,813 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-29 10:59:43,601 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9598, 5.0863, 5.3635, 5.0462, 5.2404, 4.8964, 5.0812, 4.8431], + device='cuda:1'), covar=tensor([0.1391, 0.1502, 0.0847, 0.1363, 0.0743, 0.0885, 0.1853, 0.2143], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0345, 0.0379, 0.0312, 0.0288, 0.0291, 0.0371, 0.0399], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 10:59:51,961 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.082e+02 3.634e+02 4.546e+02 5.374e+02 9.782e+02, threshold=9.091e+02, percent-clipped=2.0 +2023-03-29 10:59:53,005 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69429.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:00:07,014 INFO [train.py:892] (1/4) Epoch 38, batch 800, loss[loss=0.1579, simple_loss=0.2321, pruned_loss=0.04182, over 19809.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2355, pruned_loss=0.03753, over 3878133.58 frames. ], batch size: 72, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 11:00:53,802 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69455.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:02:01,807 INFO [train.py:892] (1/4) Epoch 38, batch 850, loss[loss=0.1403, simple_loss=0.2162, pruned_loss=0.03222, over 19794.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2348, pruned_loss=0.03711, over 3894872.60 frames. ], batch size: 174, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 11:03:16,077 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69518.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:03:19,368 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-29 11:03:28,502 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69524.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:03:39,263 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.319e+02 3.651e+02 4.238e+02 5.380e+02 9.885e+02, threshold=8.476e+02, percent-clipped=3.0 +2023-03-29 11:03:54,824 INFO [train.py:892] (1/4) Epoch 38, batch 900, loss[loss=0.1563, simple_loss=0.2326, pruned_loss=0.04001, over 19817.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.234, pruned_loss=0.03724, over 3908112.29 frames. ], batch size: 133, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 11:03:57,972 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.25 vs. limit=5.0 +2023-03-29 11:04:37,546 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8536, 2.3893, 2.7879, 3.0481, 3.5196, 3.7822, 3.5669, 3.5800], + device='cuda:1'), covar=tensor([0.1024, 0.1673, 0.1300, 0.0752, 0.0471, 0.0302, 0.0478, 0.0600], + device='cuda:1'), in_proj_covar=tensor([0.0163, 0.0169, 0.0179, 0.0154, 0.0140, 0.0135, 0.0130, 0.0120], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 11:05:35,622 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69581.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:05:45,043 INFO [train.py:892] (1/4) Epoch 38, batch 950, loss[loss=0.1614, simple_loss=0.2384, pruned_loss=0.04218, over 19787.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2342, pruned_loss=0.03705, over 3916592.76 frames. ], batch size: 73, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 11:06:21,709 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69601.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 11:07:25,031 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.229e+02 3.525e+02 4.148e+02 4.911e+02 8.887e+02, threshold=8.297e+02, percent-clipped=1.0 +2023-03-29 11:07:25,883 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69629.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 11:07:42,215 INFO [train.py:892] (1/4) Epoch 38, batch 1000, loss[loss=0.1493, simple_loss=0.2261, pruned_loss=0.0362, over 19899.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.236, pruned_loss=0.03812, over 3924137.44 frames. ], batch size: 113, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 11:08:07,984 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-29 11:09:09,932 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69674.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 11:09:14,193 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69676.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:09:41,228 INFO [train.py:892] (1/4) Epoch 38, batch 1050, loss[loss=0.1525, simple_loss=0.2347, pruned_loss=0.03514, over 19850.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2358, pruned_loss=0.03792, over 3929554.98 frames. ], batch size: 190, lr: 4.10e-03, grad_scale: 32.0 +2023-03-29 11:09:53,043 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 11:10:12,280 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69699.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:11:13,819 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69724.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:11:23,834 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.661e+02 3.738e+02 4.279e+02 5.183e+02 8.031e+02, threshold=8.559e+02, percent-clipped=0.0 +2023-03-29 11:11:41,132 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69735.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 11:11:42,240 INFO [train.py:892] (1/4) Epoch 38, batch 1100, loss[loss=0.1532, simple_loss=0.2293, pruned_loss=0.03853, over 19844.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2357, pruned_loss=0.03774, over 3934736.09 frames. ], batch size: 145, lr: 4.10e-03, grad_scale: 32.0 +2023-03-29 11:12:07,672 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69747.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:12:15,060 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69750.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:13:36,906 INFO [train.py:892] (1/4) Epoch 38, batch 1150, loss[loss=0.1485, simple_loss=0.2317, pruned_loss=0.03259, over 19774.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2348, pruned_loss=0.03729, over 3938207.56 frames. ], batch size: 191, lr: 4.10e-03, grad_scale: 32.0 +2023-03-29 11:14:51,198 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69818.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:14:53,433 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5085, 3.5928, 2.2662, 4.2094, 3.8563, 4.1399, 4.2653, 3.2987], + device='cuda:1'), covar=tensor([0.0608, 0.0601, 0.1424, 0.0606, 0.0649, 0.0443, 0.0576, 0.0815], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0147, 0.0146, 0.0157, 0.0137, 0.0141, 0.0153, 0.0150], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 11:15:03,321 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69824.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:15:13,304 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.448e+02 3.531e+02 4.116e+02 4.879e+02 7.988e+02, threshold=8.232e+02, percent-clipped=0.0 +2023-03-29 11:15:27,916 INFO [train.py:892] (1/4) Epoch 38, batch 1200, loss[loss=0.1483, simple_loss=0.232, pruned_loss=0.03227, over 19480.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2359, pruned_loss=0.03738, over 3940731.45 frames. ], batch size: 43, lr: 4.10e-03, grad_scale: 32.0 +2023-03-29 11:16:39,618 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69866.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:16:53,563 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69872.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:17:15,056 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69881.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:17:26,311 INFO [train.py:892] (1/4) Epoch 38, batch 1250, loss[loss=0.1613, simple_loss=0.2503, pruned_loss=0.03618, over 19943.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2359, pruned_loss=0.03746, over 3943146.02 frames. ], batch size: 52, lr: 4.10e-03, grad_scale: 32.0 +2023-03-29 11:17:44,133 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0283, 2.9598, 1.9224, 3.4830, 3.2401, 3.4206, 3.5357, 2.8554], + device='cuda:1'), covar=tensor([0.0688, 0.0787, 0.1824, 0.0718, 0.0681, 0.0507, 0.0535, 0.0847], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0148, 0.0146, 0.0158, 0.0137, 0.0141, 0.0153, 0.0150], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 11:18:00,537 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69901.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 11:18:19,757 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8220, 3.5756, 3.6916, 3.8384, 3.6612, 3.8338, 3.9178, 4.1086], + device='cuda:1'), covar=tensor([0.0751, 0.0500, 0.0578, 0.0444, 0.0768, 0.0625, 0.0486, 0.0346], + device='cuda:1'), in_proj_covar=tensor([0.0160, 0.0185, 0.0209, 0.0183, 0.0184, 0.0167, 0.0158, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 11:19:06,360 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69929.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:19:06,438 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69929.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 11:19:08,019 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.501e+02 3.622e+02 4.531e+02 5.693e+02 1.011e+03, threshold=9.061e+02, percent-clipped=2.0 +2023-03-29 11:19:20,443 INFO [train.py:892] (1/4) Epoch 38, batch 1300, loss[loss=0.143, simple_loss=0.2206, pruned_loss=0.03272, over 19775.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2362, pruned_loss=0.03765, over 3944872.98 frames. ], batch size: 168, lr: 4.10e-03, grad_scale: 16.0 +2023-03-29 11:19:38,874 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1189, 2.0266, 2.1234, 2.1024, 2.0737, 2.1730, 2.0466, 2.1893], + device='cuda:1'), covar=tensor([0.0406, 0.0387, 0.0358, 0.0360, 0.0502, 0.0409, 0.0523, 0.0337], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0087, 0.0090, 0.0085, 0.0096, 0.0090, 0.0106, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 11:19:46,459 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69949.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 11:20:47,583 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69976.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:20:49,291 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69977.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:20:55,631 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5508, 4.2495, 4.3228, 4.5421, 4.2841, 4.6017, 4.6254, 4.8468], + device='cuda:1'), covar=tensor([0.0704, 0.0463, 0.0592, 0.0402, 0.0761, 0.0581, 0.0470, 0.0331], + device='cuda:1'), in_proj_covar=tensor([0.0161, 0.0185, 0.0209, 0.0184, 0.0184, 0.0168, 0.0159, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 11:21:07,869 INFO [train.py:892] (1/4) Epoch 38, batch 1350, loss[loss=0.1567, simple_loss=0.235, pruned_loss=0.03921, over 19790.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2358, pruned_loss=0.03718, over 3945878.66 frames. ], batch size: 120, lr: 4.10e-03, grad_scale: 16.0 +2023-03-29 11:21:41,212 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.6649, 5.9560, 5.9915, 5.8684, 5.7263, 5.9729, 5.3252, 5.4009], + device='cuda:1'), covar=tensor([0.0388, 0.0426, 0.0473, 0.0409, 0.0497, 0.0478, 0.0689, 0.0919], + device='cuda:1'), in_proj_covar=tensor([0.0283, 0.0300, 0.0311, 0.0272, 0.0281, 0.0262, 0.0276, 0.0324], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 11:22:40,228 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70024.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:22:40,396 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70024.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:22:51,481 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.339e+02 3.460e+02 3.863e+02 4.574e+02 6.887e+02, threshold=7.727e+02, percent-clipped=0.0 +2023-03-29 11:22:52,386 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70030.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 11:23:05,929 INFO [train.py:892] (1/4) Epoch 38, batch 1400, loss[loss=0.1539, simple_loss=0.2345, pruned_loss=0.03662, over 19789.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2343, pruned_loss=0.03675, over 3947614.74 frames. ], batch size: 211, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:23:41,190 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70050.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:24:30,333 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70072.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:25:00,853 INFO [train.py:892] (1/4) Epoch 38, batch 1450, loss[loss=0.1373, simple_loss=0.2226, pruned_loss=0.02603, over 19710.00 frames. ], tot_loss[loss=0.1532, simple_loss=0.2338, pruned_loss=0.03625, over 3949349.15 frames. ], batch size: 101, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:25:26,138 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70098.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:25:31,317 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2481, 3.1147, 4.7796, 3.5271, 3.8190, 3.5950, 2.6452, 2.8023], + device='cuda:1'), covar=tensor([0.0905, 0.3107, 0.0424, 0.1105, 0.1717, 0.1433, 0.2494, 0.2565], + device='cuda:1'), in_proj_covar=tensor([0.0358, 0.0399, 0.0356, 0.0295, 0.0381, 0.0392, 0.0386, 0.0357], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 11:25:53,047 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70110.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:26:07,517 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9305, 2.8901, 3.2268, 2.7839, 3.2323, 3.2034, 3.8531, 4.1992], + device='cuda:1'), covar=tensor([0.0592, 0.1651, 0.1434, 0.2211, 0.1671, 0.1509, 0.0600, 0.0547], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0246, 0.0275, 0.0260, 0.0307, 0.0264, 0.0239, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 11:26:38,225 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.553e+02 3.599e+02 4.475e+02 5.242e+02 1.040e+03, threshold=8.950e+02, percent-clipped=4.0 +2023-03-29 11:26:50,789 INFO [train.py:892] (1/4) Epoch 38, batch 1500, loss[loss=0.1698, simple_loss=0.26, pruned_loss=0.03981, over 19864.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2343, pruned_loss=0.03641, over 3948941.67 frames. ], batch size: 51, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:28:06,732 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5215, 2.6106, 2.8167, 2.4922, 2.9494, 2.9339, 3.3895, 3.7048], + device='cuda:1'), covar=tensor([0.0714, 0.1672, 0.1656, 0.2209, 0.1575, 0.1486, 0.0692, 0.0653], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0245, 0.0274, 0.0259, 0.0306, 0.0264, 0.0239, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 11:28:12,771 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70171.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:28:47,254 INFO [train.py:892] (1/4) Epoch 38, batch 1550, loss[loss=0.1464, simple_loss=0.2226, pruned_loss=0.03514, over 19839.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2355, pruned_loss=0.03683, over 3947645.02 frames. ], batch size: 128, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:29:53,444 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.5380, 6.0130, 6.1197, 5.9158, 5.7874, 5.7589, 5.7717, 5.6451], + device='cuda:1'), covar=tensor([0.1422, 0.1029, 0.0806, 0.1116, 0.0648, 0.0750, 0.1786, 0.1874], + device='cuda:1'), in_proj_covar=tensor([0.0302, 0.0344, 0.0378, 0.0309, 0.0288, 0.0290, 0.0369, 0.0400], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 11:30:25,194 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.741e+02 3.595e+02 4.117e+02 4.902e+02 9.648e+02, threshold=8.233e+02, percent-clipped=1.0 +2023-03-29 11:30:39,828 INFO [train.py:892] (1/4) Epoch 38, batch 1600, loss[loss=0.1526, simple_loss=0.2335, pruned_loss=0.03583, over 19595.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2357, pruned_loss=0.03671, over 3947793.42 frames. ], batch size: 45, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:31:18,073 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2761, 3.1422, 3.3185, 2.9275, 3.5790, 3.5220, 4.1273, 4.5576], + device='cuda:1'), covar=tensor([0.0581, 0.1602, 0.1660, 0.2327, 0.1687, 0.1385, 0.0647, 0.0574], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0246, 0.0275, 0.0260, 0.0307, 0.0264, 0.0238, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 11:31:34,414 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70260.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:32:29,337 INFO [train.py:892] (1/4) Epoch 38, batch 1650, loss[loss=0.1231, simple_loss=0.1996, pruned_loss=0.02331, over 19769.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2344, pruned_loss=0.03631, over 3948387.08 frames. ], batch size: 113, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:33:05,037 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-03-29 11:33:52,818 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70321.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:34:12,144 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.483e+02 3.682e+02 4.356e+02 5.206e+02 1.027e+03, threshold=8.712e+02, percent-clipped=2.0 +2023-03-29 11:34:13,255 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70330.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 11:34:27,858 INFO [train.py:892] (1/4) Epoch 38, batch 1700, loss[loss=0.1703, simple_loss=0.2453, pruned_loss=0.04763, over 19771.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2357, pruned_loss=0.03704, over 3948671.63 frames. ], batch size: 241, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:35:08,975 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-29 11:35:59,705 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70378.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 11:36:12,507 INFO [train.py:892] (1/4) Epoch 38, batch 1750, loss[loss=0.1408, simple_loss=0.227, pruned_loss=0.02733, over 19792.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2365, pruned_loss=0.03755, over 3949220.99 frames. ], batch size: 68, lr: 4.08e-03, grad_scale: 16.0 +2023-03-29 11:36:29,314 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-29 11:37:39,920 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.846e+02 3.811e+02 4.551e+02 5.592e+02 1.733e+03, threshold=9.102e+02, percent-clipped=3.0 +2023-03-29 11:37:40,771 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7036, 3.5161, 3.8254, 2.8380, 3.9442, 3.1805, 3.5232, 3.8269], + device='cuda:1'), covar=tensor([0.0957, 0.0414, 0.0473, 0.0847, 0.0434, 0.0507, 0.0484, 0.0406], + device='cuda:1'), in_proj_covar=tensor([0.0083, 0.0092, 0.0089, 0.0115, 0.0084, 0.0087, 0.0085, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 11:37:51,294 INFO [train.py:892] (1/4) Epoch 38, batch 1800, loss[loss=0.1535, simple_loss=0.2345, pruned_loss=0.03626, over 19809.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.236, pruned_loss=0.03728, over 3950389.84 frames. ], batch size: 174, lr: 4.08e-03, grad_scale: 16.0 +2023-03-29 11:38:48,028 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70466.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:39:26,451 INFO [train.py:892] (1/4) Epoch 38, batch 1850, loss[loss=0.1484, simple_loss=0.2372, pruned_loss=0.02978, over 19851.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2373, pruned_loss=0.03698, over 3947633.98 frames. ], batch size: 58, lr: 4.08e-03, grad_scale: 16.0 +2023-03-29 11:40:29,567 INFO [train.py:892] (1/4) Epoch 39, batch 0, loss[loss=0.1425, simple_loss=0.2167, pruned_loss=0.03422, over 19729.00 frames. ], tot_loss[loss=0.1425, simple_loss=0.2167, pruned_loss=0.03422, over 19729.00 frames. ], batch size: 63, lr: 4.03e-03, grad_scale: 16.0 +2023-03-29 11:40:29,567 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 11:41:05,157 INFO [train.py:926] (1/4) Epoch 39, validation: loss=0.1858, simple_loss=0.25, pruned_loss=0.06079, over 2883724.00 frames. +2023-03-29 11:41:05,158 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22859MB +2023-03-29 11:42:37,525 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.196e+02 3.428e+02 3.863e+02 4.713e+02 6.861e+02, threshold=7.726e+02, percent-clipped=0.0 +2023-03-29 11:42:50,041 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5964, 3.5631, 3.5626, 3.7288, 3.5357, 3.8065, 3.6478, 3.8270], + device='cuda:1'), covar=tensor([0.1051, 0.0639, 0.0795, 0.0588, 0.0966, 0.0780, 0.0736, 0.0697], + device='cuda:1'), in_proj_covar=tensor([0.0162, 0.0186, 0.0210, 0.0185, 0.0185, 0.0169, 0.0160, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 11:43:01,762 INFO [train.py:892] (1/4) Epoch 39, batch 50, loss[loss=0.1546, simple_loss=0.2329, pruned_loss=0.03814, over 19778.00 frames. ], tot_loss[loss=0.1483, simple_loss=0.2273, pruned_loss=0.0347, over 892532.83 frames. ], batch size: 66, lr: 4.03e-03, grad_scale: 16.0 +2023-03-29 11:44:59,632 INFO [train.py:892] (1/4) Epoch 39, batch 100, loss[loss=0.1457, simple_loss=0.2285, pruned_loss=0.03147, over 19751.00 frames. ], tot_loss[loss=0.152, simple_loss=0.2317, pruned_loss=0.03619, over 1569820.19 frames. ], batch size: 102, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:45:57,277 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70616.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:46:29,517 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.491e+02 3.857e+02 4.460e+02 4.985e+02 1.010e+03, threshold=8.919e+02, percent-clipped=5.0 +2023-03-29 11:46:43,717 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2690, 3.5807, 3.7371, 4.3516, 2.9629, 3.2489, 2.8840, 2.7143], + device='cuda:1'), covar=tensor([0.0579, 0.1949, 0.1000, 0.0450, 0.2025, 0.1152, 0.1305, 0.1697], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0328, 0.0250, 0.0208, 0.0249, 0.0212, 0.0222, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 11:46:53,612 INFO [train.py:892] (1/4) Epoch 39, batch 150, loss[loss=0.1709, simple_loss=0.2504, pruned_loss=0.04568, over 19795.00 frames. ], tot_loss[loss=0.1532, simple_loss=0.2341, pruned_loss=0.03622, over 2096226.35 frames. ], batch size: 185, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:48:51,192 INFO [train.py:892] (1/4) Epoch 39, batch 200, loss[loss=0.1513, simple_loss=0.2414, pruned_loss=0.03056, over 19652.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2356, pruned_loss=0.03632, over 2506668.56 frames. ], batch size: 57, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:49:57,835 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8384, 1.6410, 1.8267, 1.8325, 1.7323, 1.8368, 1.7016, 1.8433], + device='cuda:1'), covar=tensor([0.0393, 0.0373, 0.0374, 0.0350, 0.0487, 0.0329, 0.0510, 0.0365], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0088, 0.0090, 0.0085, 0.0097, 0.0090, 0.0106, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 11:50:22,138 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.699e+02 3.379e+02 4.143e+02 5.094e+02 9.478e+02, threshold=8.286e+02, percent-clipped=1.0 +2023-03-29 11:50:49,029 INFO [train.py:892] (1/4) Epoch 39, batch 250, loss[loss=0.1467, simple_loss=0.2259, pruned_loss=0.0337, over 19823.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2345, pruned_loss=0.03615, over 2827528.32 frames. ], batch size: 146, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:51:47,985 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70766.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:52:29,878 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0057, 3.3369, 3.2604, 3.2845, 3.1091, 3.1759, 3.1175, 3.2190], + device='cuda:1'), covar=tensor([0.0294, 0.0277, 0.0295, 0.0269, 0.0361, 0.0301, 0.0321, 0.0402], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0087, 0.0090, 0.0084, 0.0097, 0.0090, 0.0106, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 11:52:42,429 INFO [train.py:892] (1/4) Epoch 39, batch 300, loss[loss=0.1455, simple_loss=0.2233, pruned_loss=0.03385, over 19834.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2361, pruned_loss=0.03658, over 3074701.95 frames. ], batch size: 52, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:52:55,433 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9092, 4.5887, 4.5995, 4.3418, 4.8630, 3.1642, 3.9763, 2.3958], + device='cuda:1'), covar=tensor([0.0164, 0.0188, 0.0136, 0.0191, 0.0130, 0.1003, 0.0778, 0.1508], + device='cuda:1'), in_proj_covar=tensor([0.0107, 0.0150, 0.0116, 0.0138, 0.0122, 0.0137, 0.0145, 0.0130], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 11:53:37,631 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70814.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:53:42,830 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-29 11:54:14,736 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.577e+02 3.476e+02 3.994e+02 4.897e+02 8.420e+02, threshold=7.989e+02, percent-clipped=1.0 +2023-03-29 11:54:43,441 INFO [train.py:892] (1/4) Epoch 39, batch 350, loss[loss=0.1468, simple_loss=0.2228, pruned_loss=0.03538, over 19784.00 frames. ], tot_loss[loss=0.1548, simple_loss=0.2361, pruned_loss=0.03676, over 3268885.31 frames. ], batch size: 152, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:56:39,300 INFO [train.py:892] (1/4) Epoch 39, batch 400, loss[loss=0.1473, simple_loss=0.2266, pruned_loss=0.03399, over 19792.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.236, pruned_loss=0.03668, over 3420055.62 frames. ], batch size: 224, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:57:38,250 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70916.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:58:08,797 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.576e+02 3.611e+02 4.279e+02 5.158e+02 1.128e+03, threshold=8.559e+02, percent-clipped=5.0 +2023-03-29 11:58:31,799 INFO [train.py:892] (1/4) Epoch 39, batch 450, loss[loss=0.1619, simple_loss=0.2282, pruned_loss=0.04776, over 19795.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2362, pruned_loss=0.03681, over 3536032.41 frames. ], batch size: 185, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:59:27,836 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70964.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:00:29,101 INFO [train.py:892] (1/4) Epoch 39, batch 500, loss[loss=0.1476, simple_loss=0.2235, pruned_loss=0.03583, over 19559.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2364, pruned_loss=0.03692, over 3625963.30 frames. ], batch size: 47, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:00:32,170 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70992.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:02:01,495 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.739e+02 3.809e+02 4.349e+02 5.400e+02 1.199e+03, threshold=8.697e+02, percent-clipped=2.0 +2023-03-29 12:02:02,620 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71030.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:02:27,667 INFO [train.py:892] (1/4) Epoch 39, batch 550, loss[loss=0.1539, simple_loss=0.2446, pruned_loss=0.03161, over 19731.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.2369, pruned_loss=0.03719, over 3697068.77 frames. ], batch size: 63, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:02:54,040 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71053.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:04:21,951 INFO [train.py:892] (1/4) Epoch 39, batch 600, loss[loss=0.1446, simple_loss=0.2258, pruned_loss=0.03176, over 19860.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2363, pruned_loss=0.03722, over 3754857.43 frames. ], batch size: 46, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:04:22,886 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71091.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:04:30,366 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.95 vs. limit=5.0 +2023-03-29 12:05:55,695 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.519e+02 3.574e+02 4.295e+02 5.387e+02 1.048e+03, threshold=8.591e+02, percent-clipped=4.0 +2023-03-29 12:05:59,044 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9039, 2.7578, 3.1215, 2.7121, 3.2438, 3.1191, 3.7525, 4.1039], + device='cuda:1'), covar=tensor([0.0644, 0.1782, 0.1591, 0.2086, 0.1758, 0.1594, 0.0616, 0.0617], + device='cuda:1'), in_proj_covar=tensor([0.0259, 0.0244, 0.0271, 0.0258, 0.0305, 0.0262, 0.0236, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 12:06:26,060 INFO [train.py:892] (1/4) Epoch 39, batch 650, loss[loss=0.1575, simple_loss=0.2384, pruned_loss=0.03825, over 19736.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2351, pruned_loss=0.03675, over 3799081.02 frames. ], batch size: 63, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:07:21,464 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-29 12:07:51,171 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8242, 2.7631, 3.0163, 2.6637, 3.1778, 3.0933, 3.6453, 4.0178], + device='cuda:1'), covar=tensor([0.0596, 0.1690, 0.1582, 0.2174, 0.1516, 0.1485, 0.0698, 0.0558], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0243, 0.0270, 0.0257, 0.0303, 0.0261, 0.0235, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 12:07:51,552 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.53 vs. limit=5.0 +2023-03-29 12:08:25,809 INFO [train.py:892] (1/4) Epoch 39, batch 700, loss[loss=0.1532, simple_loss=0.2277, pruned_loss=0.03938, over 19781.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2347, pruned_loss=0.0365, over 3833664.14 frames. ], batch size: 131, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:08:29,755 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.53 vs. limit=5.0 +2023-03-29 12:09:56,524 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.218e+02 3.479e+02 4.128e+02 5.224e+02 8.144e+02, threshold=8.256e+02, percent-clipped=0.0 +2023-03-29 12:10:19,503 INFO [train.py:892] (1/4) Epoch 39, batch 750, loss[loss=0.1485, simple_loss=0.2231, pruned_loss=0.03697, over 19811.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2343, pruned_loss=0.0363, over 3859126.68 frames. ], batch size: 96, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:12:12,852 INFO [train.py:892] (1/4) Epoch 39, batch 800, loss[loss=0.1384, simple_loss=0.2161, pruned_loss=0.0303, over 19804.00 frames. ], tot_loss[loss=0.1528, simple_loss=0.2335, pruned_loss=0.03608, over 3878902.33 frames. ], batch size: 40, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:13:43,657 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.446e+02 3.401e+02 4.193e+02 5.158e+02 9.890e+02, threshold=8.386e+02, percent-clipped=3.0 +2023-03-29 12:14:08,927 INFO [train.py:892] (1/4) Epoch 39, batch 850, loss[loss=0.1733, simple_loss=0.2582, pruned_loss=0.04422, over 19650.00 frames. ], tot_loss[loss=0.153, simple_loss=0.2339, pruned_loss=0.03605, over 3894426.98 frames. ], batch size: 330, lr: 4.00e-03, grad_scale: 16.0 +2023-03-29 12:14:25,042 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71348.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:15:11,068 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9998, 2.5913, 3.0666, 3.2306, 3.7291, 4.1375, 3.9933, 4.0323], + device='cuda:1'), covar=tensor([0.0988, 0.1619, 0.1298, 0.0693, 0.0413, 0.0260, 0.0387, 0.0459], + device='cuda:1'), in_proj_covar=tensor([0.0164, 0.0169, 0.0182, 0.0155, 0.0140, 0.0136, 0.0130, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 12:15:33,192 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5670, 1.9935, 2.2447, 2.7073, 3.0857, 3.1860, 3.0874, 3.1369], + device='cuda:1'), covar=tensor([0.1106, 0.1800, 0.1628, 0.0839, 0.0563, 0.0396, 0.0524, 0.0564], + device='cuda:1'), in_proj_covar=tensor([0.0164, 0.0169, 0.0181, 0.0155, 0.0140, 0.0136, 0.0130, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 12:15:53,711 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71386.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:16:03,937 INFO [train.py:892] (1/4) Epoch 39, batch 900, loss[loss=0.1416, simple_loss=0.2119, pruned_loss=0.03562, over 19821.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.2335, pruned_loss=0.03581, over 3906037.06 frames. ], batch size: 123, lr: 4.00e-03, grad_scale: 16.0 +2023-03-29 12:17:36,398 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.339e+02 3.295e+02 3.866e+02 4.930e+02 9.721e+02, threshold=7.731e+02, percent-clipped=3.0 +2023-03-29 12:18:00,865 INFO [train.py:892] (1/4) Epoch 39, batch 950, loss[loss=0.1313, simple_loss=0.2166, pruned_loss=0.02305, over 19890.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.2338, pruned_loss=0.03566, over 3915702.87 frames. ], batch size: 87, lr: 4.00e-03, grad_scale: 8.0 +2023-03-29 12:19:56,190 INFO [train.py:892] (1/4) Epoch 39, batch 1000, loss[loss=0.1574, simple_loss=0.2312, pruned_loss=0.04178, over 19833.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.2336, pruned_loss=0.03553, over 3921379.19 frames. ], batch size: 101, lr: 4.00e-03, grad_scale: 8.0 +2023-03-29 12:21:18,015 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-29 12:21:28,780 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.429e+02 3.687e+02 4.367e+02 5.594e+02 8.534e+02, threshold=8.735e+02, percent-clipped=3.0 +2023-03-29 12:21:52,909 INFO [train.py:892] (1/4) Epoch 39, batch 1050, loss[loss=0.1462, simple_loss=0.2277, pruned_loss=0.0324, over 19749.00 frames. ], tot_loss[loss=0.1528, simple_loss=0.2338, pruned_loss=0.03593, over 3928819.97 frames. ], batch size: 44, lr: 4.00e-03, grad_scale: 8.0 +2023-03-29 12:23:09,915 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-29 12:23:33,757 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4784, 3.3294, 5.0845, 3.7358, 3.9815, 3.8547, 2.7403, 2.9774], + device='cuda:1'), covar=tensor([0.0920, 0.3184, 0.0372, 0.1073, 0.1882, 0.1393, 0.2794, 0.2593], + device='cuda:1'), in_proj_covar=tensor([0.0353, 0.0394, 0.0352, 0.0293, 0.0377, 0.0387, 0.0382, 0.0355], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 12:23:45,725 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4722, 2.4378, 2.4852, 2.4940, 2.5450, 2.5857, 2.4407, 2.6014], + device='cuda:1'), covar=tensor([0.0370, 0.0397, 0.0436, 0.0359, 0.0443, 0.0352, 0.0490, 0.0380], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0088, 0.0091, 0.0086, 0.0098, 0.0090, 0.0106, 0.0080], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 12:23:51,087 INFO [train.py:892] (1/4) Epoch 39, batch 1100, loss[loss=0.1504, simple_loss=0.2445, pruned_loss=0.02818, over 19740.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2343, pruned_loss=0.03619, over 3933794.31 frames. ], batch size: 92, lr: 4.00e-03, grad_scale: 8.0 +2023-03-29 12:24:28,088 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3317, 2.8318, 3.3662, 2.8022, 3.4067, 3.3369, 4.2110, 4.5821], + device='cuda:1'), covar=tensor([0.0597, 0.1943, 0.1573, 0.2502, 0.2052, 0.1768, 0.0550, 0.0568], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0246, 0.0274, 0.0261, 0.0307, 0.0264, 0.0239, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 12:24:43,329 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.71 vs. limit=5.0 +2023-03-29 12:24:48,543 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71616.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:25:24,248 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.877e+02 3.477e+02 4.222e+02 5.368e+02 8.846e+02, threshold=8.443e+02, percent-clipped=1.0 +2023-03-29 12:25:46,053 INFO [train.py:892] (1/4) Epoch 39, batch 1150, loss[loss=0.1545, simple_loss=0.2381, pruned_loss=0.03546, over 19734.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.2344, pruned_loss=0.03637, over 3937077.68 frames. ], batch size: 47, lr: 4.00e-03, grad_scale: 8.0 +2023-03-29 12:26:02,269 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71648.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:26:28,943 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2984, 3.3031, 2.0606, 3.8288, 3.5281, 3.7959, 3.8880, 3.1263], + device='cuda:1'), covar=tensor([0.0634, 0.0670, 0.1624, 0.0651, 0.0585, 0.0494, 0.0560, 0.0804], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0148, 0.0146, 0.0158, 0.0137, 0.0142, 0.0152, 0.0151], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 12:27:08,319 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71677.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:27:26,124 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71686.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:27:38,332 INFO [train.py:892] (1/4) Epoch 39, batch 1200, loss[loss=0.1588, simple_loss=0.2399, pruned_loss=0.03884, over 19802.00 frames. ], tot_loss[loss=0.1528, simple_loss=0.2335, pruned_loss=0.03608, over 3941279.59 frames. ], batch size: 288, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:27:51,687 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71696.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:29:12,302 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.421e+02 3.452e+02 4.248e+02 4.889e+02 8.041e+02, threshold=8.496e+02, percent-clipped=0.0 +2023-03-29 12:29:21,153 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71734.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:29:35,343 INFO [train.py:892] (1/4) Epoch 39, batch 1250, loss[loss=0.1631, simple_loss=0.2293, pruned_loss=0.04846, over 19731.00 frames. ], tot_loss[loss=0.1521, simple_loss=0.2324, pruned_loss=0.03589, over 3944353.77 frames. ], batch size: 140, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:31:32,673 INFO [train.py:892] (1/4) Epoch 39, batch 1300, loss[loss=0.1878, simple_loss=0.2631, pruned_loss=0.05628, over 19750.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.233, pruned_loss=0.03624, over 3945773.48 frames. ], batch size: 250, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:33:04,693 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.312e+02 3.787e+02 4.345e+02 5.084e+02 1.281e+03, threshold=8.689e+02, percent-clipped=1.0 +2023-03-29 12:33:29,414 INFO [train.py:892] (1/4) Epoch 39, batch 1350, loss[loss=0.1485, simple_loss=0.2215, pruned_loss=0.0378, over 19822.00 frames. ], tot_loss[loss=0.153, simple_loss=0.2332, pruned_loss=0.0364, over 3947447.07 frames. ], batch size: 187, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:35:29,326 INFO [train.py:892] (1/4) Epoch 39, batch 1400, loss[loss=0.134, simple_loss=0.215, pruned_loss=0.02654, over 19738.00 frames. ], tot_loss[loss=0.1528, simple_loss=0.2334, pruned_loss=0.03612, over 3947047.25 frames. ], batch size: 140, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:36:08,886 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9253, 3.6710, 3.7849, 3.9434, 3.6956, 3.9291, 3.9882, 4.2094], + device='cuda:1'), covar=tensor([0.0734, 0.0536, 0.0586, 0.0464, 0.0836, 0.0635, 0.0500, 0.0344], + device='cuda:1'), in_proj_covar=tensor([0.0159, 0.0185, 0.0207, 0.0182, 0.0182, 0.0165, 0.0158, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 12:36:38,181 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71919.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:37:03,110 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.539e+02 3.590e+02 4.169e+02 5.056e+02 8.405e+02, threshold=8.337e+02, percent-clipped=0.0 +2023-03-29 12:37:17,493 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6823, 4.8984, 2.7952, 5.0634, 5.3012, 2.2865, 4.6038, 3.6094], + device='cuda:1'), covar=tensor([0.0556, 0.0698, 0.2582, 0.0468, 0.0456, 0.2676, 0.0788, 0.0900], + device='cuda:1'), in_proj_covar=tensor([0.0241, 0.0267, 0.0236, 0.0286, 0.0266, 0.0207, 0.0245, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 12:37:21,719 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9477, 3.2670, 2.8469, 2.3866, 2.8576, 3.2248, 3.1709, 3.1585], + device='cuda:1'), covar=tensor([0.0322, 0.0319, 0.0320, 0.0534, 0.0347, 0.0267, 0.0224, 0.0249], + device='cuda:1'), in_proj_covar=tensor([0.0113, 0.0106, 0.0107, 0.0107, 0.0111, 0.0095, 0.0096, 0.0095], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 12:37:24,642 INFO [train.py:892] (1/4) Epoch 39, batch 1450, loss[loss=0.1487, simple_loss=0.238, pruned_loss=0.02976, over 19803.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.2329, pruned_loss=0.03593, over 3948371.42 frames. ], batch size: 65, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:37:47,193 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-29 12:38:39,262 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71972.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:38:58,372 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71980.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:39:08,193 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-29 12:39:24,728 INFO [train.py:892] (1/4) Epoch 39, batch 1500, loss[loss=0.1486, simple_loss=0.2205, pruned_loss=0.03834, over 19813.00 frames. ], tot_loss[loss=0.1518, simple_loss=0.2326, pruned_loss=0.03553, over 3949265.90 frames. ], batch size: 148, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:41:04,303 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.372e+02 3.694e+02 4.331e+02 4.992e+02 8.331e+02, threshold=8.661e+02, percent-clipped=0.0 +2023-03-29 12:41:29,081 INFO [train.py:892] (1/4) Epoch 39, batch 1550, loss[loss=0.1866, simple_loss=0.2711, pruned_loss=0.05105, over 19782.00 frames. ], tot_loss[loss=0.1516, simple_loss=0.2326, pruned_loss=0.03534, over 3949053.14 frames. ], batch size: 263, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:41:48,096 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-29 12:42:00,388 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7661, 2.7807, 2.9599, 2.6333, 3.0647, 2.9877, 3.5884, 3.9164], + device='cuda:1'), covar=tensor([0.0630, 0.1735, 0.1611, 0.2258, 0.1662, 0.1688, 0.0670, 0.0628], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0247, 0.0275, 0.0262, 0.0308, 0.0266, 0.0240, 0.0269], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 12:42:02,606 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.40 vs. limit=2.0 +2023-03-29 12:42:38,335 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-03-29 12:42:40,613 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.4408, 1.1987, 1.3960, 1.4152, 1.2704, 1.3380, 1.1522, 1.4047], + device='cuda:1'), covar=tensor([0.0413, 0.0437, 0.0377, 0.0369, 0.0519, 0.0375, 0.0587, 0.0357], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0089, 0.0091, 0.0085, 0.0098, 0.0091, 0.0107, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 12:43:25,416 INFO [train.py:892] (1/4) Epoch 39, batch 1600, loss[loss=0.1609, simple_loss=0.2446, pruned_loss=0.03863, over 19666.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2348, pruned_loss=0.03593, over 3945643.98 frames. ], batch size: 51, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:44:37,451 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2056, 3.0127, 3.2766, 2.6576, 3.3213, 2.7498, 3.1491, 3.2310], + device='cuda:1'), covar=tensor([0.0542, 0.0564, 0.0468, 0.0784, 0.0375, 0.0572, 0.0538, 0.0377], + device='cuda:1'), in_proj_covar=tensor([0.0083, 0.0092, 0.0088, 0.0114, 0.0085, 0.0087, 0.0085, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 12:44:50,172 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72129.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:44:53,595 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.272e+02 3.496e+02 4.151e+02 4.701e+02 9.677e+02, threshold=8.302e+02, percent-clipped=1.0 +2023-03-29 12:44:58,204 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.6812, 5.9006, 5.9927, 5.8209, 5.6327, 5.9143, 5.2566, 5.3427], + device='cuda:1'), covar=tensor([0.0396, 0.0457, 0.0418, 0.0472, 0.0572, 0.0524, 0.0743, 0.0996], + device='cuda:1'), in_proj_covar=tensor([0.0288, 0.0306, 0.0316, 0.0277, 0.0285, 0.0266, 0.0282, 0.0331], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 12:45:19,116 INFO [train.py:892] (1/4) Epoch 39, batch 1650, loss[loss=0.1444, simple_loss=0.222, pruned_loss=0.03336, over 19832.00 frames. ], tot_loss[loss=0.1521, simple_loss=0.2335, pruned_loss=0.03535, over 3946993.27 frames. ], batch size: 202, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:47:09,820 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72190.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:47:10,956 INFO [train.py:892] (1/4) Epoch 39, batch 1700, loss[loss=0.1406, simple_loss=0.2225, pruned_loss=0.02935, over 19740.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.2337, pruned_loss=0.03545, over 3946097.46 frames. ], batch size: 99, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:48:41,501 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.323e+02 3.706e+02 4.408e+02 5.454e+02 1.029e+03, threshold=8.816e+02, percent-clipped=3.0 +2023-03-29 12:48:53,957 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6140, 2.6339, 4.7124, 4.0628, 4.5198, 4.6481, 4.4855, 4.3171], + device='cuda:1'), covar=tensor([0.0568, 0.1149, 0.0112, 0.0905, 0.0159, 0.0212, 0.0171, 0.0181], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0107, 0.0092, 0.0154, 0.0089, 0.0102, 0.0092, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 12:49:01,612 INFO [train.py:892] (1/4) Epoch 39, batch 1750, loss[loss=0.1308, simple_loss=0.2049, pruned_loss=0.02832, over 19903.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.234, pruned_loss=0.03573, over 3946945.57 frames. ], batch size: 116, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:50:03,164 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72272.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:50:09,087 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72275.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:50:40,148 INFO [train.py:892] (1/4) Epoch 39, batch 1800, loss[loss=0.1355, simple_loss=0.2155, pruned_loss=0.02768, over 19617.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2346, pruned_loss=0.03611, over 3948122.96 frames. ], batch size: 51, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:50:47,618 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 12:51:36,781 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72320.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:51:47,512 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72326.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:51:57,377 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.312e+02 3.449e+02 3.996e+02 5.050e+02 9.624e+02, threshold=7.991e+02, percent-clipped=1.0 +2023-03-29 12:52:15,295 INFO [train.py:892] (1/4) Epoch 39, batch 1850, loss[loss=0.1425, simple_loss=0.229, pruned_loss=0.02796, over 19821.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2357, pruned_loss=0.03582, over 3948476.16 frames. ], batch size: 57, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:53:18,928 INFO [train.py:892] (1/4) Epoch 40, batch 0, loss[loss=0.1344, simple_loss=0.2191, pruned_loss=0.02486, over 19906.00 frames. ], tot_loss[loss=0.1344, simple_loss=0.2191, pruned_loss=0.02486, over 19906.00 frames. ], batch size: 116, lr: 3.93e-03, grad_scale: 8.0 +2023-03-29 12:53:18,929 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 12:53:42,431 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5818, 2.6077, 4.0360, 3.0643, 3.3596, 3.0089, 2.3215, 2.3767], + device='cuda:1'), covar=tensor([0.1351, 0.3508, 0.0619, 0.1224, 0.2028, 0.1855, 0.3024, 0.3368], + device='cuda:1'), in_proj_covar=tensor([0.0354, 0.0394, 0.0352, 0.0294, 0.0377, 0.0388, 0.0384, 0.0357], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 12:53:44,413 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7787, 3.3360, 3.6246, 3.1085, 3.7643, 3.7537, 4.5332, 5.0124], + device='cuda:1'), covar=tensor([0.0410, 0.1714, 0.1357, 0.2277, 0.1613, 0.1574, 0.0503, 0.0391], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0245, 0.0274, 0.0260, 0.0307, 0.0265, 0.0239, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 12:53:52,920 INFO [train.py:926] (1/4) Epoch 40, validation: loss=0.1851, simple_loss=0.2491, pruned_loss=0.0605, over 2883724.00 frames. +2023-03-29 12:53:52,921 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22859MB +2023-03-29 12:54:43,606 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.76 vs. limit=5.0 +2023-03-29 12:55:00,994 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8191, 4.8497, 5.2232, 4.9065, 5.0769, 4.6969, 4.9789, 4.6765], + device='cuda:1'), covar=tensor([0.1535, 0.1487, 0.0846, 0.1395, 0.0798, 0.0903, 0.1760, 0.2086], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0346, 0.0381, 0.0313, 0.0287, 0.0294, 0.0372, 0.0401], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 12:55:30,566 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72387.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:55:53,219 INFO [train.py:892] (1/4) Epoch 40, batch 50, loss[loss=0.151, simple_loss=0.2342, pruned_loss=0.03392, over 19824.00 frames. ], tot_loss[loss=0.1463, simple_loss=0.2258, pruned_loss=0.03339, over 890652.02 frames. ], batch size: 93, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 12:57:04,556 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.3424, 4.8702, 4.9529, 4.7133, 5.2625, 3.2829, 4.2733, 2.5636], + device='cuda:1'), covar=tensor([0.0157, 0.0196, 0.0144, 0.0195, 0.0130, 0.0988, 0.0832, 0.1481], + device='cuda:1'), in_proj_covar=tensor([0.0108, 0.0151, 0.0116, 0.0137, 0.0122, 0.0137, 0.0145, 0.0130], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 12:57:12,956 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.328e+02 3.429e+02 4.154e+02 4.820e+02 1.115e+03, threshold=8.307e+02, percent-clipped=2.0 +2023-03-29 12:57:46,523 INFO [train.py:892] (1/4) Epoch 40, batch 100, loss[loss=0.1497, simple_loss=0.2276, pruned_loss=0.03586, over 19838.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2282, pruned_loss=0.03408, over 1569593.65 frames. ], batch size: 137, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 12:58:34,272 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-29 12:59:12,875 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-29 12:59:14,253 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72485.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:59:37,246 INFO [train.py:892] (1/4) Epoch 40, batch 150, loss[loss=0.1486, simple_loss=0.2346, pruned_loss=0.03129, over 19829.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2292, pruned_loss=0.03415, over 2097735.72 frames. ], batch size: 75, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 13:00:58,604 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.053e+02 3.352e+02 3.945e+02 4.917e+02 7.870e+02, threshold=7.891e+02, percent-clipped=0.0 +2023-03-29 13:01:01,984 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8805, 2.9501, 1.8250, 3.3916, 3.0811, 3.2775, 3.3667, 2.6943], + device='cuda:1'), covar=tensor([0.0699, 0.0783, 0.1732, 0.0703, 0.0715, 0.0604, 0.0629, 0.0888], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0151, 0.0147, 0.0161, 0.0139, 0.0144, 0.0155, 0.0153], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 13:01:36,371 INFO [train.py:892] (1/4) Epoch 40, batch 200, loss[loss=0.1462, simple_loss=0.2231, pruned_loss=0.03464, over 19812.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2288, pruned_loss=0.03383, over 2509119.59 frames. ], batch size: 167, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 13:02:44,222 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72575.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:03:24,722 INFO [train.py:892] (1/4) Epoch 40, batch 250, loss[loss=0.1678, simple_loss=0.2585, pruned_loss=0.03854, over 19656.00 frames. ], tot_loss[loss=0.1493, simple_loss=0.2302, pruned_loss=0.0342, over 2829317.29 frames. ], batch size: 57, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 13:04:28,482 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72623.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:04:44,496 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.269e+02 3.429e+02 3.885e+02 4.899e+02 8.561e+02, threshold=7.769e+02, percent-clipped=1.0 +2023-03-29 13:05:16,284 INFO [train.py:892] (1/4) Epoch 40, batch 300, loss[loss=0.1503, simple_loss=0.2341, pruned_loss=0.03322, over 19885.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.2317, pruned_loss=0.03439, over 3076063.42 frames. ], batch size: 52, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 13:06:40,532 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72682.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:07:13,660 INFO [train.py:892] (1/4) Epoch 40, batch 350, loss[loss=0.1439, simple_loss=0.2287, pruned_loss=0.02958, over 19803.00 frames. ], tot_loss[loss=0.151, simple_loss=0.2324, pruned_loss=0.03479, over 3270409.33 frames. ], batch size: 51, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 13:08:29,530 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.535e+02 3.481e+02 4.095e+02 5.199e+02 1.202e+03, threshold=8.190e+02, percent-clipped=3.0 +2023-03-29 13:08:53,005 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-03-29 13:09:02,640 INFO [train.py:892] (1/4) Epoch 40, batch 400, loss[loss=0.1287, simple_loss=0.2068, pruned_loss=0.02526, over 19753.00 frames. ], tot_loss[loss=0.1494, simple_loss=0.2308, pruned_loss=0.03399, over 3422586.12 frames. ], batch size: 188, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:09:03,539 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3327, 4.2119, 4.5812, 4.2292, 3.8986, 4.4324, 4.2712, 4.6470], + device='cuda:1'), covar=tensor([0.0695, 0.0359, 0.0337, 0.0359, 0.0967, 0.0524, 0.0486, 0.0326], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0228, 0.0228, 0.0241, 0.0211, 0.0253, 0.0242, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 13:09:05,829 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3303, 2.8954, 3.4355, 3.4753, 3.8525, 4.4242, 4.2269, 4.1958], + device='cuda:1'), covar=tensor([0.0862, 0.1494, 0.1157, 0.0628, 0.0496, 0.0221, 0.0374, 0.0446], + device='cuda:1'), in_proj_covar=tensor([0.0166, 0.0171, 0.0184, 0.0156, 0.0143, 0.0137, 0.0131, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 13:10:20,225 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3448, 4.1314, 4.1227, 3.8971, 4.3371, 3.0615, 3.6312, 2.1734], + device='cuda:1'), covar=tensor([0.0200, 0.0241, 0.0157, 0.0210, 0.0151, 0.1035, 0.0679, 0.1606], + device='cuda:1'), in_proj_covar=tensor([0.0108, 0.0151, 0.0117, 0.0139, 0.0123, 0.0138, 0.0146, 0.0131], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-29 13:10:34,213 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72785.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:10:56,582 INFO [train.py:892] (1/4) Epoch 40, batch 450, loss[loss=0.1423, simple_loss=0.2252, pruned_loss=0.0297, over 19846.00 frames. ], tot_loss[loss=0.15, simple_loss=0.2311, pruned_loss=0.03447, over 3540468.86 frames. ], batch size: 49, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:11:20,965 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4412, 3.6596, 3.9043, 4.4970, 2.9399, 3.3087, 3.0061, 2.7484], + device='cuda:1'), covar=tensor([0.0556, 0.2008, 0.0925, 0.0453, 0.2133, 0.1218, 0.1282, 0.1730], + device='cuda:1'), in_proj_covar=tensor([0.0251, 0.0327, 0.0251, 0.0208, 0.0250, 0.0213, 0.0222, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 13:12:04,948 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.3989, 5.6676, 5.7272, 5.5291, 5.4142, 5.6578, 5.0853, 5.0674], + device='cuda:1'), covar=tensor([0.0472, 0.0441, 0.0445, 0.0459, 0.0656, 0.0515, 0.0680, 0.1065], + device='cuda:1'), in_proj_covar=tensor([0.0284, 0.0303, 0.0312, 0.0273, 0.0281, 0.0262, 0.0279, 0.0325], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 13:12:17,043 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.344e+02 3.525e+02 4.138e+02 5.025e+02 1.485e+03, threshold=8.275e+02, percent-clipped=3.0 +2023-03-29 13:12:23,691 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72833.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:12:56,035 INFO [train.py:892] (1/4) Epoch 40, batch 500, loss[loss=0.1481, simple_loss=0.2219, pruned_loss=0.03717, over 19823.00 frames. ], tot_loss[loss=0.1509, simple_loss=0.2318, pruned_loss=0.03495, over 3630256.59 frames. ], batch size: 147, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:14:11,258 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2480, 3.4831, 3.3243, 3.4643, 3.3092, 3.3114, 3.0449, 3.4671], + device='cuda:1'), covar=tensor([0.0307, 0.0268, 0.0341, 0.0251, 0.0285, 0.0298, 0.0384, 0.0371], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0088, 0.0090, 0.0085, 0.0098, 0.0090, 0.0107, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 13:14:44,552 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72894.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:14:47,529 INFO [train.py:892] (1/4) Epoch 40, batch 550, loss[loss=0.1509, simple_loss=0.2336, pruned_loss=0.03406, over 19721.00 frames. ], tot_loss[loss=0.1509, simple_loss=0.2313, pruned_loss=0.03527, over 3702486.58 frames. ], batch size: 62, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:16:09,031 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.263e+02 3.703e+02 4.460e+02 5.204e+02 8.287e+02, threshold=8.920e+02, percent-clipped=1.0 +2023-03-29 13:16:45,774 INFO [train.py:892] (1/4) Epoch 40, batch 600, loss[loss=0.1568, simple_loss=0.2428, pruned_loss=0.03537, over 19666.00 frames. ], tot_loss[loss=0.1515, simple_loss=0.2324, pruned_loss=0.03532, over 3755187.28 frames. ], batch size: 43, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:17:06,946 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72955.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:17:26,556 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72964.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 13:18:08,816 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72982.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:18:38,516 INFO [train.py:892] (1/4) Epoch 40, batch 650, loss[loss=0.1805, simple_loss=0.2756, pruned_loss=0.04272, over 19842.00 frames. ], tot_loss[loss=0.1514, simple_loss=0.2324, pruned_loss=0.03521, over 3797719.74 frames. ], batch size: 56, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:19:43,125 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73025.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 13:19:43,791 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-29 13:19:47,005 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7377, 2.9165, 4.9097, 4.2731, 4.5454, 4.8324, 4.6611, 4.5239], + device='cuda:1'), covar=tensor([0.0539, 0.0990, 0.0105, 0.0798, 0.0141, 0.0180, 0.0161, 0.0150], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0106, 0.0091, 0.0154, 0.0090, 0.0102, 0.0092, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 13:19:55,719 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73030.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:19:57,045 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.367e+02 3.602e+02 4.054e+02 4.696e+02 9.081e+02, threshold=8.108e+02, percent-clipped=1.0 +2023-03-29 13:20:29,386 INFO [train.py:892] (1/4) Epoch 40, batch 700, loss[loss=0.1449, simple_loss=0.226, pruned_loss=0.0319, over 19822.00 frames. ], tot_loss[loss=0.1509, simple_loss=0.2319, pruned_loss=0.03491, over 3831756.11 frames. ], batch size: 187, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:21:47,684 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2857, 3.0926, 4.8074, 3.6176, 3.7373, 3.6439, 2.5991, 2.7756], + device='cuda:1'), covar=tensor([0.0910, 0.3460, 0.0445, 0.1132, 0.1956, 0.1552, 0.2881, 0.2867], + device='cuda:1'), in_proj_covar=tensor([0.0359, 0.0401, 0.0358, 0.0298, 0.0384, 0.0393, 0.0389, 0.0361], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 13:22:25,649 INFO [train.py:892] (1/4) Epoch 40, batch 750, loss[loss=0.1572, simple_loss=0.2481, pruned_loss=0.03309, over 19697.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.2319, pruned_loss=0.03475, over 3859041.59 frames. ], batch size: 325, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:23:43,988 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.740e+02 3.647e+02 4.174e+02 4.953e+02 1.017e+03, threshold=8.348e+02, percent-clipped=2.0 +2023-03-29 13:24:17,046 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6192, 4.6117, 2.8464, 4.8882, 5.1148, 2.3759, 4.4768, 3.8743], + device='cuda:1'), covar=tensor([0.0570, 0.0861, 0.2633, 0.0723, 0.0471, 0.2619, 0.0831, 0.0806], + device='cuda:1'), in_proj_covar=tensor([0.0241, 0.0267, 0.0237, 0.0285, 0.0266, 0.0207, 0.0245, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 13:24:18,106 INFO [train.py:892] (1/4) Epoch 40, batch 800, loss[loss=0.1436, simple_loss=0.219, pruned_loss=0.03415, over 19535.00 frames. ], tot_loss[loss=0.1509, simple_loss=0.2321, pruned_loss=0.03481, over 3879537.83 frames. ], batch size: 46, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:26:08,380 INFO [train.py:892] (1/4) Epoch 40, batch 850, loss[loss=0.1491, simple_loss=0.2269, pruned_loss=0.03562, over 19872.00 frames. ], tot_loss[loss=0.1504, simple_loss=0.2318, pruned_loss=0.03445, over 3895355.29 frames. ], batch size: 138, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:26:55,207 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0558, 3.8846, 3.8302, 3.6278, 4.0376, 2.9617, 3.3712, 1.9941], + device='cuda:1'), covar=tensor([0.0213, 0.0247, 0.0173, 0.0210, 0.0174, 0.1062, 0.0719, 0.1652], + device='cuda:1'), in_proj_covar=tensor([0.0109, 0.0152, 0.0117, 0.0139, 0.0123, 0.0138, 0.0146, 0.0131], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-29 13:27:30,137 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.270e+02 3.428e+02 4.036e+02 4.943e+02 8.954e+02, threshold=8.072e+02, percent-clipped=2.0 +2023-03-29 13:27:39,959 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.02 vs. limit=5.0 +2023-03-29 13:28:06,255 INFO [train.py:892] (1/4) Epoch 40, batch 900, loss[loss=0.1512, simple_loss=0.228, pruned_loss=0.0372, over 19761.00 frames. ], tot_loss[loss=0.1517, simple_loss=0.2328, pruned_loss=0.03531, over 3906716.09 frames. ], batch size: 217, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:28:17,278 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73250.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:29:53,994 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73293.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:29:59,525 INFO [train.py:892] (1/4) Epoch 40, batch 950, loss[loss=0.1471, simple_loss=0.2162, pruned_loss=0.03895, over 19844.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.2335, pruned_loss=0.03554, over 3915663.58 frames. ], batch size: 142, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:30:56,411 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73320.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 13:31:21,679 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.304e+02 3.673e+02 4.396e+02 5.131e+02 1.124e+03, threshold=8.792e+02, percent-clipped=1.0 +2023-03-29 13:31:53,775 INFO [train.py:892] (1/4) Epoch 40, batch 1000, loss[loss=0.1402, simple_loss=0.2273, pruned_loss=0.02658, over 19831.00 frames. ], tot_loss[loss=0.1522, simple_loss=0.2338, pruned_loss=0.03535, over 3921953.81 frames. ], batch size: 75, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:32:03,370 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4644, 1.8271, 2.1637, 2.6266, 2.9707, 3.0751, 2.9252, 3.0043], + device='cuda:1'), covar=tensor([0.1039, 0.1902, 0.1551, 0.0835, 0.0522, 0.0407, 0.0521, 0.0526], + device='cuda:1'), in_proj_covar=tensor([0.0166, 0.0172, 0.0185, 0.0157, 0.0144, 0.0138, 0.0132, 0.0122], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 13:32:15,176 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73354.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:32:53,975 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0591, 2.5197, 2.9601, 3.1405, 3.6582, 3.9938, 3.8613, 3.9273], + device='cuda:1'), covar=tensor([0.0892, 0.1593, 0.1299, 0.0757, 0.0413, 0.0245, 0.0361, 0.0366], + device='cuda:1'), in_proj_covar=tensor([0.0166, 0.0172, 0.0185, 0.0157, 0.0143, 0.0138, 0.0132, 0.0122], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 13:33:43,262 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-03-29 13:33:44,989 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6864, 2.7038, 1.7499, 3.1011, 2.8755, 2.9902, 3.0878, 2.5289], + device='cuda:1'), covar=tensor([0.0763, 0.0802, 0.1636, 0.0770, 0.0661, 0.0628, 0.0690, 0.0911], + device='cuda:1'), in_proj_covar=tensor([0.0149, 0.0149, 0.0146, 0.0160, 0.0138, 0.0143, 0.0154, 0.0152], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 13:33:49,230 INFO [train.py:892] (1/4) Epoch 40, batch 1050, loss[loss=0.147, simple_loss=0.2229, pruned_loss=0.03555, over 19899.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2347, pruned_loss=0.03576, over 3924509.25 frames. ], batch size: 94, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:35:09,643 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.437e+02 3.473e+02 4.249e+02 4.984e+02 7.164e+02, threshold=8.499e+02, percent-clipped=0.0 +2023-03-29 13:35:39,491 INFO [train.py:892] (1/4) Epoch 40, batch 1100, loss[loss=0.1233, simple_loss=0.2008, pruned_loss=0.02291, over 19851.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2339, pruned_loss=0.03593, over 3931730.16 frames. ], batch size: 112, lr: 3.90e-03, grad_scale: 16.0 +2023-03-29 13:35:59,822 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.42 vs. limit=5.0 +2023-03-29 13:36:55,163 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7045, 4.7946, 5.1128, 4.8893, 4.9597, 4.6828, 4.8853, 4.6358], + device='cuda:1'), covar=tensor([0.1412, 0.1604, 0.0803, 0.1228, 0.0803, 0.0925, 0.1688, 0.2101], + device='cuda:1'), in_proj_covar=tensor([0.0305, 0.0348, 0.0379, 0.0313, 0.0288, 0.0296, 0.0373, 0.0404], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 13:37:10,043 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4381, 2.6146, 4.5239, 3.9447, 4.1898, 4.4971, 4.2426, 4.2163], + device='cuda:1'), covar=tensor([0.0594, 0.1055, 0.0113, 0.0627, 0.0178, 0.0222, 0.0203, 0.0186], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0106, 0.0091, 0.0153, 0.0090, 0.0102, 0.0093, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 13:37:31,469 INFO [train.py:892] (1/4) Epoch 40, batch 1150, loss[loss=0.1337, simple_loss=0.213, pruned_loss=0.02724, over 19864.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.2337, pruned_loss=0.03574, over 3937098.21 frames. ], batch size: 64, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:38:51,538 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.496e+02 3.768e+02 4.402e+02 5.430e+02 9.903e+02, threshold=8.803e+02, percent-clipped=3.0 +2023-03-29 13:39:11,635 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6645, 4.5272, 5.0238, 4.5239, 4.1228, 4.7914, 4.6426, 5.1563], + device='cuda:1'), covar=tensor([0.0791, 0.0363, 0.0341, 0.0386, 0.0870, 0.0489, 0.0465, 0.0300], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0229, 0.0227, 0.0241, 0.0211, 0.0253, 0.0242, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 13:39:27,939 INFO [train.py:892] (1/4) Epoch 40, batch 1200, loss[loss=0.1366, simple_loss=0.2196, pruned_loss=0.02682, over 19800.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.2341, pruned_loss=0.03554, over 3939458.33 frames. ], batch size: 200, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:39:36,971 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73550.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:40:43,661 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.50 vs. limit=5.0 +2023-03-29 13:41:06,772 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73590.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:41:18,984 INFO [train.py:892] (1/4) Epoch 40, batch 1250, loss[loss=0.1311, simple_loss=0.2137, pruned_loss=0.02427, over 19883.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.2338, pruned_loss=0.03552, over 3942232.24 frames. ], batch size: 88, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:41:23,978 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73598.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:41:32,818 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4048, 4.6236, 4.6753, 4.5254, 4.3696, 4.6356, 4.1679, 4.1943], + device='cuda:1'), covar=tensor([0.0535, 0.0529, 0.0487, 0.0516, 0.0664, 0.0516, 0.0707, 0.1009], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0305, 0.0313, 0.0274, 0.0284, 0.0264, 0.0279, 0.0326], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 13:42:14,280 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73620.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 13:42:14,479 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1263, 3.2476, 2.0329, 3.3329, 3.4174, 1.6538, 2.8295, 2.6295], + device='cuda:1'), covar=tensor([0.0894, 0.0841, 0.2776, 0.0813, 0.0664, 0.2607, 0.1195, 0.1109], + device='cuda:1'), in_proj_covar=tensor([0.0242, 0.0267, 0.0237, 0.0287, 0.0267, 0.0209, 0.0245, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 13:42:38,866 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.357e+02 3.537e+02 4.071e+02 5.247e+02 9.229e+02, threshold=8.143e+02, percent-clipped=1.0 +2023-03-29 13:43:11,853 INFO [train.py:892] (1/4) Epoch 40, batch 1300, loss[loss=0.135, simple_loss=0.2173, pruned_loss=0.0264, over 19786.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2347, pruned_loss=0.03548, over 3941621.00 frames. ], batch size: 87, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:43:18,810 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73649.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:43:23,224 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73651.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:43:59,952 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 13:44:03,517 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73668.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 13:44:29,915 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.22 vs. limit=5.0 +2023-03-29 13:44:43,111 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-29 13:45:04,575 INFO [train.py:892] (1/4) Epoch 40, batch 1350, loss[loss=0.2039, simple_loss=0.2828, pruned_loss=0.06254, over 19630.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.2344, pruned_loss=0.0354, over 3943722.84 frames. ], batch size: 367, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:45:50,842 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4900, 3.6100, 2.4306, 4.2051, 3.8339, 4.1271, 4.1876, 3.2443], + device='cuda:1'), covar=tensor([0.0642, 0.0625, 0.1428, 0.0668, 0.0539, 0.0427, 0.0580, 0.0832], + device='cuda:1'), in_proj_covar=tensor([0.0149, 0.0149, 0.0146, 0.0160, 0.0138, 0.0143, 0.0154, 0.0153], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 13:46:03,772 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-29 13:46:21,765 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.112e+02 3.360e+02 4.018e+02 4.695e+02 9.172e+02, threshold=8.036e+02, percent-clipped=1.0 +2023-03-29 13:46:24,758 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73732.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:46:46,643 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4607, 1.9719, 2.2209, 2.6682, 3.0102, 3.1084, 2.9737, 3.1135], + device='cuda:1'), covar=tensor([0.1113, 0.1707, 0.1531, 0.0808, 0.0578, 0.0401, 0.0521, 0.0458], + device='cuda:1'), in_proj_covar=tensor([0.0165, 0.0170, 0.0182, 0.0156, 0.0142, 0.0136, 0.0131, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 13:46:56,650 INFO [train.py:892] (1/4) Epoch 40, batch 1400, loss[loss=0.1625, simple_loss=0.2362, pruned_loss=0.04444, over 19875.00 frames. ], tot_loss[loss=0.1514, simple_loss=0.2328, pruned_loss=0.03497, over 3944303.57 frames. ], batch size: 64, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:48:43,155 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73793.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:48:49,045 INFO [train.py:892] (1/4) Epoch 40, batch 1450, loss[loss=0.1569, simple_loss=0.2383, pruned_loss=0.03771, over 19793.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.2328, pruned_loss=0.03496, over 3946210.57 frames. ], batch size: 211, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:49:59,935 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1227, 4.3040, 2.5202, 4.5668, 4.8022, 2.0240, 3.8936, 3.4776], + device='cuda:1'), covar=tensor([0.0733, 0.0756, 0.2802, 0.0650, 0.0454, 0.2967, 0.1105, 0.0963], + device='cuda:1'), in_proj_covar=tensor([0.0241, 0.0266, 0.0236, 0.0286, 0.0265, 0.0207, 0.0244, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 13:50:11,122 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.549e+02 3.468e+02 4.023e+02 4.768e+02 7.367e+02, threshold=8.047e+02, percent-clipped=0.0 +2023-03-29 13:50:45,956 INFO [train.py:892] (1/4) Epoch 40, batch 1500, loss[loss=0.1588, simple_loss=0.2351, pruned_loss=0.04127, over 19791.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2326, pruned_loss=0.03478, over 3947317.27 frames. ], batch size: 224, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:52:34,858 INFO [train.py:892] (1/4) Epoch 40, batch 1550, loss[loss=0.1636, simple_loss=0.2409, pruned_loss=0.04311, over 19891.00 frames. ], tot_loss[loss=0.1515, simple_loss=0.2329, pruned_loss=0.03503, over 3947148.27 frames. ], batch size: 62, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 13:53:51,998 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.479e+02 3.617e+02 4.156e+02 5.212e+02 8.023e+02, threshold=8.312e+02, percent-clipped=0.0 +2023-03-29 13:54:26,945 INFO [train.py:892] (1/4) Epoch 40, batch 1600, loss[loss=0.1413, simple_loss=0.221, pruned_loss=0.03076, over 19801.00 frames. ], tot_loss[loss=0.1521, simple_loss=0.2337, pruned_loss=0.03528, over 3947916.95 frames. ], batch size: 191, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 13:54:27,679 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73946.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:54:33,769 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73949.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:55:23,359 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7411, 3.9460, 2.6309, 4.4995, 4.1426, 4.4908, 4.4957, 3.4872], + device='cuda:1'), covar=tensor([0.0588, 0.0556, 0.1489, 0.0643, 0.0510, 0.0374, 0.0564, 0.0745], + device='cuda:1'), in_proj_covar=tensor([0.0149, 0.0150, 0.0146, 0.0160, 0.0137, 0.0143, 0.0154, 0.0152], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 13:55:39,434 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.62 vs. limit=5.0 +2023-03-29 13:56:21,331 INFO [train.py:892] (1/4) Epoch 40, batch 1650, loss[loss=0.1376, simple_loss=0.223, pruned_loss=0.0261, over 19832.00 frames. ], tot_loss[loss=0.1512, simple_loss=0.2327, pruned_loss=0.03486, over 3949542.82 frames. ], batch size: 101, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 13:56:27,282 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73997.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:57:48,767 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.164e+02 3.493e+02 4.073e+02 5.082e+02 8.769e+02, threshold=8.147e+02, percent-clipped=1.0 +2023-03-29 13:58:18,998 INFO [train.py:892] (1/4) Epoch 40, batch 1700, loss[loss=0.1434, simple_loss=0.2188, pruned_loss=0.03396, over 19726.00 frames. ], tot_loss[loss=0.1515, simple_loss=0.2331, pruned_loss=0.03498, over 3948176.40 frames. ], batch size: 221, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 13:59:01,445 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74064.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:59:50,931 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74088.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:00:06,296 INFO [train.py:892] (1/4) Epoch 40, batch 1750, loss[loss=0.141, simple_loss=0.2096, pruned_loss=0.03619, over 19818.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.2338, pruned_loss=0.03554, over 3947284.37 frames. ], batch size: 123, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 14:00:43,317 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0684, 3.0259, 1.9738, 3.5466, 3.2952, 3.4768, 3.5842, 2.9220], + device='cuda:1'), covar=tensor([0.0686, 0.0699, 0.1710, 0.0635, 0.0589, 0.0516, 0.0573, 0.0809], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0150, 0.0147, 0.0161, 0.0138, 0.0144, 0.0155, 0.0153], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:01:00,647 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4487, 3.0893, 3.4553, 2.9994, 3.7375, 3.7710, 4.2307, 4.6974], + device='cuda:1'), covar=tensor([0.0577, 0.1772, 0.1529, 0.2309, 0.1612, 0.1252, 0.0617, 0.0652], + device='cuda:1'), in_proj_covar=tensor([0.0262, 0.0248, 0.0275, 0.0261, 0.0307, 0.0267, 0.0241, 0.0269], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:01:03,986 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74125.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:01:13,844 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.104e+02 3.603e+02 4.226e+02 5.080e+02 8.016e+02, threshold=8.451e+02, percent-clipped=0.0 +2023-03-29 14:01:40,437 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7875, 2.7132, 2.7962, 2.3425, 2.9118, 2.5273, 2.8350, 2.7726], + device='cuda:1'), covar=tensor([0.0609, 0.0595, 0.0604, 0.0878, 0.0496, 0.0514, 0.0501, 0.0444], + device='cuda:1'), in_proj_covar=tensor([0.0083, 0.0092, 0.0089, 0.0114, 0.0085, 0.0088, 0.0085, 0.0080], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:01:43,816 INFO [train.py:892] (1/4) Epoch 40, batch 1800, loss[loss=0.2022, simple_loss=0.2846, pruned_loss=0.05995, over 19639.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2345, pruned_loss=0.0361, over 3946153.34 frames. ], batch size: 343, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 14:01:57,011 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2121, 5.5264, 5.6922, 5.4572, 5.4594, 5.3500, 5.3924, 5.1838], + device='cuda:1'), covar=tensor([0.1428, 0.1195, 0.0793, 0.1158, 0.0635, 0.0720, 0.1752, 0.1707], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0345, 0.0378, 0.0313, 0.0286, 0.0295, 0.0374, 0.0403], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 14:03:18,061 INFO [train.py:892] (1/4) Epoch 40, batch 1850, loss[loss=0.1503, simple_loss=0.2432, pruned_loss=0.0287, over 19841.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2359, pruned_loss=0.0361, over 3946138.92 frames. ], batch size: 58, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 14:04:24,216 INFO [train.py:892] (1/4) Epoch 41, batch 0, loss[loss=0.1321, simple_loss=0.2075, pruned_loss=0.02836, over 19772.00 frames. ], tot_loss[loss=0.1321, simple_loss=0.2075, pruned_loss=0.02836, over 19772.00 frames. ], batch size: 46, lr: 3.83e-03, grad_scale: 16.0 +2023-03-29 14:04:24,217 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 14:04:45,463 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7558, 2.4158, 2.9570, 2.6058, 3.1499, 3.2762, 2.9465, 3.1517], + device='cuda:1'), covar=tensor([0.0579, 0.0869, 0.0138, 0.0350, 0.0155, 0.0242, 0.0217, 0.0224], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0090, 0.0102, 0.0093, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:04:57,764 INFO [train.py:926] (1/4) Epoch 41, validation: loss=0.1869, simple_loss=0.2502, pruned_loss=0.06181, over 2883724.00 frames. +2023-03-29 14:04:57,765 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22859MB +2023-03-29 14:06:10,072 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.348e+02 3.328e+02 3.806e+02 4.731e+02 7.088e+02, threshold=7.612e+02, percent-clipped=0.0 +2023-03-29 14:06:30,758 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74240.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:06:46,865 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74246.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:06:57,520 INFO [train.py:892] (1/4) Epoch 41, batch 50, loss[loss=0.1323, simple_loss=0.2145, pruned_loss=0.02504, over 19751.00 frames. ], tot_loss[loss=0.1486, simple_loss=0.2283, pruned_loss=0.03441, over 891506.99 frames. ], batch size: 139, lr: 3.83e-03, grad_scale: 16.0 +2023-03-29 14:07:15,946 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 14:08:04,732 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-29 14:08:22,596 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.8812, 6.1463, 6.1950, 6.0912, 5.9546, 6.1799, 5.5160, 5.5666], + device='cuda:1'), covar=tensor([0.0401, 0.0404, 0.0440, 0.0423, 0.0528, 0.0416, 0.0630, 0.0906], + device='cuda:1'), in_proj_covar=tensor([0.0287, 0.0307, 0.0316, 0.0276, 0.0284, 0.0267, 0.0281, 0.0327], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:08:35,494 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74294.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:08:49,408 INFO [train.py:892] (1/4) Epoch 41, batch 100, loss[loss=0.1627, simple_loss=0.2473, pruned_loss=0.03905, over 19687.00 frames. ], tot_loss[loss=0.1473, simple_loss=0.2281, pruned_loss=0.03325, over 1569614.34 frames. ], batch size: 325, lr: 3.83e-03, grad_scale: 16.0 +2023-03-29 14:08:50,569 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74301.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:09:37,323 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.6652, 5.9715, 6.0516, 5.9229, 5.7499, 6.0158, 5.3257, 5.3906], + device='cuda:1'), covar=tensor([0.0459, 0.0491, 0.0464, 0.0479, 0.0496, 0.0532, 0.0763, 0.1061], + device='cuda:1'), in_proj_covar=tensor([0.0287, 0.0307, 0.0317, 0.0276, 0.0285, 0.0268, 0.0281, 0.0328], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:09:54,183 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.051e+02 3.488e+02 4.125e+02 4.651e+02 7.521e+02, threshold=8.249e+02, percent-clipped=0.0 +2023-03-29 14:09:55,143 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.6340, 4.3535, 4.4071, 4.7048, 4.3691, 4.8012, 4.7742, 4.9446], + device='cuda:1'), covar=tensor([0.0725, 0.0418, 0.0536, 0.0369, 0.0827, 0.0440, 0.0457, 0.0300], + device='cuda:1'), in_proj_covar=tensor([0.0161, 0.0187, 0.0209, 0.0184, 0.0185, 0.0167, 0.0160, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 14:10:04,557 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-03-29 14:10:38,042 INFO [train.py:892] (1/4) Epoch 41, batch 150, loss[loss=0.137, simple_loss=0.2196, pruned_loss=0.02716, over 19767.00 frames. ], tot_loss[loss=0.148, simple_loss=0.2287, pruned_loss=0.03367, over 2098063.90 frames. ], batch size: 247, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:11:50,673 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8972, 2.7798, 5.0363, 4.3595, 4.6538, 4.9418, 4.8731, 4.6409], + device='cuda:1'), covar=tensor([0.0517, 0.1021, 0.0099, 0.0813, 0.0151, 0.0186, 0.0140, 0.0142], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0090, 0.0102, 0.0093, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:12:02,985 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74388.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:12:35,709 INFO [train.py:892] (1/4) Epoch 41, batch 200, loss[loss=0.137, simple_loss=0.2234, pruned_loss=0.02526, over 19675.00 frames. ], tot_loss[loss=0.1496, simple_loss=0.2309, pruned_loss=0.03416, over 2507517.87 frames. ], batch size: 52, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:13:19,123 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74420.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:13:44,025 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.532e+02 3.589e+02 4.027e+02 4.762e+02 8.323e+02, threshold=8.054e+02, percent-clipped=1.0 +2023-03-29 14:13:55,055 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74436.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:14:27,156 INFO [train.py:892] (1/4) Epoch 41, batch 250, loss[loss=0.2186, simple_loss=0.2931, pruned_loss=0.07204, over 19707.00 frames. ], tot_loss[loss=0.1496, simple_loss=0.2316, pruned_loss=0.03381, over 2826718.36 frames. ], batch size: 337, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:14:39,937 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1686, 5.4423, 5.5212, 5.3621, 5.1827, 5.4869, 4.9299, 4.9429], + device='cuda:1'), covar=tensor([0.0441, 0.0426, 0.0398, 0.0417, 0.0530, 0.0416, 0.0649, 0.0930], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0305, 0.0314, 0.0274, 0.0283, 0.0266, 0.0279, 0.0325], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:16:20,806 INFO [train.py:892] (1/4) Epoch 41, batch 300, loss[loss=0.1585, simple_loss=0.2347, pruned_loss=0.0411, over 19800.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2322, pruned_loss=0.03415, over 3074493.62 frames. ], batch size: 150, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:17:27,567 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.493e+02 3.756e+02 4.487e+02 5.432e+02 9.829e+02, threshold=8.974e+02, percent-clipped=6.0 +2023-03-29 14:18:14,850 INFO [train.py:892] (1/4) Epoch 41, batch 350, loss[loss=0.1614, simple_loss=0.2421, pruned_loss=0.04034, over 19780.00 frames. ], tot_loss[loss=0.1517, simple_loss=0.2336, pruned_loss=0.03486, over 3268443.74 frames. ], batch size: 215, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:19:56,179 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74595.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:19:59,628 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74596.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:20:06,650 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-29 14:20:09,427 INFO [train.py:892] (1/4) Epoch 41, batch 400, loss[loss=0.1432, simple_loss=0.2228, pruned_loss=0.03178, over 19809.00 frames. ], tot_loss[loss=0.1521, simple_loss=0.2341, pruned_loss=0.03504, over 3418987.17 frames. ], batch size: 147, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:20:44,982 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0021, 3.3708, 3.5002, 3.9819, 2.8245, 3.2441, 2.5092, 2.5421], + device='cuda:1'), covar=tensor([0.0542, 0.1676, 0.0921, 0.0442, 0.1812, 0.0953, 0.1408, 0.1602], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0328, 0.0253, 0.0211, 0.0252, 0.0215, 0.0223, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 14:20:56,764 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-29 14:21:17,911 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.495e+02 3.791e+02 4.431e+02 5.113e+02 8.392e+02, threshold=8.862e+02, percent-clipped=0.0 +2023-03-29 14:21:54,660 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9748, 2.8490, 3.1217, 2.6998, 3.2274, 3.2308, 3.7964, 4.2318], + device='cuda:1'), covar=tensor([0.0636, 0.1752, 0.1635, 0.2322, 0.1826, 0.1543, 0.0671, 0.0586], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0246, 0.0274, 0.0261, 0.0306, 0.0264, 0.0240, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:22:06,035 INFO [train.py:892] (1/4) Epoch 41, batch 450, loss[loss=0.142, simple_loss=0.2288, pruned_loss=0.02765, over 19744.00 frames. ], tot_loss[loss=0.1532, simple_loss=0.2353, pruned_loss=0.03562, over 3535898.32 frames. ], batch size: 89, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:22:17,853 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74656.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:24:01,238 INFO [train.py:892] (1/4) Epoch 41, batch 500, loss[loss=0.1402, simple_loss=0.2248, pruned_loss=0.02784, over 19798.00 frames. ], tot_loss[loss=0.1522, simple_loss=0.234, pruned_loss=0.03514, over 3626643.94 frames. ], batch size: 236, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:24:47,953 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74720.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:25:12,467 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.085e+02 3.392e+02 3.989e+02 4.649e+02 9.968e+02, threshold=7.978e+02, percent-clipped=1.0 +2023-03-29 14:25:55,692 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0212, 3.8780, 4.2775, 3.9151, 3.6125, 4.1466, 3.9832, 4.3277], + device='cuda:1'), covar=tensor([0.0768, 0.0352, 0.0356, 0.0398, 0.1174, 0.0583, 0.0484, 0.0371], + device='cuda:1'), in_proj_covar=tensor([0.0287, 0.0229, 0.0228, 0.0240, 0.0212, 0.0253, 0.0241, 0.0227], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:25:56,882 INFO [train.py:892] (1/4) Epoch 41, batch 550, loss[loss=0.1411, simple_loss=0.2144, pruned_loss=0.0339, over 19833.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.2341, pruned_loss=0.03556, over 3698450.24 frames. ], batch size: 190, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:26:35,990 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74768.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:27:02,423 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7266, 2.7933, 2.9215, 2.5817, 3.0661, 3.0086, 3.5538, 3.9147], + device='cuda:1'), covar=tensor([0.0662, 0.1661, 0.1598, 0.2337, 0.1710, 0.1668, 0.0702, 0.0583], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0248, 0.0276, 0.0263, 0.0309, 0.0266, 0.0241, 0.0269], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:27:49,004 INFO [train.py:892] (1/4) Epoch 41, batch 600, loss[loss=0.1489, simple_loss=0.2281, pruned_loss=0.03489, over 19673.00 frames. ], tot_loss[loss=0.1517, simple_loss=0.233, pruned_loss=0.03521, over 3755455.39 frames. ], batch size: 73, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:28:17,243 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2416, 2.6236, 4.4915, 3.9376, 4.2804, 4.4478, 4.2896, 4.1268], + device='cuda:1'), covar=tensor([0.0641, 0.1091, 0.0112, 0.0719, 0.0176, 0.0244, 0.0181, 0.0194], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0090, 0.0103, 0.0094, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:28:53,132 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.538e+02 3.664e+02 4.346e+02 5.320e+02 9.628e+02, threshold=8.692e+02, percent-clipped=3.0 +2023-03-29 14:29:38,230 INFO [train.py:892] (1/4) Epoch 41, batch 650, loss[loss=0.1482, simple_loss=0.2306, pruned_loss=0.03287, over 19768.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.2338, pruned_loss=0.03571, over 3798053.53 frames. ], batch size: 155, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:30:02,512 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7445, 3.8625, 2.2837, 3.9833, 4.1480, 1.9049, 3.4046, 3.1930], + device='cuda:1'), covar=tensor([0.0767, 0.0869, 0.2865, 0.0963, 0.0699, 0.2891, 0.1221, 0.0941], + device='cuda:1'), in_proj_covar=tensor([0.0242, 0.0268, 0.0237, 0.0288, 0.0266, 0.0209, 0.0246, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 14:31:21,294 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74896.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:31:26,157 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2442, 2.7308, 3.4335, 3.3596, 3.8711, 4.3676, 4.2270, 4.3263], + device='cuda:1'), covar=tensor([0.0963, 0.1611, 0.1182, 0.0736, 0.0432, 0.0294, 0.0391, 0.0482], + device='cuda:1'), in_proj_covar=tensor([0.0166, 0.0171, 0.0183, 0.0158, 0.0142, 0.0138, 0.0131, 0.0122], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 14:31:32,904 INFO [train.py:892] (1/4) Epoch 41, batch 700, loss[loss=0.172, simple_loss=0.2668, pruned_loss=0.03861, over 19679.00 frames. ], tot_loss[loss=0.1528, simple_loss=0.2341, pruned_loss=0.03577, over 3832457.39 frames. ], batch size: 56, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:31:51,788 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74909.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:32:42,144 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.552e+02 3.676e+02 4.188e+02 5.143e+02 8.176e+02, threshold=8.375e+02, percent-clipped=0.0 +2023-03-29 14:33:13,569 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74944.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:33:30,569 INFO [train.py:892] (1/4) Epoch 41, batch 750, loss[loss=0.1215, simple_loss=0.2095, pruned_loss=0.0167, over 19804.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.234, pruned_loss=0.03592, over 3856931.57 frames. ], batch size: 86, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:33:31,467 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74951.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:34:02,569 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74964.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:34:15,029 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74970.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:35:26,034 INFO [train.py:892] (1/4) Epoch 41, batch 800, loss[loss=0.1329, simple_loss=0.2161, pruned_loss=0.0249, over 19810.00 frames. ], tot_loss[loss=0.154, simple_loss=0.2352, pruned_loss=0.03641, over 3876307.16 frames. ], batch size: 149, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:36:21,553 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75025.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:36:36,277 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.639e+02 3.669e+02 4.402e+02 5.474e+02 1.379e+03, threshold=8.804e+02, percent-clipped=2.0 +2023-03-29 14:37:20,380 INFO [train.py:892] (1/4) Epoch 41, batch 850, loss[loss=0.1346, simple_loss=0.218, pruned_loss=0.02558, over 19847.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2356, pruned_loss=0.03633, over 3892889.64 frames. ], batch size: 109, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:37:42,848 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5268, 2.6882, 3.8436, 3.1502, 3.2323, 3.0463, 2.2628, 2.4496], + device='cuda:1'), covar=tensor([0.1135, 0.3160, 0.0671, 0.1198, 0.1897, 0.1689, 0.2898, 0.2793], + device='cuda:1'), in_proj_covar=tensor([0.0358, 0.0401, 0.0355, 0.0297, 0.0382, 0.0395, 0.0388, 0.0361], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:37:56,187 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 14:39:13,355 INFO [train.py:892] (1/4) Epoch 41, batch 900, loss[loss=0.1717, simple_loss=0.2567, pruned_loss=0.04336, over 19689.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2348, pruned_loss=0.0361, over 3905760.81 frames. ], batch size: 315, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:39:44,939 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8633, 2.8070, 2.8971, 2.4206, 3.0486, 2.5550, 2.9868, 2.9709], + device='cuda:1'), covar=tensor([0.0580, 0.0520, 0.0641, 0.0827, 0.0411, 0.0529, 0.0434, 0.0385], + device='cuda:1'), in_proj_covar=tensor([0.0084, 0.0093, 0.0090, 0.0115, 0.0086, 0.0089, 0.0085, 0.0080], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:39:50,651 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8992, 3.7316, 4.1618, 3.7966, 3.5363, 4.0414, 3.8977, 4.2261], + device='cuda:1'), covar=tensor([0.0803, 0.0399, 0.0368, 0.0404, 0.1171, 0.0556, 0.0501, 0.0367], + device='cuda:1'), in_proj_covar=tensor([0.0288, 0.0230, 0.0230, 0.0241, 0.0212, 0.0254, 0.0243, 0.0229], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:40:21,667 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.262e+02 3.745e+02 4.337e+02 5.272e+02 8.502e+02, threshold=8.673e+02, percent-clipped=0.0 +2023-03-29 14:40:30,132 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9594, 3.6951, 3.8157, 3.9811, 3.7815, 4.0127, 4.0528, 4.2419], + device='cuda:1'), covar=tensor([0.0790, 0.0543, 0.0599, 0.0473, 0.0850, 0.0664, 0.0481, 0.0349], + device='cuda:1'), in_proj_covar=tensor([0.0164, 0.0190, 0.0211, 0.0186, 0.0186, 0.0170, 0.0161, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 14:41:05,566 INFO [train.py:892] (1/4) Epoch 41, batch 950, loss[loss=0.1402, simple_loss=0.2201, pruned_loss=0.03019, over 19749.00 frames. ], tot_loss[loss=0.1532, simple_loss=0.2346, pruned_loss=0.03588, over 3916488.93 frames. ], batch size: 97, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:42:07,694 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.0414, 1.9226, 2.0626, 2.1121, 2.0419, 2.0647, 2.0234, 2.1055], + device='cuda:1'), covar=tensor([0.0440, 0.0412, 0.0386, 0.0347, 0.0492, 0.0385, 0.0489, 0.0372], + device='cuda:1'), in_proj_covar=tensor([0.0096, 0.0090, 0.0092, 0.0087, 0.0099, 0.0093, 0.0108, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 14:42:10,276 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.08 vs. limit=5.0 +2023-03-29 14:42:59,218 INFO [train.py:892] (1/4) Epoch 41, batch 1000, loss[loss=0.1306, simple_loss=0.2074, pruned_loss=0.02686, over 19731.00 frames. ], tot_loss[loss=0.1519, simple_loss=0.2333, pruned_loss=0.03525, over 3925006.42 frames. ], batch size: 51, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:44:01,298 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75228.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:44:07,910 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.478e+02 3.475e+02 3.874e+02 4.705e+02 1.241e+03, threshold=7.748e+02, percent-clipped=2.0 +2023-03-29 14:44:48,144 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1126, 5.2347, 5.5078, 5.3288, 5.3496, 5.0454, 5.2864, 5.1258], + device='cuda:1'), covar=tensor([0.1401, 0.1765, 0.0930, 0.1316, 0.0781, 0.1099, 0.1877, 0.1867], + device='cuda:1'), in_proj_covar=tensor([0.0307, 0.0351, 0.0385, 0.0318, 0.0291, 0.0297, 0.0376, 0.0409], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 14:44:53,974 INFO [train.py:892] (1/4) Epoch 41, batch 1050, loss[loss=0.15, simple_loss=0.2366, pruned_loss=0.03167, over 19652.00 frames. ], tot_loss[loss=0.153, simple_loss=0.2343, pruned_loss=0.03589, over 3928796.05 frames. ], batch size: 79, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:44:54,783 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75251.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:45:01,270 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0039, 4.1179, 2.4447, 4.2483, 4.4442, 2.0222, 3.6628, 3.5665], + device='cuda:1'), covar=tensor([0.0688, 0.0777, 0.2726, 0.0775, 0.0571, 0.2784, 0.1027, 0.0774], + device='cuda:1'), in_proj_covar=tensor([0.0242, 0.0268, 0.0237, 0.0288, 0.0266, 0.0208, 0.0246, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 14:45:13,429 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5642, 2.6427, 4.7038, 4.0630, 4.4716, 4.7483, 4.5148, 4.3402], + device='cuda:1'), covar=tensor([0.0582, 0.1073, 0.0107, 0.0857, 0.0153, 0.0191, 0.0163, 0.0167], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0090, 0.0102, 0.0093, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:45:20,329 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9034, 2.7273, 4.9236, 3.9834, 4.7060, 4.9029, 4.7269, 4.4875], + device='cuda:1'), covar=tensor([0.0543, 0.1100, 0.0100, 0.1044, 0.0138, 0.0195, 0.0160, 0.0174], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0090, 0.0102, 0.0093, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:45:26,094 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75265.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:45:38,191 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75270.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:46:19,867 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75289.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:46:40,117 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75299.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:46:43,190 INFO [train.py:892] (1/4) Epoch 41, batch 1100, loss[loss=0.1712, simple_loss=0.2546, pruned_loss=0.04386, over 19650.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.235, pruned_loss=0.03608, over 3932369.32 frames. ], batch size: 299, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:46:53,601 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2249, 2.4816, 2.2608, 1.6843, 2.3123, 2.4430, 2.3610, 2.4328], + device='cuda:1'), covar=tensor([0.0455, 0.0352, 0.0386, 0.0639, 0.0424, 0.0377, 0.0368, 0.0329], + device='cuda:1'), in_proj_covar=tensor([0.0114, 0.0106, 0.0108, 0.0108, 0.0111, 0.0096, 0.0097, 0.0096], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 14:47:27,565 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75320.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:47:51,229 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.168e+02 3.537e+02 4.080e+02 4.857e+02 1.316e+03, threshold=8.160e+02, percent-clipped=3.0 +2023-03-29 14:47:53,853 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75331.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 14:48:39,015 INFO [train.py:892] (1/4) Epoch 41, batch 1150, loss[loss=0.1854, simple_loss=0.2681, pruned_loss=0.05139, over 19712.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2356, pruned_loss=0.03655, over 3932968.82 frames. ], batch size: 325, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:48:46,372 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.8795, 6.1551, 6.2191, 6.0289, 5.9566, 6.1748, 5.5207, 5.5467], + device='cuda:1'), covar=tensor([0.0433, 0.0424, 0.0422, 0.0450, 0.0488, 0.0432, 0.0634, 0.1020], + device='cuda:1'), in_proj_covar=tensor([0.0289, 0.0307, 0.0317, 0.0276, 0.0286, 0.0268, 0.0280, 0.0327], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:49:19,667 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-29 14:50:32,180 INFO [train.py:892] (1/4) Epoch 41, batch 1200, loss[loss=0.1903, simple_loss=0.28, pruned_loss=0.05029, over 19611.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.2351, pruned_loss=0.03601, over 3935031.46 frames. ], batch size: 359, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:50:43,860 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-29 14:51:41,593 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.627e+02 3.559e+02 4.233e+02 5.176e+02 1.064e+03, threshold=8.467e+02, percent-clipped=2.0 +2023-03-29 14:52:27,177 INFO [train.py:892] (1/4) Epoch 41, batch 1250, loss[loss=0.143, simple_loss=0.2175, pruned_loss=0.03421, over 19866.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2352, pruned_loss=0.03611, over 3938882.95 frames. ], batch size: 129, lr: 3.80e-03, grad_scale: 32.0 +2023-03-29 14:52:57,658 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4203, 3.6774, 1.9863, 4.1721, 3.7622, 4.2030, 4.2120, 3.1758], + device='cuda:1'), covar=tensor([0.0652, 0.0565, 0.1843, 0.0712, 0.0599, 0.0448, 0.0590, 0.0847], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0150, 0.0146, 0.0160, 0.0139, 0.0145, 0.0155, 0.0153], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:54:02,935 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-29 14:54:20,110 INFO [train.py:892] (1/4) Epoch 41, batch 1300, loss[loss=0.1436, simple_loss=0.2304, pruned_loss=0.02842, over 19953.00 frames. ], tot_loss[loss=0.1522, simple_loss=0.2337, pruned_loss=0.03534, over 3942637.63 frames. ], batch size: 53, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 14:55:27,415 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.667e+02 3.652e+02 4.285e+02 4.954e+02 9.983e+02, threshold=8.569e+02, percent-clipped=1.0 +2023-03-29 14:55:54,764 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2049, 5.3197, 5.3466, 5.4055, 5.0306, 5.3318, 4.7986, 4.5631], + device='cuda:1'), covar=tensor([0.0929, 0.1112, 0.0990, 0.0772, 0.1199, 0.0995, 0.1364, 0.2555], + device='cuda:1'), in_proj_covar=tensor([0.0290, 0.0308, 0.0318, 0.0277, 0.0288, 0.0270, 0.0283, 0.0329], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 14:56:10,886 INFO [train.py:892] (1/4) Epoch 41, batch 1350, loss[loss=0.1388, simple_loss=0.2122, pruned_loss=0.03264, over 19821.00 frames. ], tot_loss[loss=0.1525, simple_loss=0.2338, pruned_loss=0.03556, over 3943457.23 frames. ], batch size: 127, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 14:56:46,463 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75565.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:57:28,137 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75584.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:58:05,445 INFO [train.py:892] (1/4) Epoch 41, batch 1400, loss[loss=0.1607, simple_loss=0.2362, pruned_loss=0.04264, over 19840.00 frames. ], tot_loss[loss=0.1516, simple_loss=0.2329, pruned_loss=0.03518, over 3944056.90 frames. ], batch size: 161, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 14:58:33,032 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75613.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:58:49,305 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75620.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:59:04,030 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75626.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 14:59:14,854 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.057e+02 3.782e+02 4.383e+02 5.452e+02 9.029e+02, threshold=8.766e+02, percent-clipped=1.0 +2023-03-29 15:00:00,084 INFO [train.py:892] (1/4) Epoch 41, batch 1450, loss[loss=0.1752, simple_loss=0.2531, pruned_loss=0.0486, over 19769.00 frames. ], tot_loss[loss=0.1516, simple_loss=0.2331, pruned_loss=0.03503, over 3945848.97 frames. ], batch size: 233, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 15:00:40,611 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75668.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:01:17,239 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3276, 2.6925, 4.5400, 3.9747, 4.3808, 4.5069, 4.2999, 4.1983], + device='cuda:1'), covar=tensor([0.0608, 0.1023, 0.0106, 0.0607, 0.0149, 0.0199, 0.0181, 0.0182], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0091, 0.0103, 0.0093, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 15:01:52,766 INFO [train.py:892] (1/4) Epoch 41, batch 1500, loss[loss=0.1466, simple_loss=0.2247, pruned_loss=0.03423, over 19773.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.234, pruned_loss=0.03556, over 3944984.50 frames. ], batch size: 169, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 15:03:00,953 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.571e+02 3.753e+02 4.206e+02 5.021e+02 8.487e+02, threshold=8.413e+02, percent-clipped=0.0 +2023-03-29 15:03:47,906 INFO [train.py:892] (1/4) Epoch 41, batch 1550, loss[loss=0.2528, simple_loss=0.3235, pruned_loss=0.09111, over 19175.00 frames. ], tot_loss[loss=0.152, simple_loss=0.2332, pruned_loss=0.03534, over 3946774.78 frames. ], batch size: 452, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 15:05:06,415 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5628, 3.4309, 3.7651, 2.8627, 3.8449, 3.1998, 3.4630, 3.8709], + device='cuda:1'), covar=tensor([0.0721, 0.0501, 0.0521, 0.0835, 0.0460, 0.0478, 0.0512, 0.0292], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0094, 0.0091, 0.0115, 0.0086, 0.0090, 0.0086, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 15:05:34,411 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-03-29 15:05:37,492 INFO [train.py:892] (1/4) Epoch 41, batch 1600, loss[loss=0.1563, simple_loss=0.2442, pruned_loss=0.03418, over 19954.00 frames. ], tot_loss[loss=0.1517, simple_loss=0.2333, pruned_loss=0.03511, over 3947094.94 frames. ], batch size: 53, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 15:06:45,419 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.043e+02 3.453e+02 4.213e+02 4.846e+02 1.121e+03, threshold=8.426e+02, percent-clipped=1.0 +2023-03-29 15:07:20,750 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 15:07:30,000 INFO [train.py:892] (1/4) Epoch 41, batch 1650, loss[loss=0.1449, simple_loss=0.2298, pruned_loss=0.03, over 19591.00 frames. ], tot_loss[loss=0.1508, simple_loss=0.2324, pruned_loss=0.03463, over 3947972.99 frames. ], batch size: 44, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 15:08:01,838 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7806, 3.2804, 3.6766, 3.1646, 3.8042, 3.8549, 4.4764, 5.0101], + device='cuda:1'), covar=tensor([0.0486, 0.1548, 0.1332, 0.2176, 0.1762, 0.1372, 0.0560, 0.0521], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0246, 0.0274, 0.0262, 0.0307, 0.0265, 0.0241, 0.0269], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 15:08:44,709 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75884.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:08:51,532 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3626, 4.6169, 4.6808, 4.5112, 4.3605, 4.6318, 4.1884, 4.1587], + device='cuda:1'), covar=tensor([0.0552, 0.0486, 0.0468, 0.0500, 0.0649, 0.0503, 0.0706, 0.1021], + device='cuda:1'), in_proj_covar=tensor([0.0290, 0.0309, 0.0319, 0.0278, 0.0288, 0.0269, 0.0282, 0.0331], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 15:09:22,817 INFO [train.py:892] (1/4) Epoch 41, batch 1700, loss[loss=0.1377, simple_loss=0.2226, pruned_loss=0.02639, over 19836.00 frames. ], tot_loss[loss=0.1506, simple_loss=0.2319, pruned_loss=0.03463, over 3949289.97 frames. ], batch size: 52, lr: 3.78e-03, grad_scale: 32.0 +2023-03-29 15:09:35,977 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8891, 2.8197, 2.8951, 2.5167, 2.9966, 2.5599, 2.9636, 2.9261], + device='cuda:1'), covar=tensor([0.0614, 0.0596, 0.0612, 0.0806, 0.0448, 0.0589, 0.0500, 0.0396], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0094, 0.0091, 0.0116, 0.0086, 0.0090, 0.0086, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 15:10:20,683 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75926.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 15:10:32,267 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.390e+02 3.529e+02 4.151e+02 4.845e+02 7.414e+02, threshold=8.303e+02, percent-clipped=0.0 +2023-03-29 15:10:35,229 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75932.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:11:15,235 INFO [train.py:892] (1/4) Epoch 41, batch 1750, loss[loss=0.1858, simple_loss=0.2631, pruned_loss=0.05422, over 19703.00 frames. ], tot_loss[loss=0.151, simple_loss=0.2321, pruned_loss=0.03499, over 3947929.04 frames. ], batch size: 283, lr: 3.78e-03, grad_scale: 32.0 +2023-03-29 15:12:01,832 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75974.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:12:53,810 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.35 vs. limit=5.0 +2023-03-29 15:13:01,526 INFO [train.py:892] (1/4) Epoch 41, batch 1800, loss[loss=0.1421, simple_loss=0.2173, pruned_loss=0.03342, over 19861.00 frames. ], tot_loss[loss=0.1508, simple_loss=0.232, pruned_loss=0.03478, over 3947303.60 frames. ], batch size: 46, lr: 3.78e-03, grad_scale: 32.0 +2023-03-29 15:13:57,893 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.757e+02 3.667e+02 4.191e+02 5.134e+02 9.957e+02, threshold=8.381e+02, percent-clipped=3.0 +2023-03-29 15:14:33,339 INFO [train.py:892] (1/4) Epoch 41, batch 1850, loss[loss=0.143, simple_loss=0.234, pruned_loss=0.02598, over 19827.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2328, pruned_loss=0.03472, over 3947950.47 frames. ], batch size: 57, lr: 3.78e-03, grad_scale: 32.0 +2023-03-29 15:15:35,917 INFO [train.py:892] (1/4) Epoch 42, batch 0, loss[loss=0.1422, simple_loss=0.2253, pruned_loss=0.02956, over 19899.00 frames. ], tot_loss[loss=0.1422, simple_loss=0.2253, pruned_loss=0.02956, over 19899.00 frames. ], batch size: 91, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:15:35,918 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 15:16:08,347 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2785, 2.2494, 2.4214, 2.3138, 2.3541, 2.3488, 2.3333, 2.3943], + device='cuda:1'), covar=tensor([0.0414, 0.0401, 0.0350, 0.0385, 0.0495, 0.0417, 0.0499, 0.0425], + device='cuda:1'), in_proj_covar=tensor([0.0096, 0.0090, 0.0093, 0.0088, 0.0101, 0.0093, 0.0109, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 15:16:12,643 INFO [train.py:926] (1/4) Epoch 42, validation: loss=0.1864, simple_loss=0.2496, pruned_loss=0.06163, over 2883724.00 frames. +2023-03-29 15:16:12,646 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22859MB +2023-03-29 15:17:12,410 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3292, 3.2063, 3.4115, 2.7997, 3.4121, 2.9197, 3.2971, 3.3520], + device='cuda:1'), covar=tensor([0.0691, 0.0499, 0.0518, 0.0780, 0.0423, 0.0551, 0.0506, 0.0481], + device='cuda:1'), in_proj_covar=tensor([0.0084, 0.0093, 0.0090, 0.0114, 0.0085, 0.0089, 0.0086, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 15:18:08,445 INFO [train.py:892] (1/4) Epoch 42, batch 50, loss[loss=0.1464, simple_loss=0.2318, pruned_loss=0.03045, over 19789.00 frames. ], tot_loss[loss=0.1442, simple_loss=0.2244, pruned_loss=0.03197, over 892522.64 frames. ], batch size: 241, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:19:07,899 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.581e+02 3.605e+02 4.013e+02 4.746e+02 9.116e+02, threshold=8.026e+02, percent-clipped=1.0 +2023-03-29 15:19:17,072 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-03-29 15:20:05,975 INFO [train.py:892] (1/4) Epoch 42, batch 100, loss[loss=0.1471, simple_loss=0.2325, pruned_loss=0.03089, over 19704.00 frames. ], tot_loss[loss=0.152, simple_loss=0.2325, pruned_loss=0.03578, over 1569086.36 frames. ], batch size: 78, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:21:58,768 INFO [train.py:892] (1/4) Epoch 42, batch 150, loss[loss=0.1564, simple_loss=0.2394, pruned_loss=0.03674, over 19667.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2305, pruned_loss=0.03462, over 2097826.92 frames. ], batch size: 330, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:22:57,751 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.293e+02 3.477e+02 4.297e+02 5.129e+02 1.202e+03, threshold=8.594e+02, percent-clipped=2.0 +2023-03-29 15:23:17,358 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3039, 2.5519, 2.3410, 1.8188, 2.4252, 2.5325, 2.4922, 2.5760], + device='cuda:1'), covar=tensor([0.0463, 0.0363, 0.0368, 0.0652, 0.0432, 0.0370, 0.0349, 0.0299], + device='cuda:1'), in_proj_covar=tensor([0.0115, 0.0108, 0.0108, 0.0109, 0.0112, 0.0097, 0.0099, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 15:23:55,608 INFO [train.py:892] (1/4) Epoch 42, batch 200, loss[loss=0.1402, simple_loss=0.2141, pruned_loss=0.03315, over 19875.00 frames. ], tot_loss[loss=0.151, simple_loss=0.2321, pruned_loss=0.03492, over 2508528.91 frames. ], batch size: 159, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:24:27,099 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76269.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 15:25:30,168 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76294.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:25:57,512 INFO [train.py:892] (1/4) Epoch 42, batch 250, loss[loss=0.1544, simple_loss=0.2249, pruned_loss=0.04192, over 19750.00 frames. ], tot_loss[loss=0.151, simple_loss=0.2321, pruned_loss=0.03496, over 2829070.79 frames. ], batch size: 129, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:26:38,873 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76323.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 15:26:55,393 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76330.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 15:26:57,685 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.384e+02 3.381e+02 3.992e+02 4.681e+02 8.709e+02, threshold=7.984e+02, percent-clipped=1.0 +2023-03-29 15:26:58,944 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3670, 4.4514, 2.7133, 4.6468, 4.8354, 2.0786, 4.1683, 3.6226], + device='cuda:1'), covar=tensor([0.0668, 0.0685, 0.2514, 0.0788, 0.0506, 0.2963, 0.0926, 0.0888], + device='cuda:1'), in_proj_covar=tensor([0.0245, 0.0271, 0.0240, 0.0292, 0.0270, 0.0210, 0.0248, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 15:27:53,463 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-29 15:27:54,571 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76355.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:27:55,584 INFO [train.py:892] (1/4) Epoch 42, batch 300, loss[loss=0.1294, simple_loss=0.2092, pruned_loss=0.0248, over 19836.00 frames. ], tot_loss[loss=0.1506, simple_loss=0.2321, pruned_loss=0.03456, over 3075803.08 frames. ], batch size: 171, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:28:02,920 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3488, 4.5467, 2.6929, 4.7201, 4.9701, 2.1209, 4.2615, 3.7610], + device='cuda:1'), covar=tensor([0.0693, 0.0715, 0.2623, 0.0786, 0.0433, 0.2791, 0.0898, 0.0792], + device='cuda:1'), in_proj_covar=tensor([0.0245, 0.0271, 0.0240, 0.0291, 0.0269, 0.0210, 0.0247, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 15:28:18,282 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3220, 2.3002, 2.4461, 2.3909, 2.4010, 2.4179, 2.4017, 2.5129], + device='cuda:1'), covar=tensor([0.0436, 0.0421, 0.0364, 0.0361, 0.0504, 0.0397, 0.0522, 0.0382], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0092, 0.0094, 0.0088, 0.0102, 0.0094, 0.0110, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 15:29:02,742 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76384.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 15:29:53,466 INFO [train.py:892] (1/4) Epoch 42, batch 350, loss[loss=0.1602, simple_loss=0.2524, pruned_loss=0.03398, over 19691.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2317, pruned_loss=0.03445, over 3269415.34 frames. ], batch size: 55, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:30:28,799 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2857, 5.5183, 5.6035, 5.4800, 5.3128, 5.5790, 5.0368, 5.0184], + device='cuda:1'), covar=tensor([0.0447, 0.0470, 0.0452, 0.0408, 0.0551, 0.0461, 0.0656, 0.0954], + device='cuda:1'), in_proj_covar=tensor([0.0290, 0.0309, 0.0319, 0.0278, 0.0288, 0.0268, 0.0282, 0.0331], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 15:30:58,173 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.636e+02 3.511e+02 4.196e+02 4.977e+02 1.197e+03, threshold=8.393e+02, percent-clipped=1.0 +2023-03-29 15:31:51,713 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0707, 3.0511, 4.7359, 3.6651, 3.7254, 3.5580, 2.6309, 2.7238], + device='cuda:1'), covar=tensor([0.1024, 0.3032, 0.0414, 0.1049, 0.1850, 0.1369, 0.2632, 0.2621], + device='cuda:1'), in_proj_covar=tensor([0.0360, 0.0402, 0.0357, 0.0298, 0.0382, 0.0397, 0.0389, 0.0363], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 15:32:05,172 INFO [train.py:892] (1/4) Epoch 42, batch 400, loss[loss=0.1938, simple_loss=0.2737, pruned_loss=0.05698, over 19604.00 frames. ], tot_loss[loss=0.1505, simple_loss=0.2322, pruned_loss=0.0344, over 3420578.53 frames. ], batch size: 367, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:32:14,167 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7206, 4.3907, 4.4568, 4.6570, 4.4159, 4.7525, 4.8202, 4.9716], + device='cuda:1'), covar=tensor([0.0641, 0.0407, 0.0507, 0.0352, 0.0748, 0.0475, 0.0435, 0.0302], + device='cuda:1'), in_proj_covar=tensor([0.0160, 0.0185, 0.0207, 0.0183, 0.0183, 0.0166, 0.0158, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 15:32:42,889 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 15:34:11,913 INFO [train.py:892] (1/4) Epoch 42, batch 450, loss[loss=0.1656, simple_loss=0.2408, pruned_loss=0.04516, over 19812.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.2326, pruned_loss=0.0344, over 3538394.53 frames. ], batch size: 123, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:34:46,446 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-03-29 15:35:17,339 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.526e+02 3.433e+02 4.142e+02 5.148e+02 8.158e+02, threshold=8.284e+02, percent-clipped=0.0 +2023-03-29 15:35:19,414 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-29 15:35:23,365 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 15:36:19,149 INFO [train.py:892] (1/4) Epoch 42, batch 500, loss[loss=0.2308, simple_loss=0.3098, pruned_loss=0.0759, over 19444.00 frames. ], tot_loss[loss=0.1512, simple_loss=0.2334, pruned_loss=0.03453, over 3629264.67 frames. ], batch size: 396, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:36:38,427 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-29 15:37:31,972 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.4489, 4.4720, 4.8133, 4.5839, 4.7418, 4.4380, 4.5468, 4.3615], + device='cuda:1'), covar=tensor([0.1451, 0.1650, 0.0822, 0.1264, 0.0819, 0.0871, 0.1697, 0.1935], + device='cuda:1'), in_proj_covar=tensor([0.0306, 0.0349, 0.0383, 0.0316, 0.0291, 0.0295, 0.0371, 0.0403], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 15:38:21,691 INFO [train.py:892] (1/4) Epoch 42, batch 550, loss[loss=0.1412, simple_loss=0.2207, pruned_loss=0.03088, over 19900.00 frames. ], tot_loss[loss=0.1518, simple_loss=0.2336, pruned_loss=0.03501, over 3700592.73 frames. ], batch size: 71, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:38:38,425 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2562, 2.5700, 3.2118, 3.4555, 3.8776, 4.4509, 4.3137, 4.4404], + device='cuda:1'), covar=tensor([0.0961, 0.2085, 0.1530, 0.0703, 0.0484, 0.0265, 0.0386, 0.0444], + device='cuda:1'), in_proj_covar=tensor([0.0168, 0.0171, 0.0184, 0.0158, 0.0143, 0.0139, 0.0132, 0.0123], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 15:39:05,734 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76625.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 15:39:18,275 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.079e+02 3.230e+02 3.815e+02 4.831e+02 8.297e+02, threshold=7.631e+02, percent-clipped=1.0 +2023-03-29 15:40:08,078 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76650.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:40:21,755 INFO [train.py:892] (1/4) Epoch 42, batch 600, loss[loss=0.1475, simple_loss=0.2196, pruned_loss=0.03773, over 19782.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.2322, pruned_loss=0.03457, over 3756607.83 frames. ], batch size: 163, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:41:18,890 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76679.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 15:41:55,827 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7878, 4.5242, 4.5524, 4.7957, 4.4878, 4.9385, 4.8631, 5.0790], + device='cuda:1'), covar=tensor([0.0623, 0.0409, 0.0459, 0.0385, 0.0673, 0.0409, 0.0427, 0.0279], + device='cuda:1'), in_proj_covar=tensor([0.0161, 0.0186, 0.0209, 0.0184, 0.0184, 0.0167, 0.0159, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 15:42:25,360 INFO [train.py:892] (1/4) Epoch 42, batch 650, loss[loss=0.1376, simple_loss=0.2149, pruned_loss=0.03016, over 19900.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.2314, pruned_loss=0.03448, over 3799477.84 frames. ], batch size: 113, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:43:30,261 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.500e+02 3.354e+02 4.128e+02 5.229e+02 1.136e+03, threshold=8.256e+02, percent-clipped=6.0 +2023-03-29 15:44:09,602 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2539, 3.3457, 1.9446, 3.9849, 3.5066, 3.9381, 3.9907, 3.0638], + device='cuda:1'), covar=tensor([0.0671, 0.0739, 0.1762, 0.0603, 0.0697, 0.0477, 0.0716, 0.0904], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0151, 0.0146, 0.0160, 0.0139, 0.0144, 0.0155, 0.0153], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 15:44:32,850 INFO [train.py:892] (1/4) Epoch 42, batch 700, loss[loss=0.1486, simple_loss=0.229, pruned_loss=0.03412, over 19737.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2317, pruned_loss=0.03443, over 3832778.50 frames. ], batch size: 71, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:46:12,168 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-29 15:46:33,806 INFO [train.py:892] (1/4) Epoch 42, batch 750, loss[loss=0.164, simple_loss=0.2478, pruned_loss=0.04007, over 19798.00 frames. ], tot_loss[loss=0.1494, simple_loss=0.2306, pruned_loss=0.03411, over 3859224.16 frames. ], batch size: 65, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:47:36,699 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.174e+02 3.338e+02 3.927e+02 4.685e+02 7.677e+02, threshold=7.855e+02, percent-clipped=0.0 +2023-03-29 15:47:59,347 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76839.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:48:42,201 INFO [train.py:892] (1/4) Epoch 42, batch 800, loss[loss=0.1453, simple_loss=0.2257, pruned_loss=0.03243, over 19683.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2317, pruned_loss=0.03447, over 3877243.94 frames. ], batch size: 59, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:50:35,649 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76900.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:50:50,203 INFO [train.py:892] (1/4) Epoch 42, batch 850, loss[loss=0.1445, simple_loss=0.2313, pruned_loss=0.02889, over 19749.00 frames. ], tot_loss[loss=0.1508, simple_loss=0.2322, pruned_loss=0.03471, over 3894143.68 frames. ], batch size: 84, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 15:51:39,921 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76925.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 15:51:54,882 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.468e+02 3.585e+02 3.941e+02 4.611e+02 9.107e+02, threshold=7.882e+02, percent-clipped=2.0 +2023-03-29 15:52:43,949 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76950.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:52:58,365 INFO [train.py:892] (1/4) Epoch 42, batch 900, loss[loss=0.1529, simple_loss=0.2283, pruned_loss=0.03871, over 19794.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.2317, pruned_loss=0.03442, over 3905362.98 frames. ], batch size: 191, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 15:53:41,333 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76973.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 15:53:57,267 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76979.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 15:54:37,653 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-03-29 15:54:43,529 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76998.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:55:01,544 INFO [train.py:892] (1/4) Epoch 42, batch 950, loss[loss=0.1577, simple_loss=0.2345, pruned_loss=0.04043, over 19674.00 frames. ], tot_loss[loss=0.1504, simple_loss=0.2318, pruned_loss=0.03446, over 3917029.62 frames. ], batch size: 73, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 15:55:19,870 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-29 15:55:38,607 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9161, 4.9799, 5.2745, 5.0302, 5.1716, 4.7889, 5.0003, 4.7897], + device='cuda:1'), covar=tensor([0.1476, 0.1582, 0.0852, 0.1260, 0.0748, 0.0951, 0.1793, 0.1905], + device='cuda:1'), in_proj_covar=tensor([0.0310, 0.0356, 0.0388, 0.0320, 0.0295, 0.0299, 0.0377, 0.0410], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 15:55:45,019 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77027.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 15:55:53,605 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.093e+02 3.626e+02 4.310e+02 4.989e+02 8.639e+02, threshold=8.620e+02, percent-clipped=2.0 +2023-03-29 15:56:11,105 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.3926, 4.9390, 5.0277, 4.7354, 5.3201, 3.2320, 4.1479, 2.6518], + device='cuda:1'), covar=tensor([0.0186, 0.0231, 0.0170, 0.0212, 0.0148, 0.1079, 0.1028, 0.1614], + device='cuda:1'), in_proj_covar=tensor([0.0109, 0.0153, 0.0117, 0.0139, 0.0123, 0.0139, 0.0144, 0.0131], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 15:56:59,664 INFO [train.py:892] (1/4) Epoch 42, batch 1000, loss[loss=0.1305, simple_loss=0.2091, pruned_loss=0.02595, over 19882.00 frames. ], tot_loss[loss=0.1506, simple_loss=0.232, pruned_loss=0.03453, over 3924953.15 frames. ], batch size: 87, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 15:58:37,629 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77095.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:58:46,753 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.63 vs. limit=2.0 +2023-03-29 15:59:03,001 INFO [train.py:892] (1/4) Epoch 42, batch 1050, loss[loss=0.1236, simple_loss=0.2031, pruned_loss=0.02202, over 19728.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.2331, pruned_loss=0.03477, over 3929808.88 frames. ], batch size: 71, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 15:59:43,600 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1750, 3.1103, 3.4257, 3.1273, 2.9757, 3.4051, 3.2266, 3.4819], + device='cuda:1'), covar=tensor([0.0848, 0.0401, 0.0420, 0.0487, 0.1882, 0.0585, 0.0529, 0.0425], + device='cuda:1'), in_proj_covar=tensor([0.0286, 0.0229, 0.0229, 0.0241, 0.0211, 0.0253, 0.0242, 0.0228], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 16:00:02,993 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.508e+02 3.618e+02 4.248e+02 5.006e+02 7.833e+02, threshold=8.496e+02, percent-clipped=0.0 +2023-03-29 16:01:07,329 INFO [train.py:892] (1/4) Epoch 42, batch 1100, loss[loss=0.1496, simple_loss=0.2313, pruned_loss=0.03395, over 19740.00 frames. ], tot_loss[loss=0.1521, simple_loss=0.2336, pruned_loss=0.03524, over 3933597.95 frames. ], batch size: 80, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 16:01:08,484 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77156.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 16:02:45,773 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77195.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:03:12,276 INFO [train.py:892] (1/4) Epoch 42, batch 1150, loss[loss=0.1455, simple_loss=0.228, pruned_loss=0.03147, over 19703.00 frames. ], tot_loss[loss=0.1515, simple_loss=0.2327, pruned_loss=0.03515, over 3936294.58 frames. ], batch size: 85, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 16:03:27,267 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77211.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:04:11,570 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8853, 2.4571, 4.3585, 3.9444, 4.4158, 4.3361, 4.1804, 4.2021], + device='cuda:1'), covar=tensor([0.0947, 0.1363, 0.0150, 0.0793, 0.0164, 0.0307, 0.0238, 0.0213], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0107, 0.0092, 0.0155, 0.0091, 0.0103, 0.0094, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 16:04:16,659 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.545e+02 3.808e+02 4.258e+02 4.963e+02 8.566e+02, threshold=8.515e+02, percent-clipped=1.0 +2023-03-29 16:04:59,856 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-29 16:05:19,683 INFO [train.py:892] (1/4) Epoch 42, batch 1200, loss[loss=0.1319, simple_loss=0.2167, pruned_loss=0.02354, over 19654.00 frames. ], tot_loss[loss=0.151, simple_loss=0.2321, pruned_loss=0.03491, over 3940245.64 frames. ], batch size: 47, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 16:05:59,855 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6751, 2.9480, 2.5964, 2.2004, 2.7092, 2.9658, 2.8954, 2.9385], + device='cuda:1'), covar=tensor([0.0407, 0.0344, 0.0340, 0.0576, 0.0387, 0.0297, 0.0310, 0.0258], + device='cuda:1'), in_proj_covar=tensor([0.0116, 0.0108, 0.0109, 0.0109, 0.0112, 0.0098, 0.0100, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 16:06:02,033 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77272.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:06:04,232 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77273.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:06:16,100 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77277.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:06:29,575 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-29 16:07:30,261 INFO [train.py:892] (1/4) Epoch 42, batch 1250, loss[loss=0.1408, simple_loss=0.2183, pruned_loss=0.03163, over 19637.00 frames. ], tot_loss[loss=0.1505, simple_loss=0.2316, pruned_loss=0.03473, over 3943066.74 frames. ], batch size: 68, lr: 3.70e-03, grad_scale: 32.0 +2023-03-29 16:08:30,830 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.435e+02 3.557e+02 4.199e+02 4.989e+02 7.948e+02, threshold=8.398e+02, percent-clipped=0.0 +2023-03-29 16:08:39,823 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77334.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:08:52,595 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77338.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:09:33,979 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0640, 3.2052, 3.3079, 3.2553, 2.9957, 3.1284, 2.9772, 3.2321], + device='cuda:1'), covar=tensor([0.0324, 0.0372, 0.0299, 0.0305, 0.0413, 0.0355, 0.0399, 0.0402], + device='cuda:1'), in_proj_covar=tensor([0.0096, 0.0091, 0.0093, 0.0087, 0.0100, 0.0094, 0.0109, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 16:09:35,076 INFO [train.py:892] (1/4) Epoch 42, batch 1300, loss[loss=0.1481, simple_loss=0.2379, pruned_loss=0.02917, over 19674.00 frames. ], tot_loss[loss=0.1506, simple_loss=0.2317, pruned_loss=0.03478, over 3944879.66 frames. ], batch size: 51, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:10:28,687 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7472, 2.7453, 2.9085, 2.9471, 2.8006, 2.8418, 2.8089, 2.8860], + device='cuda:1'), covar=tensor([0.0394, 0.0421, 0.0373, 0.0293, 0.0397, 0.0391, 0.0410, 0.0359], + device='cuda:1'), in_proj_covar=tensor([0.0096, 0.0090, 0.0093, 0.0087, 0.0100, 0.0093, 0.0109, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 16:11:31,851 INFO [train.py:892] (1/4) Epoch 42, batch 1350, loss[loss=0.1346, simple_loss=0.2189, pruned_loss=0.02519, over 19687.00 frames. ], tot_loss[loss=0.1506, simple_loss=0.2317, pruned_loss=0.03475, over 3945836.96 frames. ], batch size: 56, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:12:30,502 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.502e+02 3.333e+02 3.969e+02 4.891e+02 7.952e+02, threshold=7.937e+02, percent-clipped=0.0 +2023-03-29 16:13:20,652 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77451.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 16:13:34,043 INFO [train.py:892] (1/4) Epoch 42, batch 1400, loss[loss=0.1326, simple_loss=0.2064, pruned_loss=0.02936, over 19772.00 frames. ], tot_loss[loss=0.1504, simple_loss=0.2313, pruned_loss=0.03479, over 3947484.73 frames. ], batch size: 163, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:14:41,865 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.57 vs. limit=5.0 +2023-03-29 16:15:10,482 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77495.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:15:36,598 INFO [train.py:892] (1/4) Epoch 42, batch 1450, loss[loss=0.1506, simple_loss=0.2449, pruned_loss=0.02811, over 19883.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.2328, pruned_loss=0.03491, over 3944022.11 frames. ], batch size: 52, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:16:40,348 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.038e+02 3.421e+02 4.179e+02 5.316e+02 8.524e+02, threshold=8.358e+02, percent-clipped=3.0 +2023-03-29 16:17:10,173 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77543.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:17:44,866 INFO [train.py:892] (1/4) Epoch 42, batch 1500, loss[loss=0.1708, simple_loss=0.2391, pruned_loss=0.05125, over 19735.00 frames. ], tot_loss[loss=0.1505, simple_loss=0.2319, pruned_loss=0.03455, over 3945625.78 frames. ], batch size: 77, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:18:16,123 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77567.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:18:33,363 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.7321, 1.6036, 1.7516, 1.7807, 1.6691, 1.7790, 1.5554, 1.8016], + device='cuda:1'), covar=tensor([0.0440, 0.0410, 0.0376, 0.0376, 0.0522, 0.0359, 0.0563, 0.0364], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0091, 0.0093, 0.0088, 0.0100, 0.0094, 0.0110, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 16:19:13,611 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7596, 3.6534, 3.9478, 2.9068, 4.0573, 3.3507, 3.4152, 3.9697], + device='cuda:1'), covar=tensor([0.0747, 0.0384, 0.0450, 0.0786, 0.0365, 0.0429, 0.0675, 0.0318], + device='cuda:1'), in_proj_covar=tensor([0.0084, 0.0094, 0.0090, 0.0114, 0.0085, 0.0089, 0.0087, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 16:19:49,160 INFO [train.py:892] (1/4) Epoch 42, batch 1550, loss[loss=0.1521, simple_loss=0.2439, pruned_loss=0.03015, over 19786.00 frames. ], tot_loss[loss=0.1508, simple_loss=0.2324, pruned_loss=0.03463, over 3947147.18 frames. ], batch size: 48, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:20:15,246 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77616.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:20:50,271 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77629.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:20:56,259 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.329e+02 3.286e+02 3.992e+02 4.919e+02 1.011e+03, threshold=7.983e+02, percent-clipped=1.0 +2023-03-29 16:20:59,711 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77633.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:21:54,228 INFO [train.py:892] (1/4) Epoch 42, batch 1600, loss[loss=0.1467, simple_loss=0.2161, pruned_loss=0.0386, over 19788.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2321, pruned_loss=0.03422, over 3947217.84 frames. ], batch size: 172, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:22:46,456 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77677.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:22:46,509 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77677.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:23:39,994 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4223, 2.6646, 3.8915, 3.0539, 3.1739, 3.0497, 2.3188, 2.4693], + device='cuda:1'), covar=tensor([0.1241, 0.3118, 0.0642, 0.1248, 0.1989, 0.1639, 0.2671, 0.2891], + device='cuda:1'), in_proj_covar=tensor([0.0357, 0.0401, 0.0357, 0.0298, 0.0380, 0.0396, 0.0388, 0.0363], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 16:23:59,748 INFO [train.py:892] (1/4) Epoch 42, batch 1650, loss[loss=0.1568, simple_loss=0.2409, pruned_loss=0.03631, over 19575.00 frames. ], tot_loss[loss=0.1495, simple_loss=0.2308, pruned_loss=0.03411, over 3948162.40 frames. ], batch size: 53, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:25:04,066 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.346e+02 3.655e+02 4.276e+02 5.251e+02 1.145e+03, threshold=8.553e+02, percent-clipped=2.0 +2023-03-29 16:25:19,095 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77738.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:25:51,913 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77751.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 16:26:01,331 INFO [train.py:892] (1/4) Epoch 42, batch 1700, loss[loss=0.1697, simple_loss=0.2587, pruned_loss=0.04038, over 19708.00 frames. ], tot_loss[loss=0.1504, simple_loss=0.2317, pruned_loss=0.03457, over 3947819.37 frames. ], batch size: 78, lr: 3.69e-03, grad_scale: 16.0 +2023-03-29 16:26:21,956 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-03-29 16:27:11,491 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4450, 2.6424, 4.5163, 3.9082, 4.3512, 4.5134, 4.3614, 4.2363], + device='cuda:1'), covar=tensor([0.0568, 0.1035, 0.0112, 0.0743, 0.0184, 0.0203, 0.0174, 0.0191], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0107, 0.0092, 0.0154, 0.0091, 0.0103, 0.0094, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 16:27:35,847 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77799.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:27:51,349 INFO [train.py:892] (1/4) Epoch 42, batch 1750, loss[loss=0.1242, simple_loss=0.2058, pruned_loss=0.02132, over 19763.00 frames. ], tot_loss[loss=0.15, simple_loss=0.2311, pruned_loss=0.03447, over 3948566.90 frames. ], batch size: 102, lr: 3.69e-03, grad_scale: 16.0 +2023-03-29 16:28:04,387 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.9198, 6.2066, 6.2502, 6.0972, 5.9637, 6.2183, 5.5815, 5.6063], + device='cuda:1'), covar=tensor([0.0403, 0.0470, 0.0448, 0.0403, 0.0485, 0.0441, 0.0604, 0.0937], + device='cuda:1'), in_proj_covar=tensor([0.0294, 0.0312, 0.0322, 0.0281, 0.0291, 0.0272, 0.0286, 0.0336], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 16:28:49,557 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.255e+02 3.364e+02 4.068e+02 4.963e+02 7.822e+02, threshold=8.136e+02, percent-clipped=0.0 +2023-03-29 16:29:41,593 INFO [train.py:892] (1/4) Epoch 42, batch 1800, loss[loss=0.1813, simple_loss=0.2653, pruned_loss=0.0486, over 19724.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.2319, pruned_loss=0.03478, over 3947353.74 frames. ], batch size: 310, lr: 3.69e-03, grad_scale: 16.0 +2023-03-29 16:29:42,456 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0624, 3.0078, 2.0758, 3.4731, 3.2260, 3.4744, 3.5312, 2.9072], + device='cuda:1'), covar=tensor([0.0665, 0.0766, 0.1549, 0.0747, 0.0648, 0.0529, 0.0601, 0.0856], + device='cuda:1'), in_proj_covar=tensor([0.0153, 0.0153, 0.0148, 0.0163, 0.0143, 0.0147, 0.0157, 0.0155], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-29 16:30:04,911 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77867.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:31:23,162 INFO [train.py:892] (1/4) Epoch 42, batch 1850, loss[loss=0.1623, simple_loss=0.2535, pruned_loss=0.03559, over 19831.00 frames. ], tot_loss[loss=0.1518, simple_loss=0.2339, pruned_loss=0.03481, over 3947436.34 frames. ], batch size: 57, lr: 3.69e-03, grad_scale: 16.0 +2023-03-29 16:32:27,845 INFO [train.py:892] (1/4) Epoch 43, batch 0, loss[loss=0.1781, simple_loss=0.27, pruned_loss=0.04307, over 19612.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.27, pruned_loss=0.04307, over 19612.00 frames. ], batch size: 351, lr: 3.65e-03, grad_scale: 16.0 +2023-03-29 16:32:27,846 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 16:33:04,434 INFO [train.py:926] (1/4) Epoch 43, validation: loss=0.1873, simple_loss=0.2496, pruned_loss=0.06254, over 2883724.00 frames. +2023-03-29 16:33:04,436 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22859MB +2023-03-29 16:33:16,461 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77915.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:33:39,041 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1681, 2.0507, 3.2583, 2.6944, 3.3492, 3.3939, 3.1062, 3.2551], + device='cuda:1'), covar=tensor([0.1068, 0.1249, 0.0144, 0.0397, 0.0170, 0.0259, 0.0239, 0.0239], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0108, 0.0093, 0.0155, 0.0092, 0.0104, 0.0095, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 16:33:52,353 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77929.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:33:58,721 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.066e+02 3.128e+02 3.651e+02 4.371e+02 8.409e+02, threshold=7.303e+02, percent-clipped=1.0 +2023-03-29 16:34:01,743 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77933.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:35:13,090 INFO [train.py:892] (1/4) Epoch 43, batch 50, loss[loss=0.1638, simple_loss=0.2449, pruned_loss=0.04139, over 19758.00 frames. ], tot_loss[loss=0.1473, simple_loss=0.2278, pruned_loss=0.03342, over 892006.36 frames. ], batch size: 256, lr: 3.65e-03, grad_scale: 16.0 +2023-03-29 16:35:44,861 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77972.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:35:55,688 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77977.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:36:05,063 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77981.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:37:24,633 INFO [train.py:892] (1/4) Epoch 43, batch 100, loss[loss=0.1445, simple_loss=0.2344, pruned_loss=0.02734, over 19758.00 frames. ], tot_loss[loss=0.1491, simple_loss=0.2299, pruned_loss=0.03416, over 1570112.04 frames. ], batch size: 88, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:37:32,301 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 16:38:14,008 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.210e+02 3.457e+02 4.175e+02 5.002e+02 8.622e+02, threshold=8.350e+02, percent-clipped=3.0 +2023-03-29 16:38:19,896 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78033.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:39:26,822 INFO [train.py:892] (1/4) Epoch 43, batch 150, loss[loss=0.1551, simple_loss=0.2317, pruned_loss=0.03922, over 19759.00 frames. ], tot_loss[loss=0.149, simple_loss=0.2301, pruned_loss=0.03397, over 2095915.13 frames. ], batch size: 182, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:41:35,068 INFO [train.py:892] (1/4) Epoch 43, batch 200, loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.03114, over 19817.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2293, pruned_loss=0.03405, over 2508121.40 frames. ], batch size: 133, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:42:26,474 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.115e+02 3.506e+02 4.148e+02 4.888e+02 9.722e+02, threshold=8.295e+02, percent-clipped=1.0 +2023-03-29 16:42:32,247 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3864, 2.2930, 3.6085, 3.0960, 3.6423, 3.6832, 3.4038, 3.4695], + device='cuda:1'), covar=tensor([0.0872, 0.1091, 0.0125, 0.0461, 0.0172, 0.0251, 0.0237, 0.0220], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0108, 0.0093, 0.0156, 0.0092, 0.0105, 0.0095, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 16:42:49,313 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.84 vs. limit=5.0 +2023-03-29 16:42:51,713 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6432, 3.5067, 3.8298, 2.9725, 4.0092, 3.2583, 3.6298, 3.9865], + device='cuda:1'), covar=tensor([0.0720, 0.0412, 0.0451, 0.0764, 0.0360, 0.0460, 0.0413, 0.0294], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0094, 0.0090, 0.0114, 0.0085, 0.0089, 0.0087, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 16:43:34,482 INFO [train.py:892] (1/4) Epoch 43, batch 250, loss[loss=0.1497, simple_loss=0.2307, pruned_loss=0.03432, over 19830.00 frames. ], tot_loss[loss=0.1481, simple_loss=0.2284, pruned_loss=0.03396, over 2828281.64 frames. ], batch size: 204, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:44:12,193 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5698, 3.3345, 3.6510, 2.8447, 3.6482, 3.0987, 3.4820, 3.6630], + device='cuda:1'), covar=tensor([0.0519, 0.0456, 0.0562, 0.0731, 0.0456, 0.0433, 0.0447, 0.0315], + device='cuda:1'), in_proj_covar=tensor([0.0084, 0.0093, 0.0090, 0.0114, 0.0085, 0.0089, 0.0086, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 16:44:43,962 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.68 vs. limit=5.0 +2023-03-29 16:45:33,956 INFO [train.py:892] (1/4) Epoch 43, batch 300, loss[loss=0.132, simple_loss=0.2147, pruned_loss=0.02467, over 19617.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2292, pruned_loss=0.03433, over 3078418.57 frames. ], batch size: 46, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:46:28,827 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.098e+02 3.467e+02 4.146e+02 4.960e+02 1.292e+03, threshold=8.293e+02, percent-clipped=2.0 +2023-03-29 16:46:31,182 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78232.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:47:06,611 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8679, 4.1327, 4.4478, 5.0728, 3.2532, 3.6229, 3.1616, 2.9972], + device='cuda:1'), covar=tensor([0.0407, 0.1725, 0.0660, 0.0302, 0.1872, 0.1059, 0.1124, 0.1509], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0326, 0.0253, 0.0212, 0.0250, 0.0215, 0.0223, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 16:47:40,850 INFO [train.py:892] (1/4) Epoch 43, batch 350, loss[loss=0.1499, simple_loss=0.2219, pruned_loss=0.03894, over 19856.00 frames. ], tot_loss[loss=0.1486, simple_loss=0.2292, pruned_loss=0.03397, over 3273046.73 frames. ], batch size: 64, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:48:09,167 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78272.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:48:14,183 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6494, 2.0925, 2.4022, 2.8236, 3.1817, 3.3042, 3.1947, 3.2620], + device='cuda:1'), covar=tensor([0.1137, 0.1873, 0.1544, 0.0861, 0.0621, 0.0434, 0.0532, 0.0564], + device='cuda:1'), in_proj_covar=tensor([0.0167, 0.0171, 0.0184, 0.0157, 0.0143, 0.0140, 0.0132, 0.0123], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 16:48:59,525 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78293.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 16:49:45,892 INFO [train.py:892] (1/4) Epoch 43, batch 400, loss[loss=0.2055, simple_loss=0.2926, pruned_loss=0.05921, over 19581.00 frames. ], tot_loss[loss=0.1491, simple_loss=0.2302, pruned_loss=0.03407, over 3422897.01 frames. ], batch size: 376, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:50:09,282 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78320.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:50:37,820 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.238e+02 3.252e+02 3.876e+02 4.451e+02 9.041e+02, threshold=7.752e+02, percent-clipped=1.0 +2023-03-29 16:50:42,496 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78333.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:51:50,708 INFO [train.py:892] (1/4) Epoch 43, batch 450, loss[loss=0.1443, simple_loss=0.228, pruned_loss=0.03028, over 19874.00 frames. ], tot_loss[loss=0.149, simple_loss=0.2304, pruned_loss=0.03376, over 3540455.84 frames. ], batch size: 47, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:52:43,036 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78381.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:53:55,350 INFO [train.py:892] (1/4) Epoch 43, batch 500, loss[loss=0.1604, simple_loss=0.2349, pruned_loss=0.04297, over 19780.00 frames. ], tot_loss[loss=0.1491, simple_loss=0.2302, pruned_loss=0.03401, over 3631331.39 frames. ], batch size: 233, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 16:54:44,933 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.422e+02 3.338e+02 4.015e+02 4.887e+02 8.174e+02, threshold=8.030e+02, percent-clipped=1.0 +2023-03-29 16:55:02,207 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 16:55:57,336 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78460.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:55:58,528 INFO [train.py:892] (1/4) Epoch 43, batch 550, loss[loss=0.1491, simple_loss=0.2346, pruned_loss=0.03178, over 19760.00 frames. ], tot_loss[loss=0.1496, simple_loss=0.231, pruned_loss=0.03412, over 3702176.19 frames. ], batch size: 233, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 16:57:09,708 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6657, 2.3311, 3.7254, 3.2243, 3.6799, 3.7565, 3.5579, 3.4884], + device='cuda:1'), covar=tensor([0.0739, 0.1065, 0.0126, 0.0482, 0.0175, 0.0263, 0.0217, 0.0218], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0107, 0.0092, 0.0153, 0.0090, 0.0103, 0.0094, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 16:57:58,439 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-29 16:58:01,164 INFO [train.py:892] (1/4) Epoch 43, batch 600, loss[loss=0.1462, simple_loss=0.2247, pruned_loss=0.03385, over 19800.00 frames. ], tot_loss[loss=0.1494, simple_loss=0.2308, pruned_loss=0.03398, over 3756794.44 frames. ], batch size: 151, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 16:58:04,721 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8320, 3.8570, 2.3864, 4.0466, 4.1933, 1.9112, 3.4562, 3.2724], + device='cuda:1'), covar=tensor([0.0739, 0.0855, 0.2758, 0.0845, 0.0581, 0.2805, 0.1085, 0.0907], + device='cuda:1'), in_proj_covar=tensor([0.0244, 0.0269, 0.0240, 0.0290, 0.0270, 0.0209, 0.0246, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 16:58:09,230 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8074, 2.7699, 2.8902, 2.3982, 2.9919, 2.5174, 2.8879, 2.8725], + device='cuda:1'), covar=tensor([0.0602, 0.0603, 0.0636, 0.0816, 0.0389, 0.0520, 0.0521, 0.0414], + device='cuda:1'), in_proj_covar=tensor([0.0084, 0.0094, 0.0090, 0.0115, 0.0085, 0.0089, 0.0086, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 16:58:12,167 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7427, 2.8408, 3.0054, 2.9380, 2.8271, 2.9008, 2.8287, 3.0021], + device='cuda:1'), covar=tensor([0.0370, 0.0365, 0.0314, 0.0304, 0.0409, 0.0408, 0.0374, 0.0414], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0091, 0.0094, 0.0088, 0.0100, 0.0094, 0.0109, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 16:58:31,047 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78521.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:58:40,643 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8076, 4.5312, 4.5461, 4.3439, 4.8048, 3.2473, 3.9809, 2.4526], + device='cuda:1'), covar=tensor([0.0165, 0.0196, 0.0148, 0.0186, 0.0132, 0.0942, 0.0671, 0.1362], + device='cuda:1'), in_proj_covar=tensor([0.0109, 0.0152, 0.0117, 0.0139, 0.0123, 0.0138, 0.0145, 0.0131], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 16:58:57,259 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.184e+02 3.257e+02 4.249e+02 5.407e+02 1.207e+03, threshold=8.497e+02, percent-clipped=3.0 +2023-03-29 16:59:52,282 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78556.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:00:06,573 INFO [train.py:892] (1/4) Epoch 43, batch 650, loss[loss=0.1777, simple_loss=0.2661, pruned_loss=0.0447, over 19637.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2313, pruned_loss=0.03425, over 3798271.32 frames. ], batch size: 343, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:01:08,192 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78588.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 17:02:02,180 INFO [train.py:892] (1/4) Epoch 43, batch 700, loss[loss=0.1519, simple_loss=0.2337, pruned_loss=0.03509, over 19750.00 frames. ], tot_loss[loss=0.1509, simple_loss=0.232, pruned_loss=0.03486, over 3832780.51 frames. ], batch size: 250, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:02:16,674 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78617.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:02:27,650 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8081, 3.1210, 2.7283, 2.3069, 2.7969, 3.1176, 2.9596, 3.1210], + device='cuda:1'), covar=tensor([0.0374, 0.0320, 0.0363, 0.0575, 0.0357, 0.0286, 0.0289, 0.0243], + device='cuda:1'), in_proj_covar=tensor([0.0114, 0.0107, 0.0109, 0.0108, 0.0111, 0.0097, 0.0099, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 17:02:54,401 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.290e+02 3.555e+02 4.216e+02 4.848e+02 9.327e+02, threshold=8.432e+02, percent-clipped=1.0 +2023-03-29 17:03:25,505 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9226, 2.9700, 4.4262, 3.4858, 3.5514, 3.4240, 2.4745, 2.7376], + device='cuda:1'), covar=tensor([0.1029, 0.3183, 0.0506, 0.1104, 0.1927, 0.1643, 0.2761, 0.2603], + device='cuda:1'), in_proj_covar=tensor([0.0358, 0.0403, 0.0357, 0.0297, 0.0381, 0.0398, 0.0390, 0.0364], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 17:03:59,444 INFO [train.py:892] (1/4) Epoch 43, batch 750, loss[loss=0.1378, simple_loss=0.2155, pruned_loss=0.03003, over 19848.00 frames. ], tot_loss[loss=0.1515, simple_loss=0.2333, pruned_loss=0.03485, over 3857516.07 frames. ], batch size: 145, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:04:45,986 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.8430, 1.6925, 1.8564, 1.8556, 1.7724, 1.8834, 1.7269, 1.9036], + device='cuda:1'), covar=tensor([0.0460, 0.0420, 0.0419, 0.0375, 0.0526, 0.0385, 0.0520, 0.0345], + device='cuda:1'), in_proj_covar=tensor([0.0098, 0.0092, 0.0095, 0.0089, 0.0101, 0.0094, 0.0110, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 17:05:55,909 INFO [train.py:892] (1/4) Epoch 43, batch 800, loss[loss=0.1219, simple_loss=0.2026, pruned_loss=0.02054, over 19789.00 frames. ], tot_loss[loss=0.1509, simple_loss=0.2322, pruned_loss=0.03479, over 3878374.92 frames. ], batch size: 94, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:06:42,299 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.297e+02 3.365e+02 3.945e+02 4.793e+02 1.520e+03, threshold=7.889e+02, percent-clipped=1.0 +2023-03-29 17:07:52,418 INFO [train.py:892] (1/4) Epoch 43, batch 850, loss[loss=0.1322, simple_loss=0.2118, pruned_loss=0.02636, over 19781.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.2324, pruned_loss=0.03446, over 3892564.64 frames. ], batch size: 193, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:09:51,498 INFO [train.py:892] (1/4) Epoch 43, batch 900, loss[loss=0.1412, simple_loss=0.2153, pruned_loss=0.03355, over 19770.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.2317, pruned_loss=0.03439, over 3906120.52 frames. ], batch size: 113, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:10:05,174 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78816.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:10:39,598 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.520e+02 3.462e+02 4.060e+02 5.051e+02 1.168e+03, threshold=8.120e+02, percent-clipped=4.0 +2023-03-29 17:11:49,226 INFO [train.py:892] (1/4) Epoch 43, batch 950, loss[loss=0.1429, simple_loss=0.2306, pruned_loss=0.02763, over 19635.00 frames. ], tot_loss[loss=0.1504, simple_loss=0.2319, pruned_loss=0.03442, over 3915597.07 frames. ], batch size: 68, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:12:56,238 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78888.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:13:45,676 INFO [train.py:892] (1/4) Epoch 43, batch 1000, loss[loss=0.1536, simple_loss=0.2373, pruned_loss=0.03496, over 19824.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.2315, pruned_loss=0.03439, over 3922860.91 frames. ], batch size: 57, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:13:50,597 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78912.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:14:06,762 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7627, 3.7907, 2.3566, 3.9473, 4.0676, 1.9419, 3.3658, 3.2381], + device='cuda:1'), covar=tensor([0.0768, 0.0919, 0.2756, 0.0850, 0.0707, 0.2791, 0.1192, 0.0930], + device='cuda:1'), in_proj_covar=tensor([0.0244, 0.0269, 0.0239, 0.0289, 0.0270, 0.0208, 0.0245, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 17:14:35,060 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.508e+02 3.436e+02 3.973e+02 4.908e+02 9.421e+02, threshold=7.947e+02, percent-clipped=2.0 +2023-03-29 17:14:44,268 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78936.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:15:39,026 INFO [train.py:892] (1/4) Epoch 43, batch 1050, loss[loss=0.1533, simple_loss=0.2468, pruned_loss=0.02992, over 19850.00 frames. ], tot_loss[loss=0.1497, simple_loss=0.2311, pruned_loss=0.03418, over 3929861.05 frames. ], batch size: 58, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:15:49,031 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78965.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:17:33,116 INFO [train.py:892] (1/4) Epoch 43, batch 1100, loss[loss=0.1556, simple_loss=0.2317, pruned_loss=0.0397, over 19807.00 frames. ], tot_loss[loss=0.1504, simple_loss=0.2317, pruned_loss=0.03456, over 3935042.05 frames. ], batch size: 67, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:17:42,470 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5260, 3.5504, 2.3619, 4.1537, 3.7987, 4.0756, 4.2010, 3.2564], + device='cuda:1'), covar=tensor([0.0634, 0.0654, 0.1546, 0.0635, 0.0607, 0.0500, 0.0503, 0.0824], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0152, 0.0149, 0.0163, 0.0142, 0.0147, 0.0158, 0.0155], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-29 17:17:57,638 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7753, 3.9163, 4.1830, 4.8179, 3.3776, 3.5346, 2.9567, 2.9723], + device='cuda:1'), covar=tensor([0.0386, 0.1928, 0.0759, 0.0319, 0.1735, 0.1039, 0.1214, 0.1512], + device='cuda:1'), in_proj_covar=tensor([0.0252, 0.0326, 0.0254, 0.0213, 0.0249, 0.0216, 0.0224, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 17:18:09,915 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79026.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:18:23,401 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.685e+02 3.705e+02 4.386e+02 5.336e+02 1.015e+03, threshold=8.773e+02, percent-clipped=4.0 +2023-03-29 17:19:32,155 INFO [train.py:892] (1/4) Epoch 43, batch 1150, loss[loss=0.1381, simple_loss=0.2138, pruned_loss=0.03123, over 19801.00 frames. ], tot_loss[loss=0.1506, simple_loss=0.2321, pruned_loss=0.0345, over 3937979.84 frames. ], batch size: 126, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:21:27,644 INFO [train.py:892] (1/4) Epoch 43, batch 1200, loss[loss=0.1358, simple_loss=0.2119, pruned_loss=0.02988, over 19816.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.232, pruned_loss=0.03429, over 3939544.48 frames. ], batch size: 72, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:21:40,508 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79116.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:21:40,796 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8990, 3.3071, 3.6808, 3.1401, 3.9960, 4.0303, 4.6032, 5.1593], + device='cuda:1'), covar=tensor([0.0473, 0.1514, 0.1466, 0.2202, 0.1556, 0.1217, 0.0552, 0.0353], + device='cuda:1'), in_proj_covar=tensor([0.0263, 0.0247, 0.0275, 0.0263, 0.0307, 0.0267, 0.0241, 0.0272], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 17:22:16,934 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.729e+02 3.557e+02 4.069e+02 4.739e+02 8.583e+02, threshold=8.137e+02, percent-clipped=0.0 +2023-03-29 17:22:27,288 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.07 vs. limit=2.0 +2023-03-29 17:23:25,139 INFO [train.py:892] (1/4) Epoch 43, batch 1250, loss[loss=0.1572, simple_loss=0.2452, pruned_loss=0.03466, over 19608.00 frames. ], tot_loss[loss=0.1501, simple_loss=0.2316, pruned_loss=0.03431, over 3940609.55 frames. ], batch size: 51, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:23:32,272 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79164.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:24:11,704 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6409, 3.4294, 3.6621, 2.9225, 3.8434, 3.2014, 3.4609, 3.8470], + device='cuda:1'), covar=tensor([0.0703, 0.0450, 0.0659, 0.0763, 0.0390, 0.0473, 0.0529, 0.0316], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0095, 0.0092, 0.0115, 0.0086, 0.0090, 0.0087, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 17:25:19,905 INFO [train.py:892] (1/4) Epoch 43, batch 1300, loss[loss=0.1305, simple_loss=0.2148, pruned_loss=0.02312, over 19852.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2314, pruned_loss=0.03419, over 3943514.83 frames. ], batch size: 85, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:25:22,824 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79212.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:26:08,979 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.005e+02 3.409e+02 4.094e+02 5.234e+02 1.153e+03, threshold=8.189e+02, percent-clipped=5.0 +2023-03-29 17:27:14,214 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79260.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:27:15,388 INFO [train.py:892] (1/4) Epoch 43, batch 1350, loss[loss=0.1446, simple_loss=0.2227, pruned_loss=0.03324, over 19761.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.2316, pruned_loss=0.03442, over 3943470.33 frames. ], batch size: 113, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:28:09,860 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79285.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 17:29:06,887 INFO [train.py:892] (1/4) Epoch 43, batch 1400, loss[loss=0.1543, simple_loss=0.2324, pruned_loss=0.0381, over 19810.00 frames. ], tot_loss[loss=0.1496, simple_loss=0.2306, pruned_loss=0.03433, over 3945356.08 frames. ], batch size: 98, lr: 3.61e-03, grad_scale: 16.0 +2023-03-29 17:29:30,300 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79321.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:29:56,356 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.584e+02 3.305e+02 4.038e+02 5.143e+02 6.996e+02, threshold=8.075e+02, percent-clipped=0.0 +2023-03-29 17:29:59,659 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79333.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:30:29,460 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.0006, 1.8871, 2.9607, 2.2773, 3.1429, 3.1841, 2.8280, 3.0627], + device='cuda:1'), covar=tensor([0.1322, 0.1492, 0.0161, 0.0448, 0.0179, 0.0269, 0.0283, 0.0241], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0108, 0.0093, 0.0155, 0.0091, 0.0105, 0.0095, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 17:30:29,535 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79346.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 17:31:04,336 INFO [train.py:892] (1/4) Epoch 43, batch 1450, loss[loss=0.1528, simple_loss=0.2425, pruned_loss=0.03157, over 19763.00 frames. ], tot_loss[loss=0.1515, simple_loss=0.2329, pruned_loss=0.03508, over 3946018.89 frames. ], batch size: 100, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:31:12,890 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79364.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:31:37,384 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79375.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 17:32:20,056 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79394.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:32:59,384 INFO [train.py:892] (1/4) Epoch 43, batch 1500, loss[loss=0.1365, simple_loss=0.2185, pruned_loss=0.02725, over 19898.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2323, pruned_loss=0.03497, over 3947251.80 frames. ], batch size: 94, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:33:29,317 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79425.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:33:41,752 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.542e+02 3.517e+02 4.248e+02 5.342e+02 8.488e+02, threshold=8.496e+02, percent-clipped=1.0 +2023-03-29 17:33:47,574 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.3664, 4.9038, 4.9691, 4.7114, 5.2850, 3.3513, 4.2157, 2.4923], + device='cuda:1'), covar=tensor([0.0150, 0.0188, 0.0133, 0.0189, 0.0132, 0.0965, 0.0858, 0.1599], + device='cuda:1'), in_proj_covar=tensor([0.0110, 0.0153, 0.0117, 0.0140, 0.0124, 0.0139, 0.0145, 0.0132], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 17:33:52,812 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79436.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 17:34:27,446 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7404, 3.8502, 2.3475, 4.0295, 4.1273, 1.8925, 3.4121, 3.1592], + device='cuda:1'), covar=tensor([0.0824, 0.0815, 0.2717, 0.0752, 0.0622, 0.2732, 0.1134, 0.0957], + device='cuda:1'), in_proj_covar=tensor([0.0243, 0.0268, 0.0238, 0.0289, 0.0269, 0.0207, 0.0246, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 17:34:48,452 INFO [train.py:892] (1/4) Epoch 43, batch 1550, loss[loss=0.1409, simple_loss=0.2292, pruned_loss=0.0263, over 19767.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.2342, pruned_loss=0.03563, over 3946746.10 frames. ], batch size: 226, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:36:45,430 INFO [train.py:892] (1/4) Epoch 43, batch 1600, loss[loss=0.1487, simple_loss=0.2303, pruned_loss=0.03351, over 19785.00 frames. ], tot_loss[loss=0.1517, simple_loss=0.2334, pruned_loss=0.035, over 3947044.47 frames. ], batch size: 213, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:37:34,257 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.484e+02 3.446e+02 3.965e+02 4.827e+02 6.779e+02, threshold=7.930e+02, percent-clipped=0.0 +2023-03-29 17:38:42,073 INFO [train.py:892] (1/4) Epoch 43, batch 1650, loss[loss=0.1751, simple_loss=0.2442, pruned_loss=0.05301, over 19789.00 frames. ], tot_loss[loss=0.1501, simple_loss=0.2319, pruned_loss=0.03418, over 3946928.02 frames. ], batch size: 211, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:39:40,548 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79585.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:40:14,604 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9638, 3.2184, 2.8707, 2.4081, 2.8618, 3.2344, 3.1893, 3.1701], + device='cuda:1'), covar=tensor([0.0352, 0.0295, 0.0308, 0.0584, 0.0382, 0.0255, 0.0219, 0.0230], + device='cuda:1'), in_proj_covar=tensor([0.0114, 0.0108, 0.0108, 0.0108, 0.0111, 0.0097, 0.0099, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 17:40:37,534 INFO [train.py:892] (1/4) Epoch 43, batch 1700, loss[loss=0.1488, simple_loss=0.2302, pruned_loss=0.03364, over 19652.00 frames. ], tot_loss[loss=0.1505, simple_loss=0.232, pruned_loss=0.03452, over 3947980.39 frames. ], batch size: 43, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:41:03,029 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79621.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:41:26,786 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.071e+02 3.435e+02 4.157e+02 5.071e+02 8.490e+02, threshold=8.314e+02, percent-clipped=3.0 +2023-03-29 17:41:49,593 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79641.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 17:42:00,396 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79646.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:42:21,529 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79657.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:42:28,890 INFO [train.py:892] (1/4) Epoch 43, batch 1750, loss[loss=0.1579, simple_loss=0.2342, pruned_loss=0.04077, over 19842.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2328, pruned_loss=0.03468, over 3945875.02 frames. ], batch size: 144, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:42:46,743 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79669.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:43:21,627 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5291, 3.4526, 5.2325, 3.9687, 4.0544, 3.8972, 2.8125, 3.0412], + device='cuda:1'), covar=tensor([0.0826, 0.2822, 0.0339, 0.1027, 0.1695, 0.1284, 0.2606, 0.2584], + device='cuda:1'), in_proj_covar=tensor([0.0359, 0.0404, 0.0358, 0.0298, 0.0381, 0.0400, 0.0390, 0.0365], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 17:43:29,978 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79689.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:44:13,219 INFO [train.py:892] (1/4) Epoch 43, batch 1800, loss[loss=0.1439, simple_loss=0.2266, pruned_loss=0.0306, over 19767.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.233, pruned_loss=0.0348, over 3947400.74 frames. ], batch size: 49, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:44:20,719 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1119, 1.9767, 2.1382, 2.1954, 2.1358, 2.1229, 2.0934, 2.2127], + device='cuda:1'), covar=tensor([0.0458, 0.0445, 0.0458, 0.0358, 0.0526, 0.0446, 0.0529, 0.0415], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0091, 0.0094, 0.0088, 0.0100, 0.0094, 0.0110, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 17:44:26,154 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79718.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:44:31,258 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79720.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:44:52,926 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79731.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 17:44:53,951 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.513e+02 3.637e+02 4.285e+02 5.199e+02 9.167e+02, threshold=8.570e+02, percent-clipped=2.0 +2023-03-29 17:45:00,426 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4385, 5.7578, 5.9439, 5.6833, 5.6038, 5.5910, 5.6009, 5.4371], + device='cuda:1'), covar=tensor([0.1414, 0.1763, 0.0844, 0.1241, 0.0668, 0.0836, 0.1815, 0.1946], + device='cuda:1'), in_proj_covar=tensor([0.0306, 0.0355, 0.0382, 0.0318, 0.0293, 0.0297, 0.0375, 0.0406], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 17:45:18,864 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6593, 3.4472, 3.7749, 2.7945, 3.9296, 3.2533, 3.4867, 3.7469], + device='cuda:1'), covar=tensor([0.0729, 0.0470, 0.0621, 0.0851, 0.0414, 0.0445, 0.0529, 0.0353], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0095, 0.0091, 0.0115, 0.0085, 0.0090, 0.0087, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 17:45:48,626 INFO [train.py:892] (1/4) Epoch 43, batch 1850, loss[loss=0.1544, simple_loss=0.2423, pruned_loss=0.03326, over 19813.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.2333, pruned_loss=0.03463, over 3947689.99 frames. ], batch size: 57, lr: 3.60e-03, grad_scale: 32.0 +2023-03-29 17:45:50,929 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79762.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:46:59,245 INFO [train.py:892] (1/4) Epoch 44, batch 0, loss[loss=0.1307, simple_loss=0.2102, pruned_loss=0.02556, over 19854.00 frames. ], tot_loss[loss=0.1307, simple_loss=0.2102, pruned_loss=0.02556, over 19854.00 frames. ], batch size: 106, lr: 3.56e-03, grad_scale: 32.0 +2023-03-29 17:46:59,246 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 17:47:20,131 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0705, 4.0971, 2.4173, 4.2577, 4.4327, 2.0323, 3.7228, 3.3638], + device='cuda:1'), covar=tensor([0.0677, 0.0782, 0.3010, 0.0764, 0.0657, 0.2819, 0.1004, 0.0931], + device='cuda:1'), in_proj_covar=tensor([0.0244, 0.0271, 0.0241, 0.0293, 0.0272, 0.0210, 0.0247, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 17:47:26,457 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8442, 2.7770, 5.0052, 4.1864, 4.7195, 4.9246, 4.6382, 4.6181], + device='cuda:1'), covar=tensor([0.0553, 0.1117, 0.0096, 0.0753, 0.0146, 0.0185, 0.0165, 0.0160], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0108, 0.0093, 0.0156, 0.0091, 0.0104, 0.0095, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 17:47:38,449 INFO [train.py:926] (1/4) Epoch 44, validation: loss=0.1877, simple_loss=0.2498, pruned_loss=0.06277, over 2883724.00 frames. +2023-03-29 17:47:38,451 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22859MB +2023-03-29 17:49:41,899 INFO [train.py:892] (1/4) Epoch 44, batch 50, loss[loss=0.1556, simple_loss=0.2384, pruned_loss=0.03642, over 19778.00 frames. ], tot_loss[loss=0.1422, simple_loss=0.2234, pruned_loss=0.0305, over 891054.94 frames. ], batch size: 224, lr: 3.56e-03, grad_scale: 32.0 +2023-03-29 17:49:58,288 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79823.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:50:15,365 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.212e+02 3.427e+02 4.163e+02 5.028e+02 9.332e+02, threshold=8.326e+02, percent-clipped=1.0 +2023-03-29 17:51:32,089 INFO [train.py:892] (1/4) Epoch 44, batch 100, loss[loss=0.1409, simple_loss=0.2183, pruned_loss=0.03173, over 19793.00 frames. ], tot_loss[loss=0.1443, simple_loss=0.2269, pruned_loss=0.03087, over 1567926.77 frames. ], batch size: 168, lr: 3.56e-03, grad_scale: 32.0 +2023-03-29 17:51:58,277 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-29 17:52:09,857 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-29 17:52:12,089 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4710, 5.8533, 5.9387, 5.7394, 5.6010, 5.7000, 5.5666, 5.5466], + device='cuda:1'), covar=tensor([0.1309, 0.1259, 0.0762, 0.1185, 0.0615, 0.0713, 0.1920, 0.1811], + device='cuda:1'), in_proj_covar=tensor([0.0306, 0.0354, 0.0382, 0.0317, 0.0293, 0.0297, 0.0375, 0.0406], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 17:52:14,332 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1911, 2.0709, 2.2336, 2.2441, 2.2765, 2.2867, 2.2198, 2.3659], + device='cuda:1'), covar=tensor([0.0423, 0.0424, 0.0389, 0.0358, 0.0475, 0.0370, 0.0513, 0.0321], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0091, 0.0094, 0.0088, 0.0100, 0.0094, 0.0109, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 17:53:02,104 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1328, 4.0064, 4.0477, 4.2453, 4.0870, 4.4064, 4.1167, 4.3047], + device='cuda:1'), covar=tensor([0.0875, 0.0542, 0.0628, 0.0515, 0.0713, 0.0592, 0.0711, 0.0584], + device='cuda:1'), in_proj_covar=tensor([0.0161, 0.0187, 0.0208, 0.0186, 0.0184, 0.0167, 0.0161, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 17:53:33,219 INFO [train.py:892] (1/4) Epoch 44, batch 150, loss[loss=0.1559, simple_loss=0.233, pruned_loss=0.03937, over 19851.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.23, pruned_loss=0.03246, over 2095841.41 frames. ], batch size: 145, lr: 3.56e-03, grad_scale: 32.0 +2023-03-29 17:54:11,531 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.056e+02 3.280e+02 3.825e+02 4.551e+02 8.712e+02, threshold=7.650e+02, percent-clipped=2.0 +2023-03-29 17:54:33,942 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79941.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:54:34,000 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79941.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 17:54:37,901 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7175, 2.1622, 2.5515, 2.8860, 3.3119, 3.4365, 3.3103, 3.4116], + device='cuda:1'), covar=tensor([0.1074, 0.1785, 0.1429, 0.0813, 0.0561, 0.0359, 0.0533, 0.0554], + device='cuda:1'), in_proj_covar=tensor([0.0165, 0.0171, 0.0183, 0.0156, 0.0142, 0.0138, 0.0133, 0.0122], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 17:55:19,486 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3603, 3.6794, 3.8471, 4.4329, 2.9641, 3.3561, 2.6203, 2.5800], + device='cuda:1'), covar=tensor([0.0494, 0.1858, 0.0848, 0.0401, 0.1997, 0.1029, 0.1415, 0.1732], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0329, 0.0256, 0.0214, 0.0252, 0.0217, 0.0225, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 17:55:37,670 INFO [train.py:892] (1/4) Epoch 44, batch 200, loss[loss=0.1422, simple_loss=0.2321, pruned_loss=0.02617, over 19677.00 frames. ], tot_loss[loss=0.1464, simple_loss=0.2284, pruned_loss=0.03223, over 2508384.06 frames. ], batch size: 52, lr: 3.56e-03, grad_scale: 32.0 +2023-03-29 17:56:34,709 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79989.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 17:56:34,748 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79989.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:57:32,853 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80013.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:57:40,407 INFO [train.py:892] (1/4) Epoch 44, batch 250, loss[loss=0.2398, simple_loss=0.3205, pruned_loss=0.07961, over 19423.00 frames. ], tot_loss[loss=0.1474, simple_loss=0.2292, pruned_loss=0.03285, over 2828353.91 frames. ], batch size: 431, lr: 3.56e-03, grad_scale: 16.0 +2023-03-29 17:57:53,569 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80020.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:58:17,209 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80031.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 17:58:20,262 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.104e+02 3.486e+02 4.096e+02 4.877e+02 1.090e+03, threshold=8.193e+02, percent-clipped=2.0 +2023-03-29 17:58:30,434 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80037.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:59:38,664 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9649, 2.8986, 1.9934, 3.4781, 3.2222, 3.3869, 3.4884, 2.8683], + device='cuda:1'), covar=tensor([0.0721, 0.0844, 0.1818, 0.0692, 0.0626, 0.0560, 0.0641, 0.0836], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0151, 0.0147, 0.0161, 0.0141, 0.0147, 0.0156, 0.0155], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 17:59:39,668 INFO [train.py:892] (1/4) Epoch 44, batch 300, loss[loss=0.1173, simple_loss=0.1891, pruned_loss=0.02281, over 19840.00 frames. ], tot_loss[loss=0.1476, simple_loss=0.2291, pruned_loss=0.033, over 3077228.10 frames. ], batch size: 144, lr: 3.56e-03, grad_scale: 16.0 +2023-03-29 17:59:44,409 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80068.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:00:09,785 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80079.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 18:01:38,227 INFO [train.py:892] (1/4) Epoch 44, batch 350, loss[loss=0.1519, simple_loss=0.2323, pruned_loss=0.03574, over 19733.00 frames. ], tot_loss[loss=0.1471, simple_loss=0.2287, pruned_loss=0.03272, over 3272731.94 frames. ], batch size: 221, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:01:44,028 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80118.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:02:20,932 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.499e+02 3.590e+02 4.276e+02 5.127e+02 9.462e+02, threshold=8.552e+02, percent-clipped=1.0 +2023-03-29 18:03:14,204 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0392, 3.0407, 3.2422, 2.5532, 3.2532, 2.8961, 3.2178, 3.1841], + device='cuda:1'), covar=tensor([0.0680, 0.0526, 0.0512, 0.0855, 0.0439, 0.0450, 0.0418, 0.0431], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0094, 0.0091, 0.0114, 0.0086, 0.0089, 0.0086, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 18:03:35,973 INFO [train.py:892] (1/4) Epoch 44, batch 400, loss[loss=0.1368, simple_loss=0.2208, pruned_loss=0.0264, over 19855.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.2291, pruned_loss=0.03297, over 3423188.08 frames. ], batch size: 85, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:05:38,840 INFO [train.py:892] (1/4) Epoch 44, batch 450, loss[loss=0.1597, simple_loss=0.233, pruned_loss=0.0432, over 19836.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2294, pruned_loss=0.03307, over 3540468.26 frames. ], batch size: 144, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:05:55,512 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3666, 3.2849, 3.5141, 2.6544, 3.5589, 2.9926, 3.2677, 3.4227], + device='cuda:1'), covar=tensor([0.0670, 0.0467, 0.0548, 0.0826, 0.0452, 0.0534, 0.0593, 0.0422], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0094, 0.0091, 0.0114, 0.0086, 0.0089, 0.0086, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 18:06:18,569 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.753e+02 3.555e+02 3.976e+02 4.443e+02 7.591e+02, threshold=7.952e+02, percent-clipped=1.0 +2023-03-29 18:06:39,084 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80241.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:07:34,147 INFO [train.py:892] (1/4) Epoch 44, batch 500, loss[loss=0.1549, simple_loss=0.2262, pruned_loss=0.04184, over 19798.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2307, pruned_loss=0.03382, over 3631924.35 frames. ], batch size: 83, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:08:14,328 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80283.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:08:28,164 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80289.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:08:58,237 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6785, 3.0700, 2.6465, 2.2383, 2.7211, 2.9390, 2.9355, 2.9977], + device='cuda:1'), covar=tensor([0.0399, 0.0287, 0.0358, 0.0574, 0.0395, 0.0309, 0.0291, 0.0269], + device='cuda:1'), in_proj_covar=tensor([0.0115, 0.0108, 0.0108, 0.0109, 0.0111, 0.0098, 0.0099, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 18:09:27,876 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80313.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:09:33,216 INFO [train.py:892] (1/4) Epoch 44, batch 550, loss[loss=0.1541, simple_loss=0.2302, pruned_loss=0.03897, over 19842.00 frames. ], tot_loss[loss=0.1498, simple_loss=0.2311, pruned_loss=0.03423, over 3702051.30 frames. ], batch size: 75, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:09:46,972 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.61 vs. limit=5.0 +2023-03-29 18:10:13,756 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.312e+02 3.379e+02 4.084e+02 4.890e+02 8.912e+02, threshold=8.167e+02, percent-clipped=1.0 +2023-03-29 18:10:41,257 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80344.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:10:52,086 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.79 vs. limit=5.0 +2023-03-29 18:11:17,937 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.9084, 4.7257, 5.2997, 4.8142, 4.2832, 5.0227, 4.8935, 5.4281], + device='cuda:1'), covar=tensor([0.0882, 0.0467, 0.0352, 0.0381, 0.0822, 0.0519, 0.0485, 0.0298], + device='cuda:1'), in_proj_covar=tensor([0.0286, 0.0232, 0.0232, 0.0245, 0.0212, 0.0257, 0.0245, 0.0230], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 18:11:21,906 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80361.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:11:31,182 INFO [train.py:892] (1/4) Epoch 44, batch 600, loss[loss=0.1387, simple_loss=0.2207, pruned_loss=0.02835, over 19713.00 frames. ], tot_loss[loss=0.1498, simple_loss=0.2314, pruned_loss=0.0341, over 3756340.59 frames. ], batch size: 101, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:11:54,050 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8235, 2.3729, 3.8496, 3.3323, 3.8572, 3.9109, 3.6835, 3.6572], + device='cuda:1'), covar=tensor([0.0745, 0.1142, 0.0134, 0.0546, 0.0174, 0.0239, 0.0211, 0.0215], + device='cuda:1'), in_proj_covar=tensor([0.0106, 0.0109, 0.0093, 0.0157, 0.0092, 0.0105, 0.0096, 0.0093], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 18:12:34,650 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80393.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:13:27,012 INFO [train.py:892] (1/4) Epoch 44, batch 650, loss[loss=0.1701, simple_loss=0.248, pruned_loss=0.04608, over 19773.00 frames. ], tot_loss[loss=0.1485, simple_loss=0.2296, pruned_loss=0.03372, over 3800962.85 frames. ], batch size: 70, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:13:32,698 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80418.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:14:07,141 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.620e+02 3.666e+02 4.287e+02 5.079e+02 9.209e+02, threshold=8.573e+02, percent-clipped=4.0 +2023-03-29 18:14:32,908 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-03-29 18:14:49,025 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4269, 2.1502, 3.5566, 2.9657, 3.6060, 3.6061, 3.3937, 3.4212], + device='cuda:1'), covar=tensor([0.0952, 0.1294, 0.0149, 0.0586, 0.0185, 0.0274, 0.0244, 0.0218], + device='cuda:1'), in_proj_covar=tensor([0.0106, 0.0108, 0.0093, 0.0157, 0.0092, 0.0105, 0.0096, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 18:14:55,318 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80454.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:15:14,884 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80463.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:15:20,989 INFO [train.py:892] (1/4) Epoch 44, batch 700, loss[loss=0.193, simple_loss=0.2742, pruned_loss=0.0559, over 19704.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2303, pruned_loss=0.0338, over 3832203.19 frames. ], batch size: 305, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:15:23,435 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80466.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:16:26,802 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80494.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:17:18,263 INFO [train.py:892] (1/4) Epoch 44, batch 750, loss[loss=0.1548, simple_loss=0.2446, pruned_loss=0.0325, over 19702.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2302, pruned_loss=0.03361, over 3858619.53 frames. ], batch size: 315, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:17:38,282 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80524.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:17:57,764 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.299e+02 3.592e+02 4.231e+02 5.310e+02 9.600e+02, threshold=8.463e+02, percent-clipped=2.0 +2023-03-29 18:18:50,482 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80555.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:19:14,286 INFO [train.py:892] (1/4) Epoch 44, batch 800, loss[loss=0.1591, simple_loss=0.2591, pruned_loss=0.02957, over 19934.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2308, pruned_loss=0.03382, over 3877711.54 frames. ], batch size: 51, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:19:37,325 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7683, 4.4976, 4.5361, 4.7442, 4.4355, 4.8856, 4.8910, 5.0658], + device='cuda:1'), covar=tensor([0.0635, 0.0437, 0.0482, 0.0375, 0.0767, 0.0485, 0.0426, 0.0311], + device='cuda:1'), in_proj_covar=tensor([0.0161, 0.0187, 0.0208, 0.0186, 0.0183, 0.0167, 0.0161, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 18:20:30,969 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5061, 2.2417, 3.6901, 3.1763, 3.6781, 3.7359, 3.4873, 3.5057], + device='cuda:1'), covar=tensor([0.0904, 0.1200, 0.0143, 0.0465, 0.0197, 0.0248, 0.0227, 0.0226], + device='cuda:1'), in_proj_covar=tensor([0.0106, 0.0109, 0.0093, 0.0156, 0.0092, 0.0105, 0.0096, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 18:20:39,760 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.73 vs. limit=5.0 +2023-03-29 18:20:42,030 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-03-29 18:21:15,053 INFO [train.py:892] (1/4) Epoch 44, batch 850, loss[loss=0.1394, simple_loss=0.2125, pruned_loss=0.03311, over 19744.00 frames. ], tot_loss[loss=0.149, simple_loss=0.2309, pruned_loss=0.03356, over 3892653.69 frames. ], batch size: 139, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:21:26,245 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-29 18:21:31,643 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8501, 2.8152, 3.0165, 2.4150, 3.0049, 2.5960, 2.9730, 3.0289], + device='cuda:1'), covar=tensor([0.0637, 0.0665, 0.0480, 0.0851, 0.0538, 0.0591, 0.0528, 0.0375], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0095, 0.0091, 0.0115, 0.0086, 0.0090, 0.0086, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 18:21:54,781 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.578e+02 3.349e+02 3.795e+02 4.586e+02 1.230e+03, threshold=7.591e+02, percent-clipped=1.0 +2023-03-29 18:22:09,674 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80639.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:22:31,730 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3926, 4.4980, 2.7129, 4.7202, 4.9299, 2.2205, 4.1551, 3.6379], + device='cuda:1'), covar=tensor([0.0635, 0.0717, 0.2604, 0.0733, 0.0462, 0.2802, 0.0925, 0.0889], + device='cuda:1'), in_proj_covar=tensor([0.0244, 0.0270, 0.0241, 0.0291, 0.0272, 0.0210, 0.0247, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 18:23:11,194 INFO [train.py:892] (1/4) Epoch 44, batch 900, loss[loss=0.1444, simple_loss=0.2192, pruned_loss=0.03478, over 19794.00 frames. ], tot_loss[loss=0.1491, simple_loss=0.2311, pruned_loss=0.03352, over 3905611.91 frames. ], batch size: 167, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:24:18,699 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 18:24:56,072 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0881, 5.3825, 5.4430, 5.3046, 5.0283, 5.4051, 4.8874, 4.9091], + device='cuda:1'), covar=tensor([0.0495, 0.0530, 0.0504, 0.0478, 0.0655, 0.0500, 0.0702, 0.1038], + device='cuda:1'), in_proj_covar=tensor([0.0298, 0.0316, 0.0325, 0.0282, 0.0295, 0.0277, 0.0287, 0.0338], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 18:25:06,283 INFO [train.py:892] (1/4) Epoch 44, batch 950, loss[loss=0.1291, simple_loss=0.2082, pruned_loss=0.02494, over 19712.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2309, pruned_loss=0.03324, over 3915730.96 frames. ], batch size: 81, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:25:43,636 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5473, 4.7406, 2.8282, 4.9550, 5.0735, 2.2232, 4.3676, 3.8018], + device='cuda:1'), covar=tensor([0.0601, 0.0649, 0.2582, 0.0651, 0.0568, 0.2742, 0.0846, 0.0882], + device='cuda:1'), in_proj_covar=tensor([0.0244, 0.0270, 0.0241, 0.0292, 0.0273, 0.0210, 0.0247, 0.0213], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 18:25:49,368 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.433e+02 3.452e+02 4.047e+02 4.823e+02 9.247e+02, threshold=8.094e+02, percent-clipped=4.0 +2023-03-29 18:25:56,924 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80736.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:26:26,168 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80749.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:27:06,659 INFO [train.py:892] (1/4) Epoch 44, batch 1000, loss[loss=0.1345, simple_loss=0.2104, pruned_loss=0.02931, over 19804.00 frames. ], tot_loss[loss=0.1485, simple_loss=0.2304, pruned_loss=0.03332, over 3923767.97 frames. ], batch size: 174, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:27:24,499 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80773.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:28:18,974 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80797.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:29:01,259 INFO [train.py:892] (1/4) Epoch 44, batch 1050, loss[loss=0.1504, simple_loss=0.2271, pruned_loss=0.03683, over 19836.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2316, pruned_loss=0.03407, over 3930026.79 frames. ], batch size: 208, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:29:09,758 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80819.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:29:41,369 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.092e+02 3.498e+02 3.993e+02 4.688e+02 1.348e+03, threshold=7.986e+02, percent-clipped=2.0 +2023-03-29 18:29:44,665 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80834.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:30:22,008 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80850.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:30:46,962 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.2921, 5.6884, 5.7940, 5.6411, 5.5442, 5.5287, 5.4777, 5.3866], + device='cuda:1'), covar=tensor([0.1530, 0.1573, 0.0866, 0.1204, 0.0710, 0.0719, 0.1882, 0.1954], + device='cuda:1'), in_proj_covar=tensor([0.0308, 0.0358, 0.0385, 0.0320, 0.0293, 0.0301, 0.0379, 0.0409], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 18:30:57,643 INFO [train.py:892] (1/4) Epoch 44, batch 1100, loss[loss=0.1265, simple_loss=0.1979, pruned_loss=0.02761, over 19787.00 frames. ], tot_loss[loss=0.1501, simple_loss=0.232, pruned_loss=0.03413, over 3934948.71 frames. ], batch size: 168, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:32:57,579 INFO [train.py:892] (1/4) Epoch 44, batch 1150, loss[loss=0.1351, simple_loss=0.2176, pruned_loss=0.02634, over 19872.00 frames. ], tot_loss[loss=0.1505, simple_loss=0.2321, pruned_loss=0.03442, over 3938764.84 frames. ], batch size: 108, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:33:35,851 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.488e+02 3.446e+02 4.222e+02 4.835e+02 9.572e+02, threshold=8.444e+02, percent-clipped=1.0 +2023-03-29 18:33:49,593 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80939.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:34:56,384 INFO [train.py:892] (1/4) Epoch 44, batch 1200, loss[loss=0.1425, simple_loss=0.2236, pruned_loss=0.03072, over 19786.00 frames. ], tot_loss[loss=0.1508, simple_loss=0.2328, pruned_loss=0.03437, over 3939835.26 frames. ], batch size: 193, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:35:47,230 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80987.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:36:53,890 INFO [train.py:892] (1/4) Epoch 44, batch 1250, loss[loss=0.1408, simple_loss=0.2224, pruned_loss=0.02961, over 19835.00 frames. ], tot_loss[loss=0.1504, simple_loss=0.2325, pruned_loss=0.03413, over 3941878.20 frames. ], batch size: 161, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:37:34,761 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.062e+02 3.416e+02 3.866e+02 4.633e+02 7.617e+02, threshold=7.733e+02, percent-clipped=0.0 +2023-03-29 18:38:13,343 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81049.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:38:51,613 INFO [train.py:892] (1/4) Epoch 44, batch 1300, loss[loss=0.1359, simple_loss=0.2193, pruned_loss=0.02625, over 19865.00 frames. ], tot_loss[loss=0.1504, simple_loss=0.2329, pruned_loss=0.03393, over 3942674.60 frames. ], batch size: 104, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:39:18,968 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-29 18:39:52,050 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81092.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:40:04,663 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81097.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:40:48,662 INFO [train.py:892] (1/4) Epoch 44, batch 1350, loss[loss=0.1913, simple_loss=0.2897, pruned_loss=0.04645, over 19526.00 frames. ], tot_loss[loss=0.1514, simple_loss=0.2339, pruned_loss=0.03449, over 3944648.92 frames. ], batch size: 54, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:40:49,817 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81116.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:40:55,647 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81119.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:41:13,496 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-03-29 18:41:18,991 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81129.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:41:27,829 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.454e+02 3.446e+02 4.149e+02 4.801e+02 1.146e+03, threshold=8.297e+02, percent-clipped=2.0 +2023-03-29 18:42:04,480 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81150.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:42:39,485 INFO [train.py:892] (1/4) Epoch 44, batch 1400, loss[loss=0.1562, simple_loss=0.2475, pruned_loss=0.03245, over 19659.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.2324, pruned_loss=0.034, over 3946101.31 frames. ], batch size: 50, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:42:42,746 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81167.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:43:08,373 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81177.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:43:58,141 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81198.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:44:41,835 INFO [train.py:892] (1/4) Epoch 44, batch 1450, loss[loss=0.1304, simple_loss=0.2101, pruned_loss=0.02532, over 19806.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2323, pruned_loss=0.03372, over 3946626.57 frames. ], batch size: 47, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:45:20,208 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.287e+02 3.615e+02 3.975e+02 5.064e+02 7.888e+02, threshold=7.950e+02, percent-clipped=0.0 +2023-03-29 18:45:56,483 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1247, 2.6818, 4.4277, 3.8385, 4.2566, 4.3971, 4.2292, 4.0965], + device='cuda:1'), covar=tensor([0.0701, 0.1026, 0.0120, 0.0715, 0.0172, 0.0214, 0.0187, 0.0185], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0108, 0.0092, 0.0155, 0.0091, 0.0104, 0.0095, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 18:46:39,816 INFO [train.py:892] (1/4) Epoch 44, batch 1500, loss[loss=0.1351, simple_loss=0.213, pruned_loss=0.02863, over 19768.00 frames. ], tot_loss[loss=0.1483, simple_loss=0.2305, pruned_loss=0.03307, over 3948606.23 frames. ], batch size: 155, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:48:01,797 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81299.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:48:05,979 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81301.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:48:19,478 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3493, 2.7136, 4.6179, 4.0414, 4.3962, 4.5782, 4.4792, 4.3099], + device='cuda:1'), covar=tensor([0.0645, 0.1055, 0.0109, 0.0688, 0.0168, 0.0207, 0.0160, 0.0178], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0108, 0.0092, 0.0155, 0.0091, 0.0104, 0.0095, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 18:48:39,253 INFO [train.py:892] (1/4) Epoch 44, batch 1550, loss[loss=0.1606, simple_loss=0.2579, pruned_loss=0.03171, over 19678.00 frames. ], tot_loss[loss=0.1483, simple_loss=0.2307, pruned_loss=0.03295, over 3947665.47 frames. ], batch size: 55, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:49:19,884 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.242e+02 3.436e+02 3.963e+02 4.737e+02 1.006e+03, threshold=7.927e+02, percent-clipped=2.0 +2023-03-29 18:50:16,970 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1460, 2.4703, 2.2808, 1.7081, 2.3061, 2.4294, 2.3220, 2.3727], + device='cuda:1'), covar=tensor([0.0458, 0.0347, 0.0358, 0.0609, 0.0415, 0.0338, 0.0351, 0.0328], + device='cuda:1'), in_proj_covar=tensor([0.0115, 0.0108, 0.0108, 0.0108, 0.0111, 0.0098, 0.0099, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 18:50:23,890 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81360.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:50:28,414 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81362.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:50:36,260 INFO [train.py:892] (1/4) Epoch 44, batch 1600, loss[loss=0.1439, simple_loss=0.2332, pruned_loss=0.02733, over 19854.00 frames. ], tot_loss[loss=0.1483, simple_loss=0.2305, pruned_loss=0.03309, over 3949076.57 frames. ], batch size: 58, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:51:24,573 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-29 18:51:40,581 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81392.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:52:37,029 INFO [train.py:892] (1/4) Epoch 44, batch 1650, loss[loss=0.1613, simple_loss=0.2451, pruned_loss=0.0387, over 19753.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2313, pruned_loss=0.0332, over 3944865.60 frames. ], batch size: 291, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:53:06,861 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81429.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:53:16,899 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.426e+02 3.380e+02 3.975e+02 4.611e+02 1.350e+03, threshold=7.949e+02, percent-clipped=2.0 +2023-03-29 18:53:33,573 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81440.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:54:29,477 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.28 vs. limit=5.0 +2023-03-29 18:54:32,128 INFO [train.py:892] (1/4) Epoch 44, batch 1700, loss[loss=0.1321, simple_loss=0.2088, pruned_loss=0.02769, over 19835.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2314, pruned_loss=0.03315, over 3946645.81 frames. ], batch size: 177, lr: 3.52e-03, grad_scale: 16.0 +2023-03-29 18:54:49,107 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81472.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:54:59,862 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81477.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:56:07,471 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3955, 1.8335, 2.1063, 2.6430, 2.9911, 3.0875, 2.9520, 2.9928], + device='cuda:1'), covar=tensor([0.1151, 0.1975, 0.1666, 0.0847, 0.0551, 0.0431, 0.0519, 0.0529], + device='cuda:1'), in_proj_covar=tensor([0.0169, 0.0172, 0.0185, 0.0159, 0.0146, 0.0140, 0.0134, 0.0124], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 18:56:28,374 INFO [train.py:892] (1/4) Epoch 44, batch 1750, loss[loss=0.1419, simple_loss=0.2262, pruned_loss=0.02878, over 19784.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2313, pruned_loss=0.03322, over 3946419.74 frames. ], batch size: 73, lr: 3.52e-03, grad_scale: 16.0 +2023-03-29 18:56:33,570 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-03-29 18:57:03,575 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.489e+02 3.354e+02 3.892e+02 4.607e+02 1.019e+03, threshold=7.783e+02, percent-clipped=2.0 +2023-03-29 18:57:38,493 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-29 18:58:09,167 INFO [train.py:892] (1/4) Epoch 44, batch 1800, loss[loss=0.1336, simple_loss=0.2142, pruned_loss=0.02646, over 19849.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2313, pruned_loss=0.03358, over 3947304.47 frames. ], batch size: 197, lr: 3.52e-03, grad_scale: 16.0 +2023-03-29 18:58:38,145 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-03-29 18:59:40,686 INFO [train.py:892] (1/4) Epoch 44, batch 1850, loss[loss=0.1396, simple_loss=0.2217, pruned_loss=0.02869, over 19824.00 frames. ], tot_loss[loss=0.1501, simple_loss=0.2324, pruned_loss=0.03386, over 3948027.77 frames. ], batch size: 57, lr: 3.52e-03, grad_scale: 16.0 +2023-03-29 18:59:46,328 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-03-29 19:00:42,763 INFO [train.py:892] (1/4) Epoch 45, batch 0, loss[loss=0.1391, simple_loss=0.2187, pruned_loss=0.02979, over 19538.00 frames. ], tot_loss[loss=0.1391, simple_loss=0.2187, pruned_loss=0.02979, over 19538.00 frames. ], batch size: 46, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:00:42,763 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 19:01:20,027 INFO [train.py:926] (1/4) Epoch 45, validation: loss=0.1889, simple_loss=0.2504, pruned_loss=0.0637, over 2883724.00 frames. +2023-03-29 19:01:20,030 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22859MB +2023-03-29 19:01:47,734 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.448e+02 3.358e+02 4.011e+02 4.800e+02 7.460e+02, threshold=8.023e+02, percent-clipped=0.0 +2023-03-29 19:01:56,003 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-29 19:02:13,582 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 19:02:42,319 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81655.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:02:48,835 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81657.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:03:18,008 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81670.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:03:18,995 INFO [train.py:892] (1/4) Epoch 45, batch 50, loss[loss=0.1483, simple_loss=0.229, pruned_loss=0.03384, over 19830.00 frames. ], tot_loss[loss=0.142, simple_loss=0.2228, pruned_loss=0.03059, over 890986.05 frames. ], batch size: 128, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:04:26,784 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1864, 5.2453, 5.5570, 5.3230, 5.3602, 5.1072, 5.3060, 5.0853], + device='cuda:1'), covar=tensor([0.1398, 0.1540, 0.0835, 0.1165, 0.0703, 0.0833, 0.1711, 0.1880], + device='cuda:1'), in_proj_covar=tensor([0.0307, 0.0358, 0.0387, 0.0320, 0.0294, 0.0301, 0.0379, 0.0408], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 19:05:21,202 INFO [train.py:892] (1/4) Epoch 45, batch 100, loss[loss=0.1236, simple_loss=0.2036, pruned_loss=0.02181, over 19735.00 frames. ], tot_loss[loss=0.1477, simple_loss=0.2284, pruned_loss=0.03344, over 1567998.29 frames. ], batch size: 118, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:05:44,650 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81731.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 19:05:49,386 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.145e+02 3.372e+02 3.916e+02 5.007e+02 1.092e+03, threshold=7.832e+02, percent-clipped=2.0 +2023-03-29 19:06:03,088 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.8334, 2.4371, 2.6590, 3.0417, 3.4453, 3.7156, 3.5418, 3.5508], + device='cuda:1'), covar=tensor([0.1037, 0.1594, 0.1414, 0.0795, 0.0507, 0.0322, 0.0525, 0.0553], + device='cuda:1'), in_proj_covar=tensor([0.0166, 0.0169, 0.0183, 0.0157, 0.0144, 0.0138, 0.0133, 0.0122], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 19:06:13,926 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2229, 3.0876, 4.9962, 3.5456, 3.8941, 3.5738, 2.6096, 2.7543], + device='cuda:1'), covar=tensor([0.0895, 0.2991, 0.0325, 0.1158, 0.1790, 0.1482, 0.2584, 0.2771], + device='cuda:1'), in_proj_covar=tensor([0.0361, 0.0406, 0.0360, 0.0300, 0.0382, 0.0404, 0.0392, 0.0369], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 19:07:18,294 INFO [train.py:892] (1/4) Epoch 45, batch 150, loss[loss=0.1507, simple_loss=0.231, pruned_loss=0.03513, over 19780.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2302, pruned_loss=0.03332, over 2096296.38 frames. ], batch size: 215, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:07:21,504 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81772.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:09:11,565 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81820.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:09:12,766 INFO [train.py:892] (1/4) Epoch 45, batch 200, loss[loss=0.1566, simple_loss=0.2363, pruned_loss=0.03842, over 19841.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2303, pruned_loss=0.03323, over 2507463.73 frames. ], batch size: 145, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:09:39,834 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.431e+02 3.452e+02 4.222e+02 4.821e+02 9.570e+02, threshold=8.444e+02, percent-clipped=2.0 +2023-03-29 19:10:54,808 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-03-29 19:11:08,727 INFO [train.py:892] (1/4) Epoch 45, batch 250, loss[loss=0.1556, simple_loss=0.24, pruned_loss=0.03555, over 19905.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2309, pruned_loss=0.03293, over 2827812.58 frames. ], batch size: 71, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:12:49,237 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.48 vs. limit=5.0 +2023-03-29 19:13:09,789 INFO [train.py:892] (1/4) Epoch 45, batch 300, loss[loss=0.1806, simple_loss=0.2666, pruned_loss=0.04724, over 19636.00 frames. ], tot_loss[loss=0.148, simple_loss=0.2299, pruned_loss=0.03304, over 3077585.40 frames. ], batch size: 351, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:13:37,295 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.189e+02 3.336e+02 4.013e+02 4.745e+02 8.684e+02, threshold=8.026e+02, percent-clipped=2.0 +2023-03-29 19:14:30,295 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81955.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:14:34,780 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81957.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:15:04,601 INFO [train.py:892] (1/4) Epoch 45, batch 350, loss[loss=0.1526, simple_loss=0.2415, pruned_loss=0.03192, over 19745.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.2293, pruned_loss=0.03288, over 3272916.68 frames. ], batch size: 84, lr: 3.47e-03, grad_scale: 16.0 +2023-03-29 19:15:45,267 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81987.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:16:22,835 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82003.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:16:26,982 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82005.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:17:03,738 INFO [train.py:892] (1/4) Epoch 45, batch 400, loss[loss=0.1389, simple_loss=0.2245, pruned_loss=0.02667, over 19813.00 frames. ], tot_loss[loss=0.146, simple_loss=0.2277, pruned_loss=0.03214, over 3424139.81 frames. ], batch size: 72, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:17:15,207 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82026.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 19:17:33,548 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.143e+02 3.235e+02 3.968e+02 4.770e+02 8.639e+02, threshold=7.936e+02, percent-clipped=1.0 +2023-03-29 19:18:09,211 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82048.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:18:26,179 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82055.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:18:39,998 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8503, 4.5586, 4.5973, 4.8523, 4.6617, 4.9869, 4.9219, 5.1616], + device='cuda:1'), covar=tensor([0.0686, 0.0402, 0.0468, 0.0366, 0.0542, 0.0450, 0.0443, 0.0311], + device='cuda:1'), in_proj_covar=tensor([0.0161, 0.0186, 0.0207, 0.0185, 0.0183, 0.0167, 0.0161, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 19:19:03,143 INFO [train.py:892] (1/4) Epoch 45, batch 450, loss[loss=0.1543, simple_loss=0.2388, pruned_loss=0.03491, over 19756.00 frames. ], tot_loss[loss=0.1473, simple_loss=0.2291, pruned_loss=0.03272, over 3539836.54 frames. ], batch size: 259, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:19:06,969 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.58 vs. limit=5.0 +2023-03-29 19:20:10,824 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6775, 2.1721, 2.4918, 2.9511, 3.2713, 3.4686, 3.3163, 3.3454], + device='cuda:1'), covar=tensor([0.1137, 0.1724, 0.1468, 0.0789, 0.0562, 0.0402, 0.0577, 0.0503], + device='cuda:1'), in_proj_covar=tensor([0.0167, 0.0169, 0.0183, 0.0157, 0.0144, 0.0138, 0.0133, 0.0123], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 19:20:51,328 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82116.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:21:01,432 INFO [train.py:892] (1/4) Epoch 45, batch 500, loss[loss=0.1426, simple_loss=0.2179, pruned_loss=0.03363, over 19831.00 frames. ], tot_loss[loss=0.1462, simple_loss=0.2274, pruned_loss=0.03253, over 3631594.77 frames. ], batch size: 177, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:21:29,424 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.527e+02 3.621e+02 4.307e+02 5.011e+02 9.871e+02, threshold=8.613e+02, percent-clipped=3.0 +2023-03-29 19:21:30,559 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7865, 3.8712, 2.3672, 4.0479, 4.1860, 1.9106, 3.4374, 3.1987], + device='cuda:1'), covar=tensor([0.0771, 0.0822, 0.2799, 0.0753, 0.0575, 0.2957, 0.1196, 0.0980], + device='cuda:1'), in_proj_covar=tensor([0.0243, 0.0268, 0.0239, 0.0290, 0.0270, 0.0209, 0.0246, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 19:22:58,964 INFO [train.py:892] (1/4) Epoch 45, batch 550, loss[loss=0.1399, simple_loss=0.2224, pruned_loss=0.02873, over 19804.00 frames. ], tot_loss[loss=0.1466, simple_loss=0.228, pruned_loss=0.03255, over 3703175.21 frames. ], batch size: 67, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:24:01,294 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2557, 1.7640, 1.8987, 2.5160, 2.6734, 2.8211, 2.6396, 2.7351], + device='cuda:1'), covar=tensor([0.1237, 0.1973, 0.1871, 0.0856, 0.0694, 0.0494, 0.0641, 0.0579], + device='cuda:1'), in_proj_covar=tensor([0.0169, 0.0172, 0.0185, 0.0158, 0.0146, 0.0140, 0.0135, 0.0124], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 19:24:56,169 INFO [train.py:892] (1/4) Epoch 45, batch 600, loss[loss=0.143, simple_loss=0.2183, pruned_loss=0.0338, over 19835.00 frames. ], tot_loss[loss=0.147, simple_loss=0.2285, pruned_loss=0.03277, over 3758698.02 frames. ], batch size: 128, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:25:22,517 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.595e+02 3.638e+02 4.311e+02 5.322e+02 1.783e+03, threshold=8.623e+02, percent-clipped=2.0 +2023-03-29 19:25:33,367 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7885, 2.4065, 2.7714, 3.0738, 3.4596, 3.7835, 3.5960, 3.6341], + device='cuda:1'), covar=tensor([0.1090, 0.1745, 0.1390, 0.0768, 0.0515, 0.0304, 0.0504, 0.0497], + device='cuda:1'), in_proj_covar=tensor([0.0169, 0.0172, 0.0185, 0.0158, 0.0146, 0.0140, 0.0134, 0.0124], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 19:26:49,036 INFO [train.py:892] (1/4) Epoch 45, batch 650, loss[loss=0.1555, simple_loss=0.2361, pruned_loss=0.03741, over 19840.00 frames. ], tot_loss[loss=0.1463, simple_loss=0.228, pruned_loss=0.0323, over 3802303.39 frames. ], batch size: 142, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:28:46,110 INFO [train.py:892] (1/4) Epoch 45, batch 700, loss[loss=0.1274, simple_loss=0.1985, pruned_loss=0.0282, over 19862.00 frames. ], tot_loss[loss=0.1462, simple_loss=0.2281, pruned_loss=0.03217, over 3832278.48 frames. ], batch size: 142, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:28:57,050 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0182, 4.1719, 2.4390, 4.3330, 4.5190, 2.0121, 3.7837, 3.4128], + device='cuda:1'), covar=tensor([0.0816, 0.0828, 0.2960, 0.0716, 0.0539, 0.2839, 0.0959, 0.0926], + device='cuda:1'), in_proj_covar=tensor([0.0242, 0.0267, 0.0239, 0.0289, 0.0269, 0.0208, 0.0246, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 19:29:01,247 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82326.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:29:15,665 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.176e+02 3.457e+02 4.012e+02 4.787e+02 1.008e+03, threshold=8.024e+02, percent-clipped=2.0 +2023-03-29 19:29:40,064 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82343.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:29:46,440 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82346.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:29:46,473 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5310, 2.7073, 2.5110, 2.0395, 2.5914, 2.7179, 2.6981, 2.7759], + device='cuda:1'), covar=tensor([0.0459, 0.0426, 0.0386, 0.0586, 0.0427, 0.0360, 0.0348, 0.0294], + device='cuda:1'), in_proj_covar=tensor([0.0116, 0.0109, 0.0109, 0.0110, 0.0113, 0.0099, 0.0101, 0.0099], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 19:30:08,243 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7602, 4.6084, 5.2715, 4.7268, 4.2908, 5.0745, 4.8980, 5.4632], + device='cuda:1'), covar=tensor([0.1101, 0.0489, 0.0395, 0.0423, 0.0849, 0.0505, 0.0568, 0.0364], + device='cuda:1'), in_proj_covar=tensor([0.0287, 0.0231, 0.0232, 0.0244, 0.0213, 0.0257, 0.0245, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 19:30:42,587 INFO [train.py:892] (1/4) Epoch 45, batch 750, loss[loss=0.1441, simple_loss=0.2265, pruned_loss=0.03082, over 19840.00 frames. ], tot_loss[loss=0.1457, simple_loss=0.2278, pruned_loss=0.03182, over 3859139.14 frames. ], batch size: 60, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:30:49,661 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82374.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:32:10,223 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82407.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:32:19,251 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82411.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:32:38,391 INFO [train.py:892] (1/4) Epoch 45, batch 800, loss[loss=0.1509, simple_loss=0.2393, pruned_loss=0.03129, over 19625.00 frames. ], tot_loss[loss=0.1461, simple_loss=0.2281, pruned_loss=0.03205, over 3878899.95 frames. ], batch size: 65, lr: 3.46e-03, grad_scale: 32.0 +2023-03-29 19:33:03,753 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.510e+02 3.458e+02 4.305e+02 5.045e+02 7.264e+02, threshold=8.610e+02, percent-clipped=0.0 +2023-03-29 19:33:55,131 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0651, 3.2247, 3.2603, 3.2802, 3.1794, 3.2778, 3.0435, 3.3611], + device='cuda:1'), covar=tensor([0.0348, 0.0331, 0.0302, 0.0280, 0.0362, 0.0303, 0.0412, 0.0419], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0092, 0.0095, 0.0090, 0.0102, 0.0095, 0.0111, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 19:34:36,606 INFO [train.py:892] (1/4) Epoch 45, batch 850, loss[loss=0.142, simple_loss=0.2186, pruned_loss=0.03271, over 19819.00 frames. ], tot_loss[loss=0.1469, simple_loss=0.2289, pruned_loss=0.03244, over 3895293.37 frames. ], batch size: 202, lr: 3.46e-03, grad_scale: 32.0 +2023-03-29 19:35:25,115 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82493.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 19:36:29,767 INFO [train.py:892] (1/4) Epoch 45, batch 900, loss[loss=0.145, simple_loss=0.2198, pruned_loss=0.03507, over 19817.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2291, pruned_loss=0.0327, over 3907146.66 frames. ], batch size: 133, lr: 3.46e-03, grad_scale: 32.0 +2023-03-29 19:36:57,904 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.335e+02 3.589e+02 4.303e+02 4.859e+02 1.125e+03, threshold=8.606e+02, percent-clipped=1.0 +2023-03-29 19:37:49,545 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82554.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 19:38:23,818 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82569.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:38:27,698 INFO [train.py:892] (1/4) Epoch 45, batch 950, loss[loss=0.1455, simple_loss=0.224, pruned_loss=0.03349, over 19776.00 frames. ], tot_loss[loss=0.1471, simple_loss=0.2293, pruned_loss=0.03241, over 3915296.09 frames. ], batch size: 52, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:40:27,144 INFO [train.py:892] (1/4) Epoch 45, batch 1000, loss[loss=0.2264, simple_loss=0.3044, pruned_loss=0.0742, over 19151.00 frames. ], tot_loss[loss=0.1466, simple_loss=0.2287, pruned_loss=0.03223, over 3922593.76 frames. ], batch size: 452, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:40:51,636 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82630.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:41:01,297 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.945e+02 3.409e+02 3.840e+02 4.535e+02 7.731e+02, threshold=7.679e+02, percent-clipped=0.0 +2023-03-29 19:41:20,704 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-29 19:41:22,504 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82643.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:42:26,387 INFO [train.py:892] (1/4) Epoch 45, batch 1050, loss[loss=0.1378, simple_loss=0.2126, pruned_loss=0.03149, over 19844.00 frames. ], tot_loss[loss=0.1473, simple_loss=0.2292, pruned_loss=0.03271, over 3929326.31 frames. ], batch size: 124, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:42:44,694 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4530, 3.3557, 3.5987, 2.7208, 3.6744, 3.0780, 3.3142, 3.5697], + device='cuda:1'), covar=tensor([0.0618, 0.0408, 0.0493, 0.0808, 0.0349, 0.0503, 0.0515, 0.0396], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0095, 0.0091, 0.0115, 0.0087, 0.0090, 0.0087, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 19:43:12,529 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82691.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:43:32,936 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0707, 2.8139, 3.2327, 2.7027, 3.3256, 3.3265, 3.8697, 4.2611], + device='cuda:1'), covar=tensor([0.0637, 0.1708, 0.1591, 0.2381, 0.1714, 0.1429, 0.0698, 0.0605], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0248, 0.0276, 0.0264, 0.0309, 0.0267, 0.0242, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 19:43:39,244 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82702.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:43:59,344 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82711.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:44:18,907 INFO [train.py:892] (1/4) Epoch 45, batch 1100, loss[loss=0.1335, simple_loss=0.2176, pruned_loss=0.0247, over 19893.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2302, pruned_loss=0.0327, over 3932013.84 frames. ], batch size: 87, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:44:49,681 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.363e+02 3.616e+02 4.255e+02 4.865e+02 1.038e+03, threshold=8.510e+02, percent-clipped=5.0 +2023-03-29 19:45:48,337 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82759.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:46:17,010 INFO [train.py:892] (1/4) Epoch 45, batch 1150, loss[loss=0.1421, simple_loss=0.2274, pruned_loss=0.02838, over 19831.00 frames. ], tot_loss[loss=0.1481, simple_loss=0.2303, pruned_loss=0.0329, over 3935727.59 frames. ], batch size: 43, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:46:44,045 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3535, 3.1744, 3.4747, 2.5416, 3.4680, 2.9604, 3.1584, 3.4459], + device='cuda:1'), covar=tensor([0.0646, 0.0479, 0.0559, 0.0937, 0.0430, 0.0530, 0.0573, 0.0379], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0095, 0.0092, 0.0116, 0.0087, 0.0091, 0.0087, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 19:47:22,124 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6297, 2.6661, 2.8682, 2.5034, 3.0105, 3.0362, 3.4820, 3.7797], + device='cuda:1'), covar=tensor([0.0712, 0.1701, 0.1768, 0.2323, 0.1630, 0.1444, 0.0718, 0.0663], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0248, 0.0276, 0.0263, 0.0310, 0.0267, 0.0242, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 19:48:14,766 INFO [train.py:892] (1/4) Epoch 45, batch 1200, loss[loss=0.1736, simple_loss=0.2486, pruned_loss=0.04925, over 19696.00 frames. ], tot_loss[loss=0.149, simple_loss=0.2311, pruned_loss=0.03352, over 3939585.05 frames. ], batch size: 283, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:48:45,774 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.607e+02 4.196e+02 5.114e+02 1.465e+03, threshold=8.391e+02, percent-clipped=1.0 +2023-03-29 19:49:22,096 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82849.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 19:49:39,152 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7848, 3.5601, 3.9840, 2.9231, 4.1248, 3.3067, 3.4943, 3.8310], + device='cuda:1'), covar=tensor([0.0714, 0.0454, 0.0513, 0.0831, 0.0357, 0.0469, 0.0551, 0.0366], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0096, 0.0092, 0.0116, 0.0088, 0.0091, 0.0088, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 19:50:06,271 INFO [train.py:892] (1/4) Epoch 45, batch 1250, loss[loss=0.1184, simple_loss=0.1954, pruned_loss=0.02071, over 19801.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.23, pruned_loss=0.03341, over 3941752.48 frames. ], batch size: 114, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:50:32,599 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7625, 4.8535, 5.1423, 4.9183, 5.0519, 4.7414, 4.8692, 4.7101], + device='cuda:1'), covar=tensor([0.1386, 0.1578, 0.0852, 0.1270, 0.0811, 0.0884, 0.1795, 0.1961], + device='cuda:1'), in_proj_covar=tensor([0.0309, 0.0357, 0.0386, 0.0321, 0.0296, 0.0301, 0.0379, 0.0409], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 19:51:22,363 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7229, 3.0302, 3.2451, 3.6074, 2.6004, 3.0835, 2.3825, 2.4051], + device='cuda:1'), covar=tensor([0.0546, 0.1656, 0.1001, 0.0493, 0.1863, 0.0843, 0.1410, 0.1631], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0328, 0.0255, 0.0214, 0.0250, 0.0218, 0.0224, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 19:52:02,760 INFO [train.py:892] (1/4) Epoch 45, batch 1300, loss[loss=0.1439, simple_loss=0.2253, pruned_loss=0.03125, over 19784.00 frames. ], tot_loss[loss=0.1486, simple_loss=0.2302, pruned_loss=0.03344, over 3942950.92 frames. ], batch size: 154, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 19:52:15,020 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82925.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:52:34,741 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.586e+02 3.425e+02 4.088e+02 4.844e+02 1.609e+03, threshold=8.176e+02, percent-clipped=1.0 +2023-03-29 19:53:43,487 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82963.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:54:02,345 INFO [train.py:892] (1/4) Epoch 45, batch 1350, loss[loss=0.1329, simple_loss=0.2132, pruned_loss=0.02631, over 19798.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2304, pruned_loss=0.03323, over 3942412.07 frames. ], batch size: 45, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 19:55:08,712 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-03-29 19:55:17,035 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83002.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:55:59,009 INFO [train.py:892] (1/4) Epoch 45, batch 1400, loss[loss=0.1597, simple_loss=0.2445, pruned_loss=0.0374, over 19699.00 frames. ], tot_loss[loss=0.1479, simple_loss=0.2298, pruned_loss=0.03296, over 3943567.29 frames. ], batch size: 101, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 19:56:09,238 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83024.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:56:29,961 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.035e+02 3.427e+02 4.004e+02 4.659e+02 9.090e+02, threshold=8.008e+02, percent-clipped=1.0 +2023-03-29 19:57:07,386 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83050.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:57:56,912 INFO [train.py:892] (1/4) Epoch 45, batch 1450, loss[loss=0.1577, simple_loss=0.2427, pruned_loss=0.03629, over 19697.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2299, pruned_loss=0.03288, over 3944402.81 frames. ], batch size: 265, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 19:59:49,374 INFO [train.py:892] (1/4) Epoch 45, batch 1500, loss[loss=0.1811, simple_loss=0.2674, pruned_loss=0.04738, over 19567.00 frames. ], tot_loss[loss=0.1483, simple_loss=0.2302, pruned_loss=0.03324, over 3945561.06 frames. ], batch size: 376, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 19:59:56,678 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8018, 3.7075, 3.6811, 3.4208, 3.7775, 2.7916, 3.1104, 1.7512], + device='cuda:1'), covar=tensor([0.0223, 0.0250, 0.0165, 0.0226, 0.0163, 0.1198, 0.0602, 0.1816], + device='cuda:1'), in_proj_covar=tensor([0.0111, 0.0155, 0.0120, 0.0142, 0.0126, 0.0140, 0.0146, 0.0133], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-29 20:00:17,873 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.217e+02 3.289e+02 3.880e+02 4.653e+02 8.208e+02, threshold=7.760e+02, percent-clipped=2.0 +2023-03-29 20:00:55,591 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83149.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 20:01:45,158 INFO [train.py:892] (1/4) Epoch 45, batch 1550, loss[loss=0.1459, simple_loss=0.2333, pruned_loss=0.0292, over 19721.00 frames. ], tot_loss[loss=0.1483, simple_loss=0.2301, pruned_loss=0.03323, over 3946861.22 frames. ], batch size: 104, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 20:02:21,208 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8752, 2.8596, 4.3855, 3.3514, 3.5354, 3.2242, 2.4572, 2.6128], + device='cuda:1'), covar=tensor([0.1144, 0.3136, 0.0516, 0.1212, 0.1884, 0.1705, 0.2791, 0.2896], + device='cuda:1'), in_proj_covar=tensor([0.0361, 0.0407, 0.0360, 0.0302, 0.0384, 0.0407, 0.0394, 0.0370], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 20:02:46,118 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83196.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:02:48,042 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83197.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 20:03:45,841 INFO [train.py:892] (1/4) Epoch 45, batch 1600, loss[loss=0.1359, simple_loss=0.2242, pruned_loss=0.02373, over 19864.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2309, pruned_loss=0.0334, over 3948280.27 frames. ], batch size: 89, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 20:03:51,207 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1820, 2.9770, 3.0540, 3.2060, 3.1249, 3.0821, 3.2326, 3.3968], + device='cuda:1'), covar=tensor([0.0792, 0.0592, 0.0620, 0.0510, 0.0813, 0.0895, 0.0552, 0.0426], + device='cuda:1'), in_proj_covar=tensor([0.0164, 0.0190, 0.0210, 0.0189, 0.0187, 0.0170, 0.0164, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 20:03:57,082 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83225.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:03:59,289 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.9836, 6.3053, 6.3084, 6.1725, 6.0097, 6.2972, 5.6546, 5.6185], + device='cuda:1'), covar=tensor([0.0432, 0.0391, 0.0421, 0.0390, 0.0531, 0.0448, 0.0612, 0.0967], + device='cuda:1'), in_proj_covar=tensor([0.0294, 0.0312, 0.0321, 0.0281, 0.0292, 0.0274, 0.0285, 0.0335], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 20:04:15,361 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.230e+02 3.453e+02 4.061e+02 4.762e+02 8.632e+02, threshold=8.122e+02, percent-clipped=1.0 +2023-03-29 20:05:10,892 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83257.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:05:41,841 INFO [train.py:892] (1/4) Epoch 45, batch 1650, loss[loss=0.125, simple_loss=0.2025, pruned_loss=0.02377, over 19808.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2295, pruned_loss=0.03301, over 3948661.94 frames. ], batch size: 132, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 20:05:47,000 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83273.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:06:11,720 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83284.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:07:29,136 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83319.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:07:33,427 INFO [train.py:892] (1/4) Epoch 45, batch 1700, loss[loss=0.1503, simple_loss=0.2314, pruned_loss=0.03463, over 19763.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2307, pruned_loss=0.03353, over 3949025.19 frames. ], batch size: 70, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 20:08:02,059 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.889e+02 3.340e+02 3.879e+02 4.594e+02 1.209e+03, threshold=7.757e+02, percent-clipped=3.0 +2023-03-29 20:08:28,527 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83345.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:09:13,521 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5201, 3.2498, 3.3741, 3.5248, 3.4383, 3.4604, 3.5807, 3.7639], + device='cuda:1'), covar=tensor([0.0755, 0.0597, 0.0595, 0.0494, 0.0694, 0.0769, 0.0537, 0.0387], + device='cuda:1'), in_proj_covar=tensor([0.0164, 0.0189, 0.0209, 0.0188, 0.0186, 0.0169, 0.0163, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 20:09:20,243 INFO [train.py:892] (1/4) Epoch 45, batch 1750, loss[loss=0.14, simple_loss=0.2144, pruned_loss=0.03277, over 19874.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2302, pruned_loss=0.03364, over 3949242.51 frames. ], batch size: 157, lr: 3.44e-03, grad_scale: 16.0 +2023-03-29 20:10:59,580 INFO [train.py:892] (1/4) Epoch 45, batch 1800, loss[loss=0.1347, simple_loss=0.2185, pruned_loss=0.02542, over 19759.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2307, pruned_loss=0.03381, over 3948427.36 frames. ], batch size: 88, lr: 3.44e-03, grad_scale: 16.0 +2023-03-29 20:11:23,980 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.557e+02 3.459e+02 4.040e+02 5.185e+02 7.885e+02, threshold=8.081e+02, percent-clipped=1.0 +2023-03-29 20:11:41,244 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.0552, 5.2611, 5.4878, 5.2293, 5.3243, 5.0590, 5.2142, 4.9904], + device='cuda:1'), covar=tensor([0.1502, 0.1463, 0.0903, 0.1324, 0.0722, 0.0982, 0.1947, 0.2014], + device='cuda:1'), in_proj_covar=tensor([0.0309, 0.0358, 0.0388, 0.0322, 0.0297, 0.0303, 0.0382, 0.0411], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 20:12:35,797 INFO [train.py:892] (1/4) Epoch 45, batch 1850, loss[loss=0.1783, simple_loss=0.2663, pruned_loss=0.04513, over 19829.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2323, pruned_loss=0.03377, over 3947828.22 frames. ], batch size: 57, lr: 3.44e-03, grad_scale: 16.0 +2023-03-29 20:13:42,306 INFO [train.py:892] (1/4) Epoch 46, batch 0, loss[loss=0.1399, simple_loss=0.2109, pruned_loss=0.03441, over 19773.00 frames. ], tot_loss[loss=0.1399, simple_loss=0.2109, pruned_loss=0.03441, over 19773.00 frames. ], batch size: 66, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:13:42,307 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 20:14:19,282 INFO [train.py:926] (1/4) Epoch 46, validation: loss=0.1879, simple_loss=0.2498, pruned_loss=0.06295, over 2883724.00 frames. +2023-03-29 20:14:19,285 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22859MB +2023-03-29 20:16:21,253 INFO [train.py:892] (1/4) Epoch 46, batch 50, loss[loss=0.1304, simple_loss=0.2146, pruned_loss=0.02311, over 19874.00 frames. ], tot_loss[loss=0.1431, simple_loss=0.2238, pruned_loss=0.03122, over 890400.60 frames. ], batch size: 97, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:16:38,121 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.206e+02 3.234e+02 3.698e+02 4.952e+02 1.024e+03, threshold=7.395e+02, percent-clipped=5.0 +2023-03-29 20:16:50,938 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-29 20:17:20,610 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83552.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:17:24,118 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8390, 3.3269, 3.5911, 3.1939, 4.0065, 3.9892, 4.5201, 5.1291], + device='cuda:1'), covar=tensor([0.0425, 0.1528, 0.1335, 0.2054, 0.1355, 0.1161, 0.0619, 0.0343], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0249, 0.0278, 0.0265, 0.0312, 0.0268, 0.0243, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 20:18:14,841 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0677, 2.1824, 4.4649, 3.8872, 4.3451, 4.3554, 4.1481, 4.2100], + device='cuda:1'), covar=tensor([0.0903, 0.1543, 0.0151, 0.0770, 0.0175, 0.0292, 0.0237, 0.0213], + device='cuda:1'), in_proj_covar=tensor([0.0106, 0.0108, 0.0093, 0.0155, 0.0091, 0.0105, 0.0095, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 20:18:16,081 INFO [train.py:892] (1/4) Epoch 46, batch 100, loss[loss=0.147, simple_loss=0.2339, pruned_loss=0.03007, over 19893.00 frames. ], tot_loss[loss=0.145, simple_loss=0.227, pruned_loss=0.03154, over 1569050.16 frames. ], batch size: 91, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:19:28,001 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.1523, 2.6211, 3.1326, 3.2609, 3.7091, 4.0683, 3.9390, 3.9689], + device='cuda:1'), covar=tensor([0.0906, 0.1644, 0.1218, 0.0730, 0.0472, 0.0308, 0.0396, 0.0486], + device='cuda:1'), in_proj_covar=tensor([0.0168, 0.0171, 0.0183, 0.0158, 0.0145, 0.0139, 0.0134, 0.0123], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 20:19:59,434 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83619.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:20:13,487 INFO [train.py:892] (1/4) Epoch 46, batch 150, loss[loss=0.1273, simple_loss=0.2066, pruned_loss=0.02399, over 19769.00 frames. ], tot_loss[loss=0.1458, simple_loss=0.2278, pruned_loss=0.03193, over 2098176.26 frames. ], batch size: 113, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:20:31,958 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.341e+02 3.214e+02 3.787e+02 4.556e+02 6.937e+02, threshold=7.575e+02, percent-clipped=0.0 +2023-03-29 20:20:51,226 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83640.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:21:17,894 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5311, 3.5593, 2.3576, 4.1880, 3.8353, 4.1408, 4.2094, 3.3224], + device='cuda:1'), covar=tensor([0.0595, 0.0562, 0.1383, 0.0474, 0.0507, 0.0413, 0.0498, 0.0766], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0152, 0.0149, 0.0163, 0.0141, 0.0148, 0.0157, 0.0155], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-29 20:21:25,261 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7064, 4.8339, 2.8839, 5.1084, 5.2950, 2.3677, 4.5103, 3.8416], + device='cuda:1'), covar=tensor([0.0553, 0.0573, 0.2547, 0.0523, 0.0360, 0.2518, 0.0844, 0.0821], + device='cuda:1'), in_proj_covar=tensor([0.0243, 0.0270, 0.0242, 0.0292, 0.0271, 0.0209, 0.0249, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 20:21:54,615 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83667.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:22:16,395 INFO [train.py:892] (1/4) Epoch 46, batch 200, loss[loss=0.1388, simple_loss=0.2245, pruned_loss=0.02653, over 19776.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2295, pruned_loss=0.03243, over 2509699.60 frames. ], batch size: 66, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:24:05,077 INFO [train.py:892] (1/4) Epoch 46, batch 250, loss[loss=0.151, simple_loss=0.2318, pruned_loss=0.03505, over 19778.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2293, pruned_loss=0.03252, over 2828238.33 frames. ], batch size: 198, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:24:21,763 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.227e+02 3.614e+02 4.201e+02 5.267e+02 1.212e+03, threshold=8.403e+02, percent-clipped=3.0 +2023-03-29 20:24:30,436 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7102, 2.6833, 2.9478, 2.6014, 3.0633, 3.0678, 3.5336, 3.8791], + device='cuda:1'), covar=tensor([0.0782, 0.1905, 0.1811, 0.2366, 0.1717, 0.1596, 0.0820, 0.0787], + device='cuda:1'), in_proj_covar=tensor([0.0264, 0.0249, 0.0277, 0.0265, 0.0312, 0.0268, 0.0243, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 20:25:25,596 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83760.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:25:48,273 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 20:26:06,563 INFO [train.py:892] (1/4) Epoch 46, batch 300, loss[loss=0.1441, simple_loss=0.2278, pruned_loss=0.03022, over 19648.00 frames. ], tot_loss[loss=0.1485, simple_loss=0.231, pruned_loss=0.03297, over 3077038.86 frames. ], batch size: 79, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:26:15,096 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0296, 3.2406, 2.8699, 2.3844, 2.8950, 3.1885, 3.1303, 3.1887], + device='cuda:1'), covar=tensor([0.0311, 0.0345, 0.0312, 0.0557, 0.0350, 0.0274, 0.0280, 0.0231], + device='cuda:1'), in_proj_covar=tensor([0.0116, 0.0109, 0.0110, 0.0109, 0.0113, 0.0099, 0.0101, 0.0099], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 20:28:00,055 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83821.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:28:02,732 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.47 vs. limit=2.0 +2023-03-29 20:28:11,298 INFO [train.py:892] (1/4) Epoch 46, batch 350, loss[loss=0.1342, simple_loss=0.2126, pruned_loss=0.02784, over 19867.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2317, pruned_loss=0.0334, over 3269628.64 frames. ], batch size: 158, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:28:31,194 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.272e+02 3.501e+02 4.159e+02 5.209e+02 9.522e+02, threshold=8.317e+02, percent-clipped=2.0 +2023-03-29 20:29:18,757 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83852.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:30:17,336 INFO [train.py:892] (1/4) Epoch 46, batch 400, loss[loss=0.1348, simple_loss=0.2168, pruned_loss=0.02638, over 19768.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2312, pruned_loss=0.03325, over 3420363.15 frames. ], batch size: 69, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:30:28,487 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.3108, 5.5817, 5.6767, 5.5336, 5.3341, 5.6261, 5.1155, 5.0670], + device='cuda:1'), covar=tensor([0.0444, 0.0470, 0.0438, 0.0403, 0.0582, 0.0463, 0.0692, 0.0984], + device='cuda:1'), in_proj_covar=tensor([0.0296, 0.0314, 0.0322, 0.0284, 0.0295, 0.0275, 0.0288, 0.0337], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 20:31:21,551 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83900.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:32:25,449 INFO [train.py:892] (1/4) Epoch 46, batch 450, loss[loss=0.1633, simple_loss=0.2417, pruned_loss=0.04247, over 19699.00 frames. ], tot_loss[loss=0.1494, simple_loss=0.232, pruned_loss=0.03339, over 3536124.25 frames. ], batch size: 283, lr: 3.40e-03, grad_scale: 8.0 +2023-03-29 20:32:48,216 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.948e+02 3.420e+02 3.775e+02 4.552e+02 8.094e+02, threshold=7.550e+02, percent-clipped=0.0 +2023-03-29 20:33:01,205 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83940.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:33:22,576 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83949.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:33:35,020 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1715, 4.2984, 2.5717, 4.4555, 4.6651, 2.0431, 3.9210, 3.4936], + device='cuda:1'), covar=tensor([0.0719, 0.0739, 0.2704, 0.0768, 0.0546, 0.2842, 0.0984, 0.0924], + device='cuda:1'), in_proj_covar=tensor([0.0242, 0.0269, 0.0240, 0.0291, 0.0269, 0.0208, 0.0247, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 20:34:18,963 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3975, 3.1814, 3.2644, 3.4272, 3.2751, 3.4444, 3.4209, 3.6227], + device='cuda:1'), covar=tensor([0.0993, 0.0770, 0.0719, 0.0671, 0.0980, 0.0834, 0.0878, 0.0579], + device='cuda:1'), in_proj_covar=tensor([0.0164, 0.0190, 0.0210, 0.0188, 0.0187, 0.0169, 0.0164, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 20:34:29,670 INFO [train.py:892] (1/4) Epoch 46, batch 500, loss[loss=0.1355, simple_loss=0.2199, pruned_loss=0.02552, over 19674.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2313, pruned_loss=0.03325, over 3628029.80 frames. ], batch size: 64, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:34:51,490 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83985.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:34:58,661 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83988.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:35:23,586 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4262, 5.7030, 5.8982, 5.6299, 5.6258, 5.5748, 5.6072, 5.4070], + device='cuda:1'), covar=tensor([0.1345, 0.1338, 0.0780, 0.1142, 0.0646, 0.0684, 0.1647, 0.1697], + device='cuda:1'), in_proj_covar=tensor([0.0305, 0.0355, 0.0385, 0.0320, 0.0294, 0.0301, 0.0379, 0.0408], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 20:35:59,587 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84010.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 20:36:35,409 INFO [train.py:892] (1/4) Epoch 46, batch 550, loss[loss=0.1505, simple_loss=0.2343, pruned_loss=0.03332, over 19894.00 frames. ], tot_loss[loss=0.149, simple_loss=0.2315, pruned_loss=0.03326, over 3696724.46 frames. ], batch size: 87, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:36:57,155 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2831, 3.3688, 2.1789, 3.4615, 3.5467, 1.7762, 3.0013, 2.7972], + device='cuda:1'), covar=tensor([0.0882, 0.0904, 0.2700, 0.0832, 0.0668, 0.2552, 0.1085, 0.0999], + device='cuda:1'), in_proj_covar=tensor([0.0241, 0.0268, 0.0239, 0.0289, 0.0268, 0.0207, 0.0246, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 20:36:58,608 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.358e+02 3.554e+02 4.105e+02 5.072e+02 7.939e+02, threshold=8.210e+02, percent-clipped=2.0 +2023-03-29 20:37:17,226 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-29 20:37:27,054 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84046.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:38:42,383 INFO [train.py:892] (1/4) Epoch 46, batch 600, loss[loss=0.1439, simple_loss=0.2258, pruned_loss=0.03097, over 19797.00 frames. ], tot_loss[loss=0.1491, simple_loss=0.231, pruned_loss=0.03362, over 3753703.29 frames. ], batch size: 40, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:39:34,313 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84097.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:40:15,554 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84116.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:40:40,124 INFO [train.py:892] (1/4) Epoch 46, batch 650, loss[loss=0.1527, simple_loss=0.2376, pruned_loss=0.03393, over 19672.00 frames. ], tot_loss[loss=0.1481, simple_loss=0.2299, pruned_loss=0.0331, over 3797851.35 frames. ], batch size: 73, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:41:04,402 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.462e+02 3.455e+02 4.092e+02 4.989e+02 7.207e+02, threshold=8.185e+02, percent-clipped=0.0 +2023-03-29 20:41:39,402 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-29 20:41:39,473 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.13 vs. limit=5.0 +2023-03-29 20:41:46,557 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-29 20:42:03,376 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84158.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:42:46,668 INFO [train.py:892] (1/4) Epoch 46, batch 700, loss[loss=0.1407, simple_loss=0.2278, pruned_loss=0.02685, over 19764.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2313, pruned_loss=0.0332, over 3830554.73 frames. ], batch size: 236, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:43:45,081 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84199.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:44:02,924 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2323, 2.5403, 2.3245, 1.7702, 2.2962, 2.4298, 2.3933, 2.5158], + device='cuda:1'), covar=tensor([0.0476, 0.0374, 0.0381, 0.0664, 0.0473, 0.0433, 0.0365, 0.0322], + device='cuda:1'), in_proj_covar=tensor([0.0117, 0.0110, 0.0110, 0.0110, 0.0114, 0.0100, 0.0102, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 20:44:53,965 INFO [train.py:892] (1/4) Epoch 46, batch 750, loss[loss=0.1225, simple_loss=0.2028, pruned_loss=0.02109, over 19818.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2307, pruned_loss=0.03299, over 3858009.98 frames. ], batch size: 103, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:45:18,700 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.966e+02 3.322e+02 4.189e+02 5.032e+02 8.486e+02, threshold=8.378e+02, percent-clipped=1.0 +2023-03-29 20:45:22,225 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84236.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:46:21,595 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84260.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:46:37,038 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8363, 3.8583, 2.2938, 4.1664, 4.3425, 1.9082, 3.6002, 3.2815], + device='cuda:1'), covar=tensor([0.0830, 0.1034, 0.3023, 0.0817, 0.0582, 0.2887, 0.1005, 0.1007], + device='cuda:1'), in_proj_covar=tensor([0.0242, 0.0268, 0.0240, 0.0289, 0.0269, 0.0208, 0.0247, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 20:46:56,844 INFO [train.py:892] (1/4) Epoch 46, batch 800, loss[loss=0.1438, simple_loss=0.2218, pruned_loss=0.03288, over 19795.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2306, pruned_loss=0.03308, over 3878148.34 frames. ], batch size: 120, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:47:33,122 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7110, 3.7459, 2.3231, 3.9058, 4.0548, 1.9212, 3.3897, 3.1594], + device='cuda:1'), covar=tensor([0.0838, 0.0976, 0.2798, 0.0967, 0.0712, 0.2769, 0.1137, 0.0966], + device='cuda:1'), in_proj_covar=tensor([0.0242, 0.0269, 0.0241, 0.0290, 0.0270, 0.0208, 0.0247, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 20:47:48,293 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84297.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:48:06,622 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84305.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 20:48:30,143 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1163, 4.9533, 5.5549, 5.0863, 4.3864, 5.2390, 5.2018, 5.7222], + device='cuda:1'), covar=tensor([0.0785, 0.0363, 0.0312, 0.0327, 0.0776, 0.0460, 0.0453, 0.0280], + device='cuda:1'), in_proj_covar=tensor([0.0290, 0.0233, 0.0235, 0.0246, 0.0215, 0.0260, 0.0248, 0.0232], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 20:48:59,455 INFO [train.py:892] (1/4) Epoch 46, batch 850, loss[loss=0.1275, simple_loss=0.2031, pruned_loss=0.02595, over 19796.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.2295, pruned_loss=0.03276, over 3894840.09 frames. ], batch size: 151, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:49:22,797 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.218e+02 3.307e+02 4.048e+02 4.791e+02 1.064e+03, threshold=8.096e+02, percent-clipped=1.0 +2023-03-29 20:49:26,811 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84336.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:49:41,038 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84341.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:49:51,145 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-29 20:50:07,459 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-29 20:51:08,385 INFO [train.py:892] (1/4) Epoch 46, batch 900, loss[loss=0.1503, simple_loss=0.2336, pruned_loss=0.03351, over 19851.00 frames. ], tot_loss[loss=0.1469, simple_loss=0.2288, pruned_loss=0.03253, over 3908809.84 frames. ], batch size: 60, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:51:40,588 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6258, 3.5607, 3.8510, 3.0647, 4.0112, 3.1977, 3.5079, 3.8349], + device='cuda:1'), covar=tensor([0.0845, 0.0385, 0.0591, 0.0757, 0.0369, 0.0497, 0.0482, 0.0359], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0096, 0.0093, 0.0116, 0.0088, 0.0092, 0.0088, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 20:51:59,742 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84397.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:52:43,865 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.40 vs. limit=5.0 +2023-03-29 20:52:45,728 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84416.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:52:53,463 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9615, 2.5463, 2.9648, 3.1955, 3.6585, 4.0837, 3.9394, 3.8780], + device='cuda:1'), covar=tensor([0.1064, 0.1664, 0.1349, 0.0737, 0.0481, 0.0277, 0.0413, 0.0610], + device='cuda:1'), in_proj_covar=tensor([0.0169, 0.0173, 0.0185, 0.0160, 0.0146, 0.0140, 0.0136, 0.0125], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 20:52:55,868 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7440, 2.7118, 2.9618, 2.5900, 3.0923, 3.0739, 3.5940, 3.8851], + device='cuda:1'), covar=tensor([0.0695, 0.1772, 0.1662, 0.2387, 0.1675, 0.1558, 0.0763, 0.0709], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0250, 0.0278, 0.0266, 0.0313, 0.0269, 0.0244, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 20:53:09,157 INFO [train.py:892] (1/4) Epoch 46, batch 950, loss[loss=0.1298, simple_loss=0.2126, pruned_loss=0.02352, over 19732.00 frames. ], tot_loss[loss=0.1467, simple_loss=0.229, pruned_loss=0.03215, over 3916507.27 frames. ], batch size: 51, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:53:10,221 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.6728, 1.8271, 1.6493, 1.0984, 1.6642, 1.7972, 1.7220, 1.7333], + device='cuda:1'), covar=tensor([0.0461, 0.0338, 0.0401, 0.0597, 0.0443, 0.0351, 0.0338, 0.0353], + device='cuda:1'), in_proj_covar=tensor([0.0116, 0.0109, 0.0110, 0.0109, 0.0113, 0.0099, 0.0101, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 20:53:32,115 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.409e+02 3.247e+02 3.846e+02 4.740e+02 1.002e+03, threshold=7.692e+02, percent-clipped=2.0 +2023-03-29 20:54:04,284 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.5590, 5.0325, 5.1818, 4.8931, 5.4843, 3.4069, 4.4671, 2.7807], + device='cuda:1'), covar=tensor([0.0125, 0.0200, 0.0127, 0.0178, 0.0123, 0.0916, 0.0778, 0.1384], + device='cuda:1'), in_proj_covar=tensor([0.0110, 0.0155, 0.0119, 0.0141, 0.0125, 0.0140, 0.0146, 0.0133], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-29 20:54:17,718 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84453.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:54:42,919 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84464.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:55:13,000 INFO [train.py:892] (1/4) Epoch 46, batch 1000, loss[loss=0.1383, simple_loss=0.2188, pruned_loss=0.02896, over 19822.00 frames. ], tot_loss[loss=0.147, simple_loss=0.2291, pruned_loss=0.03243, over 3923701.87 frames. ], batch size: 187, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 20:55:26,554 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-29 20:55:47,948 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2959, 2.9292, 3.3094, 2.9088, 3.5083, 3.4441, 4.1178, 4.5535], + device='cuda:1'), covar=tensor([0.0582, 0.1838, 0.1633, 0.2336, 0.1743, 0.1556, 0.0612, 0.0527], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0250, 0.0277, 0.0266, 0.0312, 0.0269, 0.0244, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 20:55:53,750 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1809, 3.9118, 3.9559, 4.1589, 3.9176, 4.1420, 4.2267, 4.4023], + device='cuda:1'), covar=tensor([0.0658, 0.0445, 0.0515, 0.0435, 0.0729, 0.0577, 0.0435, 0.0341], + device='cuda:1'), in_proj_covar=tensor([0.0165, 0.0191, 0.0211, 0.0190, 0.0188, 0.0170, 0.0165, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 20:57:09,398 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84525.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:57:10,352 INFO [train.py:892] (1/4) Epoch 46, batch 1050, loss[loss=0.1426, simple_loss=0.2342, pruned_loss=0.02548, over 19744.00 frames. ], tot_loss[loss=0.1467, simple_loss=0.2288, pruned_loss=0.03225, over 3929075.43 frames. ], batch size: 110, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 20:57:31,619 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.128e+02 3.422e+02 4.030e+02 5.116e+02 8.679e+02, threshold=8.059e+02, percent-clipped=4.0 +2023-03-29 20:58:20,453 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84555.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:59:10,807 INFO [train.py:892] (1/4) Epoch 46, batch 1100, loss[loss=0.1403, simple_loss=0.2157, pruned_loss=0.03243, over 19767.00 frames. ], tot_loss[loss=0.1483, simple_loss=0.2307, pruned_loss=0.03297, over 3932706.86 frames. ], batch size: 155, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 20:59:32,059 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1837, 4.7821, 4.7904, 4.5126, 5.1331, 3.2535, 4.0961, 2.7099], + device='cuda:1'), covar=tensor([0.0165, 0.0206, 0.0152, 0.0206, 0.0128, 0.0973, 0.0936, 0.1440], + device='cuda:1'), in_proj_covar=tensor([0.0111, 0.0156, 0.0120, 0.0142, 0.0126, 0.0141, 0.0147, 0.0134], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-29 20:59:34,409 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84586.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:59:48,840 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84592.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:00:22,870 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84605.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:01:12,900 INFO [train.py:892] (1/4) Epoch 46, batch 1150, loss[loss=0.1339, simple_loss=0.2122, pruned_loss=0.02781, over 19837.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.231, pruned_loss=0.03341, over 3936380.23 frames. ], batch size: 128, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:01:31,339 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84633.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:01:34,946 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.372e+02 3.414e+02 3.912e+02 4.793e+02 1.001e+03, threshold=7.825e+02, percent-clipped=1.0 +2023-03-29 21:01:52,695 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84641.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:02:22,238 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84653.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:03:17,247 INFO [train.py:892] (1/4) Epoch 46, batch 1200, loss[loss=0.1454, simple_loss=0.2292, pruned_loss=0.03076, over 19837.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2306, pruned_loss=0.03344, over 3940285.53 frames. ], batch size: 239, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:03:49,813 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84689.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:03:56,567 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84692.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:04:01,709 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84694.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:05:19,012 INFO [train.py:892] (1/4) Epoch 46, batch 1250, loss[loss=0.1561, simple_loss=0.2408, pruned_loss=0.03567, over 19756.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2309, pruned_loss=0.03339, over 3940627.81 frames. ], batch size: 256, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:05:19,914 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.3289, 3.2904, 3.1947, 2.9154, 3.2826, 2.6148, 2.5386, 1.6460], + device='cuda:1'), covar=tensor([0.0243, 0.0273, 0.0197, 0.0241, 0.0187, 0.1229, 0.0619, 0.1861], + device='cuda:1'), in_proj_covar=tensor([0.0110, 0.0156, 0.0119, 0.0142, 0.0125, 0.0141, 0.0147, 0.0134], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-29 21:05:39,515 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.527e+02 3.466e+02 3.877e+02 4.842e+02 1.121e+03, threshold=7.755e+02, percent-clipped=3.0 +2023-03-29 21:06:28,412 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84753.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:06:39,273 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5323, 2.8207, 2.5792, 2.1113, 2.5855, 2.7900, 2.6996, 2.7935], + device='cuda:1'), covar=tensor([0.0442, 0.0332, 0.0361, 0.0574, 0.0406, 0.0326, 0.0324, 0.0281], + device='cuda:1'), in_proj_covar=tensor([0.0117, 0.0109, 0.0110, 0.0110, 0.0113, 0.0100, 0.0101, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 21:07:12,993 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7895, 4.0733, 4.1684, 4.8873, 3.1023, 3.5023, 3.2136, 3.0111], + device='cuda:1'), covar=tensor([0.0484, 0.1865, 0.0882, 0.0348, 0.2039, 0.1192, 0.1171, 0.1572], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0332, 0.0258, 0.0216, 0.0252, 0.0220, 0.0227, 0.0223], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 21:07:25,101 INFO [train.py:892] (1/4) Epoch 46, batch 1300, loss[loss=0.1637, simple_loss=0.2515, pruned_loss=0.03792, over 19608.00 frames. ], tot_loss[loss=0.1494, simple_loss=0.2318, pruned_loss=0.0335, over 3941607.20 frames. ], batch size: 51, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:07:38,270 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.9956, 2.2456, 1.9977, 1.4585, 2.0303, 2.2077, 2.1008, 2.1610], + device='cuda:1'), covar=tensor([0.0454, 0.0357, 0.0410, 0.0637, 0.0468, 0.0368, 0.0380, 0.0369], + device='cuda:1'), in_proj_covar=tensor([0.0117, 0.0109, 0.0110, 0.0110, 0.0114, 0.0100, 0.0101, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 21:08:25,745 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84800.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:08:27,635 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84801.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:09:27,291 INFO [train.py:892] (1/4) Epoch 46, batch 1350, loss[loss=0.1437, simple_loss=0.2307, pruned_loss=0.02838, over 19856.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2318, pruned_loss=0.03336, over 3942769.92 frames. ], batch size: 104, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:09:37,812 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1717, 4.0681, 4.4612, 4.0700, 3.8183, 4.3144, 4.1816, 4.5311], + device='cuda:1'), covar=tensor([0.0756, 0.0359, 0.0337, 0.0397, 0.0996, 0.0549, 0.0487, 0.0327], + device='cuda:1'), in_proj_covar=tensor([0.0290, 0.0233, 0.0235, 0.0246, 0.0216, 0.0261, 0.0248, 0.0232], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 21:09:42,884 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2241, 4.3972, 2.5614, 4.6439, 4.8397, 2.0945, 4.0825, 3.4768], + device='cuda:1'), covar=tensor([0.0714, 0.0709, 0.2750, 0.0603, 0.0426, 0.2792, 0.0895, 0.0938], + device='cuda:1'), in_proj_covar=tensor([0.0244, 0.0271, 0.0241, 0.0291, 0.0272, 0.0210, 0.0248, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 21:09:50,711 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.135e+02 3.546e+02 4.239e+02 5.224e+02 8.019e+02, threshold=8.477e+02, percent-clipped=4.0 +2023-03-29 21:10:04,872 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 21:10:45,657 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84855.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:10:48,097 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9550, 2.8345, 3.0937, 2.6596, 3.2030, 3.1277, 3.7744, 4.0755], + device='cuda:1'), covar=tensor([0.0596, 0.1729, 0.1634, 0.2356, 0.1566, 0.1673, 0.0679, 0.0617], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0248, 0.0276, 0.0265, 0.0311, 0.0268, 0.0242, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 21:11:00,402 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84861.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:11:35,524 INFO [train.py:892] (1/4) Epoch 46, batch 1400, loss[loss=0.1539, simple_loss=0.2414, pruned_loss=0.03323, over 19796.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2312, pruned_loss=0.03335, over 3945648.52 frames. ], batch size: 79, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:11:46,545 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84881.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:11:46,799 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6616, 2.9341, 3.1191, 3.5274, 2.4835, 2.9112, 2.4360, 2.3803], + device='cuda:1'), covar=tensor([0.0657, 0.1725, 0.1162, 0.0589, 0.2046, 0.1100, 0.1368, 0.1687], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0333, 0.0259, 0.0216, 0.0252, 0.0220, 0.0227, 0.0223], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 21:11:55,611 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2285, 2.8260, 3.2071, 3.4214, 3.9033, 4.3990, 4.1916, 4.2727], + device='cuda:1'), covar=tensor([0.0938, 0.1627, 0.1396, 0.0726, 0.0411, 0.0241, 0.0420, 0.0510], + device='cuda:1'), in_proj_covar=tensor([0.0167, 0.0171, 0.0183, 0.0159, 0.0145, 0.0139, 0.0135, 0.0124], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 21:12:08,943 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84892.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:12:20,224 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5714, 4.3314, 4.3422, 4.0755, 4.5529, 3.0480, 3.7759, 2.2119], + device='cuda:1'), covar=tensor([0.0172, 0.0222, 0.0157, 0.0209, 0.0143, 0.1096, 0.0714, 0.1636], + device='cuda:1'), in_proj_covar=tensor([0.0111, 0.0156, 0.0119, 0.0143, 0.0125, 0.0140, 0.0147, 0.0134], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-29 21:12:38,243 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84903.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:12:43,123 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4215, 2.6275, 4.8224, 4.2371, 4.4808, 4.7132, 4.5721, 4.3526], + device='cuda:1'), covar=tensor([0.0629, 0.1084, 0.0099, 0.0731, 0.0170, 0.0228, 0.0173, 0.0199], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0107, 0.0092, 0.0153, 0.0091, 0.0105, 0.0094, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 21:13:32,191 INFO [train.py:892] (1/4) Epoch 46, batch 1450, loss[loss=0.1325, simple_loss=0.2146, pruned_loss=0.02525, over 19479.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2306, pruned_loss=0.03289, over 3946713.55 frames. ], batch size: 43, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:13:55,748 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.367e+02 3.403e+02 4.016e+02 4.651e+02 7.911e+02, threshold=8.032e+02, percent-clipped=0.0 +2023-03-29 21:14:07,995 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84940.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:14:36,234 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84951.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:15:44,105 INFO [train.py:892] (1/4) Epoch 46, batch 1500, loss[loss=0.1901, simple_loss=0.2782, pruned_loss=0.05099, over 19590.00 frames. ], tot_loss[loss=0.1485, simple_loss=0.2309, pruned_loss=0.03299, over 3947622.49 frames. ], batch size: 376, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:16:15,239 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84989.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:16:22,591 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84992.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:17:16,767 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85012.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:17:49,528 INFO [train.py:892] (1/4) Epoch 46, batch 1550, loss[loss=0.1361, simple_loss=0.2077, pruned_loss=0.03232, over 19764.00 frames. ], tot_loss[loss=0.1485, simple_loss=0.2303, pruned_loss=0.03332, over 3947612.45 frames. ], batch size: 122, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:17:58,180 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2904, 4.1321, 4.1470, 3.9093, 4.3230, 3.0494, 3.6300, 2.0860], + device='cuda:1'), covar=tensor([0.0178, 0.0230, 0.0146, 0.0193, 0.0136, 0.1009, 0.0607, 0.1511], + device='cuda:1'), in_proj_covar=tensor([0.0111, 0.0156, 0.0119, 0.0143, 0.0125, 0.0140, 0.0147, 0.0134], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-29 21:18:12,455 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.101e+02 3.669e+02 4.198e+02 5.006e+02 8.856e+02, threshold=8.396e+02, percent-clipped=3.0 +2023-03-29 21:18:26,094 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85040.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:19:20,390 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2793, 4.1818, 4.7327, 4.2368, 4.0562, 4.6191, 4.4546, 4.8687], + device='cuda:1'), covar=tensor([0.1097, 0.0474, 0.0489, 0.0479, 0.0970, 0.0609, 0.0535, 0.0428], + device='cuda:1'), in_proj_covar=tensor([0.0291, 0.0233, 0.0236, 0.0247, 0.0215, 0.0261, 0.0247, 0.0232], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 21:19:59,494 INFO [train.py:892] (1/4) Epoch 46, batch 1600, loss[loss=0.1339, simple_loss=0.2195, pruned_loss=0.02412, over 19764.00 frames. ], tot_loss[loss=0.1494, simple_loss=0.2312, pruned_loss=0.03381, over 3948657.85 frames. ], batch size: 119, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:21:21,172 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85107.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:21:42,041 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85115.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:22:09,241 INFO [train.py:892] (1/4) Epoch 46, batch 1650, loss[loss=0.1241, simple_loss=0.2101, pruned_loss=0.01905, over 19801.00 frames. ], tot_loss[loss=0.149, simple_loss=0.2309, pruned_loss=0.03357, over 3949087.82 frames. ], batch size: 107, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:22:31,375 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.611e+02 3.617e+02 4.144e+02 5.075e+02 9.786e+02, threshold=8.288e+02, percent-clipped=2.0 +2023-03-29 21:23:27,031 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85156.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:23:56,938 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85168.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 21:24:15,558 INFO [train.py:892] (1/4) Epoch 46, batch 1700, loss[loss=0.1368, simple_loss=0.212, pruned_loss=0.03081, over 19846.00 frames. ], tot_loss[loss=0.1497, simple_loss=0.232, pruned_loss=0.03371, over 3948104.02 frames. ], batch size: 137, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:24:16,638 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85176.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:24:29,218 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85181.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:26:14,800 INFO [train.py:892] (1/4) Epoch 46, batch 1750, loss[loss=0.1319, simple_loss=0.2097, pruned_loss=0.0271, over 19693.00 frames. ], tot_loss[loss=0.1493, simple_loss=0.2316, pruned_loss=0.03352, over 3948590.22 frames. ], batch size: 46, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:26:22,227 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85229.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:26:35,386 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.122e+02 3.389e+02 3.888e+02 4.671e+02 1.140e+03, threshold=7.776e+02, percent-clipped=1.0 +2023-03-29 21:28:03,272 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7819, 2.8028, 1.8685, 3.2132, 3.0088, 3.1429, 3.2448, 2.6046], + device='cuda:1'), covar=tensor([0.0746, 0.0814, 0.1553, 0.0784, 0.0652, 0.0597, 0.0632, 0.0949], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0152, 0.0148, 0.0162, 0.0140, 0.0147, 0.0156, 0.0154], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 21:28:04,556 INFO [train.py:892] (1/4) Epoch 46, batch 1800, loss[loss=0.1474, simple_loss=0.2252, pruned_loss=0.03479, over 19794.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2314, pruned_loss=0.03345, over 3948004.71 frames. ], batch size: 191, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:28:25,726 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4689, 2.6461, 3.8743, 3.1054, 3.2192, 2.9808, 2.3341, 2.4537], + device='cuda:1'), covar=tensor([0.1331, 0.3245, 0.0631, 0.1231, 0.2064, 0.1865, 0.2879, 0.2812], + device='cuda:1'), in_proj_covar=tensor([0.0361, 0.0407, 0.0358, 0.0301, 0.0383, 0.0405, 0.0393, 0.0370], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 21:28:30,676 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85289.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:29:00,283 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85307.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:29:37,874 INFO [train.py:892] (1/4) Epoch 46, batch 1850, loss[loss=0.1391, simple_loss=0.2228, pruned_loss=0.02773, over 19833.00 frames. ], tot_loss[loss=0.1498, simple_loss=0.2325, pruned_loss=0.03359, over 3947871.70 frames. ], batch size: 57, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:30:45,950 INFO [train.py:892] (1/4) Epoch 47, batch 0, loss[loss=0.1467, simple_loss=0.2246, pruned_loss=0.03441, over 19724.00 frames. ], tot_loss[loss=0.1467, simple_loss=0.2246, pruned_loss=0.03441, over 19724.00 frames. ], batch size: 71, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:30:45,950 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 21:31:18,357 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.1274, 2.9478, 4.8571, 3.5560, 3.8321, 3.3597, 2.5464, 2.6947], + device='cuda:1'), covar=tensor([0.1025, 0.3624, 0.0402, 0.1080, 0.1957, 0.1767, 0.3166, 0.2962], + device='cuda:1'), in_proj_covar=tensor([0.0361, 0.0408, 0.0358, 0.0301, 0.0383, 0.0406, 0.0394, 0.0371], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 21:31:22,863 INFO [train.py:926] (1/4) Epoch 47, validation: loss=0.1894, simple_loss=0.2504, pruned_loss=0.06424, over 2883724.00 frames. +2023-03-29 21:31:22,864 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22859MB +2023-03-29 21:31:31,552 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.328e+02 3.387e+02 4.010e+02 4.758e+02 1.602e+03, threshold=8.020e+02, percent-clipped=2.0 +2023-03-29 21:31:38,921 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85337.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:33:29,576 INFO [train.py:892] (1/4) Epoch 47, batch 50, loss[loss=0.1316, simple_loss=0.217, pruned_loss=0.02311, over 19556.00 frames. ], tot_loss[loss=0.1453, simple_loss=0.2258, pruned_loss=0.03243, over 891335.34 frames. ], batch size: 47, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:35:26,211 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85428.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:35:31,476 INFO [train.py:892] (1/4) Epoch 47, batch 100, loss[loss=0.1401, simple_loss=0.2116, pruned_loss=0.0343, over 19768.00 frames. ], tot_loss[loss=0.1443, simple_loss=0.2256, pruned_loss=0.03151, over 1570485.92 frames. ], batch size: 152, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:35:42,068 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.536e+02 3.357e+02 4.311e+02 4.946e+02 7.786e+02, threshold=8.621e+02, percent-clipped=0.0 +2023-03-29 21:36:33,901 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85456.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:36:49,856 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85463.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 21:37:08,368 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85471.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:37:33,160 INFO [train.py:892] (1/4) Epoch 47, batch 150, loss[loss=0.1625, simple_loss=0.2486, pruned_loss=0.0382, over 19654.00 frames. ], tot_loss[loss=0.1454, simple_loss=0.2269, pruned_loss=0.03191, over 2098718.77 frames. ], batch size: 57, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:37:37,318 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.2178, 4.5613, 2.5489, 4.7036, 4.9015, 2.1801, 3.9291, 3.3343], + device='cuda:1'), covar=tensor([0.0729, 0.0586, 0.2696, 0.0645, 0.0480, 0.2952, 0.1100, 0.1007], + device='cuda:1'), in_proj_covar=tensor([0.0244, 0.0271, 0.0240, 0.0292, 0.0271, 0.0210, 0.0248, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 21:37:39,829 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85483.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:37:55,825 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85489.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:38:35,524 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85504.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:38:40,564 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4997, 3.5600, 2.1936, 3.6531, 3.7869, 1.7992, 3.1862, 2.9239], + device='cuda:1'), covar=tensor([0.0829, 0.0933, 0.2941, 0.0948, 0.0705, 0.2713, 0.1185, 0.1040], + device='cuda:1'), in_proj_covar=tensor([0.0245, 0.0272, 0.0241, 0.0293, 0.0272, 0.0210, 0.0249, 0.0213], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 21:39:40,463 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85530.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:39:42,115 INFO [train.py:892] (1/4) Epoch 47, batch 200, loss[loss=0.1284, simple_loss=0.2056, pruned_loss=0.02566, over 19811.00 frames. ], tot_loss[loss=0.1459, simple_loss=0.2276, pruned_loss=0.03211, over 2509888.54 frames. ], batch size: 202, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:39:43,237 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7131, 4.6873, 2.8104, 4.9415, 5.1639, 2.1779, 4.4461, 3.7986], + device='cuda:1'), covar=tensor([0.0557, 0.0671, 0.2717, 0.0656, 0.0510, 0.3023, 0.0815, 0.0888], + device='cuda:1'), in_proj_covar=tensor([0.0246, 0.0273, 0.0242, 0.0295, 0.0274, 0.0212, 0.0251, 0.0214], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 21:39:50,775 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.307e+02 3.468e+02 4.248e+02 4.914e+02 6.825e+02, threshold=8.497e+02, percent-clipped=0.0 +2023-03-29 21:40:12,873 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85544.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:40:26,125 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.44 vs. limit=2.0 +2023-03-29 21:40:39,094 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.32 vs. limit=5.0 +2023-03-29 21:41:45,831 INFO [train.py:892] (1/4) Epoch 47, batch 250, loss[loss=0.1501, simple_loss=0.2341, pruned_loss=0.03307, over 19743.00 frames. ], tot_loss[loss=0.1469, simple_loss=0.2287, pruned_loss=0.03258, over 2827535.16 frames. ], batch size: 134, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:42:12,305 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85591.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:42:16,843 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7816, 4.0248, 4.3281, 4.8765, 2.9948, 3.6961, 2.9195, 2.9655], + device='cuda:1'), covar=tensor([0.0446, 0.1738, 0.0710, 0.0347, 0.2148, 0.1075, 0.1317, 0.1585], + device='cuda:1'), in_proj_covar=tensor([0.0257, 0.0330, 0.0258, 0.0216, 0.0253, 0.0219, 0.0226, 0.0223], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 21:42:42,703 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.2347, 2.2692, 1.5971, 2.3348, 2.2753, 2.2427, 2.3116, 1.9099], + device='cuda:1'), covar=tensor([0.0781, 0.0889, 0.1320, 0.0802, 0.0817, 0.0758, 0.0722, 0.1159], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0153, 0.0149, 0.0163, 0.0142, 0.0148, 0.0158, 0.0156], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0004], + device='cuda:1') +2023-03-29 21:42:54,669 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85607.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:42:57,088 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8346, 3.8423, 2.3321, 4.0342, 4.2235, 1.9308, 3.4419, 3.1706], + device='cuda:1'), covar=tensor([0.0741, 0.0827, 0.2862, 0.0875, 0.0586, 0.2990, 0.1149, 0.1057], + device='cuda:1'), in_proj_covar=tensor([0.0244, 0.0271, 0.0240, 0.0292, 0.0272, 0.0210, 0.0248, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 21:43:40,587 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85625.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:43:54,576 INFO [train.py:892] (1/4) Epoch 47, batch 300, loss[loss=0.139, simple_loss=0.223, pruned_loss=0.02745, over 19852.00 frames. ], tot_loss[loss=0.1463, simple_loss=0.228, pruned_loss=0.03231, over 3077031.02 frames. ], batch size: 85, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:44:06,338 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.290e+02 3.428e+02 3.993e+02 4.702e+02 7.624e+02, threshold=7.986e+02, percent-clipped=0.0 +2023-03-29 21:44:50,531 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85655.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:45:49,326 INFO [train.py:892] (1/4) Epoch 47, batch 350, loss[loss=0.1507, simple_loss=0.2265, pruned_loss=0.03744, over 19805.00 frames. ], tot_loss[loss=0.1454, simple_loss=0.2269, pruned_loss=0.03197, over 3271161.02 frames. ], batch size: 132, lr: 3.32e-03, grad_scale: 8.0 +2023-03-29 21:46:02,159 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85686.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 21:46:28,102 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.5229, 3.1651, 3.4419, 2.9455, 3.6131, 3.6527, 4.3221, 4.7658], + device='cuda:1'), covar=tensor([0.0486, 0.1543, 0.1500, 0.2293, 0.1743, 0.1324, 0.0575, 0.0436], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0250, 0.0278, 0.0266, 0.0313, 0.0269, 0.0243, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 21:47:07,357 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([1.7825, 1.6550, 1.8074, 1.8301, 1.7526, 1.8246, 1.6685, 1.7966], + device='cuda:1'), covar=tensor([0.0446, 0.0433, 0.0393, 0.0394, 0.0511, 0.0398, 0.0554, 0.0368], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0093, 0.0096, 0.0091, 0.0104, 0.0096, 0.0112, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 21:47:20,704 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-29 21:47:46,175 INFO [train.py:892] (1/4) Epoch 47, batch 400, loss[loss=0.15, simple_loss=0.2383, pruned_loss=0.03089, over 19887.00 frames. ], tot_loss[loss=0.1465, simple_loss=0.2284, pruned_loss=0.03225, over 3419719.09 frames. ], batch size: 62, lr: 3.32e-03, grad_scale: 8.0 +2023-03-29 21:47:51,579 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7038, 3.7932, 2.2678, 3.9207, 4.0616, 1.8312, 3.3254, 3.0826], + device='cuda:1'), covar=tensor([0.0808, 0.0838, 0.2873, 0.0811, 0.0607, 0.2791, 0.1182, 0.0993], + device='cuda:1'), in_proj_covar=tensor([0.0245, 0.0271, 0.0241, 0.0293, 0.0273, 0.0210, 0.0249, 0.0213], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 21:47:56,632 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.491e+02 3.390e+02 3.969e+02 4.785e+02 1.194e+03, threshold=7.938e+02, percent-clipped=3.0 +2023-03-29 21:49:02,467 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85763.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:49:20,569 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85771.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:49:46,489 INFO [train.py:892] (1/4) Epoch 47, batch 450, loss[loss=0.1447, simple_loss=0.234, pruned_loss=0.02773, over 19759.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2292, pruned_loss=0.03257, over 3538147.74 frames. ], batch size: 233, lr: 3.32e-03, grad_scale: 8.0 +2023-03-29 21:49:53,645 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85784.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:50:56,289 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85811.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:51:16,880 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85819.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:51:43,853 INFO [train.py:892] (1/4) Epoch 47, batch 500, loss[loss=0.1425, simple_loss=0.2266, pruned_loss=0.02923, over 19654.00 frames. ], tot_loss[loss=0.1473, simple_loss=0.2292, pruned_loss=0.03268, over 3628173.43 frames. ], batch size: 43, lr: 3.32e-03, grad_scale: 8.0 +2023-03-29 21:51:52,517 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.359e+02 3.439e+02 4.112e+02 5.000e+02 9.207e+02, threshold=8.225e+02, percent-clipped=1.0 +2023-03-29 21:52:01,461 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85839.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:53:17,973 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1054, 5.3766, 5.4399, 5.2751, 5.1153, 5.3898, 4.8542, 4.8700], + device='cuda:1'), covar=tensor([0.0478, 0.0469, 0.0426, 0.0435, 0.0590, 0.0481, 0.0687, 0.0949], + device='cuda:1'), in_proj_covar=tensor([0.0298, 0.0318, 0.0326, 0.0285, 0.0297, 0.0277, 0.0289, 0.0337], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 21:53:23,911 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85875.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:53:36,870 INFO [train.py:892] (1/4) Epoch 47, batch 550, loss[loss=0.1445, simple_loss=0.2267, pruned_loss=0.03119, over 19775.00 frames. ], tot_loss[loss=0.148, simple_loss=0.2299, pruned_loss=0.03309, over 3699942.85 frames. ], batch size: 241, lr: 3.32e-03, grad_scale: 8.0 +2023-03-29 21:53:50,109 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85886.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:55:33,602 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85930.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 21:55:34,545 INFO [train.py:892] (1/4) Epoch 47, batch 600, loss[loss=0.1603, simple_loss=0.2329, pruned_loss=0.04391, over 19781.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2304, pruned_loss=0.03324, over 3755160.14 frames. ], batch size: 131, lr: 3.32e-03, grad_scale: 16.0 +2023-03-29 21:55:42,768 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.322e+02 3.636e+02 4.237e+02 5.117e+02 1.424e+03, threshold=8.474e+02, percent-clipped=3.0 +2023-03-29 21:55:45,847 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85936.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:56:37,112 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85957.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:56:49,172 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.5534, 2.6812, 4.0108, 3.1555, 3.3008, 3.0569, 2.3473, 2.4934], + device='cuda:1'), covar=tensor([0.1238, 0.3230, 0.0585, 0.1236, 0.1970, 0.1779, 0.2820, 0.2840], + device='cuda:1'), in_proj_covar=tensor([0.0360, 0.0408, 0.0358, 0.0301, 0.0382, 0.0406, 0.0393, 0.0370], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 21:57:01,861 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.1537, 5.4221, 5.6083, 5.3233, 5.3732, 5.2947, 5.3052, 5.1018], + device='cuda:1'), covar=tensor([0.1589, 0.1874, 0.0923, 0.1387, 0.0768, 0.0888, 0.1853, 0.2035], + device='cuda:1'), in_proj_covar=tensor([0.0309, 0.0359, 0.0389, 0.0320, 0.0296, 0.0301, 0.0380, 0.0412], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 21:57:31,425 INFO [train.py:892] (1/4) Epoch 47, batch 650, loss[loss=0.1588, simple_loss=0.2399, pruned_loss=0.03886, over 19761.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2304, pruned_loss=0.03352, over 3798801.65 frames. ], batch size: 182, lr: 3.32e-03, grad_scale: 16.0 +2023-03-29 21:57:32,316 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85981.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 21:57:58,236 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85991.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 21:58:06,868 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8266, 3.6820, 4.0998, 3.7285, 3.5191, 3.9560, 3.8303, 4.1285], + device='cuda:1'), covar=tensor([0.0809, 0.0418, 0.0366, 0.0452, 0.1208, 0.0622, 0.0522, 0.0407], + device='cuda:1'), in_proj_covar=tensor([0.0294, 0.0234, 0.0237, 0.0249, 0.0217, 0.0264, 0.0249, 0.0235], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 21:58:30,308 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86002.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:58:47,822 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-29 21:59:05,200 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86018.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:59:14,495 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.9967, 3.2378, 3.2325, 3.2919, 3.0829, 3.2646, 3.0590, 3.1890], + device='cuda:1'), covar=tensor([0.0378, 0.0364, 0.0344, 0.0292, 0.0424, 0.0297, 0.0371, 0.0407], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0093, 0.0096, 0.0091, 0.0103, 0.0096, 0.0112, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 21:59:35,657 INFO [train.py:892] (1/4) Epoch 47, batch 700, loss[loss=0.1485, simple_loss=0.244, pruned_loss=0.02647, over 19602.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2308, pruned_loss=0.03339, over 3832587.57 frames. ], batch size: 50, lr: 3.32e-03, grad_scale: 16.0 +2023-03-29 21:59:44,313 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.350e+02 3.531e+02 4.077e+02 5.107e+02 6.974e+02, threshold=8.153e+02, percent-clipped=0.0 +2023-03-29 22:00:24,760 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86052.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:00:51,391 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86063.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:01:00,306 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.6870, 3.5345, 3.9345, 3.0740, 4.1131, 3.3286, 3.6003, 4.0379], + device='cuda:1'), covar=tensor([0.0885, 0.0499, 0.0626, 0.0807, 0.0400, 0.0499, 0.0541, 0.0282], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0097, 0.0093, 0.0117, 0.0089, 0.0092, 0.0089, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 22:01:07,085 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86069.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:01:36,225 INFO [train.py:892] (1/4) Epoch 47, batch 750, loss[loss=0.1538, simple_loss=0.2356, pruned_loss=0.03599, over 19742.00 frames. ], tot_loss[loss=0.1491, simple_loss=0.2314, pruned_loss=0.03338, over 3856911.07 frames. ], batch size: 209, lr: 3.32e-03, grad_scale: 16.0 +2023-03-29 22:01:43,149 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86084.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:01:49,029 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7216, 4.4387, 4.4667, 4.2092, 4.7399, 3.1807, 3.8810, 2.3121], + device='cuda:1'), covar=tensor([0.0208, 0.0238, 0.0170, 0.0228, 0.0150, 0.1017, 0.0821, 0.1572], + device='cuda:1'), in_proj_covar=tensor([0.0111, 0.0157, 0.0120, 0.0143, 0.0126, 0.0141, 0.0148, 0.0134], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-29 22:02:29,444 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.0981, 3.0298, 4.7192, 3.4544, 3.6795, 3.5170, 2.6402, 2.7926], + device='cuda:1'), covar=tensor([0.1023, 0.3438, 0.0471, 0.1207, 0.2032, 0.1622, 0.2654, 0.2745], + device='cuda:1'), in_proj_covar=tensor([0.0363, 0.0411, 0.0361, 0.0303, 0.0385, 0.0409, 0.0396, 0.0372], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 22:02:47,816 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86113.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:03:24,201 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86130.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:03:25,081 INFO [train.py:892] (1/4) Epoch 47, batch 800, loss[loss=0.1348, simple_loss=0.2186, pruned_loss=0.02546, over 19822.00 frames. ], tot_loss[loss=0.1494, simple_loss=0.2319, pruned_loss=0.03346, over 3875744.00 frames. ], batch size: 187, lr: 3.32e-03, grad_scale: 16.0 +2023-03-29 22:03:27,850 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86132.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:03:35,178 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.984e+02 3.596e+02 4.313e+02 5.169e+02 1.005e+03, threshold=8.627e+02, percent-clipped=4.0 +2023-03-29 22:03:46,698 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86139.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:05:09,930 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.3772, 2.2562, 2.3758, 2.3619, 2.4450, 2.4493, 2.3684, 2.4290], + device='cuda:1'), covar=tensor([0.0438, 0.0442, 0.0424, 0.0431, 0.0500, 0.0402, 0.0527, 0.0453], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0094, 0.0097, 0.0091, 0.0104, 0.0097, 0.0112, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 22:05:24,301 INFO [train.py:892] (1/4) Epoch 47, batch 850, loss[loss=0.1294, simple_loss=0.2128, pruned_loss=0.02306, over 19760.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2316, pruned_loss=0.03294, over 3889162.79 frames. ], batch size: 102, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:05:37,593 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86186.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:05:41,000 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86187.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:07:21,651 INFO [train.py:892] (1/4) Epoch 47, batch 900, loss[loss=0.1393, simple_loss=0.2279, pruned_loss=0.02539, over 19838.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.2299, pruned_loss=0.03256, over 3902326.23 frames. ], batch size: 59, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:07:22,527 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86231.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:07:29,077 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86234.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:07:30,581 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.206e+02 3.419e+02 4.069e+02 4.713e+02 1.195e+03, threshold=8.139e+02, percent-clipped=3.0 +2023-03-29 22:07:56,346 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86246.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:09:15,427 INFO [train.py:892] (1/4) Epoch 47, batch 950, loss[loss=0.1355, simple_loss=0.2141, pruned_loss=0.02842, over 19817.00 frames. ], tot_loss[loss=0.147, simple_loss=0.2294, pruned_loss=0.03227, over 3913513.94 frames. ], batch size: 202, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:09:16,293 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86281.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:09:16,564 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8919, 3.3755, 3.6536, 3.1419, 3.9470, 4.0053, 4.6031, 5.1479], + device='cuda:1'), covar=tensor([0.0399, 0.1608, 0.1453, 0.2231, 0.1608, 0.1400, 0.0616, 0.0443], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0249, 0.0278, 0.0265, 0.0311, 0.0268, 0.0243, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 22:09:24,363 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.8176, 4.6083, 4.6289, 4.8484, 4.6604, 5.0309, 4.9056, 5.1426], + device='cuda:1'), covar=tensor([0.0781, 0.0459, 0.0431, 0.0455, 0.0705, 0.0484, 0.0486, 0.0357], + device='cuda:1'), in_proj_covar=tensor([0.0166, 0.0192, 0.0213, 0.0192, 0.0189, 0.0172, 0.0166, 0.0213], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 22:09:27,145 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-29 22:09:28,518 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86286.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 22:10:16,865 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86307.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:10:18,965 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4575, 3.5902, 2.3310, 4.1196, 3.8005, 4.1305, 4.1498, 3.3466], + device='cuda:1'), covar=tensor([0.0616, 0.0588, 0.1397, 0.0635, 0.0589, 0.0384, 0.0567, 0.0774], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0152, 0.0148, 0.0162, 0.0141, 0.0148, 0.0157, 0.0155], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-29 22:10:29,344 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86313.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:10:44,074 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4435, 2.6488, 3.9860, 3.0813, 3.1826, 3.0296, 2.3131, 2.4661], + device='cuda:1'), covar=tensor([0.1370, 0.3509, 0.0601, 0.1262, 0.2100, 0.1805, 0.2867, 0.2951], + device='cuda:1'), in_proj_covar=tensor([0.0362, 0.0411, 0.0360, 0.0302, 0.0384, 0.0408, 0.0396, 0.0372], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 22:11:06,031 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-29 22:11:07,298 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86329.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:11:10,615 INFO [train.py:892] (1/4) Epoch 47, batch 1000, loss[loss=0.1423, simple_loss=0.2291, pruned_loss=0.02777, over 19807.00 frames. ], tot_loss[loss=0.1462, simple_loss=0.2286, pruned_loss=0.0319, over 3920828.55 frames. ], batch size: 195, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:11:20,546 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.209e+02 3.279e+02 4.003e+02 4.723e+02 7.348e+02, threshold=8.007e+02, percent-clipped=0.0 +2023-03-29 22:12:16,487 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86358.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:13:07,770 INFO [train.py:892] (1/4) Epoch 47, batch 1050, loss[loss=0.1618, simple_loss=0.2519, pruned_loss=0.03581, over 19743.00 frames. ], tot_loss[loss=0.1466, simple_loss=0.229, pruned_loss=0.03208, over 3928398.86 frames. ], batch size: 134, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:14:11,571 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86408.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:14:51,456 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86425.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:14:58,760 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7410, 3.2638, 3.6583, 3.0888, 3.8667, 3.8168, 4.4855, 4.9766], + device='cuda:1'), covar=tensor([0.0455, 0.1527, 0.1427, 0.2215, 0.1427, 0.1322, 0.0646, 0.0427], + device='cuda:1'), in_proj_covar=tensor([0.0267, 0.0250, 0.0280, 0.0267, 0.0314, 0.0269, 0.0244, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 22:15:05,567 INFO [train.py:892] (1/4) Epoch 47, batch 1100, loss[loss=0.1257, simple_loss=0.1971, pruned_loss=0.02713, over 19843.00 frames. ], tot_loss[loss=0.146, simple_loss=0.2283, pruned_loss=0.03187, over 3934239.56 frames. ], batch size: 124, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:15:16,012 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.572e+02 3.379e+02 4.084e+02 4.897e+02 1.462e+03, threshold=8.168e+02, percent-clipped=5.0 +2023-03-29 22:15:27,629 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86440.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:15:40,224 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86445.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:17:00,644 INFO [train.py:892] (1/4) Epoch 47, batch 1150, loss[loss=0.1505, simple_loss=0.2362, pruned_loss=0.03241, over 19653.00 frames. ], tot_loss[loss=0.1465, simple_loss=0.229, pruned_loss=0.03204, over 3936159.49 frames. ], batch size: 66, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:17:19,449 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.9876, 2.9775, 4.6601, 3.4578, 3.6523, 3.4075, 2.4909, 2.7280], + device='cuda:1'), covar=tensor([0.1069, 0.3317, 0.0428, 0.1139, 0.1940, 0.1677, 0.2919, 0.2546], + device='cuda:1'), in_proj_covar=tensor([0.0360, 0.0408, 0.0357, 0.0300, 0.0382, 0.0405, 0.0393, 0.0370], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 22:17:46,903 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86501.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:17:59,769 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86506.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 22:18:58,210 INFO [train.py:892] (1/4) Epoch 47, batch 1200, loss[loss=0.1384, simple_loss=0.2223, pruned_loss=0.02727, over 19890.00 frames. ], tot_loss[loss=0.1476, simple_loss=0.2299, pruned_loss=0.03265, over 3938310.57 frames. ], batch size: 47, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:18:59,304 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86531.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:19:07,250 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.118e+02 3.301e+02 3.835e+02 4.475e+02 8.063e+02, threshold=7.670e+02, percent-clipped=0.0 +2023-03-29 22:19:40,097 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-29 22:20:44,572 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86579.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:20:47,311 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86580.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:20:49,866 INFO [train.py:892] (1/4) Epoch 47, batch 1250, loss[loss=0.1591, simple_loss=0.2359, pruned_loss=0.04108, over 19780.00 frames. ], tot_loss[loss=0.1468, simple_loss=0.229, pruned_loss=0.03225, over 3941076.25 frames. ], batch size: 247, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:21:04,072 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86586.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 22:21:41,883 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86602.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:22:06,395 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86613.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:22:49,115 INFO [train.py:892] (1/4) Epoch 47, batch 1300, loss[loss=0.1479, simple_loss=0.227, pruned_loss=0.0344, over 19785.00 frames. ], tot_loss[loss=0.1473, simple_loss=0.2299, pruned_loss=0.03235, over 3941383.83 frames. ], batch size: 213, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:22:57,922 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86634.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 22:22:59,162 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.242e+02 3.251e+02 3.887e+02 4.565e+02 9.819e+02, threshold=7.775e+02, percent-clipped=3.0 +2023-03-29 22:23:12,330 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86641.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:23:21,584 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.4430, 2.7246, 2.4758, 2.0165, 2.5246, 2.7238, 2.6949, 2.6965], + device='cuda:1'), covar=tensor([0.0467, 0.0380, 0.0396, 0.0647, 0.0412, 0.0345, 0.0343, 0.0335], + device='cuda:1'), in_proj_covar=tensor([0.0119, 0.0112, 0.0112, 0.0112, 0.0116, 0.0102, 0.0104, 0.0102], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 22:23:53,677 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86658.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:24:00,031 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86661.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:24:43,570 INFO [train.py:892] (1/4) Epoch 47, batch 1350, loss[loss=0.1286, simple_loss=0.2108, pruned_loss=0.02324, over 19882.00 frames. ], tot_loss[loss=0.1467, simple_loss=0.2293, pruned_loss=0.03204, over 3944064.36 frames. ], batch size: 88, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:25:44,664 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86706.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:25:48,890 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86708.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:26:27,122 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86725.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:26:40,903 INFO [train.py:892] (1/4) Epoch 47, batch 1400, loss[loss=0.1314, simple_loss=0.2113, pruned_loss=0.02576, over 19863.00 frames. ], tot_loss[loss=0.1458, simple_loss=0.2282, pruned_loss=0.03176, over 3945785.11 frames. ], batch size: 106, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:26:49,073 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.330e+02 3.187e+02 3.920e+02 5.050e+02 7.981e+02, threshold=7.841e+02, percent-clipped=2.0 +2023-03-29 22:27:34,539 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86756.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:28:14,250 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86773.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:28:29,191 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-29 22:28:33,034 INFO [train.py:892] (1/4) Epoch 47, batch 1450, loss[loss=0.1301, simple_loss=0.21, pruned_loss=0.02511, over 19809.00 frames. ], tot_loss[loss=0.1466, simple_loss=0.2293, pruned_loss=0.03201, over 3945013.08 frames. ], batch size: 114, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:29:06,599 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86796.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:29:20,762 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86801.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 22:29:49,591 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86814.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:30:28,323 INFO [train.py:892] (1/4) Epoch 47, batch 1500, loss[loss=0.1586, simple_loss=0.2423, pruned_loss=0.0375, over 19776.00 frames. ], tot_loss[loss=0.1476, simple_loss=0.23, pruned_loss=0.0326, over 3944947.16 frames. ], batch size: 182, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:30:37,006 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.591e+02 3.342e+02 4.163e+02 4.864e+02 6.569e+02, threshold=8.326e+02, percent-clipped=0.0 +2023-03-29 22:31:04,648 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.3394, 4.1983, 4.6011, 4.2203, 3.9184, 4.3952, 4.3085, 4.6521], + device='cuda:1'), covar=tensor([0.0767, 0.0382, 0.0353, 0.0403, 0.0964, 0.0631, 0.0486, 0.0388], + device='cuda:1'), in_proj_covar=tensor([0.0292, 0.0234, 0.0236, 0.0246, 0.0215, 0.0263, 0.0248, 0.0235], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 22:31:22,759 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.4760, 5.8413, 6.0364, 5.7443, 5.6989, 5.7172, 5.7582, 5.4928], + device='cuda:1'), covar=tensor([0.1517, 0.1309, 0.0774, 0.1182, 0.0644, 0.0618, 0.1618, 0.1955], + device='cuda:1'), in_proj_covar=tensor([0.0310, 0.0360, 0.0390, 0.0322, 0.0297, 0.0304, 0.0382, 0.0412], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-29 22:32:11,769 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86875.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:32:18,252 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86878.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:32:23,373 INFO [train.py:892] (1/4) Epoch 47, batch 1550, loss[loss=0.1617, simple_loss=0.2371, pruned_loss=0.04315, over 19813.00 frames. ], tot_loss[loss=0.1474, simple_loss=0.2298, pruned_loss=0.03249, over 3946427.50 frames. ], batch size: 231, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:33:10,874 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86902.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:33:57,153 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-03-29 22:34:20,004 INFO [train.py:892] (1/4) Epoch 47, batch 1600, loss[loss=0.1304, simple_loss=0.2048, pruned_loss=0.02797, over 19815.00 frames. ], tot_loss[loss=0.1467, simple_loss=0.2288, pruned_loss=0.0323, over 3947553.70 frames. ], batch size: 148, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:34:27,991 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.263e+02 3.386e+02 4.039e+02 4.750e+02 1.112e+03, threshold=8.078e+02, percent-clipped=1.0 +2023-03-29 22:34:30,939 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86936.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:34:37,154 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86939.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:35:03,423 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86950.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:36:13,806 INFO [train.py:892] (1/4) Epoch 47, batch 1650, loss[loss=0.1406, simple_loss=0.2183, pruned_loss=0.03143, over 19647.00 frames. ], tot_loss[loss=0.1457, simple_loss=0.2278, pruned_loss=0.03182, over 3947534.27 frames. ], batch size: 47, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:37:07,394 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87002.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 22:38:05,809 INFO [train.py:892] (1/4) Epoch 47, batch 1700, loss[loss=0.1617, simple_loss=0.2442, pruned_loss=0.03955, over 19665.00 frames. ], tot_loss[loss=0.1456, simple_loss=0.228, pruned_loss=0.03165, over 3948221.35 frames. ], batch size: 55, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:38:14,358 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.986e+02 3.322e+02 4.069e+02 4.865e+02 7.645e+02, threshold=8.138e+02, percent-clipped=0.0 +2023-03-29 22:39:20,672 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87063.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 22:39:56,682 INFO [train.py:892] (1/4) Epoch 47, batch 1750, loss[loss=0.1396, simple_loss=0.2238, pruned_loss=0.02772, over 19620.00 frames. ], tot_loss[loss=0.1454, simple_loss=0.2272, pruned_loss=0.03177, over 3948835.73 frames. ], batch size: 52, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:40:26,965 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87096.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:40:36,178 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87101.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 22:41:33,778 INFO [train.py:892] (1/4) Epoch 47, batch 1800, loss[loss=0.1702, simple_loss=0.2572, pruned_loss=0.04155, over 19576.00 frames. ], tot_loss[loss=0.1463, simple_loss=0.2283, pruned_loss=0.03216, over 3949914.07 frames. ], batch size: 53, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:41:41,045 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.449e+02 3.587e+02 4.161e+02 4.922e+02 1.323e+03, threshold=8.323e+02, percent-clipped=3.0 +2023-03-29 22:41:59,480 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87144.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:42:08,356 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87149.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:42:12,225 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7442, 3.4590, 3.7891, 2.9495, 3.9501, 3.2802, 3.5243, 3.8941], + device='cuda:1'), covar=tensor([0.0590, 0.0483, 0.0579, 0.0814, 0.0376, 0.0524, 0.0541, 0.0420], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0097, 0.0093, 0.0117, 0.0089, 0.0092, 0.0088, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 22:42:36,040 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([4.7217, 3.1802, 3.5831, 3.1112, 3.9173, 3.9005, 4.5257, 5.0404], + device='cuda:1'), covar=tensor([0.0469, 0.1691, 0.1445, 0.2261, 0.1563, 0.1306, 0.0545, 0.0445], + device='cuda:1'), in_proj_covar=tensor([0.0268, 0.0251, 0.0281, 0.0267, 0.0315, 0.0271, 0.0244, 0.0278], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 22:42:47,095 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87170.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:43:06,741 INFO [train.py:892] (1/4) Epoch 47, batch 1850, loss[loss=0.146, simple_loss=0.2278, pruned_loss=0.03208, over 19814.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2301, pruned_loss=0.03216, over 3949001.63 frames. ], batch size: 57, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:44:09,130 INFO [train.py:892] (1/4) Epoch 48, batch 0, loss[loss=0.166, simple_loss=0.2546, pruned_loss=0.03869, over 19745.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2546, pruned_loss=0.03869, over 19745.00 frames. ], batch size: 259, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:44:09,130 INFO [train.py:917] (1/4) Computing validation loss +2023-03-29 22:44:44,075 INFO [train.py:926] (1/4) Epoch 48, validation: loss=0.1901, simple_loss=0.2508, pruned_loss=0.06469, over 2883724.00 frames. +2023-03-29 22:44:44,077 INFO [train.py:927] (1/4) Maximum memory allocated so far is 22859MB +2023-03-29 22:46:30,436 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87230.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:46:40,123 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87234.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:46:41,368 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.137e+02 3.310e+02 3.743e+02 4.381e+02 8.999e+02, threshold=7.487e+02, percent-clipped=2.0 +2023-03-29 22:46:45,732 INFO [train.py:892] (1/4) Epoch 48, batch 50, loss[loss=0.1527, simple_loss=0.241, pruned_loss=0.03223, over 19670.00 frames. ], tot_loss[loss=0.1401, simple_loss=0.2222, pruned_loss=0.02898, over 891516.75 frames. ], batch size: 73, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:46:46,512 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87236.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:48:04,637 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.1819, 1.6012, 1.7877, 2.3922, 2.5697, 2.7385, 2.5757, 2.6571], + device='cuda:1'), covar=tensor([0.1191, 0.2102, 0.1847, 0.0911, 0.0671, 0.0500, 0.0579, 0.0552], + device='cuda:1'), in_proj_covar=tensor([0.0169, 0.0173, 0.0184, 0.0161, 0.0147, 0.0142, 0.0137, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 22:48:04,705 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2862, 2.4651, 2.6928, 3.1058, 2.1033, 2.7942, 2.1513, 2.0980], + device='cuda:1'), covar=tensor([0.0658, 0.1286, 0.1236, 0.0679, 0.2246, 0.0945, 0.1444, 0.1652], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0331, 0.0258, 0.0217, 0.0253, 0.0218, 0.0227, 0.0223], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-29 22:48:38,626 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87284.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:48:42,216 INFO [train.py:892] (1/4) Epoch 48, batch 100, loss[loss=0.1437, simple_loss=0.2228, pruned_loss=0.03226, over 19706.00 frames. ], tot_loss[loss=0.143, simple_loss=0.2257, pruned_loss=0.03021, over 1569470.33 frames. ], batch size: 48, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:48:53,635 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87291.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:49:13,920 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.55 vs. limit=5.0 +2023-03-29 22:50:36,648 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.455e+02 3.250e+02 3.994e+02 4.855e+02 8.208e+02, threshold=7.988e+02, percent-clipped=2.0 +2023-03-29 22:50:39,106 INFO [train.py:892] (1/4) Epoch 48, batch 150, loss[loss=0.1387, simple_loss=0.2167, pruned_loss=0.03036, over 19828.00 frames. ], tot_loss[loss=0.1423, simple_loss=0.2243, pruned_loss=0.03017, over 2097381.11 frames. ], batch size: 177, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:51:32,765 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87358.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 22:51:41,630 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87362.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:52:28,771 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.7805, 2.8537, 1.6966, 3.2139, 2.8978, 3.1775, 3.2040, 2.6410], + device='cuda:1'), covar=tensor([0.0811, 0.0819, 0.1981, 0.0770, 0.0899, 0.0635, 0.0769, 0.0952], + device='cuda:1'), in_proj_covar=tensor([0.0153, 0.0153, 0.0149, 0.0163, 0.0143, 0.0149, 0.0159, 0.0155], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-29 22:52:33,964 INFO [train.py:892] (1/4) Epoch 48, batch 200, loss[loss=0.1191, simple_loss=0.1933, pruned_loss=0.0225, over 19870.00 frames. ], tot_loss[loss=0.1427, simple_loss=0.2246, pruned_loss=0.03041, over 2508898.18 frames. ], batch size: 136, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:53:59,070 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.7586, 3.5227, 3.7936, 3.1094, 3.9753, 3.3794, 3.6181, 3.9176], + device='cuda:1'), covar=tensor([0.0823, 0.0463, 0.0730, 0.0751, 0.0402, 0.0408, 0.0481, 0.0378], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0096, 0.0093, 0.0116, 0.0089, 0.0092, 0.0088, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 22:54:00,842 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87423.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:54:24,399 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.319e+02 3.416e+02 4.017e+02 4.841e+02 7.070e+02, threshold=8.034e+02, percent-clipped=0.0 +2023-03-29 22:54:26,337 INFO [train.py:892] (1/4) Epoch 48, batch 250, loss[loss=0.1275, simple_loss=0.2006, pruned_loss=0.02719, over 19766.00 frames. ], tot_loss[loss=0.1431, simple_loss=0.2249, pruned_loss=0.03064, over 2828823.55 frames. ], batch size: 125, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:55:23,157 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87461.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:55:44,951 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87470.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:56:19,968 INFO [train.py:892] (1/4) Epoch 48, batch 300, loss[loss=0.1395, simple_loss=0.219, pruned_loss=0.03006, over 19799.00 frames. ], tot_loss[loss=0.1443, simple_loss=0.2264, pruned_loss=0.03116, over 3078212.55 frames. ], batch size: 148, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 22:56:49,983 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.51 vs. limit=5.0 +2023-03-29 22:57:07,889 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87505.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:57:37,237 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87518.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:57:45,572 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87522.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:58:14,385 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87534.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:58:15,390 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.562e+02 3.502e+02 4.019e+02 4.844e+02 7.845e+02, threshold=8.037e+02, percent-clipped=0.0 +2023-03-29 22:58:16,294 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.8466, 3.7740, 4.1464, 3.7979, 3.6064, 4.0433, 3.8867, 4.2054], + device='cuda:1'), covar=tensor([0.0867, 0.0397, 0.0414, 0.0448, 0.1094, 0.0595, 0.0494, 0.0385], + device='cuda:1'), in_proj_covar=tensor([0.0295, 0.0235, 0.0237, 0.0248, 0.0217, 0.0265, 0.0250, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 22:58:18,644 INFO [train.py:892] (1/4) Epoch 48, batch 350, loss[loss=0.1413, simple_loss=0.2324, pruned_loss=0.02507, over 19780.00 frames. ], tot_loss[loss=0.1456, simple_loss=0.2278, pruned_loss=0.0317, over 3268559.27 frames. ], batch size: 215, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 22:59:29,752 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87566.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 23:00:10,262 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87582.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:00:14,754 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87584.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:00:17,793 INFO [train.py:892] (1/4) Epoch 48, batch 400, loss[loss=0.1448, simple_loss=0.2256, pruned_loss=0.03194, over 19888.00 frames. ], tot_loss[loss=0.1467, simple_loss=0.2288, pruned_loss=0.03228, over 3419056.37 frames. ], batch size: 176, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:00:18,737 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87586.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:00:25,676 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87589.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:01:03,106 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87605.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:01:34,836 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.74 vs. limit=5.0 +2023-03-29 23:02:09,690 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.5884, 2.8927, 2.5756, 2.1342, 2.6159, 2.7756, 2.8227, 2.8355], + device='cuda:1'), covar=tensor([0.0437, 0.0373, 0.0410, 0.0616, 0.0424, 0.0419, 0.0341, 0.0331], + device='cuda:1'), in_proj_covar=tensor([0.0118, 0.0111, 0.0110, 0.0110, 0.0114, 0.0101, 0.0103, 0.0101], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-29 23:02:10,881 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.523e+02 3.389e+02 3.932e+02 4.658e+02 8.787e+02, threshold=7.864e+02, percent-clipped=1.0 +2023-03-29 23:02:13,003 INFO [train.py:892] (1/4) Epoch 48, batch 450, loss[loss=0.1487, simple_loss=0.2291, pruned_loss=0.03416, over 19780.00 frames. ], tot_loss[loss=0.1463, simple_loss=0.2285, pruned_loss=0.03202, over 3536600.91 frames. ], batch size: 215, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:02:36,807 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87645.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 23:02:48,787 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87650.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:03:07,035 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87658.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 23:03:24,674 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87666.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 23:03:29,202 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 23:04:11,528 INFO [train.py:892] (1/4) Epoch 48, batch 500, loss[loss=0.1947, simple_loss=0.2817, pruned_loss=0.0539, over 19613.00 frames. ], tot_loss[loss=0.1469, simple_loss=0.2289, pruned_loss=0.03242, over 3628228.63 frames. ], batch size: 387, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:04:12,492 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.2317, 2.6116, 4.4922, 3.8730, 4.2473, 4.4322, 4.1611, 4.0774], + device='cuda:1'), covar=tensor([0.0594, 0.1043, 0.0105, 0.0638, 0.0162, 0.0214, 0.0195, 0.0183], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0108, 0.0093, 0.0154, 0.0092, 0.0105, 0.0094, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 23:04:58,641 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87706.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 23:05:28,038 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87718.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:05:40,539 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0240, 3.3397, 2.9181, 2.4820, 2.9317, 3.2635, 3.2104, 3.2466], + device='cuda:1'), covar=tensor([0.0367, 0.0264, 0.0340, 0.0541, 0.0391, 0.0288, 0.0282, 0.0259], + device='cuda:1'), in_proj_covar=tensor([0.0118, 0.0111, 0.0111, 0.0111, 0.0114, 0.0101, 0.0103, 0.0101], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 23:06:06,503 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.147e+02 3.363e+02 3.924e+02 4.897e+02 8.586e+02, threshold=7.848e+02, percent-clipped=2.0 +2023-03-29 23:06:09,079 INFO [train.py:892] (1/4) Epoch 48, batch 550, loss[loss=0.1063, simple_loss=0.1867, pruned_loss=0.01297, over 19905.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2291, pruned_loss=0.03268, over 3700335.80 frames. ], batch size: 116, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:06:18,669 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.0231, 2.6660, 4.3134, 3.8011, 4.1213, 4.2625, 3.9972, 3.9414], + device='cuda:1'), covar=tensor([0.0678, 0.1020, 0.0116, 0.0586, 0.0164, 0.0244, 0.0202, 0.0201], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0108, 0.0093, 0.0154, 0.0092, 0.0106, 0.0094, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-29 23:08:07,526 INFO [train.py:892] (1/4) Epoch 48, batch 600, loss[loss=0.1282, simple_loss=0.2077, pruned_loss=0.02435, over 19769.00 frames. ], tot_loss[loss=0.1473, simple_loss=0.2292, pruned_loss=0.03274, over 3755429.27 frames. ], batch size: 119, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:09:12,920 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-29 23:09:18,936 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87817.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:09:59,422 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.332e+02 3.329e+02 4.109e+02 5.019e+02 8.243e+02, threshold=8.218e+02, percent-clipped=1.0 +2023-03-29 23:10:01,417 INFO [train.py:892] (1/4) Epoch 48, batch 650, loss[loss=0.1397, simple_loss=0.2139, pruned_loss=0.03282, over 19839.00 frames. ], tot_loss[loss=0.1471, simple_loss=0.2293, pruned_loss=0.03244, over 3798015.74 frames. ], batch size: 144, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:10:17,850 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.58 vs. limit=5.0 +2023-03-29 23:10:59,814 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87861.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 23:11:56,093 INFO [train.py:892] (1/4) Epoch 48, batch 700, loss[loss=0.1478, simple_loss=0.226, pruned_loss=0.03477, over 19780.00 frames. ], tot_loss[loss=0.1469, simple_loss=0.2294, pruned_loss=0.03222, over 3830779.63 frames. ], batch size: 46, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:11:57,076 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87886.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:12:35,766 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([5.5995, 5.1016, 5.1289, 4.8185, 5.5005, 3.4520, 4.2573, 2.8552], + device='cuda:1'), covar=tensor([0.0136, 0.0191, 0.0144, 0.0215, 0.0154, 0.0876, 0.1009, 0.1426], + device='cuda:1'), in_proj_covar=tensor([0.0111, 0.0156, 0.0120, 0.0143, 0.0126, 0.0140, 0.0148, 0.0134], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-29 23:12:50,905 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-03-29 23:13:45,615 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87934.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:13:48,369 INFO [optim.py:368] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.639e+02 3.556e+02 4.149e+02 4.938e+02 1.091e+03, threshold=8.298e+02, percent-clipped=2.0 +2023-03-29 23:13:50,816 INFO [train.py:892] (1/4) Epoch 48, batch 750, loss[loss=0.1591, simple_loss=0.2476, pruned_loss=0.03525, over 19672.00 frames. ], tot_loss[loss=0.1464, simple_loss=0.2293, pruned_loss=0.03181, over 3856937.00 frames. ], batch size: 73, lr: 3.25e-03, grad_scale: 32.0 +2023-03-29 23:14:38,870 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87940.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 23:14:50,629 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87945.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:15:26,368 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87961.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 23:17:04,532 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([2.6347, 2.0520, 2.4028, 2.8081, 3.1806, 3.3110, 3.1690, 3.1847], + device='cuda:1'), covar=tensor([0.1088, 0.1824, 0.1453, 0.0828, 0.0577, 0.0414, 0.0498, 0.0612], + device='cuda:1'), in_proj_covar=tensor([0.0170, 0.0174, 0.0186, 0.0162, 0.0148, 0.0143, 0.0138, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-29 23:17:10,151 INFO [train.py:892] (1/4) Epoch 48, batch 800, loss[loss=0.1264, simple_loss=0.2072, pruned_loss=0.02285, over 19863.00 frames. ], tot_loss[loss=0.147, simple_loss=0.2297, pruned_loss=0.03214, over 3875598.44 frames. ], batch size: 46, lr: 3.25e-03, grad_scale: 32.0 +2023-03-29 23:17:40,177 INFO [zipformer.py:1454] (1/4) attn_weights_entropy = tensor([3.4860, 2.6764, 3.9223, 3.1137, 3.2549, 3.0166, 2.3290, 2.4613], + device='cuda:1'), covar=tensor([0.1249, 0.3282, 0.0644, 0.1273, 0.1972, 0.1897, 0.3003, 0.3047], + device='cuda:1'), in_proj_covar=tensor([0.0364, 0.0411, 0.0359, 0.0304, 0.0385, 0.0410, 0.0397, 0.0374], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1')