diff --git "a/exp/log/log-train-2023-03-27-14-47-20-2" "b/exp/log/log-train-2023-03-27-14-47-20-2" new file mode 100644--- /dev/null +++ "b/exp/log/log-train-2023-03-27-14-47-20-2" @@ -0,0 +1,12062 @@ +2023-03-27 14:47:20,926 INFO [train.py:962] (2/4) Training started +2023-03-27 14:47:20,926 INFO [train.py:972] (2/4) Device: cuda:2 +2023-03-27 14:47:20,929 INFO [train.py:981] (2/4) {'frame_shift_ms': 10.0, 'allowed_excess_duration_ratio': 0.1, 'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.23.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': '9426c9f730820d291f5dcb06be337662595fa7b4', 'k2-git-date': 'Sun Feb 5 17:35:01 2023', 'lhotse-version': '1.13.0.dev+git.4cbd1bde.clean', 'torch-version': '1.10.0+cu102', 'torch-cuda-available': True, 'torch-cuda-version': '10.2', 'python-version': '3.8', 'icefall-git-branch': 'bbpe', 'icefall-git-sha1': 'e03c10a-dirty', 'icefall-git-date': 'Mon Mar 27 00:05:03 2023', 'icefall-path': '/ceph-kw/kangwei/code/icefall_bbpe', 'k2-path': '/ceph-hw/kangwei/code/k2_release/k2/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-hw/kangwei/dev_tools/anaconda3/envs/rnnt2/lib/python3.8/site-packages/lhotse-1.13.0.dev0+git.4cbd1bde.clean-py3.8.egg/lhotse/__init__.py', 'hostname': 'de-74279-k2-train-9-0208143539-7dcb6bfd79-b6fdq', 'IP address': '10.177.13.150'}, 'world_size': 4, 'master_port': 12535, 'tensorboard': True, 'num_epochs': 50, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('pruned_transducer_stateless7_bbpe/exp'), 'bbpe_model': 'data/lang_bbpe_500/bbpe.model', 'base_lr': 0.05, 'lr_batches': 5000, 'lr_epochs': 3.5, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 2000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,4,3,2,4', 'feedforward_dims': '1024,1024,2048,2048,1024', 'nhead': '8,8,8,8,8', 'encoder_dims': '384,384,384,384,384', 'attention_dims': '192,192,192,192,192', 'encoder_unmasked_dims': '256,256,256,256,256', 'zipformer_downsampling_factors': '1,2,4,8,2', 'cnn_module_kernels': '31,31,31,31,31', 'decoder_dim': 512, 'joiner_dim': 512, 'manifest_dir': PosixPath('data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 300, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2023-03-27 14:47:20,930 INFO [train.py:983] (2/4) About to create model +2023-03-27 14:47:21,847 INFO [zipformer.py:178] (2/4) At encoder stack 4, which has downsampling_factor=2, we will combine the outputs of layers 1 and 3, with downsampling_factors=2 and 8. +2023-03-27 14:47:21,871 INFO [train.py:987] (2/4) Number of model parameters: 70369391 +2023-03-27 14:47:28,355 INFO [train.py:1002] (2/4) Using DDP +2023-03-27 14:47:28,704 INFO [asr_datamodule.py:407] (2/4) About to get train cuts +2023-03-27 14:47:28,707 INFO [train.py:1083] (2/4) Filtering short and long utterances. +2023-03-27 14:47:28,707 INFO [train.py:1086] (2/4) Tokenizing and encoding texts in train cuts. +2023-03-27 14:47:28,707 INFO [asr_datamodule.py:224] (2/4) About to get Musan cuts +2023-03-27 14:47:31,947 INFO [asr_datamodule.py:229] (2/4) Enable MUSAN +2023-03-27 14:47:31,947 INFO [asr_datamodule.py:252] (2/4) Enable SpecAugment +2023-03-27 14:47:31,947 INFO [asr_datamodule.py:253] (2/4) Time warp factor: 80 +2023-03-27 14:47:31,948 INFO [asr_datamodule.py:263] (2/4) Num frame mask: 10 +2023-03-27 14:47:31,948 INFO [asr_datamodule.py:276] (2/4) About to create train dataset +2023-03-27 14:47:31,948 INFO [asr_datamodule.py:303] (2/4) Using DynamicBucketingSampler. +2023-03-27 14:47:42,171 INFO [asr_datamodule.py:320] (2/4) About to create train dataloader +2023-03-27 14:47:42,172 INFO [asr_datamodule.py:414] (2/4) About to get dev cuts +2023-03-27 14:47:42,174 INFO [train.py:1102] (2/4) Tokenizing and encoding texts in valid cuts. +2023-03-27 14:47:42,182 INFO [asr_datamodule.py:351] (2/4) About to create dev dataset +2023-03-27 14:47:43,001 INFO [asr_datamodule.py:370] (2/4) About to create dev dataloader +2023-03-27 14:48:25,410 INFO [train.py:892] (2/4) Epoch 1, batch 0, loss[loss=7.432, simple_loss=6.748, pruned_loss=6.819, over 19704.00 frames. ], tot_loss[loss=7.432, simple_loss=6.748, pruned_loss=6.819, over 19704.00 frames. ], batch size: 305, lr: 2.50e-02, grad_scale: 2.0 +2023-03-27 14:48:25,410 INFO [train.py:917] (2/4) Computing validation loss +2023-03-27 14:48:53,173 INFO [train.py:926] (2/4) Epoch 1, validation: loss=6.85, simple_loss=6.179, pruned_loss=6.691, over 2883724.00 frames. +2023-03-27 14:48:53,174 INFO [train.py:927] (2/4) Maximum memory allocated so far is 12219MB +2023-03-27 14:49:01,349 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-27 14:49:05,158 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=3.82 vs. limit=2.0 +2023-03-27 14:49:31,451 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 14:50:04,826 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=10.68 vs. limit=2.0 +2023-03-27 14:50:15,322 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=176.59 vs. limit=5.0 +2023-03-27 14:50:17,299 INFO [train.py:892] (2/4) Epoch 1, batch 50, loss[loss=1.197, simple_loss=1.048, pruned_loss=1.324, over 19834.00 frames. ], tot_loss[loss=2.19, simple_loss=1.984, pruned_loss=1.979, over 892449.41 frames. ], batch size: 75, lr: 2.75e-02, grad_scale: 0.5 +2023-03-27 14:50:26,606 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7792, 4.7197, 4.6965, 4.7216, 4.8037, 4.8324, 4.8515, 4.7202], + device='cuda:2'), covar=tensor([0.0015, 0.0012, 0.0025, 0.0023, 0.0011, 0.0023, 0.0017, 0.0018], + device='cuda:2'), in_proj_covar=tensor([0.0009, 0.0010, 0.0010, 0.0010, 0.0009, 0.0010, 0.0010, 0.0010], + device='cuda:2'), out_proj_covar=tensor([9.3050e-06, 9.5459e-06, 9.4556e-06, 9.7150e-06, 9.3893e-06, 9.4014e-06, + 9.4669e-06, 9.5368e-06], device='cuda:2') +2023-03-27 14:50:51,205 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=75.89 vs. limit=5.0 +2023-03-27 14:51:13,796 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 14:51:21,849 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=8.14 vs. limit=2.0 +2023-03-27 14:51:39,749 INFO [train.py:892] (2/4) Epoch 1, batch 100, loss[loss=0.8474, simple_loss=0.7229, pruned_loss=0.981, over 19729.00 frames. ], tot_loss[loss=1.538, simple_loss=1.366, pruned_loss=1.545, over 1571113.54 frames. ], batch size: 118, lr: 3.00e-02, grad_scale: 1.0 +2023-03-27 14:51:42,874 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.423e+01 2.042e+02 3.545e+02 1.360e+03 1.838e+04, threshold=7.089e+02, percent-clipped=0.0 +2023-03-27 14:52:20,805 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=5.69 vs. limit=2.0 +2023-03-27 14:52:28,211 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=6.90 vs. limit=2.0 +2023-03-27 14:52:37,171 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=3.71 vs. limit=2.0 +2023-03-27 14:52:40,382 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=33.43 vs. limit=5.0 +2023-03-27 14:52:51,599 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=144.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-27 14:53:04,045 INFO [train.py:892] (2/4) Epoch 1, batch 150, loss[loss=0.7813, simple_loss=0.6676, pruned_loss=0.8272, over 19811.00 frames. ], tot_loss[loss=1.258, simple_loss=1.104, pruned_loss=1.302, over 2097523.74 frames. ], batch size: 123, lr: 3.25e-02, grad_scale: 1.0 +2023-03-27 14:53:30,976 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2228, 4.5219, 5.1376, 5.4102, 5.3870, 5.4365, 5.5116, 1.7573], + device='cuda:2'), covar=tensor([0.0060, 0.0116, 0.0103, 0.0090, 0.0067, 0.0097, 0.0064, 0.0307], + device='cuda:2'), in_proj_covar=tensor([0.0009, 0.0009, 0.0010, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009], + device='cuda:2'), out_proj_covar=tensor([8.8604e-06, 9.0396e-06, 8.9186e-06, 9.1321e-06, 8.8662e-06, 8.8654e-06, + 8.9414e-06, 9.1245e-06], device='cuda:2') +2023-03-27 14:53:39,991 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=20.50 vs. limit=5.0 +2023-03-27 14:53:47,638 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=13.57 vs. limit=5.0 +2023-03-27 14:54:26,339 INFO [train.py:892] (2/4) Epoch 1, batch 200, loss[loss=0.8241, simple_loss=0.7071, pruned_loss=0.8007, over 19768.00 frames. ], tot_loss[loss=1.11, simple_loss=0.967, pruned_loss=1.148, over 2507854.33 frames. ], batch size: 247, lr: 3.50e-02, grad_scale: 1.0 +2023-03-27 14:54:29,333 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.392e+01 1.785e+02 2.449e+02 3.359e+02 7.299e+02, threshold=4.898e+02, percent-clipped=1.0 +2023-03-27 14:55:46,824 INFO [train.py:892] (2/4) Epoch 1, batch 250, loss[loss=0.7621, simple_loss=0.6409, pruned_loss=0.7577, over 19770.00 frames. ], tot_loss[loss=1.017, simple_loss=0.8798, pruned_loss=1.042, over 2826798.28 frames. ], batch size: 87, lr: 3.75e-02, grad_scale: 1.0 +2023-03-27 14:56:52,040 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.8285, 5.8673, 5.8534, 5.8615, 5.8641, 5.8698, 5.8493, 5.8708], + device='cuda:2'), covar=tensor([0.0032, 0.0027, 0.0028, 0.0028, 0.0024, 0.0028, 0.0030, 0.0025], + device='cuda:2'), in_proj_covar=tensor([0.0009, 0.0009, 0.0010, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009], + device='cuda:2'), out_proj_covar=tensor([9.6384e-06, 9.6659e-06, 9.4889e-06, 9.3242e-06, 9.3729e-06, 9.5085e-06, + 9.3456e-06, 9.4820e-06], device='cuda:2') +2023-03-27 14:57:03,638 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=296.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 14:57:10,700 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=300.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 14:57:11,404 INFO [train.py:892] (2/4) Epoch 1, batch 300, loss[loss=0.8077, simple_loss=0.6693, pruned_loss=0.8015, over 19580.00 frames. ], tot_loss[loss=0.9532, simple_loss=0.8191, pruned_loss=0.9648, over 3076575.70 frames. ], batch size: 42, lr: 4.00e-02, grad_scale: 1.0 +2023-03-27 14:57:14,294 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 9.056e+01 1.487e+02 2.142e+02 2.914e+02 5.641e+02, threshold=4.285e+02, percent-clipped=2.0 +2023-03-27 14:58:31,285 INFO [train.py:892] (2/4) Epoch 1, batch 350, loss[loss=0.7283, simple_loss=0.61, pruned_loss=0.666, over 19751.00 frames. ], tot_loss[loss=0.9091, simple_loss=0.776, pruned_loss=0.9056, over 3270842.09 frames. ], batch size: 110, lr: 4.25e-02, grad_scale: 1.0 +2023-03-27 14:58:41,006 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=357.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 14:59:30,286 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=387.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 14:59:51,357 INFO [train.py:892] (2/4) Epoch 1, batch 400, loss[loss=0.7825, simple_loss=0.6545, pruned_loss=0.6903, over 19797.00 frames. ], tot_loss[loss=0.8794, simple_loss=0.7465, pruned_loss=0.8584, over 3421030.55 frames. ], batch size: 173, lr: 4.50e-02, grad_scale: 2.0 +2023-03-27 14:59:54,326 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.540e+02 2.069e+02 2.975e+02 6.292e+02, threshold=4.137e+02, percent-clipped=2.0 +2023-03-27 15:00:51,898 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=439.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-27 15:01:07,647 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=448.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-27 15:01:12,545 INFO [train.py:892] (2/4) Epoch 1, batch 450, loss[loss=0.7795, simple_loss=0.6514, pruned_loss=0.6655, over 19810.00 frames. ], tot_loss[loss=0.8546, simple_loss=0.7225, pruned_loss=0.8145, over 3538595.32 frames. ], batch size: 288, lr: 4.75e-02, grad_scale: 2.0 +2023-03-27 15:02:30,388 INFO [train.py:892] (2/4) Epoch 1, batch 500, loss[loss=0.7877, simple_loss=0.6636, pruned_loss=0.6368, over 19557.00 frames. ], tot_loss[loss=0.8294, simple_loss=0.6997, pruned_loss=0.7691, over 3630824.05 frames. ], batch size: 41, lr: 4.99e-02, grad_scale: 2.0 +2023-03-27 15:02:33,260 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.044e+02 2.386e+02 3.363e+02 4.760e+02 1.006e+03, threshold=6.727e+02, percent-clipped=34.0 +2023-03-27 15:02:51,627 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=6.21 vs. limit=5.0 +2023-03-27 15:03:50,995 INFO [train.py:892] (2/4) Epoch 1, batch 550, loss[loss=0.6998, simple_loss=0.5875, pruned_loss=0.5553, over 19799.00 frames. ], tot_loss[loss=0.8061, simple_loss=0.6798, pruned_loss=0.7251, over 3702379.71 frames. ], batch size: 236, lr: 4.98e-02, grad_scale: 2.0 +2023-03-27 15:04:10,960 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=562.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:04:21,079 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=568.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:04:55,652 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=590.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:05:13,832 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=600.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:05:14,495 INFO [train.py:892] (2/4) Epoch 1, batch 600, loss[loss=0.615, simple_loss=0.5206, pruned_loss=0.4661, over 19843.00 frames. ], tot_loss[loss=0.7822, simple_loss=0.6605, pruned_loss=0.681, over 3757638.43 frames. ], batch size: 124, lr: 4.98e-02, grad_scale: 2.0 +2023-03-27 15:05:17,537 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.786e+02 4.093e+02 5.434e+02 6.548e+02 1.823e+03, threshold=1.087e+03, percent-clipped=20.0 +2023-03-27 15:05:49,362 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=623.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-27 15:05:59,541 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=629.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 15:06:31,698 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=648.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:06:35,604 INFO [train.py:892] (2/4) Epoch 1, batch 650, loss[loss=0.5815, simple_loss=0.505, pruned_loss=0.404, over 19892.00 frames. ], tot_loss[loss=0.7619, simple_loss=0.6448, pruned_loss=0.6415, over 3797676.80 frames. ], batch size: 47, lr: 4.98e-02, grad_scale: 2.0 +2023-03-27 15:06:36,369 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=651.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:06:37,900 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=652.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:07:56,925 INFO [train.py:892] (2/4) Epoch 1, batch 700, loss[loss=0.6851, simple_loss=0.5835, pruned_loss=0.4907, over 19713.00 frames. ], tot_loss[loss=0.7385, simple_loss=0.6265, pruned_loss=0.6023, over 3831776.13 frames. ], batch size: 81, lr: 4.98e-02, grad_scale: 2.0 +2023-03-27 15:07:59,946 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.633e+02 4.666e+02 6.058e+02 9.217e+02 2.342e+03, threshold=1.212e+03, percent-clipped=17.0 +2023-03-27 15:08:32,776 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-03-27 15:08:59,079 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=739.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 15:09:06,945 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=743.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-27 15:09:20,458 INFO [train.py:892] (2/4) Epoch 1, batch 750, loss[loss=0.6094, simple_loss=0.5297, pruned_loss=0.4091, over 19744.00 frames. ], tot_loss[loss=0.7204, simple_loss=0.6124, pruned_loss=0.5702, over 3857956.41 frames. ], batch size: 71, lr: 4.97e-02, grad_scale: 2.0 +2023-03-27 15:09:45,595 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-27 15:10:17,408 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=787.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:10:39,429 INFO [train.py:892] (2/4) Epoch 1, batch 800, loss[loss=0.6232, simple_loss=0.5346, pruned_loss=0.425, over 19859.00 frames. ], tot_loss[loss=0.6989, simple_loss=0.5963, pruned_loss=0.5366, over 3878454.22 frames. ], batch size: 106, lr: 4.97e-02, grad_scale: 4.0 +2023-03-27 15:10:42,236 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.256e+02 4.881e+02 6.070e+02 8.660e+02 1.858e+03, threshold=1.214e+03, percent-clipped=11.0 +2023-03-27 15:11:34,459 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=6.00 vs. limit=5.0 +2023-03-27 15:11:36,005 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.90 vs. limit=5.0 +2023-03-27 15:11:54,010 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=847.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:12:00,814 INFO [train.py:892] (2/4) Epoch 1, batch 850, loss[loss=0.6482, simple_loss=0.5559, pruned_loss=0.436, over 19738.00 frames. ], tot_loss[loss=0.6787, simple_loss=0.5809, pruned_loss=0.5064, over 3895337.65 frames. ], batch size: 219, lr: 4.96e-02, grad_scale: 4.0 +2023-03-27 15:13:46,293 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.7879, 2.1047, 2.2724, 1.9159, 2.0359, 2.3127, 2.5693, 1.8720], + device='cuda:2'), covar=tensor([0.5250, 0.3993, 0.3904, 0.4492, 0.4038, 0.3814, 0.3184, 0.6547], + device='cuda:2'), in_proj_covar=tensor([0.0058, 0.0061, 0.0063, 0.0057, 0.0062, 0.0058, 0.0062, 0.0067], + device='cuda:2'), out_proj_covar=tensor([5.2895e-05, 5.3742e-05, 5.5397e-05, 5.2686e-05, 5.8033e-05, 5.2865e-05, + 5.7972e-05, 6.1126e-05], device='cuda:2') +2023-03-27 15:14:32,614 INFO [train.py:892] (2/4) Epoch 1, batch 900, loss[loss=0.5634, simple_loss=0.4905, pruned_loss=0.3629, over 19769.00 frames. ], tot_loss[loss=0.6625, simple_loss=0.5683, pruned_loss=0.4822, over 3907916.45 frames. ], batch size: 130, lr: 4.96e-02, grad_scale: 4.0 +2023-03-27 15:14:39,048 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.144e+02 5.792e+02 7.433e+02 9.367e+02 4.103e+03, threshold=1.487e+03, percent-clipped=16.0 +2023-03-27 15:14:57,725 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=908.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:15:27,146 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=918.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:15:45,040 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=924.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 15:17:00,759 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=946.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:17:13,666 INFO [train.py:892] (2/4) Epoch 1, batch 950, loss[loss=0.6514, simple_loss=0.5598, pruned_loss=0.4256, over 19760.00 frames. ], tot_loss[loss=0.6514, simple_loss=0.5607, pruned_loss=0.4619, over 3914229.49 frames. ], batch size: 233, lr: 4.96e-02, grad_scale: 4.0 +2023-03-27 15:17:17,288 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=952.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:19:15,021 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1000.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:19:15,780 INFO [train.py:892] (2/4) Epoch 1, batch 1000, loss[loss=0.5655, simple_loss=0.4955, pruned_loss=0.3525, over 19754.00 frames. ], tot_loss[loss=0.6385, simple_loss=0.5509, pruned_loss=0.4428, over 3923076.08 frames. ], batch size: 89, lr: 4.95e-02, grad_scale: 4.0 +2023-03-27 15:19:21,146 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.942e+02 5.404e+02 6.507e+02 8.567e+02 3.462e+03, threshold=1.301e+03, percent-clipped=3.0 +2023-03-27 15:20:01,496 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.02 vs. limit=5.0 +2023-03-27 15:21:24,941 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1043.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:21:43,191 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.98 vs. limit=2.0 +2023-03-27 15:21:50,537 INFO [train.py:892] (2/4) Epoch 1, batch 1050, loss[loss=0.6437, simple_loss=0.5526, pruned_loss=0.4123, over 19777.00 frames. ], tot_loss[loss=0.6286, simple_loss=0.5431, pruned_loss=0.4276, over 3929529.65 frames. ], batch size: 241, lr: 4.95e-02, grad_scale: 4.0 +2023-03-27 15:23:44,421 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1091.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:23:53,219 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-27 15:24:13,032 INFO [train.py:892] (2/4) Epoch 1, batch 1100, loss[loss=0.755, simple_loss=0.6455, pruned_loss=0.4823, over 19478.00 frames. ], tot_loss[loss=0.6124, simple_loss=0.5313, pruned_loss=0.4076, over 3935870.91 frames. ], batch size: 396, lr: 4.94e-02, grad_scale: 4.0 +2023-03-27 15:24:19,027 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.337e+02 6.074e+02 7.964e+02 1.002e+03 2.431e+03, threshold=1.593e+03, percent-clipped=21.0 +2023-03-27 15:26:28,303 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4445, 4.8121, 4.8244, 4.7507, 4.8611, 4.6198, 4.5934, 4.3068], + device='cuda:2'), covar=tensor([0.0449, 0.0368, 0.0380, 0.0360, 0.0333, 0.0489, 0.0421, 0.0671], + device='cuda:2'), in_proj_covar=tensor([0.0049, 0.0043, 0.0045, 0.0045, 0.0041, 0.0046, 0.0046, 0.0054], + device='cuda:2'), out_proj_covar=tensor([4.9552e-05, 4.5946e-05, 4.7242e-05, 4.6942e-05, 4.5294e-05, 4.4030e-05, + 4.6754e-05, 5.2424e-05], device='cuda:2') +2023-03-27 15:26:44,135 INFO [train.py:892] (2/4) Epoch 1, batch 1150, loss[loss=0.559, simple_loss=0.4843, pruned_loss=0.3464, over 19868.00 frames. ], tot_loss[loss=0.6032, simple_loss=0.5246, pruned_loss=0.3942, over 3938893.69 frames. ], batch size: 165, lr: 4.94e-02, grad_scale: 4.0 +2023-03-27 15:26:48,824 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1153.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:28:22,995 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.9407, 1.8666, 2.1406, 1.7499, 1.9838, 2.1317, 2.0316, 1.9826], + device='cuda:2'), covar=tensor([0.3156, 0.3127, 0.2329, 0.3384, 0.2253, 0.2707, 0.2353, 0.3628], + device='cuda:2'), in_proj_covar=tensor([0.0057, 0.0061, 0.0061, 0.0053, 0.0059, 0.0055, 0.0059, 0.0065], + device='cuda:2'), out_proj_covar=tensor([5.1511e-05, 5.4590e-05, 5.2745e-05, 4.6859e-05, 5.1972e-05, 4.8098e-05, + 5.2584e-05, 5.8621e-05], device='cuda:2') +2023-03-27 15:28:37,505 INFO [train.py:892] (2/4) Epoch 1, batch 1200, loss[loss=0.5859, simple_loss=0.5151, pruned_loss=0.352, over 19669.00 frames. ], tot_loss[loss=0.5942, simple_loss=0.5182, pruned_loss=0.3817, over 3942626.05 frames. ], batch size: 73, lr: 4.93e-02, grad_scale: 8.0 +2023-03-27 15:28:40,928 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.874e+02 6.275e+02 7.572e+02 9.981e+02 2.448e+03, threshold=1.514e+03, percent-clipped=4.0 +2023-03-27 15:28:41,667 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1203.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 15:28:55,332 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.27 vs. limit=5.0 +2023-03-27 15:29:13,026 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1214.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 15:29:24,473 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1218.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:29:45,971 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1224.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:30:51,398 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1246.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:31:03,305 INFO [train.py:892] (2/4) Epoch 1, batch 1250, loss[loss=0.536, simple_loss=0.4798, pruned_loss=0.3111, over 19783.00 frames. ], tot_loss[loss=0.5841, simple_loss=0.5112, pruned_loss=0.3689, over 3945047.84 frames. ], batch size: 94, lr: 4.92e-02, grad_scale: 8.0 +2023-03-27 15:31:48,029 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1266.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:32:00,210 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1272.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:32:47,423 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1294.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:32:57,692 INFO [train.py:892] (2/4) Epoch 1, batch 1300, loss[loss=0.4746, simple_loss=0.435, pruned_loss=0.2641, over 19836.00 frames. ], tot_loss[loss=0.578, simple_loss=0.5073, pruned_loss=0.3597, over 3944436.62 frames. ], batch size: 76, lr: 4.92e-02, grad_scale: 8.0 +2023-03-27 15:33:00,849 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.916e+02 5.972e+02 8.796e+02 1.171e+03 2.494e+03, threshold=1.759e+03, percent-clipped=14.0 +2023-03-27 15:33:56,670 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=2.12 vs. limit=2.0 +2023-03-27 15:34:24,481 INFO [train.py:892] (2/4) Epoch 1, batch 1350, loss[loss=0.5147, simple_loss=0.4691, pruned_loss=0.2883, over 19693.00 frames. ], tot_loss[loss=0.5664, simple_loss=0.4996, pruned_loss=0.3467, over 3946018.12 frames. ], batch size: 74, lr: 4.91e-02, grad_scale: 8.0 +2023-03-27 15:35:42,729 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.7711, 1.6632, 1.6035, 2.0519, 1.6430, 1.5894, 1.8253, 1.8065], + device='cuda:2'), covar=tensor([0.2067, 0.1610, 0.2136, 0.1295, 0.2167, 0.2334, 0.1578, 0.2331], + device='cuda:2'), in_proj_covar=tensor([0.0051, 0.0057, 0.0058, 0.0052, 0.0061, 0.0053, 0.0052, 0.0054], + device='cuda:2'), out_proj_covar=tensor([4.5703e-05, 4.8898e-05, 5.1537e-05, 4.5034e-05, 5.6359e-05, 4.7835e-05, + 4.3462e-05, 4.7996e-05], device='cuda:2') +2023-03-27 15:35:50,290 INFO [train.py:892] (2/4) Epoch 1, batch 1400, loss[loss=0.5057, simple_loss=0.4596, pruned_loss=0.2835, over 19872.00 frames. ], tot_loss[loss=0.5557, simple_loss=0.4921, pruned_loss=0.3354, over 3947799.47 frames. ], batch size: 157, lr: 4.91e-02, grad_scale: 8.0 +2023-03-27 15:35:53,773 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.724e+02 6.220e+02 8.188e+02 1.056e+03 1.766e+03, threshold=1.638e+03, percent-clipped=1.0 +2023-03-27 15:36:50,063 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1434.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:37:19,437 INFO [train.py:892] (2/4) Epoch 1, batch 1450, loss[loss=0.5268, simple_loss=0.4647, pruned_loss=0.3063, over 19866.00 frames. ], tot_loss[loss=0.5489, simple_loss=0.4875, pruned_loss=0.3272, over 3949308.36 frames. ], batch size: 129, lr: 4.90e-02, grad_scale: 8.0 +2023-03-27 15:38:19,683 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.94 vs. limit=5.0 +2023-03-27 15:38:24,197 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3184, 2.0653, 3.3550, 2.8958, 2.1236, 2.9953, 2.2634, 1.4265], + device='cuda:2'), covar=tensor([0.4278, 1.5615, 0.0899, 0.1362, 0.7691, 0.1901, 0.3470, 1.0110], + device='cuda:2'), in_proj_covar=tensor([0.0042, 0.0058, 0.0031, 0.0033, 0.0047, 0.0031, 0.0037, 0.0055], + device='cuda:2'), out_proj_covar=tensor([4.0599e-05, 5.6583e-05, 2.2657e-05, 2.4653e-05, 4.7102e-05, 2.2553e-05, + 3.1984e-05, 5.1531e-05], device='cuda:2') +2023-03-27 15:38:38,521 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1495.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:38:47,171 INFO [train.py:892] (2/4) Epoch 1, batch 1500, loss[loss=0.5211, simple_loss=0.4779, pruned_loss=0.287, over 19724.00 frames. ], tot_loss[loss=0.5442, simple_loss=0.485, pruned_loss=0.3206, over 3950132.16 frames. ], batch size: 71, lr: 4.89e-02, grad_scale: 8.0 +2023-03-27 15:38:51,712 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 6.710e+02 8.347e+02 1.087e+03 2.003e+03, threshold=1.669e+03, percent-clipped=5.0 +2023-03-27 15:38:52,700 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1503.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:39:02,624 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1509.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:40:15,424 INFO [train.py:892] (2/4) Epoch 1, batch 1550, loss[loss=0.5516, simple_loss=0.5059, pruned_loss=0.3032, over 19797.00 frames. ], tot_loss[loss=0.5389, simple_loss=0.4821, pruned_loss=0.3139, over 3950233.67 frames. ], batch size: 65, lr: 4.89e-02, grad_scale: 8.0 +2023-03-27 15:40:16,164 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1551.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:40:37,189 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5883, 2.7806, 2.6480, 2.9121, 2.2098, 1.9835, 2.6933, 2.0949], + device='cuda:2'), covar=tensor([0.0742, 0.0605, 0.0677, 0.0438, 0.1115, 0.1874, 0.0537, 0.0826], + device='cuda:2'), in_proj_covar=tensor([0.0052, 0.0046, 0.0049, 0.0043, 0.0050, 0.0051, 0.0046, 0.0046], + device='cuda:2'), out_proj_covar=tensor([4.2318e-05, 3.6280e-05, 3.9719e-05, 3.2963e-05, 4.0961e-05, 4.1182e-05, + 3.8058e-05, 3.5720e-05], device='cuda:2') +2023-03-27 15:40:45,091 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1566.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:41:04,929 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.44 vs. limit=2.0 +2023-03-27 15:41:45,855 INFO [train.py:892] (2/4) Epoch 1, batch 1600, loss[loss=0.4682, simple_loss=0.4284, pruned_loss=0.2575, over 19860.00 frames. ], tot_loss[loss=0.531, simple_loss=0.4774, pruned_loss=0.3057, over 3950101.34 frames. ], batch size: 157, lr: 4.88e-02, grad_scale: 8.0 +2023-03-27 15:41:50,279 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.877e+02 6.710e+02 8.151e+02 1.107e+03 2.056e+03, threshold=1.630e+03, percent-clipped=4.0 +2023-03-27 15:42:05,303 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1611.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:42:16,426 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.91 vs. limit=2.0 +2023-03-27 15:42:27,921 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.07 vs. limit=2.0 +2023-03-27 15:42:30,869 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1627.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 15:43:13,160 INFO [train.py:892] (2/4) Epoch 1, batch 1650, loss[loss=0.4302, simple_loss=0.4077, pruned_loss=0.2263, over 19819.00 frames. ], tot_loss[loss=0.5263, simple_loss=0.4743, pruned_loss=0.3005, over 3950101.39 frames. ], batch size: 82, lr: 4.87e-02, grad_scale: 8.0 +2023-03-27 15:43:50,754 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1672.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 15:44:39,839 INFO [train.py:892] (2/4) Epoch 1, batch 1700, loss[loss=0.4811, simple_loss=0.4413, pruned_loss=0.2628, over 19859.00 frames. ], tot_loss[loss=0.5178, simple_loss=0.4691, pruned_loss=0.2925, over 3949712.24 frames. ], batch size: 197, lr: 4.86e-02, grad_scale: 8.0 +2023-03-27 15:44:43,069 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 6.108e+02 7.327e+02 1.005e+03 2.757e+03, threshold=1.465e+03, percent-clipped=5.0 +2023-03-27 15:46:05,825 INFO [train.py:892] (2/4) Epoch 1, batch 1750, loss[loss=0.4665, simple_loss=0.4363, pruned_loss=0.2491, over 19775.00 frames. ], tot_loss[loss=0.5101, simple_loss=0.4641, pruned_loss=0.2856, over 3950719.76 frames. ], batch size: 87, lr: 4.86e-02, grad_scale: 8.0 +2023-03-27 15:46:27,355 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0891, 3.8465, 3.7917, 4.3471, 3.8831, 3.6033, 3.5381, 3.3307], + device='cuda:2'), covar=tensor([0.0373, 0.0416, 0.0545, 0.0302, 0.0417, 0.0582, 0.0567, 0.0778], + device='cuda:2'), in_proj_covar=tensor([0.0041, 0.0036, 0.0040, 0.0034, 0.0040, 0.0046, 0.0043, 0.0044], + device='cuda:2'), out_proj_covar=tensor([3.2476e-05, 3.1491e-05, 3.0646e-05, 2.6535e-05, 3.0041e-05, 3.6938e-05, + 3.5523e-05, 3.5314e-05], device='cuda:2') +2023-03-27 15:47:05,108 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1790.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 15:47:20,414 INFO [train.py:892] (2/4) Epoch 1, batch 1800, loss[loss=0.4457, simple_loss=0.4109, pruned_loss=0.2414, over 19739.00 frames. ], tot_loss[loss=0.5073, simple_loss=0.4624, pruned_loss=0.2823, over 3949182.71 frames. ], batch size: 140, lr: 4.85e-02, grad_scale: 8.0 +2023-03-27 15:47:23,280 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.834e+02 6.979e+02 1.011e+03 1.306e+03 2.784e+03, threshold=2.021e+03, percent-clipped=17.0 +2023-03-27 15:47:32,344 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1809.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:48:13,765 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7843, 4.3519, 4.5374, 4.2175, 4.5574, 4.5666, 4.3844, 4.7767], + device='cuda:2'), covar=tensor([0.0427, 0.0426, 0.0573, 0.0641, 0.0350, 0.0624, 0.0447, 0.0486], + device='cuda:2'), in_proj_covar=tensor([0.0042, 0.0048, 0.0047, 0.0048, 0.0042, 0.0044, 0.0046, 0.0038], + device='cuda:2'), out_proj_covar=tensor([3.7246e-05, 4.5605e-05, 4.8261e-05, 4.5475e-05, 3.8645e-05, 4.4653e-05, + 4.1917e-05, 3.5053e-05], device='cuda:2') +2023-03-27 15:48:13,796 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1838.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:48:31,004 INFO [train.py:892] (2/4) Epoch 1, batch 1850, loss[loss=0.5389, simple_loss=0.5158, pruned_loss=0.2806, over 19687.00 frames. ], tot_loss[loss=0.5045, simple_loss=0.4628, pruned_loss=0.2781, over 3948780.47 frames. ], batch size: 56, lr: 4.84e-02, grad_scale: 8.0 +2023-03-27 15:49:26,275 INFO [train.py:892] (2/4) Epoch 2, batch 0, loss[loss=0.4016, simple_loss=0.3832, pruned_loss=0.2098, over 19774.00 frames. ], tot_loss[loss=0.4016, simple_loss=0.3832, pruned_loss=0.2098, over 19774.00 frames. ], batch size: 116, lr: 4.75e-02, grad_scale: 8.0 +2023-03-27 15:49:26,276 INFO [train.py:917] (2/4) Computing validation loss +2023-03-27 15:49:35,732 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1906, 2.5375, 2.2564, 2.9730, 2.1768, 2.3457, 2.2182, 2.2545], + device='cuda:2'), covar=tensor([0.1146, 0.0732, 0.3103, 0.0517, 0.1052, 0.0847, 0.0869, 0.0742], + device='cuda:2'), in_proj_covar=tensor([0.0033, 0.0030, 0.0031, 0.0031, 0.0034, 0.0036, 0.0039, 0.0035], + device='cuda:2'), out_proj_covar=tensor([2.6538e-05, 2.3124e-05, 2.6662e-05, 2.3965e-05, 2.5553e-05, 2.9377e-05, + 3.2424e-05, 2.8234e-05], device='cuda:2') +2023-03-27 15:49:49,211 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.4848, 0.7376, 1.4382, 0.9674, 1.2972, 1.4838, 1.4330, 1.3098], + device='cuda:2'), covar=tensor([0.5096, 1.5934, 0.4387, 1.3410, 0.5425, 0.3416, 0.2767, 0.3499], + device='cuda:2'), in_proj_covar=tensor([0.0026, 0.0049, 0.0025, 0.0052, 0.0028, 0.0032, 0.0027, 0.0027], + device='cuda:2'), out_proj_covar=tensor([1.9811e-05, 4.4370e-05, 1.9254e-05, 4.6820e-05, 2.2890e-05, 2.4864e-05, + 2.0715e-05, 1.9702e-05], device='cuda:2') +2023-03-27 15:49:52,688 INFO [train.py:926] (2/4) Epoch 2, validation: loss=0.3819, simple_loss=0.4085, pruned_loss=0.1743, over 2883724.00 frames. +2023-03-27 15:49:52,689 INFO [train.py:927] (2/4) Maximum memory allocated so far is 18170MB +2023-03-27 15:49:56,680 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1857.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:49:56,808 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1857.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:50:22,524 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1870.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:51:04,168 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.05 vs. limit=2.0 +2023-03-27 15:51:15,973 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1899.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:51:22,958 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.599e+02 7.082e+02 8.943e+02 1.129e+03 2.587e+03, threshold=1.789e+03, percent-clipped=3.0 +2023-03-27 15:51:29,922 INFO [train.py:892] (2/4) Epoch 2, batch 50, loss[loss=0.4686, simple_loss=0.4465, pruned_loss=0.2453, over 19760.00 frames. ], tot_loss[loss=0.4584, simple_loss=0.4336, pruned_loss=0.2417, over 891727.82 frames. ], batch size: 88, lr: 4.74e-02, grad_scale: 8.0 +2023-03-27 15:51:53,441 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1918.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 15:51:59,875 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1922.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 15:52:16,583 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1931.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:53:00,441 INFO [train.py:892] (2/4) Epoch 2, batch 100, loss[loss=0.499, simple_loss=0.454, pruned_loss=0.2724, over 19779.00 frames. ], tot_loss[loss=0.4652, simple_loss=0.4372, pruned_loss=0.2467, over 1570938.13 frames. ], batch size: 193, lr: 4.73e-02, grad_scale: 8.0 +2023-03-27 15:53:21,394 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1967.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:53:55,958 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5423, 3.2727, 3.4984, 3.1522, 3.3467, 2.9935, 3.2425, 3.3637], + device='cuda:2'), covar=tensor([0.0197, 0.0237, 0.0196, 0.0226, 0.0281, 0.0357, 0.0242, 0.0232], + device='cuda:2'), in_proj_covar=tensor([0.0032, 0.0031, 0.0035, 0.0037, 0.0035, 0.0038, 0.0034, 0.0031], + device='cuda:2'), out_proj_covar=tensor([2.9539e-05, 2.5591e-05, 3.1287e-05, 3.2381e-05, 3.2392e-05, 3.2293e-05, + 2.8839e-05, 2.7101e-05], device='cuda:2') +2023-03-27 15:54:30,516 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.745e+02 7.981e+02 1.059e+03 1.451e+03 2.427e+03, threshold=2.118e+03, percent-clipped=7.0 +2023-03-27 15:54:36,143 INFO [train.py:892] (2/4) Epoch 2, batch 150, loss[loss=0.4258, simple_loss=0.4077, pruned_loss=0.2219, over 19745.00 frames. ], tot_loss[loss=0.4664, simple_loss=0.4379, pruned_loss=0.2475, over 2098051.80 frames. ], batch size: 140, lr: 4.72e-02, grad_scale: 8.0 +2023-03-27 15:54:40,407 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2000, 3.8077, 4.1051, 4.0296, 3.8436, 3.8077, 4.0383, 3.9965], + device='cuda:2'), covar=tensor([0.0147, 0.0199, 0.0107, 0.0112, 0.0147, 0.0321, 0.0108, 0.0152], + device='cuda:2'), in_proj_covar=tensor([0.0029, 0.0029, 0.0028, 0.0028, 0.0027, 0.0029, 0.0026, 0.0030], + device='cuda:2'), out_proj_covar=tensor([2.6756e-05, 2.5489e-05, 2.3833e-05, 2.3928e-05, 2.2931e-05, 2.6080e-05, + 2.2552e-05, 2.7929e-05], device='cuda:2') +2023-03-27 15:56:17,772 INFO [train.py:892] (2/4) Epoch 2, batch 200, loss[loss=0.4009, simple_loss=0.387, pruned_loss=0.2074, over 19884.00 frames. ], tot_loss[loss=0.4588, simple_loss=0.4352, pruned_loss=0.2413, over 2508098.63 frames. ], batch size: 134, lr: 4.72e-02, grad_scale: 16.0 +2023-03-27 15:57:23,830 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.29 vs. limit=5.0 +2023-03-27 15:57:25,402 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2090.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:57:51,092 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.526e+02 6.247e+02 7.217e+02 9.235e+02 2.365e+03, threshold=1.443e+03, percent-clipped=1.0 +2023-03-27 15:57:56,590 INFO [train.py:892] (2/4) Epoch 2, batch 250, loss[loss=0.3999, simple_loss=0.3942, pruned_loss=0.2028, over 19752.00 frames. ], tot_loss[loss=0.4547, simple_loss=0.4334, pruned_loss=0.2381, over 2827756.96 frames. ], batch size: 97, lr: 4.71e-02, grad_scale: 16.0 +2023-03-27 15:59:00,539 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2138.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:59:32,885 INFO [train.py:892] (2/4) Epoch 2, batch 300, loss[loss=0.4274, simple_loss=0.4184, pruned_loss=0.2182, over 19781.00 frames. ], tot_loss[loss=0.4506, simple_loss=0.4314, pruned_loss=0.2349, over 3077364.65 frames. ], batch size: 131, lr: 4.70e-02, grad_scale: 16.0 +2023-03-27 15:59:51,706 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.76 vs. limit=2.0 +2023-03-27 16:00:53,372 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2194.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:01:01,261 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0034, 4.1655, 4.4695, 4.5005, 3.8062, 2.3650, 4.2321, 3.6301], + device='cuda:2'), covar=tensor([0.0285, 0.0331, 0.0100, 0.0130, 0.0275, 0.0685, 0.0150, 0.0159], + device='cuda:2'), in_proj_covar=tensor([0.0040, 0.0036, 0.0036, 0.0032, 0.0039, 0.0038, 0.0034, 0.0033], + device='cuda:2'), out_proj_covar=tensor([3.2402e-05, 2.8162e-05, 2.8559e-05, 2.4303e-05, 3.0729e-05, 3.2262e-05, + 2.7617e-05, 2.5699e-05], device='cuda:2') +2023-03-27 16:01:11,029 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.616e+02 6.231e+02 7.603e+02 9.238e+02 1.387e+03, threshold=1.521e+03, percent-clipped=0.0 +2023-03-27 16:01:18,837 INFO [train.py:892] (2/4) Epoch 2, batch 350, loss[loss=0.3729, simple_loss=0.3791, pruned_loss=0.1834, over 19845.00 frames. ], tot_loss[loss=0.4467, simple_loss=0.4297, pruned_loss=0.2319, over 3271017.02 frames. ], batch size: 109, lr: 4.69e-02, grad_scale: 16.0 +2023-03-27 16:01:25,542 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0803, 3.9388, 4.0130, 3.7227, 4.0325, 4.0862, 3.8107, 4.1841], + device='cuda:2'), covar=tensor([0.0419, 0.0347, 0.0455, 0.0446, 0.0268, 0.0399, 0.0370, 0.0369], + device='cuda:2'), in_proj_covar=tensor([0.0052, 0.0059, 0.0055, 0.0056, 0.0050, 0.0052, 0.0054, 0.0046], + device='cuda:2'), out_proj_covar=tensor([5.0268e-05, 5.9403e-05, 6.0317e-05, 5.6016e-05, 4.7241e-05, 5.7677e-05, + 5.1879e-05, 4.5045e-05], device='cuda:2') +2023-03-27 16:01:32,871 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2213.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:01:49,472 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2222.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 16:01:56,889 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2226.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:02:46,625 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6163, 3.1209, 3.5080, 3.5727, 3.2327, 3.2080, 3.3521, 2.5601], + device='cuda:2'), covar=tensor([0.0157, 0.0389, 0.0194, 0.0179, 0.0307, 0.0227, 0.0260, 0.0716], + device='cuda:2'), in_proj_covar=tensor([0.0027, 0.0030, 0.0030, 0.0024, 0.0029, 0.0032, 0.0031, 0.0032], + device='cuda:2'), out_proj_covar=tensor([2.0697e-05, 2.4723e-05, 2.2644e-05, 1.8208e-05, 2.2309e-05, 2.4541e-05, + 2.4654e-05, 2.5346e-05], device='cuda:2') +2023-03-27 16:02:54,452 INFO [train.py:892] (2/4) Epoch 2, batch 400, loss[loss=0.4531, simple_loss=0.434, pruned_loss=0.2362, over 19757.00 frames. ], tot_loss[loss=0.4408, simple_loss=0.4262, pruned_loss=0.2277, over 3421270.68 frames. ], batch size: 88, lr: 4.68e-02, grad_scale: 16.0 +2023-03-27 16:03:15,684 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2267.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:03:21,048 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2270.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:03:31,438 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.74 vs. limit=5.0 +2023-03-27 16:04:21,857 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0202, 3.9181, 4.8521, 4.2837, 4.2044, 4.7159, 4.4579, 4.6894], + device='cuda:2'), covar=tensor([0.0130, 0.0363, 0.0092, 0.0200, 0.0212, 0.0082, 0.0164, 0.0173], + device='cuda:2'), in_proj_covar=tensor([0.0027, 0.0029, 0.0027, 0.0027, 0.0026, 0.0025, 0.0026, 0.0030], + device='cuda:2'), out_proj_covar=tensor([2.6654e-05, 2.8449e-05, 2.5132e-05, 2.5663e-05, 2.3776e-05, 2.4173e-05, + 2.2744e-05, 2.9674e-05], device='cuda:2') +2023-03-27 16:04:26,668 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.159e+02 7.202e+02 8.951e+02 1.212e+03 2.125e+03, threshold=1.790e+03, percent-clipped=11.0 +2023-03-27 16:04:31,840 INFO [train.py:892] (2/4) Epoch 2, batch 450, loss[loss=0.3912, simple_loss=0.3908, pruned_loss=0.1958, over 19764.00 frames. ], tot_loss[loss=0.441, simple_loss=0.427, pruned_loss=0.2275, over 3538047.68 frames. ], batch size: 122, lr: 4.67e-02, grad_scale: 16.0 +2023-03-27 16:04:52,555 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2315.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:05:23,911 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4173, 3.5717, 4.1537, 3.9325, 3.7948, 4.1807, 4.1119, 4.1526], + device='cuda:2'), covar=tensor([0.0116, 0.0363, 0.0120, 0.0188, 0.0174, 0.0117, 0.0129, 0.0160], + device='cuda:2'), in_proj_covar=tensor([0.0026, 0.0028, 0.0026, 0.0026, 0.0025, 0.0024, 0.0025, 0.0028], + device='cuda:2'), out_proj_covar=tensor([2.5310e-05, 2.7640e-05, 2.4625e-05, 2.5436e-05, 2.2894e-05, 2.3430e-05, + 2.2370e-05, 2.8286e-05], device='cuda:2') +2023-03-27 16:05:40,882 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2340.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:06:13,199 INFO [train.py:892] (2/4) Epoch 2, batch 500, loss[loss=0.4022, simple_loss=0.4069, pruned_loss=0.1988, over 19898.00 frames. ], tot_loss[loss=0.4357, simple_loss=0.4239, pruned_loss=0.2237, over 3630729.61 frames. ], batch size: 91, lr: 4.66e-02, grad_scale: 16.0 +2023-03-27 16:07:43,945 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2401.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 16:07:46,784 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.849e+02 6.111e+02 7.665e+02 9.809e+02 1.525e+03, threshold=1.533e+03, percent-clipped=0.0 +2023-03-27 16:07:51,945 INFO [train.py:892] (2/4) Epoch 2, batch 550, loss[loss=0.4034, simple_loss=0.4074, pruned_loss=0.1997, over 19747.00 frames. ], tot_loss[loss=0.4325, simple_loss=0.4219, pruned_loss=0.2215, over 3701595.70 frames. ], batch size: 89, lr: 4.65e-02, grad_scale: 16.0 +2023-03-27 16:08:37,273 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.96 vs. limit=2.0 +2023-03-27 16:09:31,325 INFO [train.py:892] (2/4) Epoch 2, batch 600, loss[loss=0.4268, simple_loss=0.4185, pruned_loss=0.2175, over 19781.00 frames. ], tot_loss[loss=0.4293, simple_loss=0.4205, pruned_loss=0.2191, over 3757439.66 frames. ], batch size: 193, lr: 4.64e-02, grad_scale: 16.0 +2023-03-27 16:09:45,319 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.8494, 1.9870, 1.9085, 1.0017, 1.7301, 2.0066, 1.9285, 1.7991], + device='cuda:2'), covar=tensor([0.0264, 0.0357, 0.0305, 0.0882, 0.0455, 0.0360, 0.0325, 0.0480], + device='cuda:2'), in_proj_covar=tensor([0.0028, 0.0033, 0.0032, 0.0033, 0.0030, 0.0029, 0.0029, 0.0033], + device='cuda:2'), out_proj_covar=tensor([2.4933e-05, 2.9707e-05, 2.9131e-05, 3.1078e-05, 2.7884e-05, 2.7466e-05, + 2.7776e-05, 3.2000e-05], device='cuda:2') +2023-03-27 16:10:49,199 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2494.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 16:11:07,764 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.512e+02 7.276e+02 8.474e+02 1.052e+03 1.837e+03, threshold=1.695e+03, percent-clipped=3.0 +2023-03-27 16:11:13,425 INFO [train.py:892] (2/4) Epoch 2, batch 650, loss[loss=0.4387, simple_loss=0.4237, pruned_loss=0.2268, over 19843.00 frames. ], tot_loss[loss=0.4275, simple_loss=0.4198, pruned_loss=0.2176, over 3800359.19 frames. ], batch size: 75, lr: 4.64e-02, grad_scale: 16.0 +2023-03-27 16:11:27,349 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2513.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:11:48,098 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.75 vs. limit=2.0 +2023-03-27 16:11:49,598 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-03-27 16:11:51,069 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2526.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:12:26,523 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2542.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:12:51,151 INFO [train.py:892] (2/4) Epoch 2, batch 700, loss[loss=0.4025, simple_loss=0.3959, pruned_loss=0.2045, over 19764.00 frames. ], tot_loss[loss=0.424, simple_loss=0.4178, pruned_loss=0.2151, over 3833886.53 frames. ], batch size: 209, lr: 4.63e-02, grad_scale: 16.0 +2023-03-27 16:13:01,060 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2561.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:13:27,671 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2574.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:14:23,666 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.137e+02 6.142e+02 8.213e+02 1.055e+03 2.684e+03, threshold=1.643e+03, percent-clipped=4.0 +2023-03-27 16:14:30,613 INFO [train.py:892] (2/4) Epoch 2, batch 750, loss[loss=0.3808, simple_loss=0.3852, pruned_loss=0.1882, over 19809.00 frames. ], tot_loss[loss=0.4226, simple_loss=0.4171, pruned_loss=0.2141, over 3858608.02 frames. ], batch size: 96, lr: 4.62e-02, grad_scale: 16.0 +2023-03-27 16:15:19,644 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6593, 2.4390, 1.7611, 2.9796, 2.4804, 1.5246, 2.6268, 2.4113], + device='cuda:2'), covar=tensor([0.0280, 0.0277, 0.1070, 0.0146, 0.0384, 0.1230, 0.0453, 0.0854], + device='cuda:2'), in_proj_covar=tensor([0.0029, 0.0031, 0.0034, 0.0030, 0.0033, 0.0033, 0.0028, 0.0032], + device='cuda:2'), out_proj_covar=tensor([2.8431e-05, 2.9374e-05, 3.3988e-05, 2.6175e-05, 3.3466e-05, 3.3156e-05, + 2.6476e-05, 3.3686e-05], device='cuda:2') +2023-03-27 16:15:32,890 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=2.04 vs. limit=2.0 +2023-03-27 16:16:13,080 INFO [train.py:892] (2/4) Epoch 2, batch 800, loss[loss=0.3444, simple_loss=0.3582, pruned_loss=0.1653, over 19686.00 frames. ], tot_loss[loss=0.4201, simple_loss=0.4156, pruned_loss=0.2123, over 3877832.42 frames. ], batch size: 75, lr: 4.61e-02, grad_scale: 16.0 +2023-03-27 16:16:50,986 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3793, 5.4655, 5.4581, 5.4942, 5.3050, 5.5390, 4.9591, 4.8741], + device='cuda:2'), covar=tensor([0.0283, 0.0306, 0.0379, 0.0269, 0.0279, 0.0335, 0.0392, 0.0583], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0071, 0.0084, 0.0077, 0.0081, 0.0068, 0.0088, 0.0108], + device='cuda:2'), out_proj_covar=tensor([8.4055e-05, 7.9173e-05, 9.1882e-05, 8.7324e-05, 8.8696e-05, 7.5095e-05, + 9.5485e-05, 1.2416e-04], device='cuda:2') +2023-03-27 16:17:11,698 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2685.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:17:33,316 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2696.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:17:45,223 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.299e+02 7.441e+02 9.827e+02 1.217e+03 2.716e+03, threshold=1.965e+03, percent-clipped=6.0 +2023-03-27 16:17:51,044 INFO [train.py:892] (2/4) Epoch 2, batch 850, loss[loss=0.3688, simple_loss=0.3786, pruned_loss=0.1795, over 19789.00 frames. ], tot_loss[loss=0.4178, simple_loss=0.4143, pruned_loss=0.2106, over 3893514.48 frames. ], batch size: 94, lr: 4.60e-02, grad_scale: 16.0 +2023-03-27 16:18:03,154 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3851, 3.8244, 4.9059, 4.5487, 4.3546, 5.1196, 4.8682, 4.8330], + device='cuda:2'), covar=tensor([0.0080, 0.0382, 0.0085, 0.0220, 0.0128, 0.0054, 0.0086, 0.0162], + device='cuda:2'), in_proj_covar=tensor([0.0026, 0.0028, 0.0024, 0.0026, 0.0024, 0.0023, 0.0024, 0.0026], + device='cuda:2'), out_proj_covar=tensor([2.9251e-05, 3.3729e-05, 2.6195e-05, 2.8622e-05, 2.4616e-05, 2.6383e-05, + 2.4786e-05, 3.0001e-05], device='cuda:2') +2023-03-27 16:19:12,099 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2746.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-27 16:19:32,076 INFO [train.py:892] (2/4) Epoch 2, batch 900, loss[loss=0.3745, simple_loss=0.3686, pruned_loss=0.1901, over 19839.00 frames. ], tot_loss[loss=0.4179, simple_loss=0.4149, pruned_loss=0.2104, over 3903003.96 frames. ], batch size: 143, lr: 4.59e-02, grad_scale: 16.0 +2023-03-27 16:21:04,236 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.961e+02 6.224e+02 7.989e+02 1.015e+03 2.345e+03, threshold=1.598e+03, percent-clipped=4.0 +2023-03-27 16:21:10,165 INFO [train.py:892] (2/4) Epoch 2, batch 950, loss[loss=0.4147, simple_loss=0.4023, pruned_loss=0.2135, over 19798.00 frames. ], tot_loss[loss=0.414, simple_loss=0.4123, pruned_loss=0.2079, over 3912984.93 frames. ], batch size: 149, lr: 4.58e-02, grad_scale: 16.0 +2023-03-27 16:21:45,564 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2824.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:22:47,349 INFO [train.py:892] (2/4) Epoch 2, batch 1000, loss[loss=0.3606, simple_loss=0.3666, pruned_loss=0.1773, over 19809.00 frames. ], tot_loss[loss=0.4095, simple_loss=0.4095, pruned_loss=0.2048, over 3920682.72 frames. ], batch size: 117, lr: 4.57e-02, grad_scale: 16.0 +2023-03-27 16:23:14,120 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.90 vs. limit=2.0 +2023-03-27 16:23:47,334 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2885.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 16:24:23,437 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.209e+02 6.834e+02 8.062e+02 1.027e+03 2.036e+03, threshold=1.612e+03, percent-clipped=4.0 +2023-03-27 16:24:28,992 INFO [train.py:892] (2/4) Epoch 2, batch 1050, loss[loss=0.3821, simple_loss=0.3809, pruned_loss=0.1917, over 19762.00 frames. ], tot_loss[loss=0.4067, simple_loss=0.4073, pruned_loss=0.203, over 3928239.48 frames. ], batch size: 213, lr: 4.56e-02, grad_scale: 16.0 +2023-03-27 16:25:29,973 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2937.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:26:09,028 INFO [train.py:892] (2/4) Epoch 2, batch 1100, loss[loss=0.3886, simple_loss=0.3965, pruned_loss=0.1904, over 19753.00 frames. ], tot_loss[loss=0.4058, simple_loss=0.4073, pruned_loss=0.2022, over 3932159.05 frames. ], batch size: 179, lr: 4.55e-02, grad_scale: 16.0 +2023-03-27 16:27:28,951 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2996.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:27:32,694 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2998.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 16:27:41,287 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.835e+02 7.115e+02 9.003e+02 1.112e+03 2.091e+03, threshold=1.801e+03, percent-clipped=6.0 +2023-03-27 16:27:46,917 INFO [train.py:892] (2/4) Epoch 2, batch 1150, loss[loss=0.4001, simple_loss=0.4091, pruned_loss=0.1956, over 19660.00 frames. ], tot_loss[loss=0.402, simple_loss=0.4046, pruned_loss=0.1997, over 3937691.55 frames. ], batch size: 50, lr: 4.54e-02, grad_scale: 16.0 +2023-03-27 16:28:17,768 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3019.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:29:00,247 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3041.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:29:05,919 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3044.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:29:30,590 INFO [train.py:892] (2/4) Epoch 2, batch 1200, loss[loss=0.3184, simple_loss=0.3372, pruned_loss=0.1498, over 19826.00 frames. ], tot_loss[loss=0.4012, simple_loss=0.404, pruned_loss=0.1992, over 3939863.22 frames. ], batch size: 96, lr: 4.53e-02, grad_scale: 16.0 +2023-03-27 16:30:17,953 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3080.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 16:30:38,882 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8700, 3.0559, 3.4139, 3.2932, 3.0568, 2.2403, 3.2770, 2.2425], + device='cuda:2'), covar=tensor([0.0746, 0.0318, 0.0315, 0.0171, 0.0411, 0.0613, 0.0246, 0.0387], + device='cuda:2'), in_proj_covar=tensor([0.0034, 0.0026, 0.0028, 0.0026, 0.0032, 0.0027, 0.0027, 0.0027], + device='cuda:2'), out_proj_covar=tensor([3.4197e-05, 2.5446e-05, 2.7601e-05, 2.4060e-05, 3.2379e-05, 2.8645e-05, + 2.5711e-05, 2.6810e-05], device='cuda:2') +2023-03-27 16:31:04,251 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.272e+02 6.636e+02 8.301e+02 1.092e+03 3.055e+03, threshold=1.660e+03, percent-clipped=1.0 +2023-03-27 16:31:10,196 INFO [train.py:892] (2/4) Epoch 2, batch 1250, loss[loss=0.3765, simple_loss=0.4005, pruned_loss=0.1762, over 19774.00 frames. ], tot_loss[loss=0.3975, simple_loss=0.4015, pruned_loss=0.1967, over 3944231.22 frames. ], batch size: 87, lr: 4.52e-02, grad_scale: 16.0 +2023-03-27 16:32:23,436 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8378, 2.1696, 2.9647, 2.6094, 4.1560, 3.4575, 3.9562, 3.9446], + device='cuda:2'), covar=tensor([0.0226, 0.1506, 0.0360, 0.1281, 0.0129, 0.0222, 0.0087, 0.0079], + device='cuda:2'), in_proj_covar=tensor([0.0052, 0.0100, 0.0051, 0.0103, 0.0039, 0.0054, 0.0040, 0.0043], + device='cuda:2'), out_proj_covar=tensor([4.5481e-05, 8.8527e-05, 4.4826e-05, 9.3724e-05, 3.6555e-05, 4.6940e-05, + 3.4146e-05, 3.3598e-05], device='cuda:2') +2023-03-27 16:32:49,631 INFO [train.py:892] (2/4) Epoch 2, batch 1300, loss[loss=0.376, simple_loss=0.3793, pruned_loss=0.1864, over 19763.00 frames. ], tot_loss[loss=0.3949, simple_loss=0.3998, pruned_loss=0.195, over 3945584.71 frames. ], batch size: 217, lr: 4.51e-02, grad_scale: 16.0 +2023-03-27 16:33:37,201 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3180.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:34:23,392 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.299e+02 7.276e+02 8.986e+02 1.118e+03 2.110e+03, threshold=1.797e+03, percent-clipped=4.0 +2023-03-27 16:34:28,607 INFO [train.py:892] (2/4) Epoch 2, batch 1350, loss[loss=0.5939, simple_loss=0.5753, pruned_loss=0.3063, over 18013.00 frames. ], tot_loss[loss=0.3938, simple_loss=0.3994, pruned_loss=0.1941, over 3945117.88 frames. ], batch size: 633, lr: 4.50e-02, grad_scale: 16.0 +2023-03-27 16:36:05,217 INFO [train.py:892] (2/4) Epoch 2, batch 1400, loss[loss=0.3636, simple_loss=0.3811, pruned_loss=0.173, over 19734.00 frames. ], tot_loss[loss=0.3925, simple_loss=0.3981, pruned_loss=0.1935, over 3947630.35 frames. ], batch size: 77, lr: 4.49e-02, grad_scale: 16.0 +2023-03-27 16:36:31,907 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-03-27 16:37:06,537 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8135, 3.9063, 4.1358, 4.4839, 2.9554, 4.1923, 4.0759, 2.3520], + device='cuda:2'), covar=tensor([0.0202, 0.0660, 0.0230, 0.0077, 0.1719, 0.0147, 0.0195, 0.2086], + device='cuda:2'), in_proj_covar=tensor([0.0052, 0.0059, 0.0060, 0.0047, 0.0116, 0.0050, 0.0060, 0.0123], + device='cuda:2'), out_proj_covar=tensor([4.2584e-05, 5.3268e-05, 5.0161e-05, 3.4100e-05, 9.9114e-05, 3.9301e-05, + 5.1471e-05, 1.0485e-04], device='cuda:2') +2023-03-27 16:37:09,924 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3288.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:37:18,966 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3293.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:37:38,107 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.923e+02 7.473e+02 1.007e+03 1.301e+03 1.953e+03, threshold=2.013e+03, percent-clipped=1.0 +2023-03-27 16:37:43,377 INFO [train.py:892] (2/4) Epoch 2, batch 1450, loss[loss=0.3478, simple_loss=0.3704, pruned_loss=0.1626, over 19798.00 frames. ], tot_loss[loss=0.3872, simple_loss=0.3947, pruned_loss=0.1898, over 3949672.66 frames. ], batch size: 45, lr: 4.48e-02, grad_scale: 16.0 +2023-03-27 16:38:09,817 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-27 16:38:55,628 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3341.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:39:13,626 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3349.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 16:39:26,372 INFO [train.py:892] (2/4) Epoch 2, batch 1500, loss[loss=0.4412, simple_loss=0.4355, pruned_loss=0.2234, over 19695.00 frames. ], tot_loss[loss=0.3862, simple_loss=0.3945, pruned_loss=0.1889, over 3948057.38 frames. ], batch size: 325, lr: 4.47e-02, grad_scale: 16.0 +2023-03-27 16:40:03,889 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3375.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:40:30,625 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3784, 3.9680, 4.0849, 3.5969, 4.0518, 4.0294, 3.8070, 4.2983], + device='cuda:2'), covar=tensor([0.2002, 0.0277, 0.0288, 0.0384, 0.0286, 0.0280, 0.0276, 0.0243], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0099, 0.0087, 0.0087, 0.0086, 0.0087, 0.0081, 0.0078], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 16:40:34,098 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3389.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:40:36,172 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7258, 1.5695, 2.9308, 2.7332, 2.7789, 2.6237, 2.7475, 2.6459], + device='cuda:2'), covar=tensor([0.0354, 0.1678, 0.0224, 0.0224, 0.0299, 0.0231, 0.0312, 0.0713], + device='cuda:2'), in_proj_covar=tensor([0.0045, 0.0080, 0.0046, 0.0040, 0.0042, 0.0046, 0.0044, 0.0050], + device='cuda:2'), out_proj_covar=tensor([4.4320e-05, 8.7953e-05, 4.3174e-05, 3.9788e-05, 4.5153e-05, 4.6011e-05, + 4.6984e-05, 5.6540e-05], device='cuda:2') +2023-03-27 16:40:59,392 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.06 vs. limit=5.0 +2023-03-27 16:40:59,889 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.490e+02 6.824e+02 8.232e+02 9.819e+02 1.797e+03, threshold=1.646e+03, percent-clipped=0.0 +2023-03-27 16:41:05,676 INFO [train.py:892] (2/4) Epoch 2, batch 1550, loss[loss=0.4039, simple_loss=0.4043, pruned_loss=0.2018, over 19747.00 frames. ], tot_loss[loss=0.3858, simple_loss=0.3944, pruned_loss=0.1886, over 3947368.60 frames. ], batch size: 205, lr: 4.46e-02, grad_scale: 16.0 +2023-03-27 16:41:36,999 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3331, 3.3539, 3.6407, 3.7629, 2.3573, 3.2180, 3.3480, 2.0218], + device='cuda:2'), covar=tensor([0.0239, 0.0674, 0.0251, 0.0087, 0.1841, 0.0212, 0.0264, 0.1959], + device='cuda:2'), in_proj_covar=tensor([0.0055, 0.0065, 0.0061, 0.0047, 0.0121, 0.0054, 0.0063, 0.0127], + device='cuda:2'), out_proj_covar=tensor([4.5121e-05, 5.8654e-05, 5.1858e-05, 3.5141e-05, 1.0293e-04, 4.2487e-05, + 5.4504e-05, 1.0885e-04], device='cuda:2') +2023-03-27 16:42:46,606 INFO [train.py:892] (2/4) Epoch 2, batch 1600, loss[loss=0.4357, simple_loss=0.4297, pruned_loss=0.2209, over 19774.00 frames. ], tot_loss[loss=0.3855, simple_loss=0.3938, pruned_loss=0.1886, over 3948179.95 frames. ], batch size: 280, lr: 4.45e-02, grad_scale: 16.0 +2023-03-27 16:43:35,435 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3480.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:44:24,961 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 7.682e+02 9.382e+02 1.262e+03 3.177e+03, threshold=1.876e+03, percent-clipped=11.0 +2023-03-27 16:44:28,495 INFO [train.py:892] (2/4) Epoch 2, batch 1650, loss[loss=0.3741, simple_loss=0.3835, pruned_loss=0.1824, over 19804.00 frames. ], tot_loss[loss=0.3826, simple_loss=0.3917, pruned_loss=0.1867, over 3947694.85 frames. ], batch size: 72, lr: 4.44e-02, grad_scale: 8.0 +2023-03-27 16:44:42,842 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=2.05 vs. limit=2.0 +2023-03-27 16:45:11,961 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3528.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:45:32,036 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.12 vs. limit=2.0 +2023-03-27 16:45:50,257 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3546.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:46:07,494 INFO [train.py:892] (2/4) Epoch 2, batch 1700, loss[loss=0.3163, simple_loss=0.3467, pruned_loss=0.143, over 19821.00 frames. ], tot_loss[loss=0.3833, simple_loss=0.3923, pruned_loss=0.1872, over 3949090.92 frames. ], batch size: 103, lr: 4.43e-02, grad_scale: 8.0 +2023-03-27 16:47:17,459 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2381, 5.0381, 4.8541, 5.4161, 5.0449, 5.4649, 5.1277, 5.3703], + device='cuda:2'), covar=tensor([0.0418, 0.0220, 0.0384, 0.0158, 0.0285, 0.0081, 0.0210, 0.0492], + device='cuda:2'), in_proj_covar=tensor([0.0075, 0.0084, 0.0091, 0.0075, 0.0085, 0.0065, 0.0090, 0.0095], + device='cuda:2'), out_proj_covar=tensor([8.9755e-05, 1.1735e-04, 1.2209e-04, 1.0025e-04, 1.2352e-04, 8.9999e-05, + 1.1190e-04, 1.2867e-04], device='cuda:2') +2023-03-27 16:47:22,572 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3593.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:47:33,901 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.77 vs. limit=2.0 +2023-03-27 16:47:33,911 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.18 vs. limit=5.0 +2023-03-27 16:47:35,342 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8589, 3.5102, 3.4494, 3.8983, 3.6077, 3.5987, 3.7396, 3.9746], + device='cuda:2'), covar=tensor([0.0334, 0.0316, 0.0451, 0.0180, 0.0346, 0.0486, 0.0266, 0.0388], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0086, 0.0093, 0.0076, 0.0087, 0.0066, 0.0092, 0.0097], + device='cuda:2'), out_proj_covar=tensor([9.1567e-05, 1.2045e-04, 1.2463e-04, 1.0211e-04, 1.2566e-04, 9.1335e-05, + 1.1458e-04, 1.3128e-04], device='cuda:2') +2023-03-27 16:47:42,005 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.530e+02 9.023e+02 1.103e+03 1.410e+03 2.321e+03, threshold=2.205e+03, percent-clipped=10.0 +2023-03-27 16:47:45,741 INFO [train.py:892] (2/4) Epoch 2, batch 1750, loss[loss=0.3338, simple_loss=0.3518, pruned_loss=0.1579, over 19792.00 frames. ], tot_loss[loss=0.3816, simple_loss=0.3916, pruned_loss=0.1858, over 3950710.30 frames. ], batch size: 111, lr: 4.42e-02, grad_scale: 8.0 +2023-03-27 16:47:48,504 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3607.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 16:48:50,058 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3641.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:48:55,203 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3644.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:49:10,265 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.49 vs. limit=5.0 +2023-03-27 16:49:15,997 INFO [train.py:892] (2/4) Epoch 2, batch 1800, loss[loss=0.3361, simple_loss=0.3523, pruned_loss=0.16, over 19796.00 frames. ], tot_loss[loss=0.3816, simple_loss=0.3925, pruned_loss=0.1854, over 3949438.75 frames. ], batch size: 126, lr: 4.41e-02, grad_scale: 8.0 +2023-03-27 16:49:49,869 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3675.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:49:51,552 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3676.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:50:04,591 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2894, 4.5657, 5.1456, 4.7540, 5.1827, 3.8504, 4.1596, 4.9231], + device='cuda:2'), covar=tensor([0.0112, 0.0177, 0.0100, 0.0130, 0.0115, 0.0388, 0.0739, 0.0148], + device='cuda:2'), in_proj_covar=tensor([0.0043, 0.0048, 0.0052, 0.0055, 0.0054, 0.0067, 0.0070, 0.0050], + device='cuda:2'), out_proj_covar=tensor([6.2015e-05, 7.7592e-05, 7.4756e-05, 8.1314e-05, 8.8961e-05, 1.0102e-04, + 1.1160e-04, 7.3237e-05], device='cuda:2') +2023-03-27 16:50:37,047 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.302e+02 8.227e+02 1.028e+03 1.238e+03 2.427e+03, threshold=2.056e+03, percent-clipped=3.0 +2023-03-27 16:50:40,372 INFO [train.py:892] (2/4) Epoch 2, batch 1850, loss[loss=0.3726, simple_loss=0.3937, pruned_loss=0.1758, over 19833.00 frames. ], tot_loss[loss=0.3844, simple_loss=0.3958, pruned_loss=0.1865, over 3947158.16 frames. ], batch size: 57, lr: 4.39e-02, grad_scale: 8.0 +2023-03-27 16:51:36,796 INFO [train.py:892] (2/4) Epoch 3, batch 0, loss[loss=0.3751, simple_loss=0.3815, pruned_loss=0.1843, over 19764.00 frames. ], tot_loss[loss=0.3751, simple_loss=0.3815, pruned_loss=0.1843, over 19764.00 frames. ], batch size: 226, lr: 4.17e-02, grad_scale: 8.0 +2023-03-27 16:51:36,797 INFO [train.py:917] (2/4) Computing validation loss +2023-03-27 16:51:46,330 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1973, 4.5278, 5.1855, 4.7658, 5.3359, 4.0949, 4.1077, 4.8761], + device='cuda:2'), covar=tensor([0.0141, 0.0217, 0.0109, 0.0142, 0.0102, 0.0453, 0.0976, 0.0178], + device='cuda:2'), in_proj_covar=tensor([0.0046, 0.0050, 0.0054, 0.0058, 0.0057, 0.0070, 0.0075, 0.0052], + device='cuda:2'), out_proj_covar=tensor([6.6477e-05, 8.2200e-05, 7.8669e-05, 8.6315e-05, 9.4812e-05, 1.0732e-04, + 1.2070e-04, 7.7048e-05], device='cuda:2') +2023-03-27 16:52:00,341 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3448, 1.7687, 2.4069, 2.3791, 2.5256, 2.6280, 2.4364, 2.4265], + device='cuda:2'), covar=tensor([0.2398, 0.9743, 0.1442, 0.1809, 0.2368, 0.1467, 0.2148, 0.2786], + device='cuda:2'), in_proj_covar=tensor([0.0102, 0.0154, 0.0066, 0.0064, 0.0115, 0.0059, 0.0076, 0.0096], + device='cuda:2'), out_proj_covar=tensor([9.7965e-05, 1.6217e-04, 5.9158e-05, 5.4350e-05, 1.1389e-04, 5.7797e-05, + 7.1846e-05, 9.0060e-05], device='cuda:2') +2023-03-27 16:52:02,988 INFO [train.py:926] (2/4) Epoch 3, validation: loss=0.2594, simple_loss=0.3267, pruned_loss=0.09605, over 2883724.00 frames. +2023-03-27 16:52:02,989 INFO [train.py:927] (2/4) Maximum memory allocated so far is 21186MB +2023-03-27 16:52:30,238 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3723.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:52:57,973 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3737.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 16:53:24,777 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2244, 2.7888, 3.3685, 3.4320, 2.5276, 3.0078, 2.8639, 2.0838], + device='cuda:2'), covar=tensor([0.0246, 0.1414, 0.0323, 0.0121, 0.1776, 0.0277, 0.0471, 0.2031], + device='cuda:2'), in_proj_covar=tensor([0.0070, 0.0123, 0.0081, 0.0059, 0.0161, 0.0071, 0.0085, 0.0161], + device='cuda:2'), out_proj_covar=tensor([6.0650e-05, 1.1381e-04, 7.1995e-05, 4.5630e-05, 1.3900e-04, 5.6885e-05, + 7.5310e-05, 1.3901e-04], device='cuda:2') +2023-03-27 16:53:47,348 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8994, 3.4996, 3.8630, 3.4683, 3.9081, 3.2337, 3.2771, 3.7352], + device='cuda:2'), covar=tensor([0.0155, 0.0203, 0.0123, 0.0196, 0.0153, 0.0409, 0.0716, 0.0202], + device='cuda:2'), in_proj_covar=tensor([0.0044, 0.0048, 0.0052, 0.0056, 0.0055, 0.0066, 0.0073, 0.0050], + device='cuda:2'), out_proj_covar=tensor([6.3975e-05, 7.9338e-05, 7.4855e-05, 8.4068e-05, 9.0885e-05, 1.0242e-04, + 1.1805e-04, 7.4657e-05], device='cuda:2') +2023-03-27 16:53:50,415 INFO [train.py:892] (2/4) Epoch 3, batch 50, loss[loss=0.3111, simple_loss=0.3342, pruned_loss=0.144, over 19807.00 frames. ], tot_loss[loss=0.366, simple_loss=0.3788, pruned_loss=0.1766, over 890279.78 frames. ], batch size: 114, lr: 4.16e-02, grad_scale: 8.0 +2023-03-27 16:55:16,585 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.227e+02 7.628e+02 9.313e+02 1.232e+03 3.200e+03, threshold=1.863e+03, percent-clipped=4.0 +2023-03-27 16:55:31,509 INFO [train.py:892] (2/4) Epoch 3, batch 100, loss[loss=0.3177, simple_loss=0.342, pruned_loss=0.1467, over 19730.00 frames. ], tot_loss[loss=0.3656, simple_loss=0.3785, pruned_loss=0.1764, over 1568233.00 frames. ], batch size: 63, lr: 4.15e-02, grad_scale: 8.0 +2023-03-27 16:55:43,537 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2012, 5.4352, 5.3849, 5.3430, 5.2257, 5.3884, 4.7251, 4.7378], + device='cuda:2'), covar=tensor([0.0336, 0.0220, 0.0431, 0.0328, 0.0420, 0.0408, 0.0432, 0.0782], + device='cuda:2'), in_proj_covar=tensor([0.0095, 0.0086, 0.0117, 0.0101, 0.0104, 0.0084, 0.0113, 0.0147], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-27 16:57:14,244 INFO [train.py:892] (2/4) Epoch 3, batch 150, loss[loss=0.3431, simple_loss=0.3721, pruned_loss=0.157, over 19779.00 frames. ], tot_loss[loss=0.3604, simple_loss=0.3755, pruned_loss=0.1727, over 2096974.42 frames. ], batch size: 87, lr: 4.14e-02, grad_scale: 8.0 +2023-03-27 16:57:42,666 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7246, 3.1030, 3.4947, 3.4323, 4.0717, 3.4932, 3.7078, 3.8341], + device='cuda:2'), covar=tensor([0.0465, 0.1178, 0.0655, 0.1445, 0.0445, 0.0454, 0.0258, 0.0164], + device='cuda:2'), in_proj_covar=tensor([0.0051, 0.0085, 0.0051, 0.0092, 0.0038, 0.0055, 0.0039, 0.0041], + device='cuda:2'), out_proj_covar=tensor([4.8299e-05, 7.6407e-05, 4.6585e-05, 8.5799e-05, 3.6955e-05, 4.8912e-05, + 3.2703e-05, 3.3510e-05], device='cuda:2') +2023-03-27 16:58:41,988 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3902.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 16:58:44,807 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.184e+02 7.831e+02 9.821e+02 1.147e+03 2.106e+03, threshold=1.964e+03, percent-clipped=1.0 +2023-03-27 16:58:45,807 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3411, 5.5872, 5.5843, 5.6690, 5.4214, 5.6522, 4.9608, 5.0475], + device='cuda:2'), covar=tensor([0.0389, 0.0270, 0.0550, 0.0285, 0.0375, 0.0437, 0.0390, 0.0831], + device='cuda:2'), in_proj_covar=tensor([0.0097, 0.0087, 0.0119, 0.0099, 0.0102, 0.0084, 0.0114, 0.0148], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-27 16:58:59,052 INFO [train.py:892] (2/4) Epoch 3, batch 200, loss[loss=0.4035, simple_loss=0.4059, pruned_loss=0.2005, over 19811.00 frames. ], tot_loss[loss=0.357, simple_loss=0.3738, pruned_loss=0.1701, over 2509590.77 frames. ], batch size: 147, lr: 4.13e-02, grad_scale: 8.0 +2023-03-27 16:59:30,178 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3926.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:59:43,761 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.70 vs. limit=2.0 +2023-03-27 17:00:09,713 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3944.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 17:00:21,346 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.02 vs. limit=5.0 +2023-03-27 17:00:41,361 INFO [train.py:892] (2/4) Epoch 3, batch 250, loss[loss=0.3631, simple_loss=0.3669, pruned_loss=0.1796, over 19768.00 frames. ], tot_loss[loss=0.3565, simple_loss=0.3741, pruned_loss=0.1694, over 2829359.68 frames. ], batch size: 217, lr: 4.12e-02, grad_scale: 8.0 +2023-03-27 17:00:53,492 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.93 vs. limit=2.0 +2023-03-27 17:00:55,770 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.88 vs. limit=5.0 +2023-03-27 17:01:37,842 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3987.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:01:47,663 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3992.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 17:02:15,832 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.914e+02 7.402e+02 9.885e+02 1.286e+03 2.496e+03, threshold=1.977e+03, percent-clipped=5.0 +2023-03-27 17:02:31,809 INFO [train.py:892] (2/4) Epoch 3, batch 300, loss[loss=0.3208, simple_loss=0.3443, pruned_loss=0.1487, over 19699.00 frames. ], tot_loss[loss=0.3631, simple_loss=0.379, pruned_loss=0.1736, over 3075465.79 frames. ], batch size: 82, lr: 4.11e-02, grad_scale: 8.0 +2023-03-27 17:03:15,113 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4032.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 17:04:16,817 INFO [train.py:892] (2/4) Epoch 3, batch 350, loss[loss=0.3455, simple_loss=0.3778, pruned_loss=0.1566, over 19806.00 frames. ], tot_loss[loss=0.3612, simple_loss=0.3781, pruned_loss=0.1722, over 3268548.37 frames. ], batch size: 47, lr: 4.10e-02, grad_scale: 8.0 +2023-03-27 17:05:20,886 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0781, 4.2448, 4.2612, 4.3001, 4.1395, 4.2413, 3.7879, 3.8345], + device='cuda:2'), covar=tensor([0.0410, 0.0341, 0.0680, 0.0423, 0.0529, 0.0627, 0.0522, 0.0977], + device='cuda:2'), in_proj_covar=tensor([0.0098, 0.0091, 0.0126, 0.0100, 0.0106, 0.0088, 0.0117, 0.0150], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-27 17:05:45,543 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.173e+02 7.083e+02 8.600e+02 1.034e+03 2.171e+03, threshold=1.720e+03, percent-clipped=1.0 +2023-03-27 17:05:59,497 INFO [train.py:892] (2/4) Epoch 3, batch 400, loss[loss=0.3244, simple_loss=0.3435, pruned_loss=0.1526, over 19738.00 frames. ], tot_loss[loss=0.3584, simple_loss=0.376, pruned_loss=0.1704, over 3420617.19 frames. ], batch size: 140, lr: 4.09e-02, grad_scale: 8.0 +2023-03-27 17:06:12,130 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-27 17:07:02,393 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4141.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:07:46,002 INFO [train.py:892] (2/4) Epoch 3, batch 450, loss[loss=0.3531, simple_loss=0.3919, pruned_loss=0.1571, over 19875.00 frames. ], tot_loss[loss=0.3588, simple_loss=0.3771, pruned_loss=0.1702, over 3535782.28 frames. ], batch size: 53, lr: 4.08e-02, grad_scale: 8.0 +2023-03-27 17:08:27,785 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.74 vs. limit=2.0 +2023-03-27 17:08:43,510 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.91 vs. limit=5.0 +2023-03-27 17:09:11,984 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4202.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 17:09:12,021 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4202.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:09:14,946 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.069e+02 7.856e+02 9.394e+02 1.124e+03 2.140e+03, threshold=1.879e+03, percent-clipped=3.0 +2023-03-27 17:09:28,188 INFO [train.py:892] (2/4) Epoch 3, batch 500, loss[loss=0.3264, simple_loss=0.346, pruned_loss=0.1534, over 19879.00 frames. ], tot_loss[loss=0.3555, simple_loss=0.3742, pruned_loss=0.1684, over 3627961.39 frames. ], batch size: 88, lr: 4.07e-02, grad_scale: 8.0 +2023-03-27 17:10:15,065 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7501, 3.2731, 2.5821, 3.8118, 3.7120, 1.6609, 3.5910, 2.9279], + device='cuda:2'), covar=tensor([0.0342, 0.0638, 0.1868, 0.0102, 0.0120, 0.2313, 0.0376, 0.0355], + device='cuda:2'), in_proj_covar=tensor([0.0065, 0.0075, 0.0118, 0.0040, 0.0042, 0.0115, 0.0076, 0.0057], + device='cuda:2'), out_proj_covar=tensor([5.7687e-05, 7.4815e-05, 1.0998e-04, 3.7963e-05, 3.7623e-05, 1.0314e-04, + 7.0545e-05, 4.8589e-05], device='cuda:2') +2023-03-27 17:10:50,172 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4250.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 17:11:12,811 INFO [train.py:892] (2/4) Epoch 3, batch 550, loss[loss=0.3738, simple_loss=0.3837, pruned_loss=0.1819, over 19786.00 frames. ], tot_loss[loss=0.3564, simple_loss=0.3753, pruned_loss=0.1688, over 3698047.69 frames. ], batch size: 253, lr: 4.06e-02, grad_scale: 8.0 +2023-03-27 17:11:38,153 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-27 17:11:56,663 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4282.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:12:08,152 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4288.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:12:22,488 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.97 vs. limit=5.0 +2023-03-27 17:12:42,163 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.575e+02 7.145e+02 8.933e+02 1.130e+03 1.909e+03, threshold=1.787e+03, percent-clipped=2.0 +2023-03-27 17:12:57,548 INFO [train.py:892] (2/4) Epoch 3, batch 600, loss[loss=0.339, simple_loss=0.3708, pruned_loss=0.1536, over 19594.00 frames. ], tot_loss[loss=0.3541, simple_loss=0.3738, pruned_loss=0.1672, over 3752814.87 frames. ], batch size: 53, lr: 4.05e-02, grad_scale: 8.0 +2023-03-27 17:13:41,290 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4332.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 17:14:16,475 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4349.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:14:38,645 INFO [train.py:892] (2/4) Epoch 3, batch 650, loss[loss=0.3294, simple_loss=0.3551, pruned_loss=0.1519, over 19747.00 frames. ], tot_loss[loss=0.3531, simple_loss=0.3727, pruned_loss=0.1668, over 3797339.44 frames. ], batch size: 71, lr: 4.04e-02, grad_scale: 8.0 +2023-03-27 17:15:21,790 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4380.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 17:16:08,634 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+02 7.551e+02 9.171e+02 1.079e+03 2.038e+03, threshold=1.834e+03, percent-clipped=1.0 +2023-03-27 17:16:24,595 INFO [train.py:892] (2/4) Epoch 3, batch 700, loss[loss=0.3516, simple_loss=0.3719, pruned_loss=0.1657, over 19851.00 frames. ], tot_loss[loss=0.3546, simple_loss=0.3738, pruned_loss=0.1677, over 3831195.96 frames. ], batch size: 56, lr: 4.03e-02, grad_scale: 8.0 +2023-03-27 17:17:18,677 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.65 vs. limit=2.0 +2023-03-27 17:17:40,124 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-27 17:18:07,003 INFO [train.py:892] (2/4) Epoch 3, batch 750, loss[loss=0.3249, simple_loss=0.3494, pruned_loss=0.1502, over 19833.00 frames. ], tot_loss[loss=0.3544, simple_loss=0.3742, pruned_loss=0.1673, over 3857040.97 frames. ], batch size: 184, lr: 4.02e-02, grad_scale: 8.0 +2023-03-27 17:19:09,015 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8893, 3.1728, 4.0629, 4.2347, 2.8918, 3.9505, 3.8570, 2.3199], + device='cuda:2'), covar=tensor([0.0276, 0.2694, 0.0389, 0.0114, 0.1897, 0.0249, 0.0379, 0.2484], + device='cuda:2'), in_proj_covar=tensor([0.0093, 0.0208, 0.0105, 0.0078, 0.0177, 0.0090, 0.0110, 0.0175], + device='cuda:2'), out_proj_covar=tensor([8.7867e-05, 1.9812e-04, 1.0056e-04, 6.7565e-05, 1.6167e-04, 8.1228e-05, + 1.0478e-04, 1.6045e-04], device='cuda:2') +2023-03-27 17:19:22,156 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4497.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:19:22,786 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-27 17:19:34,746 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.458e+02 7.939e+02 9.251e+02 1.113e+03 1.728e+03, threshold=1.850e+03, percent-clipped=0.0 +2023-03-27 17:19:48,316 INFO [train.py:892] (2/4) Epoch 3, batch 800, loss[loss=0.3333, simple_loss=0.3544, pruned_loss=0.1561, over 19811.00 frames. ], tot_loss[loss=0.352, simple_loss=0.3728, pruned_loss=0.1656, over 3876226.63 frames. ], batch size: 117, lr: 4.01e-02, grad_scale: 8.0 +2023-03-27 17:21:33,147 INFO [train.py:892] (2/4) Epoch 3, batch 850, loss[loss=0.3598, simple_loss=0.3697, pruned_loss=0.175, over 19803.00 frames. ], tot_loss[loss=0.3498, simple_loss=0.3706, pruned_loss=0.1645, over 3893071.98 frames. ], batch size: 195, lr: 4.00e-02, grad_scale: 8.0 +2023-03-27 17:22:13,943 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4582.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:22:14,600 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.03 vs. limit=2.0 +2023-03-27 17:22:58,522 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.263e+02 6.963e+02 8.935e+02 1.119e+03 2.174e+03, threshold=1.787e+03, percent-clipped=2.0 +2023-03-27 17:23:11,125 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-03-27 17:23:13,496 INFO [train.py:892] (2/4) Epoch 3, batch 900, loss[loss=0.3287, simple_loss=0.3649, pruned_loss=0.1462, over 19812.00 frames. ], tot_loss[loss=0.349, simple_loss=0.3699, pruned_loss=0.1641, over 3906703.70 frames. ], batch size: 98, lr: 3.99e-02, grad_scale: 8.0 +2023-03-27 17:23:53,312 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4630.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:24:12,409 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2131, 2.1270, 2.9079, 2.7176, 3.5657, 2.8352, 2.9055, 3.0033], + device='cuda:2'), covar=tensor([0.0429, 0.1279, 0.0501, 0.1072, 0.0268, 0.0430, 0.0246, 0.0403], + device='cuda:2'), in_proj_covar=tensor([0.0071, 0.0103, 0.0072, 0.0117, 0.0053, 0.0072, 0.0054, 0.0057], + device='cuda:2'), out_proj_covar=tensor([7.4917e-05, 1.0036e-04, 7.6196e-05, 1.1857e-04, 5.9481e-05, 7.2992e-05, + 5.2332e-05, 5.6579e-05], device='cuda:2') +2023-03-27 17:24:23,398 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4644.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:24:42,656 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3782, 2.7474, 3.0454, 2.5337, 2.6307, 2.0508, 2.5526, 3.0219], + device='cuda:2'), covar=tensor([0.0801, 0.0261, 0.0492, 0.0477, 0.0428, 0.0565, 0.0457, 0.0248], + device='cuda:2'), in_proj_covar=tensor([0.0041, 0.0033, 0.0035, 0.0044, 0.0038, 0.0032, 0.0030, 0.0028], + device='cuda:2'), out_proj_covar=tensor([6.1248e-05, 5.0892e-05, 5.2090e-05, 6.0653e-05, 5.6870e-05, 5.0982e-05, + 4.7325e-05, 4.3201e-05], device='cuda:2') +2023-03-27 17:24:56,909 INFO [train.py:892] (2/4) Epoch 3, batch 950, loss[loss=0.3645, simple_loss=0.3729, pruned_loss=0.178, over 19757.00 frames. ], tot_loss[loss=0.3509, simple_loss=0.3713, pruned_loss=0.1652, over 3914068.24 frames. ], batch size: 205, lr: 3.98e-02, grad_scale: 8.0 +2023-03-27 17:26:26,123 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 7.071e+02 8.023e+02 9.803e+02 2.669e+03, threshold=1.605e+03, percent-clipped=2.0 +2023-03-27 17:26:42,510 INFO [train.py:892] (2/4) Epoch 3, batch 1000, loss[loss=0.2937, simple_loss=0.3415, pruned_loss=0.123, over 19594.00 frames. ], tot_loss[loss=0.3477, simple_loss=0.3688, pruned_loss=0.1633, over 3921543.77 frames. ], batch size: 44, lr: 3.97e-02, grad_scale: 8.0 +2023-03-27 17:27:49,668 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7119, 4.3894, 4.5037, 4.1419, 4.3059, 4.5521, 4.2351, 4.8700], + device='cuda:2'), covar=tensor([0.1541, 0.0258, 0.0325, 0.0307, 0.0346, 0.0228, 0.0249, 0.0187], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0107, 0.0098, 0.0095, 0.0099, 0.0090, 0.0085, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 17:28:22,550 INFO [train.py:892] (2/4) Epoch 3, batch 1050, loss[loss=0.3522, simple_loss=0.361, pruned_loss=0.1717, over 19737.00 frames. ], tot_loss[loss=0.3464, simple_loss=0.3679, pruned_loss=0.1624, over 3926404.00 frames. ], batch size: 51, lr: 3.96e-02, grad_scale: 8.0 +2023-03-27 17:29:37,220 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4797.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:29:50,538 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.737e+02 7.391e+02 9.077e+02 1.100e+03 2.003e+03, threshold=1.815e+03, percent-clipped=3.0 +2023-03-27 17:29:57,074 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.6378, 1.7671, 1.3599, 1.8427, 1.7309, 1.2655, 1.6910, 1.7147], + device='cuda:2'), covar=tensor([0.0344, 0.0387, 0.0572, 0.0201, 0.0345, 0.0698, 0.0436, 0.0572], + device='cuda:2'), in_proj_covar=tensor([0.0025, 0.0028, 0.0032, 0.0026, 0.0028, 0.0031, 0.0032, 0.0029], + device='cuda:2'), out_proj_covar=tensor([3.8569e-05, 4.0240e-05, 4.8103e-05, 3.7550e-05, 4.2509e-05, 4.6880e-05, + 4.7874e-05, 4.4354e-05], device='cuda:2') +2023-03-27 17:30:03,724 INFO [train.py:892] (2/4) Epoch 3, batch 1100, loss[loss=0.3444, simple_loss=0.3538, pruned_loss=0.1675, over 19877.00 frames. ], tot_loss[loss=0.345, simple_loss=0.3666, pruned_loss=0.1617, over 3932214.57 frames. ], batch size: 139, lr: 3.95e-02, grad_scale: 8.0 +2023-03-27 17:31:15,752 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4845.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:31:50,574 INFO [train.py:892] (2/4) Epoch 3, batch 1150, loss[loss=0.2955, simple_loss=0.3243, pruned_loss=0.1333, over 19880.00 frames. ], tot_loss[loss=0.3434, simple_loss=0.3657, pruned_loss=0.1605, over 3936264.57 frames. ], batch size: 95, lr: 3.95e-02, grad_scale: 8.0 +2023-03-27 17:33:20,480 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.536e+02 7.167e+02 9.190e+02 1.175e+03 2.796e+03, threshold=1.838e+03, percent-clipped=7.0 +2023-03-27 17:33:33,647 INFO [train.py:892] (2/4) Epoch 3, batch 1200, loss[loss=0.3002, simple_loss=0.3438, pruned_loss=0.1283, over 19783.00 frames. ], tot_loss[loss=0.3419, simple_loss=0.3649, pruned_loss=0.1594, over 3938586.37 frames. ], batch size: 87, lr: 3.94e-02, grad_scale: 8.0 +2023-03-27 17:34:43,344 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4944.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:35:01,087 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-27 17:35:18,139 INFO [train.py:892] (2/4) Epoch 3, batch 1250, loss[loss=0.2915, simple_loss=0.3277, pruned_loss=0.1276, over 19654.00 frames. ], tot_loss[loss=0.3395, simple_loss=0.363, pruned_loss=0.158, over 3942883.82 frames. ], batch size: 47, lr: 3.93e-02, grad_scale: 8.0 +2023-03-27 17:36:20,317 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4992.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:36:47,196 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.843e+02 6.781e+02 8.883e+02 1.088e+03 2.699e+03, threshold=1.777e+03, percent-clipped=1.0 +2023-03-27 17:36:52,197 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-27 17:37:00,615 INFO [train.py:892] (2/4) Epoch 3, batch 1300, loss[loss=0.3468, simple_loss=0.37, pruned_loss=0.1618, over 19773.00 frames. ], tot_loss[loss=0.3372, simple_loss=0.3615, pruned_loss=0.1565, over 3943261.70 frames. ], batch size: 130, lr: 3.92e-02, grad_scale: 8.0 +2023-03-27 17:38:43,017 INFO [train.py:892] (2/4) Epoch 3, batch 1350, loss[loss=0.3786, simple_loss=0.3889, pruned_loss=0.1842, over 19823.00 frames. ], tot_loss[loss=0.3377, simple_loss=0.3624, pruned_loss=0.1565, over 3943445.65 frames. ], batch size: 231, lr: 3.91e-02, grad_scale: 8.0 +2023-03-27 17:39:04,199 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.82 vs. limit=2.0 +2023-03-27 17:39:36,143 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3668, 5.7825, 5.7765, 5.6667, 5.4911, 5.5856, 5.2184, 5.1503], + device='cuda:2'), covar=tensor([0.0395, 0.0228, 0.0568, 0.0400, 0.0462, 0.0629, 0.0380, 0.0993], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0099, 0.0145, 0.0113, 0.0116, 0.0094, 0.0127, 0.0162], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 17:40:10,987 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 6.737e+02 7.871e+02 9.316e+02 1.602e+03, threshold=1.574e+03, percent-clipped=0.0 +2023-03-27 17:40:26,684 INFO [train.py:892] (2/4) Epoch 3, batch 1400, loss[loss=0.5688, simple_loss=0.5376, pruned_loss=0.3, over 19409.00 frames. ], tot_loss[loss=0.3373, simple_loss=0.3618, pruned_loss=0.1564, over 3945122.23 frames. ], batch size: 431, lr: 3.90e-02, grad_scale: 8.0 +2023-03-27 17:42:06,540 INFO [train.py:892] (2/4) Epoch 3, batch 1450, loss[loss=0.402, simple_loss=0.4065, pruned_loss=0.1988, over 19699.00 frames. ], tot_loss[loss=0.3397, simple_loss=0.3636, pruned_loss=0.1579, over 3946078.51 frames. ], batch size: 310, lr: 3.89e-02, grad_scale: 8.0 +2023-03-27 17:42:34,355 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.94 vs. limit=5.0 +2023-03-27 17:43:31,570 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-27 17:43:35,887 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 5.438e+02 7.354e+02 9.251e+02 1.233e+03 2.096e+03, threshold=1.850e+03, percent-clipped=6.0 +2023-03-27 17:43:49,237 INFO [train.py:892] (2/4) Epoch 3, batch 1500, loss[loss=0.3462, simple_loss=0.368, pruned_loss=0.1623, over 19766.00 frames. ], tot_loss[loss=0.3422, simple_loss=0.3652, pruned_loss=0.1596, over 3946503.45 frames. ], batch size: 226, lr: 3.88e-02, grad_scale: 8.0 +2023-03-27 17:43:53,743 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2857, 3.7646, 3.8800, 4.2623, 3.8462, 4.2296, 4.1668, 4.4649], + device='cuda:2'), covar=tensor([0.0420, 0.0326, 0.0394, 0.0215, 0.0453, 0.0225, 0.0253, 0.0291], + device='cuda:2'), in_proj_covar=tensor([0.0089, 0.0094, 0.0104, 0.0089, 0.0093, 0.0074, 0.0101, 0.0111], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-27 17:45:34,253 INFO [train.py:892] (2/4) Epoch 3, batch 1550, loss[loss=0.3581, simple_loss=0.3874, pruned_loss=0.1644, over 19784.00 frames. ], tot_loss[loss=0.3427, simple_loss=0.3659, pruned_loss=0.1597, over 3946533.12 frames. ], batch size: 66, lr: 3.87e-02, grad_scale: 8.0 +2023-03-27 17:46:48,574 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3083, 2.1195, 1.6084, 2.4414, 2.3388, 2.2655, 2.4413, 2.0613], + device='cuda:2'), covar=tensor([0.0460, 0.0427, 0.1195, 0.0345, 0.0393, 0.0361, 0.0372, 0.0305], + device='cuda:2'), in_proj_covar=tensor([0.0061, 0.0050, 0.0084, 0.0052, 0.0050, 0.0044, 0.0050, 0.0046], + device='cuda:2'), out_proj_covar=tensor([9.0410e-05, 7.9464e-05, 1.2850e-04, 8.3153e-05, 7.6619e-05, 7.1475e-05, + 8.1002e-05, 7.4862e-05], device='cuda:2') +2023-03-27 17:47:02,176 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.559e+02 6.524e+02 7.733e+02 9.539e+02 1.385e+03, threshold=1.547e+03, percent-clipped=0.0 +2023-03-27 17:47:15,548 INFO [train.py:892] (2/4) Epoch 3, batch 1600, loss[loss=0.2888, simple_loss=0.3259, pruned_loss=0.1259, over 19782.00 frames. ], tot_loss[loss=0.3414, simple_loss=0.3649, pruned_loss=0.159, over 3948502.30 frames. ], batch size: 91, lr: 3.86e-02, grad_scale: 8.0 +2023-03-27 17:48:02,547 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9415, 4.6618, 5.0239, 4.6723, 4.4395, 4.9072, 4.6132, 5.3114], + device='cuda:2'), covar=tensor([0.1690, 0.0285, 0.0291, 0.0263, 0.0341, 0.0214, 0.0261, 0.0174], + device='cuda:2'), in_proj_covar=tensor([0.0160, 0.0115, 0.0110, 0.0102, 0.0109, 0.0099, 0.0092, 0.0093], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 17:48:56,957 INFO [train.py:892] (2/4) Epoch 3, batch 1650, loss[loss=0.3029, simple_loss=0.3372, pruned_loss=0.1343, over 19950.00 frames. ], tot_loss[loss=0.3378, simple_loss=0.3625, pruned_loss=0.1565, over 3948590.78 frames. ], batch size: 46, lr: 3.85e-02, grad_scale: 8.0 +2023-03-27 17:49:44,076 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.5081, 5.7562, 5.7062, 5.6600, 5.5636, 5.6898, 5.1305, 5.0056], + device='cuda:2'), covar=tensor([0.0304, 0.0258, 0.0496, 0.0314, 0.0583, 0.0541, 0.0393, 0.0734], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0102, 0.0148, 0.0115, 0.0116, 0.0095, 0.0129, 0.0164], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 17:50:27,889 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.447e+02 7.036e+02 8.570e+02 1.037e+03 1.553e+03, threshold=1.714e+03, percent-clipped=1.0 +2023-03-27 17:50:41,760 INFO [train.py:892] (2/4) Epoch 3, batch 1700, loss[loss=0.3236, simple_loss=0.3393, pruned_loss=0.1539, over 19764.00 frames. ], tot_loss[loss=0.3368, simple_loss=0.3619, pruned_loss=0.1559, over 3947497.41 frames. ], batch size: 152, lr: 3.84e-02, grad_scale: 8.0 +2023-03-27 17:52:19,755 INFO [train.py:892] (2/4) Epoch 3, batch 1750, loss[loss=0.3326, simple_loss=0.3751, pruned_loss=0.1451, over 19875.00 frames. ], tot_loss[loss=0.3349, simple_loss=0.3603, pruned_loss=0.1547, over 3947881.59 frames. ], batch size: 53, lr: 3.83e-02, grad_scale: 8.0 +2023-03-27 17:52:38,034 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1713, 1.8779, 4.1059, 3.8978, 3.8890, 3.8091, 4.0543, 3.5405], + device='cuda:2'), covar=tensor([0.0350, 0.2473, 0.0205, 0.0207, 0.0320, 0.0205, 0.0285, 0.0799], + device='cuda:2'), in_proj_covar=tensor([0.0063, 0.0130, 0.0067, 0.0061, 0.0055, 0.0063, 0.0056, 0.0065], + device='cuda:2'), out_proj_covar=tensor([8.8203e-05, 1.6203e-04, 8.8016e-05, 8.5560e-05, 8.3823e-05, 8.7010e-05, + 8.2609e-05, 9.6007e-05], device='cuda:2') +2023-03-27 17:52:58,281 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.07 vs. limit=2.0 +2023-03-27 17:53:20,261 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0357, 5.2123, 5.4468, 5.3321, 5.3921, 5.0443, 5.1286, 4.9925], + device='cuda:2'), covar=tensor([0.1135, 0.0724, 0.0951, 0.0610, 0.0661, 0.1007, 0.1653, 0.2490], + device='cuda:2'), in_proj_covar=tensor([0.0171, 0.0134, 0.0202, 0.0151, 0.0155, 0.0151, 0.0184, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-27 17:53:34,948 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.884e+02 6.493e+02 7.809e+02 9.948e+02 1.676e+03, threshold=1.562e+03, percent-clipped=0.0 +2023-03-27 17:53:43,835 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.8607, 2.0125, 1.4055, 2.3025, 1.6856, 1.6862, 1.6886, 2.0881], + device='cuda:2'), covar=tensor([0.0327, 0.0410, 0.0632, 0.0341, 0.0476, 0.0667, 0.0650, 0.0630], + device='cuda:2'), in_proj_covar=tensor([0.0027, 0.0029, 0.0032, 0.0028, 0.0028, 0.0032, 0.0032, 0.0029], + device='cuda:2'), out_proj_covar=tensor([4.4681e-05, 4.7887e-05, 5.3362e-05, 4.5433e-05, 4.9251e-05, 5.2512e-05, + 5.3919e-05, 4.9116e-05], device='cuda:2') +2023-03-27 17:53:46,206 INFO [train.py:892] (2/4) Epoch 3, batch 1800, loss[loss=0.3244, simple_loss=0.3545, pruned_loss=0.1471, over 19889.00 frames. ], tot_loss[loss=0.3353, simple_loss=0.3612, pruned_loss=0.1547, over 3946949.72 frames. ], batch size: 92, lr: 3.82e-02, grad_scale: 16.0 +2023-03-27 17:55:08,435 INFO [train.py:892] (2/4) Epoch 3, batch 1850, loss[loss=0.3678, simple_loss=0.4036, pruned_loss=0.166, over 19844.00 frames. ], tot_loss[loss=0.3349, simple_loss=0.3623, pruned_loss=0.1537, over 3946187.11 frames. ], batch size: 57, lr: 3.81e-02, grad_scale: 16.0 +2023-03-27 17:56:05,365 INFO [train.py:892] (2/4) Epoch 4, batch 0, loss[loss=0.3424, simple_loss=0.3556, pruned_loss=0.1646, over 19795.00 frames. ], tot_loss[loss=0.3424, simple_loss=0.3556, pruned_loss=0.1646, over 19795.00 frames. ], batch size: 185, lr: 3.56e-02, grad_scale: 16.0 +2023-03-27 17:56:05,366 INFO [train.py:917] (2/4) Computing validation loss +2023-03-27 17:56:21,850 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5534, 3.6079, 4.4864, 4.9519, 3.5829, 4.0287, 4.0486, 3.1473], + device='cuda:2'), covar=tensor([0.0273, 0.2403, 0.0423, 0.0093, 0.1865, 0.0374, 0.0459, 0.1861], + device='cuda:2'), in_proj_covar=tensor([0.0110, 0.0263, 0.0133, 0.0088, 0.0189, 0.0103, 0.0128, 0.0184], + device='cuda:2'), out_proj_covar=tensor([1.1510e-04, 2.5885e-04, 1.3836e-04, 8.4456e-05, 1.8556e-04, 9.9827e-05, + 1.2950e-04, 1.8085e-04], device='cuda:2') +2023-03-27 17:56:31,644 INFO [train.py:926] (2/4) Epoch 4, validation: loss=0.2293, simple_loss=0.3025, pruned_loss=0.07807, over 2883724.00 frames. +2023-03-27 17:56:31,646 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22298MB +2023-03-27 17:57:55,092 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.716e+02 7.146e+02 8.558e+02 9.901e+02 2.056e+03, threshold=1.712e+03, percent-clipped=2.0 +2023-03-27 17:58:21,180 INFO [train.py:892] (2/4) Epoch 4, batch 50, loss[loss=0.3074, simple_loss=0.3399, pruned_loss=0.1375, over 19768.00 frames. ], tot_loss[loss=0.3166, simple_loss=0.3441, pruned_loss=0.1445, over 891699.52 frames. ], batch size: 70, lr: 3.55e-02, grad_scale: 16.0 +2023-03-27 17:59:35,995 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5650.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 18:00:10,204 INFO [train.py:892] (2/4) Epoch 4, batch 100, loss[loss=0.3092, simple_loss=0.3473, pruned_loss=0.1356, over 19701.00 frames. ], tot_loss[loss=0.3173, simple_loss=0.3466, pruned_loss=0.144, over 1570507.60 frames. ], batch size: 59, lr: 3.54e-02, grad_scale: 16.0 +2023-03-27 18:00:29,136 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3679, 4.5239, 4.5700, 4.5033, 4.2614, 4.4626, 3.9337, 3.9853], + device='cuda:2'), covar=tensor([0.0415, 0.0320, 0.0568, 0.0408, 0.0559, 0.0571, 0.0637, 0.0944], + device='cuda:2'), in_proj_covar=tensor([0.0112, 0.0105, 0.0154, 0.0119, 0.0121, 0.0100, 0.0138, 0.0170], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 18:01:31,610 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+02 7.031e+02 8.200e+02 9.251e+02 1.434e+03, threshold=1.640e+03, percent-clipped=0.0 +2023-03-27 18:01:46,554 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5711.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:01:55,774 INFO [train.py:892] (2/4) Epoch 4, batch 150, loss[loss=0.4149, simple_loss=0.4148, pruned_loss=0.2075, over 19719.00 frames. ], tot_loss[loss=0.3248, simple_loss=0.3519, pruned_loss=0.1488, over 2098536.73 frames. ], batch size: 337, lr: 3.54e-02, grad_scale: 16.0 +2023-03-27 18:01:58,710 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8187, 3.4841, 3.4712, 3.7632, 3.4877, 3.6526, 3.7727, 3.9511], + device='cuda:2'), covar=tensor([0.0396, 0.0285, 0.0379, 0.0265, 0.0430, 0.0381, 0.0274, 0.0332], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0096, 0.0105, 0.0093, 0.0094, 0.0075, 0.0102, 0.0117], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-27 18:03:18,818 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5755.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:03:41,943 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3961, 4.7978, 5.2904, 4.8494, 5.3826, 3.6939, 4.2213, 4.7983], + device='cuda:2'), covar=tensor([0.0153, 0.0136, 0.0098, 0.0131, 0.0083, 0.0535, 0.0934, 0.0214], + device='cuda:2'), in_proj_covar=tensor([0.0053, 0.0059, 0.0062, 0.0069, 0.0063, 0.0085, 0.0098, 0.0070], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-27 18:03:42,970 INFO [train.py:892] (2/4) Epoch 4, batch 200, loss[loss=0.3062, simple_loss=0.3362, pruned_loss=0.1381, over 19779.00 frames. ], tot_loss[loss=0.3237, simple_loss=0.3516, pruned_loss=0.1478, over 2508503.97 frames. ], batch size: 42, lr: 3.53e-02, grad_scale: 16.0 +2023-03-27 18:05:07,192 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.424e+02 6.665e+02 7.849e+02 9.625e+02 1.890e+03, threshold=1.570e+03, percent-clipped=1.0 +2023-03-27 18:05:31,202 INFO [train.py:892] (2/4) Epoch 4, batch 250, loss[loss=0.2952, simple_loss=0.3344, pruned_loss=0.128, over 19692.00 frames. ], tot_loss[loss=0.3247, simple_loss=0.3527, pruned_loss=0.1483, over 2828568.14 frames. ], batch size: 45, lr: 3.52e-02, grad_scale: 16.0 +2023-03-27 18:05:32,190 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5816.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:06:52,341 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([0.9977, 1.6656, 1.4074, 0.8294, 1.1833, 1.4047, 1.1334, 1.4065], + device='cuda:2'), covar=tensor([0.0307, 0.0260, 0.0231, 0.0701, 0.0627, 0.0273, 0.0270, 0.0316], + device='cuda:2'), in_proj_covar=tensor([0.0029, 0.0028, 0.0028, 0.0043, 0.0041, 0.0029, 0.0026, 0.0031], + device='cuda:2'), out_proj_covar=tensor([4.8579e-05, 4.7696e-05, 4.5986e-05, 7.2722e-05, 7.0841e-05, 4.8486e-05, + 4.6578e-05, 5.2196e-05], device='cuda:2') +2023-03-27 18:06:59,835 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5858.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:07:17,593 INFO [train.py:892] (2/4) Epoch 4, batch 300, loss[loss=0.2758, simple_loss=0.3044, pruned_loss=0.1235, over 19748.00 frames. ], tot_loss[loss=0.3268, simple_loss=0.3544, pruned_loss=0.1496, over 3077766.61 frames. ], batch size: 140, lr: 3.51e-02, grad_scale: 16.0 +2023-03-27 18:07:45,818 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-27 18:08:37,829 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.638e+02 6.552e+02 7.972e+02 9.789e+02 1.946e+03, threshold=1.594e+03, percent-clipped=3.0 +2023-03-27 18:09:05,482 INFO [train.py:892] (2/4) Epoch 4, batch 350, loss[loss=0.3186, simple_loss=0.3562, pruned_loss=0.1405, over 19718.00 frames. ], tot_loss[loss=0.328, simple_loss=0.3557, pruned_loss=0.1502, over 3270413.62 frames. ], batch size: 61, lr: 3.50e-02, grad_scale: 16.0 +2023-03-27 18:09:12,453 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5919.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:10:21,234 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8985, 5.2021, 5.2016, 5.1754, 4.9746, 5.1497, 4.6682, 4.5816], + device='cuda:2'), covar=tensor([0.0361, 0.0247, 0.0549, 0.0345, 0.0472, 0.0498, 0.0429, 0.0841], + device='cuda:2'), in_proj_covar=tensor([0.0116, 0.0106, 0.0158, 0.0121, 0.0123, 0.0103, 0.0138, 0.0173], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 18:10:45,662 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0751, 2.5271, 3.2068, 2.7030, 2.7391, 3.4633, 2.3926, 2.3025], + device='cuda:2'), covar=tensor([0.0505, 0.2405, 0.0307, 0.0529, 0.0923, 0.0233, 0.0695, 0.1288], + device='cuda:2'), in_proj_covar=tensor([0.0155, 0.0240, 0.0111, 0.0120, 0.0195, 0.0096, 0.0126, 0.0146], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-27 18:10:47,953 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.97 vs. limit=2.0 +2023-03-27 18:10:48,434 INFO [train.py:892] (2/4) Epoch 4, batch 400, loss[loss=0.2912, simple_loss=0.3302, pruned_loss=0.1261, over 19734.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3542, pruned_loss=0.1484, over 3421193.99 frames. ], batch size: 80, lr: 3.49e-02, grad_scale: 16.0 +2023-03-27 18:11:41,735 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5990.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:12:12,455 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.258e+02 6.552e+02 7.732e+02 9.637e+02 1.932e+03, threshold=1.546e+03, percent-clipped=3.0 +2023-03-27 18:12:17,253 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6006.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:12:36,855 INFO [train.py:892] (2/4) Epoch 4, batch 450, loss[loss=0.2769, simple_loss=0.3162, pruned_loss=0.1188, over 19813.00 frames. ], tot_loss[loss=0.3257, simple_loss=0.3546, pruned_loss=0.1483, over 3538575.18 frames. ], batch size: 117, lr: 3.48e-02, grad_scale: 16.0 +2023-03-27 18:13:12,016 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8888, 3.0050, 3.5190, 3.8998, 2.5622, 3.5398, 3.1677, 2.4394], + device='cuda:2'), covar=tensor([0.0319, 0.2597, 0.0519, 0.0115, 0.1918, 0.0323, 0.0538, 0.1958], + device='cuda:2'), in_proj_covar=tensor([0.0114, 0.0269, 0.0137, 0.0088, 0.0191, 0.0105, 0.0133, 0.0181], + device='cuda:2'), out_proj_covar=tensor([1.2291e-04, 2.6752e-04, 1.4412e-04, 8.6094e-05, 1.9123e-04, 1.0521e-04, + 1.3686e-04, 1.8240e-04], device='cuda:2') +2023-03-27 18:13:33,623 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6043.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:13:52,146 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6051.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:14:02,804 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-03-27 18:14:22,251 INFO [train.py:892] (2/4) Epoch 4, batch 500, loss[loss=0.3293, simple_loss=0.3569, pruned_loss=0.1509, over 19767.00 frames. ], tot_loss[loss=0.3232, simple_loss=0.3529, pruned_loss=0.1468, over 3631014.42 frames. ], batch size: 155, lr: 3.47e-02, grad_scale: 16.0 +2023-03-27 18:15:43,599 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.065e+02 6.254e+02 7.910e+02 9.308e+02 1.450e+03, threshold=1.582e+03, percent-clipped=0.0 +2023-03-27 18:15:44,511 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6104.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:15:58,501 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6111.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:16:07,743 INFO [train.py:892] (2/4) Epoch 4, batch 550, loss[loss=0.2899, simple_loss=0.3232, pruned_loss=0.1283, over 19790.00 frames. ], tot_loss[loss=0.321, simple_loss=0.3511, pruned_loss=0.1455, over 3703047.89 frames. ], batch size: 79, lr: 3.47e-02, grad_scale: 16.0 +2023-03-27 18:16:42,604 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-27 18:16:51,724 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6135.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:17:55,484 INFO [train.py:892] (2/4) Epoch 4, batch 600, loss[loss=0.2986, simple_loss=0.335, pruned_loss=0.1311, over 19536.00 frames. ], tot_loss[loss=0.3213, simple_loss=0.3516, pruned_loss=0.1455, over 3757710.06 frames. ], batch size: 46, lr: 3.46e-02, grad_scale: 16.0 +2023-03-27 18:18:03,642 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6170.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:18:56,431 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6196.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:19:15,224 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6203.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:19:16,016 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.339e+02 6.549e+02 8.017e+02 9.808e+02 1.883e+03, threshold=1.603e+03, percent-clipped=1.0 +2023-03-27 18:19:30,072 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6211.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:19:37,456 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6214.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:19:40,630 INFO [train.py:892] (2/4) Epoch 4, batch 650, loss[loss=0.3173, simple_loss=0.3453, pruned_loss=0.1446, over 19790.00 frames. ], tot_loss[loss=0.3227, simple_loss=0.3527, pruned_loss=0.1463, over 3798302.49 frames. ], batch size: 154, lr: 3.45e-02, grad_scale: 16.0 +2023-03-27 18:19:59,597 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0537, 2.7412, 3.4858, 3.5546, 4.3351, 3.8323, 4.3679, 4.6480], + device='cuda:2'), covar=tensor([0.0363, 0.1208, 0.0648, 0.1279, 0.0650, 0.0580, 0.0227, 0.0164], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0131, 0.0109, 0.0154, 0.0091, 0.0110, 0.0075, 0.0079], + device='cuda:2'), out_proj_covar=tensor([1.2149e-04, 1.4233e-04, 1.3328e-04, 1.7015e-04, 1.1292e-04, 1.2979e-04, + 8.8163e-05, 9.3247e-05], device='cuda:2') +2023-03-27 18:20:11,656 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6231.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:20:17,713 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3724, 1.9440, 3.9163, 3.5716, 3.8204, 4.1636, 4.1568, 3.6996], + device='cuda:2'), covar=tensor([0.0420, 0.2409, 0.0249, 0.0276, 0.0307, 0.0117, 0.0154, 0.0492], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0138, 0.0077, 0.0069, 0.0064, 0.0066, 0.0058, 0.0066], + device='cuda:2'), out_proj_covar=tensor([1.0723e-04, 1.8042e-04, 1.0787e-04, 1.0143e-04, 9.9344e-05, 9.4298e-05, + 8.9577e-05, 1.0287e-04], device='cuda:2') +2023-03-27 18:20:49,605 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-27 18:21:22,776 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6264.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:21:25,681 INFO [train.py:892] (2/4) Epoch 4, batch 700, loss[loss=0.3036, simple_loss=0.3296, pruned_loss=0.1388, over 19800.00 frames. ], tot_loss[loss=0.3215, simple_loss=0.3517, pruned_loss=0.1457, over 3832754.90 frames. ], batch size: 172, lr: 3.44e-02, grad_scale: 16.0 +2023-03-27 18:21:26,795 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6266.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:21:41,920 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6272.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:22:47,951 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.869e+02 6.690e+02 8.159e+02 9.859e+02 1.679e+03, threshold=1.632e+03, percent-clipped=2.0 +2023-03-27 18:22:52,759 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6306.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:23:15,288 INFO [train.py:892] (2/4) Epoch 4, batch 750, loss[loss=0.2789, simple_loss=0.3236, pruned_loss=0.1171, over 19767.00 frames. ], tot_loss[loss=0.3224, simple_loss=0.3522, pruned_loss=0.1463, over 3856885.03 frames. ], batch size: 113, lr: 3.43e-02, grad_scale: 8.0 +2023-03-27 18:23:38,429 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6148, 4.7886, 4.8338, 4.8430, 4.6164, 4.7735, 4.2673, 4.3347], + device='cuda:2'), covar=tensor([0.0341, 0.0361, 0.0635, 0.0394, 0.0477, 0.0566, 0.0602, 0.1003], + device='cuda:2'), in_proj_covar=tensor([0.0116, 0.0107, 0.0159, 0.0120, 0.0124, 0.0106, 0.0142, 0.0175], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 18:23:38,600 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6327.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:24:16,280 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6346.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:24:34,070 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6354.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 18:24:42,951 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.5820, 1.8696, 1.6060, 1.0330, 1.6340, 1.8208, 1.8116, 1.7717], + device='cuda:2'), covar=tensor([0.0256, 0.0268, 0.0255, 0.0909, 0.0490, 0.0413, 0.0224, 0.0408], + device='cuda:2'), in_proj_covar=tensor([0.0029, 0.0029, 0.0027, 0.0043, 0.0040, 0.0029, 0.0025, 0.0029], + device='cuda:2'), out_proj_covar=tensor([5.1159e-05, 5.2131e-05, 4.7083e-05, 7.7395e-05, 7.1951e-05, 5.2055e-05, + 4.6560e-05, 5.2209e-05], device='cuda:2') +2023-03-27 18:24:57,112 INFO [train.py:892] (2/4) Epoch 4, batch 800, loss[loss=0.3509, simple_loss=0.3776, pruned_loss=0.1621, over 19948.00 frames. ], tot_loss[loss=0.3229, simple_loss=0.3524, pruned_loss=0.1466, over 3878427.88 frames. ], batch size: 53, lr: 3.42e-02, grad_scale: 8.0 +2023-03-27 18:25:01,649 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3026, 2.9044, 2.9371, 3.3361, 3.0429, 2.8969, 3.1143, 3.5387], + device='cuda:2'), covar=tensor([0.0493, 0.0424, 0.0518, 0.0290, 0.0504, 0.0782, 0.0447, 0.0320], + device='cuda:2'), in_proj_covar=tensor([0.0091, 0.0096, 0.0109, 0.0096, 0.0096, 0.0078, 0.0105, 0.0112], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 18:25:21,485 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7523, 2.5170, 1.5230, 2.7758, 2.9684, 2.9062, 2.9937, 2.4657], + device='cuda:2'), covar=tensor([0.0635, 0.0520, 0.1747, 0.0823, 0.0633, 0.0583, 0.0657, 0.0522], + device='cuda:2'), in_proj_covar=tensor([0.0073, 0.0060, 0.0090, 0.0062, 0.0060, 0.0050, 0.0058, 0.0056], + device='cuda:2'), out_proj_covar=tensor([1.2189e-04, 1.0361e-04, 1.5103e-04, 1.1112e-04, 1.0322e-04, 9.2691e-05, + 1.0564e-04, 1.0199e-04], device='cuda:2') +2023-03-27 18:26:05,917 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6399.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:26:17,456 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.240e+02 6.935e+02 8.296e+02 1.069e+03 2.251e+03, threshold=1.659e+03, percent-clipped=2.0 +2023-03-27 18:26:29,547 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6411.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:26:38,522 INFO [train.py:892] (2/4) Epoch 4, batch 850, loss[loss=0.3115, simple_loss=0.3317, pruned_loss=0.1456, over 19888.00 frames. ], tot_loss[loss=0.32, simple_loss=0.351, pruned_loss=0.1445, over 3894263.48 frames. ], batch size: 176, lr: 3.42e-02, grad_scale: 8.0 +2023-03-27 18:26:44,953 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2358, 3.3203, 4.7567, 3.6167, 3.9129, 4.6992, 3.2014, 3.0828], + device='cuda:2'), covar=tensor([0.0479, 0.3275, 0.0195, 0.0568, 0.1047, 0.0246, 0.0706, 0.1192], + device='cuda:2'), in_proj_covar=tensor([0.0169, 0.0251, 0.0120, 0.0128, 0.0207, 0.0106, 0.0135, 0.0155], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-27 18:28:08,186 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6459.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:28:14,592 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6462.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:28:23,859 INFO [train.py:892] (2/4) Epoch 4, batch 900, loss[loss=0.5395, simple_loss=0.5466, pruned_loss=0.2662, over 17907.00 frames. ], tot_loss[loss=0.3203, simple_loss=0.3513, pruned_loss=0.1446, over 3904857.57 frames. ], batch size: 633, lr: 3.41e-02, grad_scale: 8.0 +2023-03-27 18:28:26,685 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6467.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:28:43,299 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=2.00 vs. limit=2.0 +2023-03-27 18:29:15,018 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6491.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:29:44,755 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.785e+02 6.152e+02 8.046e+02 9.426e+02 1.704e+03, threshold=1.609e+03, percent-clipped=1.0 +2023-03-27 18:30:02,704 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6514.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:30:05,644 INFO [train.py:892] (2/4) Epoch 4, batch 950, loss[loss=0.3043, simple_loss=0.3306, pruned_loss=0.139, over 19838.00 frames. ], tot_loss[loss=0.319, simple_loss=0.3501, pruned_loss=0.1439, over 3916372.78 frames. ], batch size: 144, lr: 3.40e-02, grad_scale: 8.0 +2023-03-27 18:30:19,458 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6523.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:30:24,572 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6526.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:30:28,561 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6528.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:31:32,898 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6559.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:31:38,664 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6562.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:31:46,898 INFO [train.py:892] (2/4) Epoch 4, batch 1000, loss[loss=0.3075, simple_loss=0.3364, pruned_loss=0.1393, over 19821.00 frames. ], tot_loss[loss=0.3188, simple_loss=0.3503, pruned_loss=0.1437, over 3921724.30 frames. ], batch size: 204, lr: 3.39e-02, grad_scale: 8.0 +2023-03-27 18:31:48,229 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-27 18:31:49,596 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6567.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:32:00,915 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9194, 4.1118, 4.0911, 4.0621, 3.9676, 4.0637, 3.5276, 3.6082], + device='cuda:2'), covar=tensor([0.0459, 0.0448, 0.0782, 0.0552, 0.0696, 0.0690, 0.0721, 0.1141], + device='cuda:2'), in_proj_covar=tensor([0.0117, 0.0112, 0.0165, 0.0125, 0.0128, 0.0109, 0.0146, 0.0183], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 18:33:08,468 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.155e+02 6.376e+02 7.596e+02 9.023e+02 1.357e+03, threshold=1.519e+03, percent-clipped=0.0 +2023-03-27 18:33:09,338 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4051, 2.5219, 2.8976, 2.3742, 2.7242, 1.8250, 2.7540, 3.2647], + device='cuda:2'), covar=tensor([0.0519, 0.0285, 0.0482, 0.0424, 0.0626, 0.0668, 0.0411, 0.0178], + device='cuda:2'), in_proj_covar=tensor([0.0041, 0.0034, 0.0037, 0.0051, 0.0037, 0.0033, 0.0032, 0.0029], + device='cuda:2'), out_proj_covar=tensor([8.2156e-05, 6.9956e-05, 7.4330e-05, 9.1067e-05, 7.3643e-05, 6.7716e-05, + 6.6841e-05, 5.9946e-05], device='cuda:2') +2023-03-27 18:33:31,601 INFO [train.py:892] (2/4) Epoch 4, batch 1050, loss[loss=0.3565, simple_loss=0.3771, pruned_loss=0.1679, over 19764.00 frames. ], tot_loss[loss=0.3212, simple_loss=0.3523, pruned_loss=0.1451, over 3927464.51 frames. ], batch size: 263, lr: 3.38e-02, grad_scale: 8.0 +2023-03-27 18:33:45,138 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6622.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:33:59,006 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6629.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:34:35,659 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6646.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:35:14,542 INFO [train.py:892] (2/4) Epoch 4, batch 1100, loss[loss=0.319, simple_loss=0.37, pruned_loss=0.134, over 19821.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.3515, pruned_loss=0.144, over 3930800.15 frames. ], batch size: 57, lr: 3.37e-02, grad_scale: 8.0 +2023-03-27 18:36:06,456 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6690.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:36:14,098 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6694.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:36:23,779 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6699.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:36:34,437 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.080e+02 6.475e+02 7.910e+02 9.923e+02 2.054e+03, threshold=1.582e+03, percent-clipped=5.0 +2023-03-27 18:36:57,927 INFO [train.py:892] (2/4) Epoch 4, batch 1150, loss[loss=0.3673, simple_loss=0.3809, pruned_loss=0.1768, over 19737.00 frames. ], tot_loss[loss=0.3192, simple_loss=0.3505, pruned_loss=0.144, over 3935589.97 frames. ], batch size: 269, lr: 3.37e-02, grad_scale: 8.0 +2023-03-27 18:37:38,529 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.72 vs. limit=2.0 +2023-03-27 18:38:01,835 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6747.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:38:25,129 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2327, 2.2747, 2.2564, 2.1306, 2.3813, 1.9738, 2.4092, 2.4252], + device='cuda:2'), covar=tensor([0.0449, 0.0295, 0.0432, 0.0454, 0.0325, 0.0295, 0.0286, 0.0244], + device='cuda:2'), in_proj_covar=tensor([0.0043, 0.0036, 0.0039, 0.0053, 0.0038, 0.0033, 0.0034, 0.0032], + device='cuda:2'), out_proj_covar=tensor([8.8238e-05, 7.3813e-05, 7.8486e-05, 9.6072e-05, 7.7730e-05, 7.0169e-05, + 7.2527e-05, 6.5708e-05], device='cuda:2') +2023-03-27 18:38:41,083 INFO [train.py:892] (2/4) Epoch 4, batch 1200, loss[loss=0.2641, simple_loss=0.3093, pruned_loss=0.1095, over 19912.00 frames. ], tot_loss[loss=0.3198, simple_loss=0.351, pruned_loss=0.1443, over 3939067.30 frames. ], batch size: 45, lr: 3.36e-02, grad_scale: 8.0 +2023-03-27 18:39:32,833 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6791.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:39:59,924 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.559e+02 6.894e+02 8.217e+02 1.051e+03 2.121e+03, threshold=1.643e+03, percent-clipped=4.0 +2023-03-27 18:40:22,609 INFO [train.py:892] (2/4) Epoch 4, batch 1250, loss[loss=0.2849, simple_loss=0.3211, pruned_loss=0.1243, over 19788.00 frames. ], tot_loss[loss=0.3193, simple_loss=0.3503, pruned_loss=0.1442, over 3940504.44 frames. ], batch size: 172, lr: 3.35e-02, grad_scale: 8.0 +2023-03-27 18:40:26,976 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6818.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 18:40:36,488 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6823.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:40:42,239 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6826.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:41:05,518 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0342, 1.9649, 3.2795, 3.4857, 3.3849, 3.6262, 3.7757, 3.4245], + device='cuda:2'), covar=tensor([0.0446, 0.2076, 0.0308, 0.0220, 0.0372, 0.0158, 0.0160, 0.0361], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0142, 0.0081, 0.0074, 0.0067, 0.0066, 0.0060, 0.0066], + device='cuda:2'), out_proj_covar=tensor([1.1530e-04, 1.9171e-04, 1.1752e-04, 1.1222e-04, 1.0629e-04, 9.7048e-05, + 9.6756e-05, 1.0681e-04], device='cuda:2') +2023-03-27 18:41:10,816 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6839.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:41:50,073 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6859.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:42:04,703 INFO [train.py:892] (2/4) Epoch 4, batch 1300, loss[loss=0.3804, simple_loss=0.3968, pruned_loss=0.182, over 19762.00 frames. ], tot_loss[loss=0.3218, simple_loss=0.3523, pruned_loss=0.1457, over 3941713.48 frames. ], batch size: 321, lr: 3.34e-02, grad_scale: 8.0 +2023-03-27 18:42:07,330 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6867.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:42:21,483 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6874.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:42:29,191 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6878.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 18:42:49,235 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.62 vs. limit=5.0 +2023-03-27 18:43:18,198 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-27 18:43:24,640 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 6.185e+02 7.634e+02 1.003e+03 1.964e+03, threshold=1.527e+03, percent-clipped=1.0 +2023-03-27 18:43:29,137 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6907.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:43:34,582 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6909.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:43:45,439 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6915.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:43:46,681 INFO [train.py:892] (2/4) Epoch 4, batch 1350, loss[loss=0.3082, simple_loss=0.3448, pruned_loss=0.1358, over 19376.00 frames. ], tot_loss[loss=0.3186, simple_loss=0.3499, pruned_loss=0.1436, over 3942880.65 frames. ], batch size: 40, lr: 3.33e-02, grad_scale: 8.0 +2023-03-27 18:43:58,837 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6922.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:44:32,682 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6939.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 18:45:27,451 INFO [train.py:892] (2/4) Epoch 4, batch 1400, loss[loss=0.3477, simple_loss=0.3636, pruned_loss=0.1659, over 19746.00 frames. ], tot_loss[loss=0.317, simple_loss=0.3487, pruned_loss=0.1426, over 3945633.01 frames. ], batch size: 259, lr: 3.33e-02, grad_scale: 8.0 +2023-03-27 18:45:37,280 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6970.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:45:37,515 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6970.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:45:59,805 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.59 vs. limit=5.0 +2023-03-27 18:46:07,944 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-03-27 18:46:09,138 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6985.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:46:49,377 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.908e+02 6.198e+02 7.481e+02 9.100e+02 1.903e+03, threshold=1.496e+03, percent-clipped=2.0 +2023-03-27 18:47:13,277 INFO [train.py:892] (2/4) Epoch 4, batch 1450, loss[loss=0.2763, simple_loss=0.3213, pruned_loss=0.1157, over 19813.00 frames. ], tot_loss[loss=0.3167, simple_loss=0.349, pruned_loss=0.1422, over 3945638.05 frames. ], batch size: 98, lr: 3.32e-02, grad_scale: 8.0 +2023-03-27 18:47:43,614 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-27 18:48:17,436 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7048.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 18:48:28,172 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6256, 2.3853, 1.2260, 2.8970, 2.7816, 2.7552, 2.9415, 2.3633], + device='cuda:2'), covar=tensor([0.0728, 0.0620, 0.2083, 0.0701, 0.0648, 0.0471, 0.0626, 0.0668], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0067, 0.0100, 0.0073, 0.0067, 0.0057, 0.0066, 0.0070], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 18:48:54,060 INFO [train.py:892] (2/4) Epoch 4, batch 1500, loss[loss=0.446, simple_loss=0.4755, pruned_loss=0.2082, over 18699.00 frames. ], tot_loss[loss=0.3172, simple_loss=0.3494, pruned_loss=0.1425, over 3945852.61 frames. ], batch size: 564, lr: 3.31e-02, grad_scale: 8.0 +2023-03-27 18:49:48,625 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-03-27 18:50:13,928 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.314e+02 6.331e+02 7.635e+02 9.535e+02 1.700e+03, threshold=1.527e+03, percent-clipped=1.0 +2023-03-27 18:50:22,734 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7109.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:50:36,734 INFO [train.py:892] (2/4) Epoch 4, batch 1550, loss[loss=0.3009, simple_loss=0.3398, pruned_loss=0.131, over 19739.00 frames. ], tot_loss[loss=0.3155, simple_loss=0.3484, pruned_loss=0.1413, over 3945662.06 frames. ], batch size: 95, lr: 3.30e-02, grad_scale: 8.0 +2023-03-27 18:50:41,194 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7118.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 18:50:51,476 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7123.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:52:15,349 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7163.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:52:19,903 INFO [train.py:892] (2/4) Epoch 4, batch 1600, loss[loss=0.3722, simple_loss=0.386, pruned_loss=0.1792, over 19533.00 frames. ], tot_loss[loss=0.3149, simple_loss=0.3479, pruned_loss=0.1409, over 3945442.89 frames. ], batch size: 46, lr: 3.30e-02, grad_scale: 8.0 +2023-03-27 18:52:20,691 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7166.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:52:31,424 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7171.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:52:32,432 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-27 18:53:38,642 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.71 vs. limit=5.0 +2023-03-27 18:53:39,224 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.151e+02 6.559e+02 7.813e+02 1.032e+03 1.739e+03, threshold=1.563e+03, percent-clipped=2.0 +2023-03-27 18:54:01,349 INFO [train.py:892] (2/4) Epoch 4, batch 1650, loss[loss=0.2735, simple_loss=0.3163, pruned_loss=0.1154, over 19756.00 frames. ], tot_loss[loss=0.3128, simple_loss=0.3462, pruned_loss=0.1397, over 3946616.84 frames. ], batch size: 84, lr: 3.29e-02, grad_scale: 8.0 +2023-03-27 18:54:18,815 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7224.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 18:54:26,871 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.92 vs. limit=5.0 +2023-03-27 18:54:39,107 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7234.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 18:55:42,217 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7265.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:55:43,211 INFO [train.py:892] (2/4) Epoch 4, batch 1700, loss[loss=0.3213, simple_loss=0.3479, pruned_loss=0.1473, over 19810.00 frames. ], tot_loss[loss=0.3126, simple_loss=0.3459, pruned_loss=0.1397, over 3948404.53 frames. ], batch size: 224, lr: 3.28e-02, grad_scale: 8.0 +2023-03-27 18:56:02,942 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7275.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:56:23,659 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7285.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:56:29,221 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6852, 4.5040, 4.9946, 4.8924, 4.9191, 4.4358, 4.6727, 4.5423], + device='cuda:2'), covar=tensor([0.1066, 0.0908, 0.0851, 0.0711, 0.0696, 0.0843, 0.1768, 0.2197], + device='cuda:2'), in_proj_covar=tensor([0.0185, 0.0151, 0.0219, 0.0173, 0.0172, 0.0162, 0.0203, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-27 18:56:29,281 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6175, 4.9425, 5.0021, 4.9332, 4.7309, 4.8926, 4.3295, 4.4621], + device='cuda:2'), covar=tensor([0.0375, 0.0298, 0.0573, 0.0378, 0.0537, 0.0586, 0.0506, 0.0945], + device='cuda:2'), in_proj_covar=tensor([0.0122, 0.0113, 0.0170, 0.0127, 0.0126, 0.0115, 0.0146, 0.0178], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 18:57:00,731 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.185e+02 6.685e+02 7.833e+02 9.153e+02 1.678e+03, threshold=1.567e+03, percent-clipped=4.0 +2023-03-27 18:57:20,324 INFO [train.py:892] (2/4) Epoch 4, batch 1750, loss[loss=0.2738, simple_loss=0.3123, pruned_loss=0.1177, over 19883.00 frames. ], tot_loss[loss=0.3136, simple_loss=0.3466, pruned_loss=0.1402, over 3948092.76 frames. ], batch size: 97, lr: 3.27e-02, grad_scale: 8.0 +2023-03-27 18:57:44,652 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.01 vs. limit=5.0 +2023-03-27 18:57:51,839 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7333.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:57:57,500 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7336.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:58:49,003 INFO [train.py:892] (2/4) Epoch 4, batch 1800, loss[loss=0.2678, simple_loss=0.3093, pruned_loss=0.1131, over 19600.00 frames. ], tot_loss[loss=0.311, simple_loss=0.3445, pruned_loss=0.1388, over 3949298.22 frames. ], batch size: 44, lr: 3.27e-02, grad_scale: 8.0 +2023-03-27 18:59:04,935 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1716, 2.1507, 1.8797, 1.6358, 1.3018, 1.3842, 1.8099, 1.8925], + device='cuda:2'), covar=tensor([0.0697, 0.0688, 0.0400, 0.0432, 0.0495, 0.0848, 0.0536, 0.1012], + device='cuda:2'), in_proj_covar=tensor([0.0027, 0.0028, 0.0029, 0.0025, 0.0027, 0.0030, 0.0035, 0.0027], + device='cuda:2'), out_proj_covar=tensor([5.4074e-05, 5.2964e-05, 5.5771e-05, 4.9021e-05, 5.4327e-05, 5.8112e-05, + 6.5894e-05, 5.2220e-05], device='cuda:2') +2023-03-27 18:59:52,480 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7404.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 18:59:53,631 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.607e+02 6.051e+02 7.381e+02 9.131e+02 1.849e+03, threshold=1.476e+03, percent-clipped=3.0 +2023-03-27 18:59:57,442 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7407.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:00:11,536 INFO [train.py:892] (2/4) Epoch 4, batch 1850, loss[loss=0.2918, simple_loss=0.342, pruned_loss=0.1208, over 19527.00 frames. ], tot_loss[loss=0.3122, simple_loss=0.3465, pruned_loss=0.1389, over 3947784.86 frames. ], batch size: 54, lr: 3.26e-02, grad_scale: 8.0 +2023-03-27 19:01:08,058 INFO [train.py:892] (2/4) Epoch 5, batch 0, loss[loss=0.3114, simple_loss=0.3368, pruned_loss=0.143, over 19742.00 frames. ], tot_loss[loss=0.3114, simple_loss=0.3368, pruned_loss=0.143, over 19742.00 frames. ], batch size: 209, lr: 3.03e-02, grad_scale: 8.0 +2023-03-27 19:01:08,059 INFO [train.py:917] (2/4) Computing validation loss +2023-03-27 19:01:34,467 INFO [train.py:926] (2/4) Epoch 5, validation: loss=0.2154, simple_loss=0.2917, pruned_loss=0.06955, over 2883724.00 frames. +2023-03-27 19:01:34,468 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22298MB +2023-03-27 19:01:52,195 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.8359, 1.6482, 1.6355, 1.4938, 1.2058, 1.2687, 1.4806, 1.8577], + device='cuda:2'), covar=tensor([0.0297, 0.0245, 0.0321, 0.0294, 0.0347, 0.0482, 0.0490, 0.0339], + device='cuda:2'), in_proj_covar=tensor([0.0028, 0.0029, 0.0030, 0.0026, 0.0028, 0.0031, 0.0037, 0.0028], + device='cuda:2'), out_proj_covar=tensor([5.5745e-05, 5.4754e-05, 5.7701e-05, 5.1694e-05, 5.6072e-05, 5.9783e-05, + 7.1001e-05, 5.5956e-05], device='cuda:2') +2023-03-27 19:02:10,363 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-27 19:02:43,659 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-27 19:02:51,535 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.7205, 1.6157, 1.7112, 1.4343, 1.2183, 1.0861, 1.4796, 1.8685], + device='cuda:2'), covar=tensor([0.0420, 0.0346, 0.0353, 0.0332, 0.0331, 0.0867, 0.0525, 0.0380], + device='cuda:2'), in_proj_covar=tensor([0.0029, 0.0029, 0.0031, 0.0027, 0.0028, 0.0032, 0.0037, 0.0028], + device='cuda:2'), out_proj_covar=tensor([5.7041e-05, 5.5758e-05, 5.8393e-05, 5.2227e-05, 5.6201e-05, 6.1950e-05, + 7.1280e-05, 5.6255e-05], device='cuda:2') +2023-03-27 19:03:19,087 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7468.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:03:24,020 INFO [train.py:892] (2/4) Epoch 5, batch 50, loss[loss=0.2875, simple_loss=0.3189, pruned_loss=0.128, over 19789.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3324, pruned_loss=0.1315, over 891733.61 frames. ], batch size: 191, lr: 3.03e-02, grad_scale: 8.0 +2023-03-27 19:04:36,697 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.390e+02 6.285e+02 7.498e+02 8.995e+02 1.568e+03, threshold=1.500e+03, percent-clipped=1.0 +2023-03-27 19:04:38,312 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.70 vs. limit=2.0 +2023-03-27 19:05:06,278 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7519.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 19:05:09,578 INFO [train.py:892] (2/4) Epoch 5, batch 100, loss[loss=0.2715, simple_loss=0.3294, pruned_loss=0.1068, over 19871.00 frames. ], tot_loss[loss=0.3012, simple_loss=0.3368, pruned_loss=0.1328, over 1568163.25 frames. ], batch size: 99, lr: 3.02e-02, grad_scale: 8.0 +2023-03-27 19:05:37,886 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7534.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 19:06:43,591 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7565.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:06:53,841 INFO [train.py:892] (2/4) Epoch 5, batch 150, loss[loss=0.3298, simple_loss=0.3756, pruned_loss=0.142, over 19865.00 frames. ], tot_loss[loss=0.3048, simple_loss=0.3405, pruned_loss=0.1346, over 2096113.13 frames. ], batch size: 48, lr: 3.01e-02, grad_scale: 8.0 +2023-03-27 19:07:19,295 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7582.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 19:08:06,879 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.365e+02 5.876e+02 7.241e+02 9.334e+02 1.719e+03, threshold=1.448e+03, percent-clipped=1.0 +2023-03-27 19:08:24,631 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7613.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:08:42,172 INFO [train.py:892] (2/4) Epoch 5, batch 200, loss[loss=0.2903, simple_loss=0.3283, pruned_loss=0.1262, over 19715.00 frames. ], tot_loss[loss=0.3038, simple_loss=0.3395, pruned_loss=0.1341, over 2507659.32 frames. ], batch size: 109, lr: 3.01e-02, grad_scale: 8.0 +2023-03-27 19:08:49,139 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7624.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:09:02,491 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7631.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:09:32,944 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.69 vs. limit=2.0 +2023-03-27 19:09:43,922 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-27 19:10:17,921 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4175, 4.2305, 4.7130, 4.4909, 4.6714, 4.1231, 4.4760, 4.3469], + device='cuda:2'), covar=tensor([0.1218, 0.1146, 0.1094, 0.0880, 0.0892, 0.1043, 0.1603, 0.2137], + device='cuda:2'), in_proj_covar=tensor([0.0194, 0.0164, 0.0235, 0.0186, 0.0184, 0.0172, 0.0216, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-27 19:10:28,530 INFO [train.py:892] (2/4) Epoch 5, batch 250, loss[loss=0.2661, simple_loss=0.3113, pruned_loss=0.1104, over 19686.00 frames. ], tot_loss[loss=0.3026, simple_loss=0.339, pruned_loss=0.1331, over 2826710.37 frames. ], batch size: 74, lr: 3.00e-02, grad_scale: 8.0 +2023-03-27 19:10:55,859 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7685.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:11:37,205 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7704.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 19:11:38,138 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.842e+02 5.939e+02 7.915e+02 9.500e+02 2.384e+03, threshold=1.583e+03, percent-clipped=4.0 +2023-03-27 19:12:10,435 INFO [train.py:892] (2/4) Epoch 5, batch 300, loss[loss=0.2519, simple_loss=0.2978, pruned_loss=0.103, over 19844.00 frames. ], tot_loss[loss=0.3021, simple_loss=0.3383, pruned_loss=0.1329, over 3077000.25 frames. ], batch size: 137, lr: 2.99e-02, grad_scale: 8.0 +2023-03-27 19:13:00,745 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7743.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:13:18,448 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7752.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 19:13:32,232 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7545, 5.1018, 5.1573, 5.0745, 4.8540, 5.1125, 4.4579, 4.6289], + device='cuda:2'), covar=tensor([0.0405, 0.0348, 0.0534, 0.0353, 0.0539, 0.0532, 0.0553, 0.0814], + device='cuda:2'), in_proj_covar=tensor([0.0129, 0.0119, 0.0176, 0.0133, 0.0132, 0.0121, 0.0151, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 19:13:42,634 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7763.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:13:58,905 INFO [train.py:892] (2/4) Epoch 5, batch 350, loss[loss=0.2439, simple_loss=0.2922, pruned_loss=0.09776, over 19735.00 frames. ], tot_loss[loss=0.3006, simple_loss=0.3371, pruned_loss=0.132, over 3272225.55 frames. ], batch size: 118, lr: 2.98e-02, grad_scale: 8.0 +2023-03-27 19:14:08,079 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-27 19:14:48,573 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-27 19:15:04,483 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7802.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:15:08,620 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7804.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:15:09,453 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.882e+02 6.339e+02 7.549e+02 9.099e+02 1.830e+03, threshold=1.510e+03, percent-clipped=2.0 +2023-03-27 19:15:38,983 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7819.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 19:15:42,011 INFO [train.py:892] (2/4) Epoch 5, batch 400, loss[loss=0.2776, simple_loss=0.3256, pruned_loss=0.1148, over 19927.00 frames. ], tot_loss[loss=0.2998, simple_loss=0.3367, pruned_loss=0.1314, over 3423515.68 frames. ], batch size: 49, lr: 2.98e-02, grad_scale: 8.0 +2023-03-27 19:16:56,058 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3262, 3.4115, 4.0403, 4.8361, 2.9616, 3.8924, 3.2001, 2.4407], + device='cuda:2'), covar=tensor([0.0312, 0.3038, 0.0612, 0.0112, 0.2228, 0.0405, 0.0809, 0.2103], + device='cuda:2'), in_proj_covar=tensor([0.0132, 0.0299, 0.0166, 0.0098, 0.0210, 0.0123, 0.0151, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 19:17:11,388 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7863.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:17:19,248 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7867.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:17:26,093 INFO [train.py:892] (2/4) Epoch 5, batch 450, loss[loss=0.3195, simple_loss=0.3515, pruned_loss=0.1437, over 19827.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3362, pruned_loss=0.1311, over 3540901.39 frames. ], batch size: 204, lr: 2.97e-02, grad_scale: 8.0 +2023-03-27 19:18:37,148 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.709e+02 6.480e+02 7.693e+02 9.079e+02 2.029e+03, threshold=1.539e+03, percent-clipped=2.0 +2023-03-27 19:18:53,732 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2091, 4.1508, 2.7270, 4.7448, 5.0657, 1.9422, 4.0288, 3.9162], + device='cuda:2'), covar=tensor([0.0486, 0.0630, 0.2358, 0.0265, 0.0073, 0.3048, 0.0671, 0.0428], + device='cuda:2'), in_proj_covar=tensor([0.0122, 0.0141, 0.0171, 0.0091, 0.0070, 0.0171, 0.0162, 0.0104], + device='cuda:2'), out_proj_covar=tensor([1.4134e-04, 1.6169e-04, 1.8589e-04, 1.1135e-04, 8.2185e-05, 1.8150e-04, + 1.8114e-04, 1.1538e-04], device='cuda:2') +2023-03-27 19:19:00,867 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.2254, 1.7671, 1.4158, 0.8245, 1.4527, 1.5201, 1.3734, 1.5178], + device='cuda:2'), covar=tensor([0.0364, 0.0225, 0.0280, 0.0699, 0.0491, 0.0261, 0.0194, 0.0303], + device='cuda:2'), in_proj_covar=tensor([0.0036, 0.0031, 0.0033, 0.0048, 0.0049, 0.0032, 0.0027, 0.0031], + device='cuda:2'), out_proj_covar=tensor([7.2004e-05, 6.4017e-05, 6.5153e-05, 9.6307e-05, 9.8466e-05, 6.5400e-05, + 5.4452e-05, 6.2336e-05], device='cuda:2') +2023-03-27 19:19:12,234 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.66 vs. limit=2.0 +2023-03-27 19:19:12,464 INFO [train.py:892] (2/4) Epoch 5, batch 500, loss[loss=0.3372, simple_loss=0.3861, pruned_loss=0.1442, over 19661.00 frames. ], tot_loss[loss=0.2979, simple_loss=0.3349, pruned_loss=0.1304, over 3632211.96 frames. ], batch size: 55, lr: 2.96e-02, grad_scale: 8.0 +2023-03-27 19:19:34,547 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7931.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:20:57,021 INFO [train.py:892] (2/4) Epoch 5, batch 550, loss[loss=0.2957, simple_loss=0.3383, pruned_loss=0.1265, over 19872.00 frames. ], tot_loss[loss=0.2997, simple_loss=0.3367, pruned_loss=0.1313, over 3702689.44 frames. ], batch size: 108, lr: 2.96e-02, grad_scale: 8.0 +2023-03-27 19:21:13,919 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7979.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:21:15,774 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7980.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:22:12,798 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.154e+02 6.076e+02 7.580e+02 9.097e+02 2.073e+03, threshold=1.516e+03, percent-clipped=3.0 +2023-03-27 19:22:19,664 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5250, 1.8597, 2.9842, 3.1241, 3.3384, 3.3776, 3.6362, 3.5335], + device='cuda:2'), covar=tensor([0.0656, 0.2346, 0.0501, 0.0407, 0.0329, 0.0222, 0.0182, 0.0267], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0147, 0.0088, 0.0081, 0.0070, 0.0071, 0.0065, 0.0072], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 19:22:44,343 INFO [train.py:892] (2/4) Epoch 5, batch 600, loss[loss=0.2451, simple_loss=0.2926, pruned_loss=0.09879, over 19836.00 frames. ], tot_loss[loss=0.2984, simple_loss=0.3357, pruned_loss=0.1306, over 3758823.27 frames. ], batch size: 90, lr: 2.95e-02, grad_scale: 8.0 +2023-03-27 19:24:12,795 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8063.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:24:28,880 INFO [train.py:892] (2/4) Epoch 5, batch 650, loss[loss=0.3088, simple_loss=0.3592, pruned_loss=0.1292, over 19720.00 frames. ], tot_loss[loss=0.3009, simple_loss=0.3382, pruned_loss=0.1318, over 3797740.15 frames. ], batch size: 50, lr: 2.94e-02, grad_scale: 8.0 +2023-03-27 19:24:51,546 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3882, 2.5144, 3.0793, 2.2150, 3.3012, 2.4399, 2.3605, 3.4688], + device='cuda:2'), covar=tensor([0.0605, 0.0428, 0.0430, 0.0532, 0.0317, 0.0383, 0.0493, 0.0231], + device='cuda:2'), in_proj_covar=tensor([0.0043, 0.0039, 0.0042, 0.0061, 0.0041, 0.0035, 0.0037, 0.0034], + device='cuda:2'), out_proj_covar=tensor([9.8905e-05, 9.2361e-05, 9.4923e-05, 1.2476e-04, 9.2852e-05, 8.2342e-05, + 8.7205e-05, 7.8339e-05], device='cuda:2') +2023-03-27 19:25:31,111 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8099.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:25:43,547 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.014e+02 6.583e+02 7.852e+02 9.012e+02 1.490e+03, threshold=1.570e+03, percent-clipped=0.0 +2023-03-27 19:25:56,285 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8111.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:26:16,753 INFO [train.py:892] (2/4) Epoch 5, batch 700, loss[loss=0.2673, simple_loss=0.3059, pruned_loss=0.1144, over 19706.00 frames. ], tot_loss[loss=0.301, simple_loss=0.3382, pruned_loss=0.1319, over 3831833.11 frames. ], batch size: 101, lr: 2.94e-02, grad_scale: 8.0 +2023-03-27 19:27:15,399 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.44 vs. limit=5.0 +2023-03-27 19:27:38,094 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8158.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:28:05,562 INFO [train.py:892] (2/4) Epoch 5, batch 750, loss[loss=0.3588, simple_loss=0.4034, pruned_loss=0.1571, over 19670.00 frames. ], tot_loss[loss=0.3025, simple_loss=0.3395, pruned_loss=0.1327, over 3856262.94 frames. ], batch size: 55, lr: 2.93e-02, grad_scale: 8.0 +2023-03-27 19:29:16,504 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.022e+02 6.531e+02 7.679e+02 9.153e+02 1.674e+03, threshold=1.536e+03, percent-clipped=1.0 +2023-03-27 19:29:50,721 INFO [train.py:892] (2/4) Epoch 5, batch 800, loss[loss=0.2862, simple_loss=0.3267, pruned_loss=0.1228, over 19904.00 frames. ], tot_loss[loss=0.3019, simple_loss=0.3392, pruned_loss=0.1323, over 3876112.40 frames. ], batch size: 71, lr: 2.92e-02, grad_scale: 8.0 +2023-03-27 19:30:41,048 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9879, 2.4036, 2.8782, 3.3454, 3.6054, 3.2880, 4.1058, 4.0485], + device='cuda:2'), covar=tensor([0.0447, 0.1484, 0.0880, 0.1294, 0.0739, 0.0953, 0.0178, 0.0248], + device='cuda:2'), in_proj_covar=tensor([0.0138, 0.0166, 0.0157, 0.0185, 0.0158, 0.0156, 0.0103, 0.0104], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 19:31:25,535 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8265.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:31:36,255 INFO [train.py:892] (2/4) Epoch 5, batch 850, loss[loss=0.3111, simple_loss=0.3499, pruned_loss=0.1362, over 19780.00 frames. ], tot_loss[loss=0.3005, simple_loss=0.338, pruned_loss=0.1315, over 3893721.01 frames. ], batch size: 53, lr: 2.92e-02, grad_scale: 8.0 +2023-03-27 19:31:57,887 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8280.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:32:20,834 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9104, 3.4930, 3.4726, 3.9002, 3.4576, 3.7827, 3.8319, 4.0778], + device='cuda:2'), covar=tensor([0.0543, 0.0405, 0.0521, 0.0280, 0.0648, 0.0354, 0.0373, 0.0302], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0110, 0.0128, 0.0110, 0.0108, 0.0087, 0.0111, 0.0128], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 19:32:49,194 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.707e+02 5.455e+02 6.919e+02 8.386e+02 1.759e+03, threshold=1.384e+03, percent-clipped=2.0 +2023-03-27 19:33:22,218 INFO [train.py:892] (2/4) Epoch 5, batch 900, loss[loss=0.3049, simple_loss=0.3338, pruned_loss=0.138, over 19788.00 frames. ], tot_loss[loss=0.3016, simple_loss=0.3388, pruned_loss=0.1322, over 3905008.57 frames. ], batch size: 73, lr: 2.91e-02, grad_scale: 16.0 +2023-03-27 19:33:34,322 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8326.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:33:37,987 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8328.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:34:54,024 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-27 19:35:10,577 INFO [train.py:892] (2/4) Epoch 5, batch 950, loss[loss=0.33, simple_loss=0.3535, pruned_loss=0.1533, over 19837.00 frames. ], tot_loss[loss=0.3007, simple_loss=0.3382, pruned_loss=0.1316, over 3916166.91 frames. ], batch size: 239, lr: 2.91e-02, grad_scale: 16.0 +2023-03-27 19:35:46,568 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8976, 3.8370, 3.9528, 3.7398, 4.0154, 3.1126, 3.1788, 3.2749], + device='cuda:2'), covar=tensor([0.0397, 0.0206, 0.0264, 0.0248, 0.0255, 0.0842, 0.1078, 0.0576], + device='cuda:2'), in_proj_covar=tensor([0.0058, 0.0071, 0.0069, 0.0079, 0.0071, 0.0094, 0.0107, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 19:36:10,149 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8399.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:36:10,244 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.7474, 2.0284, 1.6685, 1.2858, 1.8907, 2.1585, 2.0376, 1.9288], + device='cuda:2'), covar=tensor([0.0267, 0.0275, 0.0216, 0.0624, 0.0349, 0.0306, 0.0138, 0.0352], + device='cuda:2'), in_proj_covar=tensor([0.0035, 0.0032, 0.0034, 0.0049, 0.0049, 0.0032, 0.0026, 0.0031], + device='cuda:2'), out_proj_covar=tensor([7.0640e-05, 6.8238e-05, 6.8223e-05, 1.0171e-04, 9.9611e-05, 6.6474e-05, + 5.4802e-05, 6.4954e-05], device='cuda:2') +2023-03-27 19:36:23,189 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.522e+02 6.186e+02 7.459e+02 8.919e+02 2.032e+03, threshold=1.492e+03, percent-clipped=3.0 +2023-03-27 19:36:43,531 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0866, 3.9191, 4.0473, 3.8039, 4.1699, 3.1562, 3.3115, 3.2886], + device='cuda:2'), covar=tensor([0.0343, 0.0182, 0.0177, 0.0183, 0.0143, 0.0722, 0.0970, 0.0563], + device='cuda:2'), in_proj_covar=tensor([0.0059, 0.0072, 0.0069, 0.0079, 0.0070, 0.0094, 0.0106, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 19:36:55,722 INFO [train.py:892] (2/4) Epoch 5, batch 1000, loss[loss=0.2702, simple_loss=0.3224, pruned_loss=0.109, over 19833.00 frames. ], tot_loss[loss=0.299, simple_loss=0.3369, pruned_loss=0.1306, over 3925023.94 frames. ], batch size: 59, lr: 2.90e-02, grad_scale: 16.0 +2023-03-27 19:37:08,308 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6937, 4.5816, 5.0514, 4.8981, 4.9591, 4.5004, 4.7310, 4.7809], + device='cuda:2'), covar=tensor([0.1206, 0.1003, 0.0965, 0.0760, 0.0731, 0.0937, 0.1822, 0.1854], + device='cuda:2'), in_proj_covar=tensor([0.0197, 0.0165, 0.0236, 0.0189, 0.0180, 0.0175, 0.0219, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-27 19:37:53,241 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8447.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:38:15,155 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8458.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:38:21,259 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.67 vs. limit=5.0 +2023-03-27 19:38:41,299 INFO [train.py:892] (2/4) Epoch 5, batch 1050, loss[loss=0.3097, simple_loss=0.3391, pruned_loss=0.1401, over 19776.00 frames. ], tot_loss[loss=0.2975, simple_loss=0.3353, pruned_loss=0.1298, over 3929719.98 frames. ], batch size: 191, lr: 2.89e-02, grad_scale: 16.0 +2023-03-27 19:39:20,349 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.9225, 2.1071, 1.5340, 1.2908, 1.7986, 2.2569, 2.0583, 2.3088], + device='cuda:2'), covar=tensor([0.0231, 0.0234, 0.0281, 0.0649, 0.0496, 0.0159, 0.0133, 0.0166], + device='cuda:2'), in_proj_covar=tensor([0.0036, 0.0033, 0.0035, 0.0051, 0.0051, 0.0033, 0.0026, 0.0031], + device='cuda:2'), out_proj_covar=tensor([7.3389e-05, 7.0813e-05, 7.1231e-05, 1.0462e-04, 1.0397e-04, 6.8624e-05, + 5.6274e-05, 6.3830e-05], device='cuda:2') +2023-03-27 19:39:52,153 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.605e+02 6.353e+02 7.329e+02 8.982e+02 1.532e+03, threshold=1.466e+03, percent-clipped=3.0 +2023-03-27 19:39:54,938 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8506.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:40:18,600 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3153, 4.7371, 4.9686, 4.5336, 4.2807, 4.8644, 4.7932, 5.2304], + device='cuda:2'), covar=tensor([0.1338, 0.0247, 0.0321, 0.0313, 0.0479, 0.0271, 0.0269, 0.0212], + device='cuda:2'), in_proj_covar=tensor([0.0194, 0.0137, 0.0134, 0.0135, 0.0140, 0.0129, 0.0116, 0.0119], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 19:40:27,774 INFO [train.py:892] (2/4) Epoch 5, batch 1100, loss[loss=0.2928, simple_loss=0.3259, pruned_loss=0.1299, over 19826.00 frames. ], tot_loss[loss=0.2953, simple_loss=0.3339, pruned_loss=0.1283, over 3934564.35 frames. ], batch size: 208, lr: 2.89e-02, grad_scale: 16.0 +2023-03-27 19:42:10,851 INFO [train.py:892] (2/4) Epoch 5, batch 1150, loss[loss=0.4568, simple_loss=0.4546, pruned_loss=0.2295, over 19403.00 frames. ], tot_loss[loss=0.2936, simple_loss=0.3322, pruned_loss=0.1275, over 3938405.45 frames. ], batch size: 412, lr: 2.88e-02, grad_scale: 16.0 +2023-03-27 19:42:34,069 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7883, 3.4089, 3.4097, 3.8402, 3.5472, 3.6890, 3.7678, 3.9798], + device='cuda:2'), covar=tensor([0.0546, 0.0360, 0.0482, 0.0252, 0.0397, 0.0390, 0.0387, 0.0296], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0109, 0.0126, 0.0108, 0.0105, 0.0086, 0.0111, 0.0124], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 19:43:21,775 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 6.114e+02 7.456e+02 8.704e+02 1.962e+03, threshold=1.491e+03, percent-clipped=1.0 +2023-03-27 19:43:56,806 INFO [train.py:892] (2/4) Epoch 5, batch 1200, loss[loss=0.2714, simple_loss=0.3041, pruned_loss=0.1194, over 19739.00 frames. ], tot_loss[loss=0.2945, simple_loss=0.3326, pruned_loss=0.1282, over 3942426.01 frames. ], batch size: 134, lr: 2.87e-02, grad_scale: 16.0 +2023-03-27 19:43:57,679 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8621.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:44:25,417 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.74 vs. limit=2.0 +2023-03-27 19:45:41,167 INFO [train.py:892] (2/4) Epoch 5, batch 1250, loss[loss=0.2982, simple_loss=0.3269, pruned_loss=0.1347, over 19802.00 frames. ], tot_loss[loss=0.2942, simple_loss=0.3325, pruned_loss=0.128, over 3944183.22 frames. ], batch size: 117, lr: 2.87e-02, grad_scale: 16.0 +2023-03-27 19:46:34,036 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7262, 3.0441, 3.4174, 3.7701, 2.4022, 3.3224, 2.4393, 2.3468], + device='cuda:2'), covar=tensor([0.0354, 0.2678, 0.0718, 0.0125, 0.2354, 0.0431, 0.1086, 0.2066], + device='cuda:2'), in_proj_covar=tensor([0.0134, 0.0298, 0.0172, 0.0094, 0.0208, 0.0127, 0.0156, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 19:46:53,148 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.783e+02 6.822e+02 8.338e+02 1.055e+03 2.150e+03, threshold=1.668e+03, percent-clipped=7.0 +2023-03-27 19:47:24,982 INFO [train.py:892] (2/4) Epoch 5, batch 1300, loss[loss=0.2855, simple_loss=0.3268, pruned_loss=0.1221, over 19679.00 frames. ], tot_loss[loss=0.2928, simple_loss=0.3312, pruned_loss=0.1272, over 3945373.85 frames. ], batch size: 82, lr: 2.86e-02, grad_scale: 16.0 +2023-03-27 19:47:34,306 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2555, 2.4089, 2.9701, 2.0918, 2.5466, 2.2264, 2.5298, 2.5731], + device='cuda:2'), covar=tensor([0.0815, 0.0377, 0.0361, 0.0619, 0.0385, 0.0389, 0.0445, 0.0350], + device='cuda:2'), in_proj_covar=tensor([0.0044, 0.0041, 0.0041, 0.0061, 0.0042, 0.0036, 0.0038, 0.0035], + device='cuda:2'), out_proj_covar=tensor([1.0456e-04, 9.9063e-05, 9.7049e-05, 1.2957e-04, 9.8208e-05, 8.8959e-05, + 9.4153e-05, 8.5052e-05], device='cuda:2') +2023-03-27 19:48:16,569 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1502, 4.6457, 4.5992, 5.2040, 4.8056, 5.3321, 5.0578, 5.4178], + device='cuda:2'), covar=tensor([0.0518, 0.0291, 0.0413, 0.0195, 0.0410, 0.0141, 0.0319, 0.0269], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0117, 0.0138, 0.0120, 0.0114, 0.0093, 0.0122, 0.0136], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 19:48:48,202 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8760.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:49:09,894 INFO [train.py:892] (2/4) Epoch 5, batch 1350, loss[loss=0.2421, simple_loss=0.294, pruned_loss=0.09508, over 19682.00 frames. ], tot_loss[loss=0.2958, simple_loss=0.3338, pruned_loss=0.1289, over 3945441.69 frames. ], batch size: 45, lr: 2.86e-02, grad_scale: 16.0 +2023-03-27 19:50:22,593 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.065e+02 5.700e+02 7.084e+02 8.473e+02 1.458e+03, threshold=1.417e+03, percent-clipped=0.0 +2023-03-27 19:50:39,815 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4675, 3.0876, 3.1981, 3.5436, 3.2953, 3.2650, 3.5547, 3.6817], + device='cuda:2'), covar=tensor([0.0517, 0.0406, 0.0478, 0.0290, 0.0427, 0.0751, 0.0348, 0.0344], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0111, 0.0131, 0.0113, 0.0106, 0.0086, 0.0113, 0.0129], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 19:50:58,493 INFO [train.py:892] (2/4) Epoch 5, batch 1400, loss[loss=0.2908, simple_loss=0.3403, pruned_loss=0.1206, over 19715.00 frames. ], tot_loss[loss=0.298, simple_loss=0.3355, pruned_loss=0.1302, over 3945516.87 frames. ], batch size: 54, lr: 2.85e-02, grad_scale: 16.0 +2023-03-27 19:50:59,390 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8821.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:51:59,797 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7161, 4.3069, 4.3830, 4.8256, 4.3655, 4.9078, 4.8093, 4.9676], + device='cuda:2'), covar=tensor([0.0571, 0.0310, 0.0392, 0.0228, 0.0416, 0.0176, 0.0347, 0.0302], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0115, 0.0135, 0.0117, 0.0110, 0.0089, 0.0118, 0.0133], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 19:52:39,444 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6657, 2.6792, 3.4876, 2.9733, 3.3556, 3.5252, 3.3043, 3.1813], + device='cuda:2'), covar=tensor([0.0140, 0.0447, 0.0105, 0.0967, 0.0117, 0.0175, 0.0196, 0.0151], + device='cuda:2'), in_proj_covar=tensor([0.0056, 0.0064, 0.0056, 0.0131, 0.0050, 0.0056, 0.0056, 0.0049], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0003, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-27 19:52:40,280 INFO [train.py:892] (2/4) Epoch 5, batch 1450, loss[loss=0.2928, simple_loss=0.3313, pruned_loss=0.1271, over 19780.00 frames. ], tot_loss[loss=0.2967, simple_loss=0.3348, pruned_loss=0.1293, over 3948336.14 frames. ], batch size: 215, lr: 2.84e-02, grad_scale: 16.0 +2023-03-27 19:53:52,854 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+02 6.588e+02 7.767e+02 1.033e+03 1.538e+03, threshold=1.553e+03, percent-clipped=3.0 +2023-03-27 19:54:26,574 INFO [train.py:892] (2/4) Epoch 5, batch 1500, loss[loss=0.2669, simple_loss=0.3067, pruned_loss=0.1136, over 19795.00 frames. ], tot_loss[loss=0.2948, simple_loss=0.333, pruned_loss=0.1283, over 3949496.43 frames. ], batch size: 120, lr: 2.84e-02, grad_scale: 16.0 +2023-03-27 19:54:27,658 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8921.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:54:40,853 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8927.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:54:44,264 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0238, 3.9372, 4.3988, 4.2459, 4.3895, 3.7722, 4.0385, 4.0792], + device='cuda:2'), covar=tensor([0.1333, 0.1054, 0.0932, 0.0820, 0.0764, 0.1136, 0.1828, 0.1924], + device='cuda:2'), in_proj_covar=tensor([0.0199, 0.0168, 0.0232, 0.0185, 0.0184, 0.0177, 0.0218, 0.0268], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-27 19:55:38,547 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.63 vs. limit=2.0 +2023-03-27 19:56:01,655 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-27 19:56:10,554 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8969.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:56:13,846 INFO [train.py:892] (2/4) Epoch 5, batch 1550, loss[loss=0.2612, simple_loss=0.3107, pruned_loss=0.1059, over 19866.00 frames. ], tot_loss[loss=0.2961, simple_loss=0.3344, pruned_loss=0.1289, over 3947587.00 frames. ], batch size: 46, lr: 2.83e-02, grad_scale: 16.0 +2023-03-27 19:56:28,310 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5408, 1.6749, 3.0100, 3.0682, 3.4461, 3.5270, 3.6876, 3.6253], + device='cuda:2'), covar=tensor([0.0748, 0.2350, 0.0462, 0.0449, 0.0268, 0.0154, 0.0193, 0.0242], + device='cuda:2'), in_proj_covar=tensor([0.0096, 0.0158, 0.0099, 0.0095, 0.0077, 0.0076, 0.0071, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 19:56:45,257 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8986.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:56:49,270 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-03-27 19:56:50,835 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8988.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:57:26,090 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.607e+02 5.861e+02 6.933e+02 8.857e+02 2.450e+03, threshold=1.387e+03, percent-clipped=2.0 +2023-03-27 19:57:42,832 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6132, 1.6882, 2.8811, 3.0981, 3.4657, 3.5426, 3.6361, 3.6668], + device='cuda:2'), covar=tensor([0.0671, 0.2091, 0.0466, 0.0362, 0.0223, 0.0127, 0.0157, 0.0160], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0152, 0.0097, 0.0092, 0.0075, 0.0074, 0.0069, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 19:57:42,859 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9013.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:57:57,841 INFO [train.py:892] (2/4) Epoch 5, batch 1600, loss[loss=0.2696, simple_loss=0.3166, pruned_loss=0.1113, over 19857.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.3341, pruned_loss=0.1286, over 3947973.50 frames. ], batch size: 78, lr: 2.83e-02, grad_scale: 16.0 +2023-03-27 19:58:11,810 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4178, 2.5347, 3.7103, 2.9533, 3.2174, 3.7851, 2.2857, 2.2308], + device='cuda:2'), covar=tensor([0.0611, 0.2610, 0.0303, 0.0460, 0.0891, 0.0285, 0.0987, 0.1630], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0275, 0.0163, 0.0157, 0.0251, 0.0142, 0.0178, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 19:58:19,833 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7782, 2.4549, 1.2095, 3.1446, 2.9948, 3.0628, 3.1226, 2.6566], + device='cuda:2'), covar=tensor([0.0514, 0.0470, 0.1901, 0.0488, 0.0429, 0.0378, 0.0526, 0.0505], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0079, 0.0111, 0.0089, 0.0076, 0.0068, 0.0077, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 19:58:52,929 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9047.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:59:40,271 INFO [train.py:892] (2/4) Epoch 5, batch 1650, loss[loss=0.29, simple_loss=0.3373, pruned_loss=0.1214, over 19847.00 frames. ], tot_loss[loss=0.2939, simple_loss=0.3325, pruned_loss=0.1277, over 3948569.08 frames. ], batch size: 49, lr: 2.82e-02, grad_scale: 16.0 +2023-03-27 19:59:48,596 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9074.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:00:51,663 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.078e+02 6.548e+02 7.778e+02 9.936e+02 1.704e+03, threshold=1.556e+03, percent-clipped=1.0 +2023-03-27 20:01:16,362 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9116.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:01:25,585 INFO [train.py:892] (2/4) Epoch 5, batch 1700, loss[loss=0.2801, simple_loss=0.3253, pruned_loss=0.1174, over 19689.00 frames. ], tot_loss[loss=0.2946, simple_loss=0.3335, pruned_loss=0.1279, over 3948718.77 frames. ], batch size: 74, lr: 2.81e-02, grad_scale: 16.0 +2023-03-27 20:02:22,471 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8467, 2.2843, 3.0535, 2.5488, 2.4576, 3.1437, 1.8885, 2.0619], + device='cuda:2'), covar=tensor([0.0581, 0.1787, 0.0355, 0.0505, 0.1036, 0.0287, 0.0938, 0.1340], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0279, 0.0165, 0.0160, 0.0253, 0.0145, 0.0179, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 20:02:55,613 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0838, 3.1904, 3.6934, 4.4148, 2.7541, 3.5056, 2.8971, 2.2929], + device='cuda:2'), covar=tensor([0.0336, 0.2698, 0.0628, 0.0093, 0.1882, 0.0411, 0.0778, 0.1957], + device='cuda:2'), in_proj_covar=tensor([0.0138, 0.0303, 0.0176, 0.0099, 0.0213, 0.0131, 0.0159, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 20:03:04,911 INFO [train.py:892] (2/4) Epoch 5, batch 1750, loss[loss=0.256, simple_loss=0.3076, pruned_loss=0.1023, over 19800.00 frames. ], tot_loss[loss=0.294, simple_loss=0.3332, pruned_loss=0.1274, over 3948809.13 frames. ], batch size: 83, lr: 2.81e-02, grad_scale: 16.0 +2023-03-27 20:03:54,901 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4772, 3.9639, 4.0237, 4.4591, 4.1428, 4.4868, 4.5086, 4.6823], + device='cuda:2'), covar=tensor([0.0542, 0.0345, 0.0446, 0.0291, 0.0467, 0.0229, 0.0374, 0.0330], + device='cuda:2'), in_proj_covar=tensor([0.0106, 0.0115, 0.0136, 0.0119, 0.0112, 0.0091, 0.0122, 0.0132], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 20:04:05,087 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+02 6.292e+02 7.596e+02 9.035e+02 1.874e+03, threshold=1.519e+03, percent-clipped=3.0 +2023-03-27 20:04:23,784 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2511, 2.2268, 3.4281, 3.6698, 3.8796, 4.2546, 4.2620, 4.1767], + device='cuda:2'), covar=tensor([0.0470, 0.1967, 0.0463, 0.0399, 0.0270, 0.0126, 0.0151, 0.0200], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0148, 0.0094, 0.0090, 0.0075, 0.0074, 0.0068, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 20:04:33,319 INFO [train.py:892] (2/4) Epoch 5, batch 1800, loss[loss=0.4279, simple_loss=0.4207, pruned_loss=0.2175, over 19643.00 frames. ], tot_loss[loss=0.2928, simple_loss=0.3319, pruned_loss=0.1269, over 3949820.07 frames. ], batch size: 351, lr: 2.80e-02, grad_scale: 16.0 +2023-03-27 20:06:00,353 INFO [train.py:892] (2/4) Epoch 5, batch 1850, loss[loss=0.2912, simple_loss=0.3475, pruned_loss=0.1175, over 19665.00 frames. ], tot_loss[loss=0.293, simple_loss=0.3329, pruned_loss=0.1266, over 3950027.27 frames. ], batch size: 55, lr: 2.80e-02, grad_scale: 16.0 +2023-03-27 20:06:59,473 INFO [train.py:892] (2/4) Epoch 6, batch 0, loss[loss=0.2694, simple_loss=0.3133, pruned_loss=0.1127, over 19946.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3133, pruned_loss=0.1127, over 19946.00 frames. ], batch size: 46, lr: 2.61e-02, grad_scale: 16.0 +2023-03-27 20:06:59,473 INFO [train.py:917] (2/4) Computing validation loss +2023-03-27 20:07:11,720 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4984, 4.2688, 4.2868, 4.1158, 4.5206, 3.2610, 3.5582, 3.5638], + device='cuda:2'), covar=tensor([0.0151, 0.0151, 0.0153, 0.0159, 0.0116, 0.0712, 0.0938, 0.0442], + device='cuda:2'), in_proj_covar=tensor([0.0061, 0.0075, 0.0075, 0.0083, 0.0075, 0.0098, 0.0111, 0.0093], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 20:07:25,896 INFO [train.py:926] (2/4) Epoch 6, validation: loss=0.2048, simple_loss=0.2829, pruned_loss=0.06328, over 2883724.00 frames. +2023-03-27 20:07:25,897 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22298MB +2023-03-27 20:07:45,226 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9283.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:08:30,071 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 6.201e+02 7.498e+02 9.060e+02 1.792e+03, threshold=1.500e+03, percent-clipped=2.0 +2023-03-27 20:08:37,011 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4368, 3.5195, 3.8660, 4.9722, 2.8244, 3.6852, 3.0887, 2.3423], + device='cuda:2'), covar=tensor([0.0324, 0.3143, 0.0761, 0.0094, 0.2345, 0.0568, 0.1002, 0.2231], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0310, 0.0181, 0.0101, 0.0214, 0.0134, 0.0162, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 20:09:14,656 INFO [train.py:892] (2/4) Epoch 6, batch 50, loss[loss=0.2646, simple_loss=0.3135, pruned_loss=0.1079, over 19853.00 frames. ], tot_loss[loss=0.2804, simple_loss=0.3217, pruned_loss=0.1195, over 891900.43 frames. ], batch size: 124, lr: 2.60e-02, grad_scale: 16.0 +2023-03-27 20:09:44,690 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.6678, 1.5970, 1.7673, 1.7811, 1.4007, 1.5875, 1.5179, 1.8428], + device='cuda:2'), covar=tensor([0.0281, 0.0360, 0.0273, 0.0175, 0.0388, 0.0397, 0.0408, 0.0309], + device='cuda:2'), in_proj_covar=tensor([0.0031, 0.0034, 0.0033, 0.0027, 0.0033, 0.0034, 0.0040, 0.0031], + device='cuda:2'), out_proj_covar=tensor([6.6222e-05, 7.1161e-05, 6.8362e-05, 5.6418e-05, 7.0215e-05, 7.0947e-05, + 8.3508e-05, 6.7005e-05], device='cuda:2') +2023-03-27 20:09:48,472 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9342.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:10:46,092 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9369.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:10:59,008 INFO [train.py:892] (2/4) Epoch 6, batch 100, loss[loss=0.2555, simple_loss=0.2968, pruned_loss=0.1071, over 19756.00 frames. ], tot_loss[loss=0.2802, simple_loss=0.3233, pruned_loss=0.1186, over 1568835.90 frames. ], batch size: 155, lr: 2.60e-02, grad_scale: 16.0 +2023-03-27 20:11:57,392 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-27 20:12:00,063 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 5.626e+02 6.778e+02 8.697e+02 1.693e+03, threshold=1.356e+03, percent-clipped=2.0 +2023-03-27 20:12:20,516 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9414.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:12:24,261 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9416.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:12:43,767 INFO [train.py:892] (2/4) Epoch 6, batch 150, loss[loss=0.374, simple_loss=0.3897, pruned_loss=0.1791, over 19706.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.3262, pruned_loss=0.1211, over 2097776.11 frames. ], batch size: 337, lr: 2.59e-02, grad_scale: 16.0 +2023-03-27 20:13:29,729 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8979, 2.3503, 2.9228, 3.0037, 3.4301, 3.3125, 3.7700, 4.0077], + device='cuda:2'), covar=tensor([0.0397, 0.1523, 0.0995, 0.1401, 0.1112, 0.0903, 0.0262, 0.0307], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0177, 0.0172, 0.0199, 0.0186, 0.0175, 0.0114, 0.0117], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 20:14:08,397 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9464.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:14:32,759 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9475.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:14:33,656 INFO [train.py:892] (2/4) Epoch 6, batch 200, loss[loss=0.2653, simple_loss=0.3239, pruned_loss=0.1033, over 19674.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.3286, pruned_loss=0.1218, over 2508163.40 frames. ], batch size: 56, lr: 2.59e-02, grad_scale: 16.0 +2023-03-27 20:15:34,371 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.804e+02 5.747e+02 7.100e+02 9.043e+02 1.475e+03, threshold=1.420e+03, percent-clipped=5.0 +2023-03-27 20:16:16,359 INFO [train.py:892] (2/4) Epoch 6, batch 250, loss[loss=0.2422, simple_loss=0.2974, pruned_loss=0.09355, over 19770.00 frames. ], tot_loss[loss=0.2855, simple_loss=0.3278, pruned_loss=0.1216, over 2828698.65 frames. ], batch size: 46, lr: 2.58e-02, grad_scale: 16.0 +2023-03-27 20:18:00,757 INFO [train.py:892] (2/4) Epoch 6, batch 300, loss[loss=0.2832, simple_loss=0.3325, pruned_loss=0.117, over 19667.00 frames. ], tot_loss[loss=0.2843, simple_loss=0.3272, pruned_loss=0.1207, over 3077529.29 frames. ], batch size: 67, lr: 2.58e-02, grad_scale: 16.0 +2023-03-27 20:18:08,288 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6677, 3.5157, 2.3949, 3.8495, 3.9268, 1.7795, 3.2418, 3.2968], + device='cuda:2'), covar=tensor([0.0479, 0.0692, 0.2193, 0.0396, 0.0153, 0.2889, 0.0833, 0.0464], + device='cuda:2'), in_proj_covar=tensor([0.0141, 0.0157, 0.0181, 0.0111, 0.0082, 0.0181, 0.0180, 0.0114], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-27 20:18:19,787 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9583.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:18:21,693 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9584.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:18:41,720 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-27 20:19:02,341 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.913e+02 5.609e+02 7.067e+02 9.111e+02 1.524e+03, threshold=1.413e+03, percent-clipped=2.0 +2023-03-27 20:19:49,281 INFO [train.py:892] (2/4) Epoch 6, batch 350, loss[loss=0.2815, simple_loss=0.3162, pruned_loss=0.1234, over 19872.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.3276, pruned_loss=0.1216, over 3271521.83 frames. ], batch size: 138, lr: 2.57e-02, grad_scale: 16.0 +2023-03-27 20:20:00,201 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9631.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:20:22,932 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9642.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:20:29,084 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9645.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:20:57,578 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9658.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:21:18,773 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9669.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:21:32,070 INFO [train.py:892] (2/4) Epoch 6, batch 400, loss[loss=0.2956, simple_loss=0.3348, pruned_loss=0.1282, over 19764.00 frames. ], tot_loss[loss=0.2823, simple_loss=0.3252, pruned_loss=0.1197, over 3422839.74 frames. ], batch size: 244, lr: 2.57e-02, grad_scale: 16.0 +2023-03-27 20:22:03,527 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9690.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:22:24,508 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7099, 2.4068, 2.8412, 2.0893, 3.0137, 2.1520, 2.7182, 2.8916], + device='cuda:2'), covar=tensor([0.0364, 0.0304, 0.0545, 0.0523, 0.0215, 0.0419, 0.0251, 0.0185], + device='cuda:2'), in_proj_covar=tensor([0.0045, 0.0040, 0.0044, 0.0062, 0.0043, 0.0038, 0.0039, 0.0035], + device='cuda:2'), out_proj_covar=tensor([1.1065e-04, 1.0190e-04, 1.0986e-04, 1.4253e-04, 1.0554e-04, 9.5501e-05, + 1.0098e-04, 8.6450e-05], device='cuda:2') +2023-03-27 20:22:32,708 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.696e+02 5.768e+02 7.171e+02 9.130e+02 1.746e+03, threshold=1.434e+03, percent-clipped=4.0 +2023-03-27 20:22:58,074 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9717.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:23:02,341 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9719.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:23:16,550 INFO [train.py:892] (2/4) Epoch 6, batch 450, loss[loss=0.2539, simple_loss=0.2992, pruned_loss=0.1043, over 19800.00 frames. ], tot_loss[loss=0.2836, simple_loss=0.3262, pruned_loss=0.1205, over 3540537.45 frames. ], batch size: 150, lr: 2.56e-02, grad_scale: 16.0 +2023-03-27 20:23:45,220 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.60 vs. limit=2.0 +2023-03-27 20:23:54,106 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9743.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:24:09,475 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7219, 2.9604, 3.4352, 3.9529, 2.3933, 3.0105, 2.6592, 2.1207], + device='cuda:2'), covar=tensor([0.0348, 0.2438, 0.0639, 0.0122, 0.2215, 0.0507, 0.0938, 0.2026], + device='cuda:2'), in_proj_covar=tensor([0.0139, 0.0306, 0.0183, 0.0100, 0.0216, 0.0134, 0.0168, 0.0194], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 20:24:27,852 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.29 vs. limit=5.0 +2023-03-27 20:24:35,684 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-27 20:24:50,382 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9770.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:25:02,464 INFO [train.py:892] (2/4) Epoch 6, batch 500, loss[loss=0.3139, simple_loss=0.3613, pruned_loss=0.1333, over 19843.00 frames. ], tot_loss[loss=0.2837, simple_loss=0.3261, pruned_loss=0.1207, over 3631980.64 frames. ], batch size: 58, lr: 2.56e-02, grad_scale: 16.0 +2023-03-27 20:26:01,096 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9804.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:26:01,919 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.739e+02 6.410e+02 7.467e+02 9.319e+02 1.836e+03, threshold=1.493e+03, percent-clipped=5.0 +2023-03-27 20:26:22,107 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.90 vs. limit=5.0 +2023-03-27 20:26:44,447 INFO [train.py:892] (2/4) Epoch 6, batch 550, loss[loss=0.268, simple_loss=0.326, pruned_loss=0.105, over 19789.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3239, pruned_loss=0.1188, over 3703721.50 frames. ], batch size: 91, lr: 2.55e-02, grad_scale: 16.0 +2023-03-27 20:28:22,149 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-27 20:28:30,660 INFO [train.py:892] (2/4) Epoch 6, batch 600, loss[loss=0.2693, simple_loss=0.3068, pruned_loss=0.1159, over 19828.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3225, pruned_loss=0.1175, over 3757806.08 frames. ], batch size: 208, lr: 2.54e-02, grad_scale: 16.0 +2023-03-27 20:28:37,948 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8408, 2.5890, 3.2947, 2.0522, 3.1273, 2.4798, 2.4954, 3.4137], + device='cuda:2'), covar=tensor([0.0521, 0.0315, 0.0367, 0.0625, 0.0273, 0.0302, 0.0606, 0.0134], + device='cuda:2'), in_proj_covar=tensor([0.0044, 0.0040, 0.0044, 0.0064, 0.0044, 0.0037, 0.0039, 0.0035], + device='cuda:2'), out_proj_covar=tensor([1.1038e-04, 1.0194e-04, 1.1012e-04, 1.4610e-04, 1.0777e-04, 9.4935e-05, + 1.0211e-04, 8.8023e-05], device='cuda:2') +2023-03-27 20:29:32,379 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.880e+02 5.938e+02 7.019e+02 8.775e+02 1.647e+03, threshold=1.404e+03, percent-clipped=2.0 +2023-03-27 20:30:16,204 INFO [train.py:892] (2/4) Epoch 6, batch 650, loss[loss=0.2894, simple_loss=0.3286, pruned_loss=0.1251, over 19894.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3246, pruned_loss=0.1196, over 3800530.62 frames. ], batch size: 63, lr: 2.54e-02, grad_scale: 16.0 +2023-03-27 20:30:38,609 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.96 vs. limit=2.0 +2023-03-27 20:30:45,578 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9940.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:32:00,348 INFO [train.py:892] (2/4) Epoch 6, batch 700, loss[loss=0.2668, simple_loss=0.3128, pruned_loss=0.1104, over 19844.00 frames. ], tot_loss[loss=0.2835, simple_loss=0.3261, pruned_loss=0.1205, over 3832155.27 frames. ], batch size: 145, lr: 2.53e-02, grad_scale: 16.0 +2023-03-27 20:32:13,361 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8311, 4.5036, 4.7797, 4.4940, 4.7827, 3.3641, 3.8156, 3.2478], + device='cuda:2'), covar=tensor([0.0190, 0.0149, 0.0115, 0.0128, 0.0148, 0.0659, 0.1005, 0.0786], + device='cuda:2'), in_proj_covar=tensor([0.0064, 0.0080, 0.0079, 0.0088, 0.0080, 0.0102, 0.0114, 0.0096], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-27 20:32:56,251 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7589, 3.5636, 3.6360, 3.4428, 3.7381, 2.8386, 3.0719, 2.3287], + device='cuda:2'), covar=tensor([0.0168, 0.0202, 0.0163, 0.0172, 0.0147, 0.0781, 0.0777, 0.0897], + device='cuda:2'), in_proj_covar=tensor([0.0064, 0.0081, 0.0080, 0.0089, 0.0080, 0.0102, 0.0114, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-27 20:33:05,035 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.264e+02 6.107e+02 7.493e+02 9.243e+02 1.709e+03, threshold=1.499e+03, percent-clipped=3.0 +2023-03-27 20:33:24,828 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10014.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:33:25,133 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4632, 3.6861, 4.0612, 5.0474, 3.1592, 3.9284, 3.3031, 2.6302], + device='cuda:2'), covar=tensor([0.0322, 0.2394, 0.0704, 0.0085, 0.2098, 0.0486, 0.0883, 0.1929], + device='cuda:2'), in_proj_covar=tensor([0.0141, 0.0313, 0.0186, 0.0104, 0.0215, 0.0133, 0.0168, 0.0194], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 20:33:49,829 INFO [train.py:892] (2/4) Epoch 6, batch 750, loss[loss=0.2594, simple_loss=0.299, pruned_loss=0.1099, over 19853.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3263, pruned_loss=0.1207, over 3858484.32 frames. ], batch size: 137, lr: 2.53e-02, grad_scale: 16.0 +2023-03-27 20:34:17,110 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10039.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:35:04,892 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10062.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 20:35:23,411 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10070.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:35:34,021 INFO [train.py:892] (2/4) Epoch 6, batch 800, loss[loss=0.2886, simple_loss=0.3366, pruned_loss=0.1203, over 19773.00 frames. ], tot_loss[loss=0.2816, simple_loss=0.3243, pruned_loss=0.1195, over 3880026.52 frames. ], batch size: 70, lr: 2.52e-02, grad_scale: 16.0 +2023-03-27 20:36:08,345 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8383, 4.0541, 4.3864, 4.0356, 4.0060, 4.2895, 4.1074, 4.5381], + device='cuda:2'), covar=tensor([0.1166, 0.0271, 0.0328, 0.0344, 0.0540, 0.0323, 0.0264, 0.0237], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0153, 0.0149, 0.0149, 0.0153, 0.0146, 0.0133, 0.0131], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 20:36:21,632 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10099.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:36:25,368 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10100.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 20:36:35,376 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.822e+02 5.825e+02 7.423e+02 9.383e+02 2.108e+03, threshold=1.485e+03, percent-clipped=8.0 +2023-03-27 20:36:38,839 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-27 20:37:01,062 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10118.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:37:12,993 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10123.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 20:37:17,765 INFO [train.py:892] (2/4) Epoch 6, batch 850, loss[loss=0.3052, simple_loss=0.3289, pruned_loss=0.1408, over 19819.00 frames. ], tot_loss[loss=0.2823, simple_loss=0.325, pruned_loss=0.1198, over 3894570.23 frames. ], batch size: 184, lr: 2.52e-02, grad_scale: 16.0 +2023-03-27 20:37:53,451 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-03-27 20:38:08,856 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9465, 2.3465, 2.8805, 3.0552, 3.3635, 3.0857, 3.8336, 3.8912], + device='cuda:2'), covar=tensor([0.0472, 0.1606, 0.1085, 0.1468, 0.1253, 0.1111, 0.0350, 0.0385], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0181, 0.0176, 0.0199, 0.0195, 0.0178, 0.0119, 0.0118], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 20:39:04,575 INFO [train.py:892] (2/4) Epoch 6, batch 900, loss[loss=0.2868, simple_loss=0.3366, pruned_loss=0.1184, over 19762.00 frames. ], tot_loss[loss=0.2821, simple_loss=0.3254, pruned_loss=0.1194, over 3904798.51 frames. ], batch size: 88, lr: 2.51e-02, grad_scale: 16.0 +2023-03-27 20:40:03,582 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 5.709e+02 6.647e+02 8.458e+02 1.607e+03, threshold=1.329e+03, percent-clipped=2.0 +2023-03-27 20:40:48,895 INFO [train.py:892] (2/4) Epoch 6, batch 950, loss[loss=0.2875, simple_loss=0.3324, pruned_loss=0.1213, over 19773.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3266, pruned_loss=0.1206, over 3915670.75 frames. ], batch size: 42, lr: 2.51e-02, grad_scale: 16.0 +2023-03-27 20:41:18,865 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10240.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:41:26,173 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.65 vs. limit=2.0 +2023-03-27 20:42:14,180 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2992, 3.3992, 3.7552, 4.7579, 2.8918, 3.5341, 2.9352, 2.6022], + device='cuda:2'), covar=tensor([0.0336, 0.2870, 0.0778, 0.0098, 0.2179, 0.0523, 0.0989, 0.1917], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0311, 0.0190, 0.0106, 0.0216, 0.0137, 0.0170, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 20:42:33,717 INFO [train.py:892] (2/4) Epoch 6, batch 1000, loss[loss=0.2672, simple_loss=0.3137, pruned_loss=0.1103, over 19648.00 frames. ], tot_loss[loss=0.2854, simple_loss=0.3279, pruned_loss=0.1214, over 3922457.87 frames. ], batch size: 47, lr: 2.50e-02, grad_scale: 16.0 +2023-03-27 20:43:00,909 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10288.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:43:22,111 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0203, 4.4985, 4.6080, 4.4762, 4.8302, 3.4326, 4.0035, 3.1863], + device='cuda:2'), covar=tensor([0.0126, 0.0162, 0.0143, 0.0160, 0.0117, 0.0650, 0.0831, 0.0751], + device='cuda:2'), in_proj_covar=tensor([0.0064, 0.0082, 0.0080, 0.0090, 0.0080, 0.0104, 0.0113, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-27 20:43:34,970 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.462e+02 6.368e+02 7.709e+02 9.244e+02 1.500e+03, threshold=1.542e+03, percent-clipped=2.0 +2023-03-27 20:43:55,016 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10314.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:44:20,093 INFO [train.py:892] (2/4) Epoch 6, batch 1050, loss[loss=0.2642, simple_loss=0.2993, pruned_loss=0.1145, over 19773.00 frames. ], tot_loss[loss=0.2875, simple_loss=0.3297, pruned_loss=0.1226, over 3927273.71 frames. ], batch size: 130, lr: 2.50e-02, grad_scale: 16.0 +2023-03-27 20:44:42,110 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-27 20:45:36,759 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10362.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:45:54,166 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8885, 3.2697, 3.4090, 3.9031, 3.5667, 3.7234, 3.9063, 4.1209], + device='cuda:2'), covar=tensor([0.0549, 0.0470, 0.0520, 0.0331, 0.0562, 0.0393, 0.0424, 0.0311], + device='cuda:2'), in_proj_covar=tensor([0.0108, 0.0120, 0.0139, 0.0120, 0.0112, 0.0095, 0.0119, 0.0134], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 20:46:05,045 INFO [train.py:892] (2/4) Epoch 6, batch 1100, loss[loss=0.2599, simple_loss=0.2929, pruned_loss=0.1135, over 19875.00 frames. ], tot_loss[loss=0.2858, simple_loss=0.3282, pruned_loss=0.1217, over 3932923.57 frames. ], batch size: 125, lr: 2.49e-02, grad_scale: 16.0 +2023-03-27 20:46:47,205 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10395.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 20:46:55,593 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10399.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:47:09,515 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.844e+02 6.468e+02 7.471e+02 8.887e+02 1.453e+03, threshold=1.494e+03, percent-clipped=0.0 +2023-03-27 20:47:16,104 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10409.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:47:35,391 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10418.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 20:47:51,063 INFO [train.py:892] (2/4) Epoch 6, batch 1150, loss[loss=0.3029, simple_loss=0.3417, pruned_loss=0.1321, over 19844.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.3262, pruned_loss=0.1203, over 3934444.04 frames. ], batch size: 109, lr: 2.49e-02, grad_scale: 16.0 +2023-03-27 20:48:36,940 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10447.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:48:51,842 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10454.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:49:24,733 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10470.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:49:35,660 INFO [train.py:892] (2/4) Epoch 6, batch 1200, loss[loss=0.3221, simple_loss=0.3512, pruned_loss=0.1465, over 19718.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.3259, pruned_loss=0.12, over 3936503.28 frames. ], batch size: 295, lr: 2.49e-02, grad_scale: 8.0 +2023-03-27 20:50:41,659 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.428e+02 5.589e+02 6.626e+02 8.703e+02 1.747e+03, threshold=1.325e+03, percent-clipped=3.0 +2023-03-27 20:50:58,486 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10515.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 20:51:05,565 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1753, 2.9602, 4.1453, 3.5354, 3.6608, 4.1290, 3.7258, 3.9551], + device='cuda:2'), covar=tensor([0.0137, 0.0535, 0.0101, 0.1071, 0.0125, 0.0131, 0.0208, 0.0101], + device='cuda:2'), in_proj_covar=tensor([0.0062, 0.0072, 0.0060, 0.0134, 0.0052, 0.0061, 0.0060, 0.0051], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-27 20:51:09,917 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-27 20:51:21,746 INFO [train.py:892] (2/4) Epoch 6, batch 1250, loss[loss=0.2534, simple_loss=0.2909, pruned_loss=0.1079, over 19875.00 frames. ], tot_loss[loss=0.2789, simple_loss=0.3231, pruned_loss=0.1173, over 3938673.47 frames. ], batch size: 159, lr: 2.48e-02, grad_scale: 8.0 +2023-03-27 20:53:08,388 INFO [train.py:892] (2/4) Epoch 6, batch 1300, loss[loss=0.2756, simple_loss=0.3152, pruned_loss=0.1181, over 19779.00 frames. ], tot_loss[loss=0.2775, simple_loss=0.322, pruned_loss=0.1166, over 3940199.78 frames. ], batch size: 241, lr: 2.48e-02, grad_scale: 8.0 +2023-03-27 20:54:06,420 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7667, 4.3715, 4.5284, 4.3135, 4.6755, 3.4528, 3.8622, 3.2116], + device='cuda:2'), covar=tensor([0.0154, 0.0152, 0.0115, 0.0131, 0.0110, 0.0551, 0.0765, 0.0775], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0085, 0.0082, 0.0090, 0.0081, 0.0103, 0.0115, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-27 20:54:11,550 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.477e+02 5.973e+02 6.933e+02 8.304e+02 2.000e+03, threshold=1.387e+03, percent-clipped=2.0 +2023-03-27 20:54:25,551 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-27 20:54:51,940 INFO [train.py:892] (2/4) Epoch 6, batch 1350, loss[loss=0.2603, simple_loss=0.3111, pruned_loss=0.1048, over 19602.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3217, pruned_loss=0.1161, over 3942205.80 frames. ], batch size: 50, lr: 2.47e-02, grad_scale: 8.0 +2023-03-27 20:56:19,415 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-27 20:56:39,324 INFO [train.py:892] (2/4) Epoch 6, batch 1400, loss[loss=0.2887, simple_loss=0.3277, pruned_loss=0.1249, over 19758.00 frames. ], tot_loss[loss=0.2793, simple_loss=0.3235, pruned_loss=0.1176, over 3942147.14 frames. ], batch size: 188, lr: 2.47e-02, grad_scale: 8.0 +2023-03-27 20:57:21,017 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10695.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:57:46,680 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.801e+02 6.022e+02 7.479e+02 9.663e+02 1.752e+03, threshold=1.496e+03, percent-clipped=3.0 +2023-03-27 20:58:11,819 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10718.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 20:58:26,787 INFO [train.py:892] (2/4) Epoch 6, batch 1450, loss[loss=0.2524, simple_loss=0.3181, pruned_loss=0.0934, over 19640.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3227, pruned_loss=0.1169, over 3943788.35 frames. ], batch size: 57, lr: 2.46e-02, grad_scale: 8.0 +2023-03-27 20:59:03,303 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10743.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:59:06,554 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10744.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:59:50,198 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10765.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:59:52,110 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10766.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 20:59:54,159 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4973, 3.4082, 3.5080, 3.2065, 3.5915, 2.7928, 2.7848, 2.1308], + device='cuda:2'), covar=tensor([0.0283, 0.0247, 0.0191, 0.0252, 0.0220, 0.0906, 0.1035, 0.1249], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0082, 0.0079, 0.0089, 0.0079, 0.0103, 0.0112, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-27 21:00:09,445 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10774.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:00:12,517 INFO [train.py:892] (2/4) Epoch 6, batch 1500, loss[loss=0.2672, simple_loss=0.3169, pruned_loss=0.1087, over 19734.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.322, pruned_loss=0.1163, over 3945401.20 frames. ], batch size: 99, lr: 2.46e-02, grad_scale: 8.0 +2023-03-27 21:01:14,022 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10805.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:01:16,851 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.160e+02 5.698e+02 7.148e+02 8.817e+02 1.909e+03, threshold=1.430e+03, percent-clipped=1.0 +2023-03-27 21:01:20,409 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-03-27 21:01:25,502 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10810.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 21:01:57,110 INFO [train.py:892] (2/4) Epoch 6, batch 1550, loss[loss=0.2702, simple_loss=0.3223, pruned_loss=0.1091, over 19739.00 frames. ], tot_loss[loss=0.2749, simple_loss=0.3201, pruned_loss=0.1149, over 3946933.48 frames. ], batch size: 77, lr: 2.45e-02, grad_scale: 8.0 +2023-03-27 21:02:08,525 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.85 vs. limit=5.0 +2023-03-27 21:02:16,580 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10835.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:03:41,428 INFO [train.py:892] (2/4) Epoch 6, batch 1600, loss[loss=0.2791, simple_loss=0.3285, pruned_loss=0.1149, over 19877.00 frames. ], tot_loss[loss=0.2743, simple_loss=0.3199, pruned_loss=0.1144, over 3947676.96 frames. ], batch size: 77, lr: 2.45e-02, grad_scale: 8.0 +2023-03-27 21:04:01,520 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10885.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 21:04:44,770 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.321e+02 5.550e+02 6.735e+02 8.191e+02 1.432e+03, threshold=1.347e+03, percent-clipped=1.0 +2023-03-27 21:05:04,516 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-27 21:05:12,065 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.60 vs. limit=2.0 +2023-03-27 21:05:25,967 INFO [train.py:892] (2/4) Epoch 6, batch 1650, loss[loss=0.3609, simple_loss=0.3931, pruned_loss=0.1644, over 19703.00 frames. ], tot_loss[loss=0.2742, simple_loss=0.3196, pruned_loss=0.1145, over 3948874.15 frames. ], batch size: 305, lr: 2.44e-02, grad_scale: 8.0 +2023-03-27 21:06:09,442 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10946.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 21:07:11,137 INFO [train.py:892] (2/4) Epoch 6, batch 1700, loss[loss=0.238, simple_loss=0.2869, pruned_loss=0.09456, over 19695.00 frames. ], tot_loss[loss=0.2764, simple_loss=0.3213, pruned_loss=0.1158, over 3948408.01 frames. ], batch size: 46, lr: 2.44e-02, grad_scale: 8.0 +2023-03-27 21:08:07,573 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3318, 4.2126, 2.7088, 4.7666, 5.0132, 1.9386, 3.9723, 3.8742], + device='cuda:2'), covar=tensor([0.0541, 0.0709, 0.2425, 0.0393, 0.0145, 0.3429, 0.0908, 0.0511], + device='cuda:2'), in_proj_covar=tensor([0.0155, 0.0177, 0.0193, 0.0133, 0.0094, 0.0195, 0.0194, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-27 21:08:17,552 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.223e+02 5.843e+02 7.049e+02 9.611e+02 2.273e+03, threshold=1.410e+03, percent-clipped=6.0 +2023-03-27 21:08:24,385 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-03-27 21:08:51,967 INFO [train.py:892] (2/4) Epoch 6, batch 1750, loss[loss=0.3422, simple_loss=0.3703, pruned_loss=0.1571, over 19751.00 frames. ], tot_loss[loss=0.2745, simple_loss=0.3197, pruned_loss=0.1146, over 3948461.93 frames. ], batch size: 321, lr: 2.43e-02, grad_scale: 8.0 +2023-03-27 21:10:02,978 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11065.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:10:21,304 INFO [train.py:892] (2/4) Epoch 6, batch 1800, loss[loss=0.2474, simple_loss=0.2892, pruned_loss=0.1028, over 19752.00 frames. ], tot_loss[loss=0.2761, simple_loss=0.3214, pruned_loss=0.1154, over 3947278.05 frames. ], batch size: 110, lr: 2.43e-02, grad_scale: 8.0 +2023-03-27 21:10:32,999 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.96 vs. limit=2.0 +2023-03-27 21:11:02,753 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11100.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:11:13,680 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.857e+02 5.952e+02 7.096e+02 9.277e+02 1.982e+03, threshold=1.419e+03, percent-clipped=4.0 +2023-03-27 21:11:19,009 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11110.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 21:11:23,873 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11113.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:11:44,929 INFO [train.py:892] (2/4) Epoch 6, batch 1850, loss[loss=0.2503, simple_loss=0.3155, pruned_loss=0.09251, over 19827.00 frames. ], tot_loss[loss=0.2749, simple_loss=0.3212, pruned_loss=0.1143, over 3948172.52 frames. ], batch size: 57, lr: 2.42e-02, grad_scale: 8.0 +2023-03-27 21:12:41,536 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11130.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:12:42,780 INFO [train.py:892] (2/4) Epoch 7, batch 0, loss[loss=0.2433, simple_loss=0.29, pruned_loss=0.09829, over 19848.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.29, pruned_loss=0.09829, over 19848.00 frames. ], batch size: 115, lr: 2.27e-02, grad_scale: 8.0 +2023-03-27 21:12:42,780 INFO [train.py:917] (2/4) Computing validation loss +2023-03-27 21:13:10,591 INFO [train.py:926] (2/4) Epoch 7, validation: loss=0.1961, simple_loss=0.2755, pruned_loss=0.05831, over 2883724.00 frames. +2023-03-27 21:13:10,592 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22298MB +2023-03-27 21:13:59,041 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6641, 4.2428, 4.5330, 4.2641, 4.6395, 3.2182, 3.7258, 3.3880], + device='cuda:2'), covar=tensor([0.0160, 0.0177, 0.0110, 0.0156, 0.0120, 0.0678, 0.0853, 0.0702], + device='cuda:2'), in_proj_covar=tensor([0.0070, 0.0087, 0.0084, 0.0093, 0.0083, 0.0105, 0.0119, 0.0102], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-27 21:14:09,092 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11158.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:14:42,620 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11174.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:14:56,461 INFO [train.py:892] (2/4) Epoch 7, batch 50, loss[loss=0.2385, simple_loss=0.2873, pruned_loss=0.0948, over 19784.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3114, pruned_loss=0.109, over 890411.42 frames. ], batch size: 94, lr: 2.27e-02, grad_scale: 8.0 +2023-03-27 21:15:51,177 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.005e+02 5.596e+02 6.713e+02 8.168e+02 2.476e+03, threshold=1.343e+03, percent-clipped=3.0 +2023-03-27 21:16:43,168 INFO [train.py:892] (2/4) Epoch 7, batch 100, loss[loss=0.2412, simple_loss=0.3, pruned_loss=0.09123, over 19939.00 frames. ], tot_loss[loss=0.2683, simple_loss=0.3143, pruned_loss=0.1111, over 1569074.24 frames. ], batch size: 52, lr: 2.26e-02, grad_scale: 8.0 +2023-03-27 21:16:52,192 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11235.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:17:05,897 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11241.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 21:17:28,704 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.92 vs. limit=2.0 +2023-03-27 21:17:42,475 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6422, 3.6633, 2.1625, 4.0346, 4.2152, 1.6910, 3.2988, 3.2711], + device='cuda:2'), covar=tensor([0.0620, 0.0815, 0.2691, 0.0426, 0.0199, 0.3257, 0.1066, 0.0515], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0180, 0.0194, 0.0135, 0.0096, 0.0193, 0.0194, 0.0127], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-27 21:18:30,178 INFO [train.py:892] (2/4) Epoch 7, batch 150, loss[loss=0.436, simple_loss=0.4433, pruned_loss=0.2143, over 19384.00 frames. ], tot_loss[loss=0.2723, simple_loss=0.3182, pruned_loss=0.1132, over 2093766.44 frames. ], batch size: 431, lr: 2.26e-02, grad_scale: 8.0 +2023-03-27 21:19:15,670 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5267, 1.9168, 2.5396, 2.2296, 1.9517, 2.8181, 1.5780, 1.7342], + device='cuda:2'), covar=tensor([0.0597, 0.1592, 0.0490, 0.0551, 0.1158, 0.0422, 0.1254, 0.1441], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0295, 0.0195, 0.0174, 0.0275, 0.0176, 0.0211, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 21:19:26,067 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.645e+02 5.370e+02 6.417e+02 7.584e+02 2.097e+03, threshold=1.283e+03, percent-clipped=1.0 +2023-03-27 21:20:18,361 INFO [train.py:892] (2/4) Epoch 7, batch 200, loss[loss=0.2367, simple_loss=0.2915, pruned_loss=0.09088, over 19770.00 frames. ], tot_loss[loss=0.2753, simple_loss=0.3212, pruned_loss=0.1147, over 2504139.27 frames. ], batch size: 46, lr: 2.26e-02, grad_scale: 8.0 +2023-03-27 21:21:01,539 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0526, 2.5884, 2.9837, 3.1453, 3.3164, 3.3197, 3.9884, 3.8691], + device='cuda:2'), covar=tensor([0.0413, 0.1450, 0.1118, 0.1445, 0.1485, 0.1105, 0.0267, 0.0385], + device='cuda:2'), in_proj_covar=tensor([0.0171, 0.0189, 0.0194, 0.0210, 0.0217, 0.0197, 0.0129, 0.0133], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 21:22:03,989 INFO [train.py:892] (2/4) Epoch 7, batch 250, loss[loss=0.2467, simple_loss=0.2921, pruned_loss=0.1006, over 19875.00 frames. ], tot_loss[loss=0.2746, simple_loss=0.3209, pruned_loss=0.1141, over 2822895.90 frames. ], batch size: 125, lr: 2.25e-02, grad_scale: 8.0 +2023-03-27 21:22:44,556 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11400.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:22:51,909 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3522, 3.6821, 3.6725, 3.5943, 3.4313, 3.6192, 3.0982, 3.1594], + device='cuda:2'), covar=tensor([0.0556, 0.0528, 0.0679, 0.0606, 0.0671, 0.0719, 0.0795, 0.1165], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0143, 0.0198, 0.0155, 0.0151, 0.0141, 0.0174, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-27 21:22:57,027 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.370e+02 5.501e+02 6.755e+02 8.647e+02 2.452e+03, threshold=1.351e+03, percent-clipped=4.0 +2023-03-27 21:23:45,444 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11430.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:23:46,580 INFO [train.py:892] (2/4) Epoch 7, batch 300, loss[loss=0.2673, simple_loss=0.3115, pruned_loss=0.1115, over 19831.00 frames. ], tot_loss[loss=0.2734, simple_loss=0.3199, pruned_loss=0.1134, over 3073250.95 frames. ], batch size: 146, lr: 2.25e-02, grad_scale: 8.0 +2023-03-27 21:24:25,381 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11448.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:25:28,292 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11478.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:25:34,573 INFO [train.py:892] (2/4) Epoch 7, batch 350, loss[loss=0.2846, simple_loss=0.33, pruned_loss=0.1195, over 19830.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3193, pruned_loss=0.1132, over 3267261.41 frames. ], batch size: 76, lr: 2.24e-02, grad_scale: 8.0 +2023-03-27 21:25:58,045 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8713, 4.8997, 5.3115, 5.1696, 5.0886, 4.7918, 4.9714, 4.9015], + device='cuda:2'), covar=tensor([0.1044, 0.1009, 0.0858, 0.0794, 0.0681, 0.0816, 0.1735, 0.1834], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0191, 0.0261, 0.0207, 0.0193, 0.0189, 0.0247, 0.0288], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-27 21:26:12,211 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1453, 4.3482, 2.6211, 4.7709, 5.1491, 1.9501, 4.1241, 3.8540], + device='cuda:2'), covar=tensor([0.0502, 0.0544, 0.2234, 0.0474, 0.0104, 0.2958, 0.0834, 0.0507], + device='cuda:2'), in_proj_covar=tensor([0.0157, 0.0181, 0.0196, 0.0138, 0.0099, 0.0195, 0.0198, 0.0130], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-27 21:26:29,991 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.848e+02 5.697e+02 6.987e+02 8.639e+02 1.327e+03, threshold=1.397e+03, percent-clipped=1.0 +2023-03-27 21:26:33,257 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.93 vs. limit=2.0 +2023-03-27 21:27:01,456 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6547, 4.5411, 4.9668, 4.8243, 4.8237, 4.2647, 4.6610, 4.5317], + device='cuda:2'), covar=tensor([0.1102, 0.1097, 0.0893, 0.0881, 0.0719, 0.0967, 0.1853, 0.2008], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0193, 0.0262, 0.0208, 0.0194, 0.0189, 0.0250, 0.0289], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-27 21:27:05,383 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7534, 2.0739, 2.6579, 3.2154, 3.3852, 3.6266, 3.6172, 3.6578], + device='cuda:2'), covar=tensor([0.0660, 0.1961, 0.0724, 0.0379, 0.0293, 0.0155, 0.0247, 0.0245], + device='cuda:2'), in_proj_covar=tensor([0.0108, 0.0160, 0.0112, 0.0103, 0.0084, 0.0082, 0.0079, 0.0080], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 21:27:18,086 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11530.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:27:19,362 INFO [train.py:892] (2/4) Epoch 7, batch 400, loss[loss=0.2958, simple_loss=0.3469, pruned_loss=0.1223, over 19772.00 frames. ], tot_loss[loss=0.2724, simple_loss=0.3186, pruned_loss=0.1131, over 3418328.50 frames. ], batch size: 70, lr: 2.24e-02, grad_scale: 8.0 +2023-03-27 21:27:22,552 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11532.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:27:23,335 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.72 vs. limit=2.0 +2023-03-27 21:27:28,365 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6240, 2.6695, 2.9336, 2.0813, 2.9410, 2.1999, 2.5772, 2.9766], + device='cuda:2'), covar=tensor([0.0781, 0.0358, 0.0490, 0.0739, 0.0370, 0.0456, 0.0436, 0.0225], + device='cuda:2'), in_proj_covar=tensor([0.0048, 0.0047, 0.0051, 0.0073, 0.0047, 0.0044, 0.0043, 0.0040], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 21:27:41,083 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11541.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 21:29:06,951 INFO [train.py:892] (2/4) Epoch 7, batch 450, loss[loss=0.248, simple_loss=0.294, pruned_loss=0.101, over 19736.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3183, pruned_loss=0.1122, over 3535087.78 frames. ], batch size: 140, lr: 2.23e-02, grad_scale: 8.0 +2023-03-27 21:29:23,363 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11589.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 21:29:31,240 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11593.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:30:00,683 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.550e+02 5.488e+02 6.753e+02 8.459e+02 1.411e+03, threshold=1.351e+03, percent-clipped=1.0 +2023-03-27 21:30:52,139 INFO [train.py:892] (2/4) Epoch 7, batch 500, loss[loss=0.2412, simple_loss=0.2905, pruned_loss=0.09596, over 19667.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.3171, pruned_loss=0.1116, over 3628490.98 frames. ], batch size: 43, lr: 2.23e-02, grad_scale: 8.0 +2023-03-27 21:30:53,539 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-27 21:31:00,977 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11635.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:32:37,360 INFO [train.py:892] (2/4) Epoch 7, batch 550, loss[loss=0.2354, simple_loss=0.2873, pruned_loss=0.09169, over 19789.00 frames. ], tot_loss[loss=0.2723, simple_loss=0.319, pruned_loss=0.1129, over 3699844.96 frames. ], batch size: 83, lr: 2.23e-02, grad_scale: 8.0 +2023-03-27 21:32:44,538 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-27 21:33:09,736 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11696.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:33:15,370 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11699.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:33:15,469 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7320, 2.5262, 2.8529, 2.8572, 3.0675, 3.0108, 3.4251, 3.6728], + device='cuda:2'), covar=tensor([0.0463, 0.1322, 0.1031, 0.1372, 0.1390, 0.1070, 0.0316, 0.0313], + device='cuda:2'), in_proj_covar=tensor([0.0172, 0.0191, 0.0192, 0.0210, 0.0219, 0.0196, 0.0134, 0.0134], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 21:33:24,750 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.21 vs. limit=5.0 +2023-03-27 21:33:31,005 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.036e+02 6.098e+02 7.200e+02 8.839e+02 1.494e+03, threshold=1.440e+03, percent-clipped=2.0 +2023-03-27 21:33:48,849 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-27 21:34:22,374 INFO [train.py:892] (2/4) Epoch 7, batch 600, loss[loss=0.255, simple_loss=0.3086, pruned_loss=0.1007, over 19673.00 frames. ], tot_loss[loss=0.2711, simple_loss=0.318, pruned_loss=0.112, over 3756833.34 frames. ], batch size: 73, lr: 2.22e-02, grad_scale: 8.0 +2023-03-27 21:35:16,903 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-03-27 21:35:24,220 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11760.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:36:07,572 INFO [train.py:892] (2/4) Epoch 7, batch 650, loss[loss=0.2392, simple_loss=0.294, pruned_loss=0.09217, over 19685.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.3165, pruned_loss=0.1109, over 3799637.08 frames. ], batch size: 75, lr: 2.22e-02, grad_scale: 8.0 +2023-03-27 21:36:34,666 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8990, 2.5765, 1.4839, 3.4288, 3.2423, 3.2918, 3.4345, 2.8533], + device='cuda:2'), covar=tensor([0.0546, 0.0562, 0.1912, 0.0513, 0.0442, 0.0406, 0.0441, 0.0614], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0094, 0.0118, 0.0099, 0.0086, 0.0079, 0.0093, 0.0106], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 21:37:02,907 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.631e+02 5.681e+02 6.679e+02 8.355e+02 2.061e+03, threshold=1.336e+03, percent-clipped=4.0 +2023-03-27 21:37:23,921 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11816.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:37:52,420 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11830.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:37:53,590 INFO [train.py:892] (2/4) Epoch 7, batch 700, loss[loss=0.2451, simple_loss=0.2994, pruned_loss=0.09545, over 19782.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3172, pruned_loss=0.1108, over 3831008.37 frames. ], batch size: 91, lr: 2.21e-02, grad_scale: 8.0 +2023-03-27 21:38:25,360 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11846.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:39:32,218 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11877.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:39:34,206 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11878.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:39:39,316 INFO [train.py:892] (2/4) Epoch 7, batch 750, loss[loss=0.2545, simple_loss=0.3114, pruned_loss=0.0988, over 19714.00 frames. ], tot_loss[loss=0.2692, simple_loss=0.3171, pruned_loss=0.1107, over 3856822.02 frames. ], batch size: 85, lr: 2.21e-02, grad_scale: 8.0 +2023-03-27 21:39:53,455 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11888.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:40:34,149 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.808e+02 5.639e+02 6.770e+02 8.403e+02 1.981e+03, threshold=1.354e+03, percent-clipped=4.0 +2023-03-27 21:40:35,225 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11907.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:41:02,231 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11920.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:41:22,937 INFO [train.py:892] (2/4) Epoch 7, batch 800, loss[loss=0.2421, simple_loss=0.2938, pruned_loss=0.09514, over 19780.00 frames. ], tot_loss[loss=0.2686, simple_loss=0.3163, pruned_loss=0.1105, over 3878068.84 frames. ], batch size: 94, lr: 2.21e-02, grad_scale: 8.0 +2023-03-27 21:41:59,554 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11948.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:43:08,369 INFO [train.py:892] (2/4) Epoch 7, batch 850, loss[loss=0.3606, simple_loss=0.3764, pruned_loss=0.1724, over 19709.00 frames. ], tot_loss[loss=0.269, simple_loss=0.3166, pruned_loss=0.1108, over 3894387.33 frames. ], batch size: 337, lr: 2.20e-02, grad_scale: 8.0 +2023-03-27 21:43:09,289 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11981.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:43:14,896 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11984.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:43:24,954 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.69 vs. limit=2.0 +2023-03-27 21:43:29,711 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11991.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:44:07,073 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.301e+02 6.093e+02 7.463e+02 9.130e+02 1.696e+03, threshold=1.493e+03, percent-clipped=3.0 +2023-03-27 21:44:11,980 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12009.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:44:56,860 INFO [train.py:892] (2/4) Epoch 7, batch 900, loss[loss=0.2451, simple_loss=0.3078, pruned_loss=0.09121, over 19578.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3146, pruned_loss=0.1095, over 3906595.81 frames. ], batch size: 53, lr: 2.20e-02, grad_scale: 8.0 +2023-03-27 21:45:27,440 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12045.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:45:48,827 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12055.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:45:53,298 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9409, 2.7494, 1.6962, 3.4198, 3.4026, 3.2545, 3.5540, 2.8882], + device='cuda:2'), covar=tensor([0.0500, 0.0540, 0.1500, 0.0470, 0.0328, 0.0568, 0.0497, 0.0638], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0093, 0.0118, 0.0100, 0.0084, 0.0080, 0.0093, 0.0104], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 21:46:32,742 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12076.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:46:44,680 INFO [train.py:892] (2/4) Epoch 7, batch 950, loss[loss=0.2542, simple_loss=0.3189, pruned_loss=0.09471, over 19958.00 frames. ], tot_loss[loss=0.2677, simple_loss=0.3156, pruned_loss=0.1099, over 3915991.56 frames. ], batch size: 53, lr: 2.19e-02, grad_scale: 8.0 +2023-03-27 21:47:23,660 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8198, 2.1131, 3.0169, 3.3216, 3.7352, 4.0560, 4.1828, 4.0938], + device='cuda:2'), covar=tensor([0.0730, 0.2121, 0.0762, 0.0432, 0.0288, 0.0161, 0.0178, 0.0380], + device='cuda:2'), in_proj_covar=tensor([0.0114, 0.0161, 0.0120, 0.0110, 0.0088, 0.0087, 0.0079, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 21:47:39,038 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.928e+02 5.727e+02 6.871e+02 8.158e+02 1.418e+03, threshold=1.374e+03, percent-clipped=0.0 +2023-03-27 21:47:39,990 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9755, 3.9828, 4.0252, 3.8158, 4.1235, 3.0657, 3.2548, 2.4245], + device='cuda:2'), covar=tensor([0.0334, 0.0197, 0.0195, 0.0205, 0.0194, 0.0768, 0.1094, 0.1263], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0091, 0.0084, 0.0095, 0.0085, 0.0106, 0.0121, 0.0104], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-27 21:48:09,557 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4218, 3.4042, 4.7918, 3.5418, 4.2263, 4.9334, 2.7420, 2.9716], + device='cuda:2'), covar=tensor([0.0447, 0.2452, 0.0242, 0.0530, 0.0878, 0.0242, 0.1222, 0.1491], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0298, 0.0206, 0.0177, 0.0279, 0.0182, 0.0219, 0.0217], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 21:48:29,674 INFO [train.py:892] (2/4) Epoch 7, batch 1000, loss[loss=0.3146, simple_loss=0.3455, pruned_loss=0.1418, over 19758.00 frames. ], tot_loss[loss=0.2663, simple_loss=0.3141, pruned_loss=0.1092, over 3924494.37 frames. ], batch size: 259, lr: 2.19e-02, grad_scale: 8.0 +2023-03-27 21:48:43,814 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12137.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:49:55,675 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12172.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:50:13,709 INFO [train.py:892] (2/4) Epoch 7, batch 1050, loss[loss=0.2352, simple_loss=0.2826, pruned_loss=0.09387, over 19760.00 frames. ], tot_loss[loss=0.2665, simple_loss=0.314, pruned_loss=0.1094, over 3930472.69 frames. ], batch size: 155, lr: 2.19e-02, grad_scale: 8.0 +2023-03-27 21:50:31,426 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12188.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:50:59,982 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12202.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:51:09,329 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.917e+02 5.376e+02 7.198e+02 8.445e+02 1.495e+03, threshold=1.440e+03, percent-clipped=1.0 +2023-03-27 21:52:00,182 INFO [train.py:892] (2/4) Epoch 7, batch 1100, loss[loss=0.2614, simple_loss=0.2908, pruned_loss=0.116, over 19817.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3147, pruned_loss=0.1095, over 3933614.77 frames. ], batch size: 184, lr: 2.18e-02, grad_scale: 8.0 +2023-03-27 21:52:12,267 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12236.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:52:29,310 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8963, 5.1891, 5.2532, 5.2161, 4.9489, 5.1586, 4.5965, 4.7530], + device='cuda:2'), covar=tensor([0.0385, 0.0354, 0.0483, 0.0426, 0.0466, 0.0502, 0.0601, 0.1010], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0156, 0.0206, 0.0167, 0.0157, 0.0149, 0.0183, 0.0217], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-27 21:52:35,429 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0842, 2.3523, 3.2281, 2.5859, 2.6973, 3.3938, 1.9932, 2.0351], + device='cuda:2'), covar=tensor([0.0602, 0.2289, 0.0401, 0.0576, 0.1053, 0.0444, 0.1204, 0.1676], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0298, 0.0205, 0.0177, 0.0282, 0.0184, 0.0219, 0.0217], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 21:53:35,987 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12276.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:53:45,139 INFO [train.py:892] (2/4) Epoch 7, batch 1150, loss[loss=0.2158, simple_loss=0.2718, pruned_loss=0.07993, over 19772.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.3135, pruned_loss=0.1088, over 3937297.03 frames. ], batch size: 116, lr: 2.18e-02, grad_scale: 8.0 +2023-03-27 21:54:06,710 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12291.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:54:20,416 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.12 vs. limit=5.0 +2023-03-27 21:54:34,596 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12304.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:54:41,302 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.471e+02 5.763e+02 6.995e+02 9.054e+02 1.717e+03, threshold=1.399e+03, percent-clipped=2.0 +2023-03-27 21:55:31,485 INFO [train.py:892] (2/4) Epoch 7, batch 1200, loss[loss=0.2383, simple_loss=0.2948, pruned_loss=0.09084, over 19685.00 frames. ], tot_loss[loss=0.2654, simple_loss=0.3133, pruned_loss=0.1088, over 3940601.15 frames. ], batch size: 45, lr: 2.18e-02, grad_scale: 8.0 +2023-03-27 21:55:48,755 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12339.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:55:50,659 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12340.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:56:21,327 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12355.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:56:38,394 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7353, 5.0178, 5.0683, 5.0444, 4.7311, 4.9802, 4.4100, 4.6306], + device='cuda:2'), covar=tensor([0.0421, 0.0417, 0.0624, 0.0417, 0.0619, 0.0596, 0.0594, 0.0967], + device='cuda:2'), in_proj_covar=tensor([0.0166, 0.0159, 0.0209, 0.0171, 0.0159, 0.0151, 0.0188, 0.0221], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-27 21:57:17,149 INFO [train.py:892] (2/4) Epoch 7, batch 1250, loss[loss=0.2553, simple_loss=0.3028, pruned_loss=0.1039, over 19879.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3125, pruned_loss=0.1085, over 3943353.59 frames. ], batch size: 88, lr: 2.17e-02, grad_scale: 8.0 +2023-03-27 21:57:34,953 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9881, 5.0268, 5.4231, 5.2405, 5.1899, 4.5944, 5.1066, 5.0435], + device='cuda:2'), covar=tensor([0.1147, 0.0937, 0.0816, 0.0885, 0.0744, 0.0960, 0.1920, 0.1916], + device='cuda:2'), in_proj_covar=tensor([0.0217, 0.0197, 0.0263, 0.0207, 0.0194, 0.0192, 0.0256, 0.0292], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-27 21:57:51,379 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4240, 3.4321, 1.9481, 3.6731, 3.8390, 1.6163, 3.1570, 3.0126], + device='cuda:2'), covar=tensor([0.0614, 0.0877, 0.2981, 0.0553, 0.0233, 0.3129, 0.1017, 0.0625], + device='cuda:2'), in_proj_covar=tensor([0.0160, 0.0185, 0.0200, 0.0147, 0.0102, 0.0193, 0.0202, 0.0133], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 21:58:02,721 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12403.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:58:09,909 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.119e+02 6.346e+02 7.494e+02 8.891e+02 1.193e+03, threshold=1.499e+03, percent-clipped=0.0 +2023-03-27 21:59:01,224 INFO [train.py:892] (2/4) Epoch 7, batch 1300, loss[loss=0.2298, simple_loss=0.2897, pruned_loss=0.08489, over 19681.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.3136, pruned_loss=0.109, over 3942911.68 frames. ], batch size: 82, lr: 2.17e-02, grad_scale: 8.0 +2023-03-27 21:59:03,851 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12432.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:59:33,345 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12446.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:59:33,965 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.60 vs. limit=5.0 +2023-03-27 22:00:27,469 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12472.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:00:45,678 INFO [train.py:892] (2/4) Epoch 7, batch 1350, loss[loss=0.2568, simple_loss=0.2981, pruned_loss=0.1077, over 19710.00 frames. ], tot_loss[loss=0.2652, simple_loss=0.3132, pruned_loss=0.1086, over 3943130.91 frames. ], batch size: 85, lr: 2.16e-02, grad_scale: 16.0 +2023-03-27 22:01:29,009 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12502.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:01:39,458 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.642e+02 6.155e+02 7.029e+02 9.046e+02 1.832e+03, threshold=1.406e+03, percent-clipped=3.0 +2023-03-27 22:01:40,582 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12507.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:02:07,089 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12520.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:02:29,803 INFO [train.py:892] (2/4) Epoch 7, batch 1400, loss[loss=0.2807, simple_loss=0.3383, pruned_loss=0.1116, over 19580.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.3127, pruned_loss=0.1087, over 3944542.45 frames. ], batch size: 49, lr: 2.16e-02, grad_scale: 16.0 +2023-03-27 22:03:08,822 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12550.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:04:00,459 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.85 vs. limit=5.0 +2023-03-27 22:04:02,203 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6140, 3.5848, 2.1696, 3.9657, 3.9167, 1.7214, 3.2080, 3.0372], + device='cuda:2'), covar=tensor([0.0585, 0.0759, 0.2638, 0.0437, 0.0232, 0.3029, 0.1031, 0.0583], + device='cuda:2'), in_proj_covar=tensor([0.0160, 0.0184, 0.0200, 0.0149, 0.0102, 0.0192, 0.0201, 0.0133], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 22:04:04,018 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12576.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:04:14,059 INFO [train.py:892] (2/4) Epoch 7, batch 1450, loss[loss=0.3958, simple_loss=0.4192, pruned_loss=0.1862, over 19402.00 frames. ], tot_loss[loss=0.2654, simple_loss=0.3132, pruned_loss=0.1088, over 3945614.03 frames. ], batch size: 412, lr: 2.16e-02, grad_scale: 16.0 +2023-03-27 22:05:02,532 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12604.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:05:07,595 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.016e+02 5.644e+02 6.580e+02 8.199e+02 1.502e+03, threshold=1.316e+03, percent-clipped=1.0 +2023-03-27 22:05:39,833 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0050, 2.5117, 3.0141, 3.0936, 3.3259, 3.3232, 3.7823, 3.8447], + device='cuda:2'), covar=tensor([0.0454, 0.1646, 0.1167, 0.1451, 0.1307, 0.1084, 0.0359, 0.0393], + device='cuda:2'), in_proj_covar=tensor([0.0173, 0.0194, 0.0199, 0.0212, 0.0226, 0.0201, 0.0139, 0.0140], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 22:05:43,403 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12624.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:05:57,454 INFO [train.py:892] (2/4) Epoch 7, batch 1500, loss[loss=0.3053, simple_loss=0.3456, pruned_loss=0.1325, over 19733.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3127, pruned_loss=0.1083, over 3946191.77 frames. ], batch size: 99, lr: 2.15e-02, grad_scale: 16.0 +2023-03-27 22:06:16,839 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12640.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:06:41,611 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12652.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:06:45,803 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-27 22:07:41,419 INFO [train.py:892] (2/4) Epoch 7, batch 1550, loss[loss=0.2876, simple_loss=0.321, pruned_loss=0.1271, over 19800.00 frames. ], tot_loss[loss=0.2671, simple_loss=0.3152, pruned_loss=0.1095, over 3944351.44 frames. ], batch size: 200, lr: 2.15e-02, grad_scale: 16.0 +2023-03-27 22:07:51,893 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8838, 2.6897, 3.2866, 2.1996, 3.0560, 2.3747, 2.5120, 3.1635], + device='cuda:2'), covar=tensor([0.0575, 0.0387, 0.0384, 0.0677, 0.0336, 0.0342, 0.0545, 0.0233], + device='cuda:2'), in_proj_covar=tensor([0.0047, 0.0048, 0.0052, 0.0074, 0.0047, 0.0043, 0.0043, 0.0039], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 22:07:56,845 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12688.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:08:04,729 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4318, 3.3526, 4.7685, 3.7950, 4.1793, 4.7949, 2.5060, 2.7799], + device='cuda:2'), covar=tensor([0.0530, 0.2535, 0.0280, 0.0506, 0.1107, 0.0344, 0.1316, 0.1766], + device='cuda:2'), in_proj_covar=tensor([0.0257, 0.0302, 0.0208, 0.0179, 0.0283, 0.0188, 0.0224, 0.0223], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 22:08:35,758 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 5.812e+02 6.854e+02 8.280e+02 1.889e+03, threshold=1.371e+03, percent-clipped=7.0 +2023-03-27 22:08:36,809 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2355, 2.7074, 3.2205, 2.9765, 3.3752, 3.4157, 4.0065, 4.2870], + device='cuda:2'), covar=tensor([0.0420, 0.1610, 0.1168, 0.2021, 0.1547, 0.1383, 0.0367, 0.0335], + device='cuda:2'), in_proj_covar=tensor([0.0172, 0.0194, 0.0198, 0.0212, 0.0225, 0.0200, 0.0138, 0.0142], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 22:09:26,215 INFO [train.py:892] (2/4) Epoch 7, batch 1600, loss[loss=0.2483, simple_loss=0.2957, pruned_loss=0.1004, over 19787.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3139, pruned_loss=0.108, over 3946247.17 frames. ], batch size: 236, lr: 2.15e-02, grad_scale: 16.0 +2023-03-27 22:09:28,758 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12732.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:10:33,005 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12763.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:11:03,580 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.98 vs. limit=2.0 +2023-03-27 22:11:06,775 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12780.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:11:08,042 INFO [train.py:892] (2/4) Epoch 7, batch 1650, loss[loss=0.2896, simple_loss=0.3475, pruned_loss=0.1158, over 19724.00 frames. ], tot_loss[loss=0.2618, simple_loss=0.3112, pruned_loss=0.1062, over 3946913.89 frames. ], batch size: 54, lr: 2.14e-02, grad_scale: 16.0 +2023-03-27 22:11:30,280 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2669, 5.6134, 5.6204, 5.5153, 5.2280, 5.5435, 4.8110, 4.9704], + device='cuda:2'), covar=tensor([0.0330, 0.0348, 0.0543, 0.0412, 0.0528, 0.0510, 0.0681, 0.0897], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0154, 0.0203, 0.0170, 0.0160, 0.0152, 0.0184, 0.0217], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-27 22:11:54,388 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12802.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:12:04,854 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.717e+02 5.657e+02 6.905e+02 8.764e+02 1.774e+03, threshold=1.381e+03, percent-clipped=3.0 +2023-03-27 22:12:22,103 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8550, 3.6659, 4.1495, 3.9838, 4.1620, 3.3497, 3.8700, 3.7311], + device='cuda:2'), covar=tensor([0.1370, 0.1474, 0.1118, 0.1140, 0.1116, 0.1585, 0.2039, 0.2645], + device='cuda:2'), in_proj_covar=tensor([0.0226, 0.0204, 0.0269, 0.0212, 0.0201, 0.0200, 0.0257, 0.0299], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-27 22:12:43,826 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12824.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 22:12:56,375 INFO [train.py:892] (2/4) Epoch 7, batch 1700, loss[loss=0.2767, simple_loss=0.3154, pruned_loss=0.119, over 19839.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.3134, pruned_loss=0.1078, over 3946501.88 frames. ], batch size: 90, lr: 2.14e-02, grad_scale: 16.0 +2023-03-27 22:14:36,909 INFO [train.py:892] (2/4) Epoch 7, batch 1750, loss[loss=0.2167, simple_loss=0.2759, pruned_loss=0.07872, over 19798.00 frames. ], tot_loss[loss=0.2636, simple_loss=0.3128, pruned_loss=0.1073, over 3946079.25 frames. ], batch size: 86, lr: 2.14e-02, grad_scale: 16.0 +2023-03-27 22:15:23,463 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 5.556e+02 6.737e+02 8.077e+02 1.296e+03, threshold=1.347e+03, percent-clipped=0.0 +2023-03-27 22:16:05,768 INFO [train.py:892] (2/4) Epoch 7, batch 1800, loss[loss=0.2601, simple_loss=0.3211, pruned_loss=0.09957, over 19715.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3121, pruned_loss=0.1069, over 3946150.13 frames. ], batch size: 81, lr: 2.13e-02, grad_scale: 16.0 +2023-03-27 22:16:06,524 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0925, 2.8429, 1.4901, 3.6892, 3.3777, 3.5699, 3.6154, 2.8515], + device='cuda:2'), covar=tensor([0.0478, 0.0502, 0.1616, 0.0388, 0.0387, 0.0309, 0.0465, 0.0604], + device='cuda:2'), in_proj_covar=tensor([0.0110, 0.0098, 0.0123, 0.0106, 0.0089, 0.0083, 0.0095, 0.0108], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 22:17:29,668 INFO [train.py:892] (2/4) Epoch 7, batch 1850, loss[loss=0.2845, simple_loss=0.3397, pruned_loss=0.1146, over 19672.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3116, pruned_loss=0.1056, over 3947501.46 frames. ], batch size: 55, lr: 2.13e-02, grad_scale: 16.0 +2023-03-27 22:18:27,966 INFO [train.py:892] (2/4) Epoch 8, batch 0, loss[loss=0.2485, simple_loss=0.2967, pruned_loss=0.1002, over 19816.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.2967, pruned_loss=0.1002, over 19816.00 frames. ], batch size: 72, lr: 2.00e-02, grad_scale: 16.0 +2023-03-27 22:18:27,967 INFO [train.py:917] (2/4) Computing validation loss +2023-03-27 22:18:55,072 INFO [train.py:926] (2/4) Epoch 8, validation: loss=0.189, simple_loss=0.2688, pruned_loss=0.05453, over 2883724.00 frames. +2023-03-27 22:18:55,073 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22298MB +2023-03-27 22:19:33,641 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5866, 1.9517, 2.5271, 3.1247, 3.4623, 3.6676, 3.6790, 3.7234], + device='cuda:2'), covar=tensor([0.0797, 0.2098, 0.1008, 0.0433, 0.0270, 0.0142, 0.0219, 0.0267], + device='cuda:2'), in_proj_covar=tensor([0.0120, 0.0166, 0.0130, 0.0116, 0.0091, 0.0092, 0.0085, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 22:19:40,815 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.963e+02 5.840e+02 6.949e+02 8.045e+02 1.580e+03, threshold=1.390e+03, percent-clipped=1.0 +2023-03-27 22:20:44,099 INFO [train.py:892] (2/4) Epoch 8, batch 50, loss[loss=0.2382, simple_loss=0.2936, pruned_loss=0.09144, over 19744.00 frames. ], tot_loss[loss=0.26, simple_loss=0.308, pruned_loss=0.106, over 890838.13 frames. ], batch size: 129, lr: 2.00e-02, grad_scale: 16.0 +2023-03-27 22:21:39,641 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.74 vs. limit=5.0 +2023-03-27 22:22:29,891 INFO [train.py:892] (2/4) Epoch 8, batch 100, loss[loss=0.2339, simple_loss=0.3052, pruned_loss=0.08135, over 19693.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3089, pruned_loss=0.1052, over 1567229.12 frames. ], batch size: 56, lr: 2.00e-02, grad_scale: 16.0 +2023-03-27 22:23:02,868 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13102.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:23:13,340 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.875e+02 5.582e+02 6.747e+02 8.374e+02 1.388e+03, threshold=1.349e+03, percent-clipped=0.0 +2023-03-27 22:23:39,229 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=13119.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 22:23:51,493 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6405, 1.7845, 2.4667, 3.1248, 3.3241, 3.3761, 3.5101, 3.5234], + device='cuda:2'), covar=tensor([0.0656, 0.2232, 0.0926, 0.0399, 0.0279, 0.0181, 0.0178, 0.0238], + device='cuda:2'), in_proj_covar=tensor([0.0116, 0.0164, 0.0127, 0.0113, 0.0089, 0.0090, 0.0083, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-03-27 22:24:13,103 INFO [train.py:892] (2/4) Epoch 8, batch 150, loss[loss=0.2511, simple_loss=0.3087, pruned_loss=0.09672, over 19759.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3101, pruned_loss=0.1059, over 2096029.69 frames. ], batch size: 119, lr: 1.99e-02, grad_scale: 16.0 +2023-03-27 22:24:43,916 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=13150.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:25:58,989 INFO [train.py:892] (2/4) Epoch 8, batch 200, loss[loss=0.2315, simple_loss=0.29, pruned_loss=0.08647, over 19851.00 frames. ], tot_loss[loss=0.2609, simple_loss=0.3103, pruned_loss=0.1057, over 2507997.76 frames. ], batch size: 78, lr: 1.99e-02, grad_scale: 16.0 +2023-03-27 22:26:43,153 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.035e+02 5.713e+02 7.016e+02 8.259e+02 1.478e+03, threshold=1.403e+03, percent-clipped=2.0 +2023-03-27 22:27:43,922 INFO [train.py:892] (2/4) Epoch 8, batch 250, loss[loss=0.2622, simple_loss=0.3, pruned_loss=0.1122, over 19750.00 frames. ], tot_loss[loss=0.2583, simple_loss=0.3079, pruned_loss=0.1044, over 2827850.39 frames. ], batch size: 139, lr: 1.99e-02, grad_scale: 16.0 +2023-03-27 22:28:36,305 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4894, 4.8454, 4.7909, 4.7790, 4.5351, 4.8034, 4.2341, 4.2778], + device='cuda:2'), covar=tensor([0.0443, 0.0375, 0.0606, 0.0427, 0.0580, 0.0574, 0.0694, 0.0891], + device='cuda:2'), in_proj_covar=tensor([0.0168, 0.0162, 0.0215, 0.0173, 0.0161, 0.0157, 0.0188, 0.0225], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-27 22:29:19,598 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.66 vs. limit=5.0 +2023-03-27 22:29:31,208 INFO [train.py:892] (2/4) Epoch 8, batch 300, loss[loss=0.2373, simple_loss=0.2802, pruned_loss=0.09719, over 19881.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3098, pruned_loss=0.1047, over 3073637.19 frames. ], batch size: 134, lr: 1.98e-02, grad_scale: 16.0 +2023-03-27 22:30:13,181 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 5.556e+02 6.827e+02 8.454e+02 1.465e+03, threshold=1.365e+03, percent-clipped=2.0 +2023-03-27 22:31:14,170 INFO [train.py:892] (2/4) Epoch 8, batch 350, loss[loss=0.2618, simple_loss=0.3195, pruned_loss=0.1021, over 19788.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.31, pruned_loss=0.1045, over 3267690.24 frames. ], batch size: 94, lr: 1.98e-02, grad_scale: 16.0 +2023-03-27 22:31:43,913 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1364, 3.9739, 2.3467, 4.5943, 4.6214, 1.8414, 3.7684, 3.4206], + device='cuda:2'), covar=tensor([0.0512, 0.0803, 0.2521, 0.0459, 0.0233, 0.2994, 0.0851, 0.0584], + device='cuda:2'), in_proj_covar=tensor([0.0167, 0.0194, 0.0206, 0.0156, 0.0111, 0.0194, 0.0207, 0.0138], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 22:31:49,809 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1420, 3.2119, 1.4641, 4.0298, 3.4967, 4.0272, 3.8718, 2.9253], + device='cuda:2'), covar=tensor([0.0518, 0.0483, 0.1889, 0.0409, 0.0390, 0.0264, 0.0604, 0.0688], + device='cuda:2'), in_proj_covar=tensor([0.0112, 0.0100, 0.0123, 0.0107, 0.0090, 0.0085, 0.0098, 0.0111], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 22:32:58,887 INFO [train.py:892] (2/4) Epoch 8, batch 400, loss[loss=0.2275, simple_loss=0.2801, pruned_loss=0.0874, over 19824.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3105, pruned_loss=0.105, over 3419597.56 frames. ], batch size: 128, lr: 1.98e-02, grad_scale: 16.0 +2023-03-27 22:33:41,529 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.399e+02 5.749e+02 7.183e+02 8.844e+02 1.624e+03, threshold=1.437e+03, percent-clipped=4.0 +2023-03-27 22:34:07,789 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13419.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:34:44,929 INFO [train.py:892] (2/4) Epoch 8, batch 450, loss[loss=0.2473, simple_loss=0.2826, pruned_loss=0.106, over 19875.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3126, pruned_loss=0.1062, over 3535842.78 frames. ], batch size: 159, lr: 1.97e-02, grad_scale: 16.0 +2023-03-27 22:35:49,876 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=13467.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:36:27,701 INFO [train.py:892] (2/4) Epoch 8, batch 500, loss[loss=0.267, simple_loss=0.312, pruned_loss=0.111, over 19777.00 frames. ], tot_loss[loss=0.2602, simple_loss=0.3104, pruned_loss=0.105, over 3627689.13 frames. ], batch size: 193, lr: 1.97e-02, grad_scale: 16.0 +2023-03-27 22:36:46,669 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.0315, 1.9515, 1.9953, 2.0204, 1.7102, 1.7834, 1.7955, 2.1252], + device='cuda:2'), covar=tensor([0.0180, 0.0224, 0.0244, 0.0196, 0.0301, 0.0293, 0.0328, 0.0251], + device='cuda:2'), in_proj_covar=tensor([0.0037, 0.0038, 0.0040, 0.0034, 0.0042, 0.0040, 0.0054, 0.0037], + device='cuda:2'), out_proj_covar=tensor([8.1830e-05, 8.3639e-05, 8.8358e-05, 7.6569e-05, 9.2664e-05, 8.9906e-05, + 1.1587e-04, 8.2671e-05], device='cuda:2') +2023-03-27 22:37:11,213 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.922e+02 5.157e+02 6.466e+02 8.211e+02 1.475e+03, threshold=1.293e+03, percent-clipped=2.0 +2023-03-27 22:37:49,133 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.64 vs. limit=2.0 +2023-03-27 22:38:12,610 INFO [train.py:892] (2/4) Epoch 8, batch 550, loss[loss=0.238, simple_loss=0.3034, pruned_loss=0.0863, over 19549.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.3095, pruned_loss=0.1036, over 3697887.19 frames. ], batch size: 41, lr: 1.97e-02, grad_scale: 16.0 +2023-03-27 22:39:58,919 INFO [train.py:892] (2/4) Epoch 8, batch 600, loss[loss=0.2659, simple_loss=0.3028, pruned_loss=0.1145, over 19821.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3098, pruned_loss=0.1041, over 3754211.23 frames. ], batch size: 195, lr: 1.97e-02, grad_scale: 16.0 +2023-03-27 22:40:03,906 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9201, 2.4981, 3.0921, 2.9896, 3.1959, 3.3163, 3.8284, 4.0423], + device='cuda:2'), covar=tensor([0.0454, 0.1552, 0.1126, 0.1481, 0.1310, 0.1040, 0.0279, 0.0380], + device='cuda:2'), in_proj_covar=tensor([0.0179, 0.0201, 0.0205, 0.0216, 0.0230, 0.0208, 0.0145, 0.0148], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 22:40:41,841 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.580e+02 5.730e+02 6.817e+02 8.110e+02 1.556e+03, threshold=1.363e+03, percent-clipped=3.0 +2023-03-27 22:41:35,572 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.8270, 6.1612, 6.1132, 6.0552, 5.7855, 6.0831, 5.4368, 5.4346], + device='cuda:2'), covar=tensor([0.0290, 0.0261, 0.0446, 0.0352, 0.0458, 0.0455, 0.0577, 0.0875], + device='cuda:2'), in_proj_covar=tensor([0.0170, 0.0165, 0.0215, 0.0173, 0.0163, 0.0159, 0.0189, 0.0226], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-27 22:41:42,657 INFO [train.py:892] (2/4) Epoch 8, batch 650, loss[loss=0.2345, simple_loss=0.2834, pruned_loss=0.09281, over 19815.00 frames. ], tot_loss[loss=0.2578, simple_loss=0.3083, pruned_loss=0.1037, over 3797834.33 frames. ], batch size: 167, lr: 1.96e-02, grad_scale: 16.0 +2023-03-27 22:42:42,047 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-27 22:42:54,737 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3267, 3.4621, 3.5611, 4.5458, 2.8239, 3.4033, 2.7225, 2.4990], + device='cuda:2'), covar=tensor([0.0386, 0.2672, 0.0859, 0.0151, 0.2300, 0.0590, 0.1124, 0.1834], + device='cuda:2'), in_proj_covar=tensor([0.0168, 0.0322, 0.0212, 0.0123, 0.0229, 0.0151, 0.0183, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 22:43:26,143 INFO [train.py:892] (2/4) Epoch 8, batch 700, loss[loss=0.2452, simple_loss=0.2971, pruned_loss=0.09664, over 19824.00 frames. ], tot_loss[loss=0.2565, simple_loss=0.3072, pruned_loss=0.1029, over 3832688.97 frames. ], batch size: 75, lr: 1.96e-02, grad_scale: 16.0 +2023-03-27 22:43:51,755 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.81 vs. limit=5.0 +2023-03-27 22:44:11,779 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.892e+02 5.950e+02 6.978e+02 8.185e+02 2.283e+03, threshold=1.396e+03, percent-clipped=2.0 +2023-03-27 22:45:13,753 INFO [train.py:892] (2/4) Epoch 8, batch 750, loss[loss=0.2559, simple_loss=0.296, pruned_loss=0.1079, over 19764.00 frames. ], tot_loss[loss=0.2571, simple_loss=0.3075, pruned_loss=0.1033, over 3857280.83 frames. ], batch size: 217, lr: 1.96e-02, grad_scale: 16.0 +2023-03-27 22:45:16,661 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=13737.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:46:57,379 INFO [train.py:892] (2/4) Epoch 8, batch 800, loss[loss=0.329, simple_loss=0.3603, pruned_loss=0.1488, over 19749.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3077, pruned_loss=0.1034, over 3878320.83 frames. ], batch size: 291, lr: 1.95e-02, grad_scale: 16.0 +2023-03-27 22:47:22,167 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=13798.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 22:47:40,986 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.356e+02 5.653e+02 6.887e+02 8.342e+02 1.511e+03, threshold=1.377e+03, percent-clipped=1.0 +2023-03-27 22:48:41,450 INFO [train.py:892] (2/4) Epoch 8, batch 850, loss[loss=0.246, simple_loss=0.2948, pruned_loss=0.0986, over 19723.00 frames. ], tot_loss[loss=0.2571, simple_loss=0.3074, pruned_loss=0.1034, over 3894173.62 frames. ], batch size: 104, lr: 1.95e-02, grad_scale: 16.0 +2023-03-27 22:48:50,077 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0758, 5.4919, 5.4637, 5.3931, 5.0418, 5.3777, 4.7140, 4.8337], + device='cuda:2'), covar=tensor([0.0383, 0.0343, 0.0541, 0.0425, 0.0600, 0.0605, 0.0662, 0.0907], + device='cuda:2'), in_proj_covar=tensor([0.0170, 0.0161, 0.0213, 0.0173, 0.0165, 0.0160, 0.0191, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-27 22:49:22,954 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-03-27 22:50:25,539 INFO [train.py:892] (2/4) Epoch 8, batch 900, loss[loss=0.2423, simple_loss=0.2992, pruned_loss=0.09277, over 19738.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3062, pruned_loss=0.1023, over 3907131.41 frames. ], batch size: 80, lr: 1.95e-02, grad_scale: 16.0 +2023-03-27 22:51:07,623 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.950e+02 5.711e+02 6.914e+02 8.522e+02 2.139e+03, threshold=1.383e+03, percent-clipped=3.0 +2023-03-27 22:52:07,732 INFO [train.py:892] (2/4) Epoch 8, batch 950, loss[loss=0.2453, simple_loss=0.2949, pruned_loss=0.09785, over 19756.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.3073, pruned_loss=0.1027, over 3915584.04 frames. ], batch size: 188, lr: 1.94e-02, grad_scale: 16.0 +2023-03-27 22:52:36,309 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.09 vs. limit=5.0 +2023-03-27 22:53:53,085 INFO [train.py:892] (2/4) Epoch 8, batch 1000, loss[loss=0.2098, simple_loss=0.2778, pruned_loss=0.07087, over 19604.00 frames. ], tot_loss[loss=0.2567, simple_loss=0.3076, pruned_loss=0.1029, over 3923362.46 frames. ], batch size: 44, lr: 1.94e-02, grad_scale: 16.0 +2023-03-27 22:54:38,053 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8871, 2.7237, 3.2552, 2.4556, 3.2102, 2.4799, 2.7670, 3.3514], + device='cuda:2'), covar=tensor([0.0397, 0.0407, 0.0447, 0.0703, 0.0344, 0.0368, 0.0437, 0.0205], + device='cuda:2'), in_proj_covar=tensor([0.0049, 0.0051, 0.0055, 0.0078, 0.0051, 0.0046, 0.0045, 0.0042], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 22:54:42,899 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.512e+02 5.428e+02 6.416e+02 7.650e+02 1.405e+03, threshold=1.283e+03, percent-clipped=1.0 +2023-03-27 22:55:44,627 INFO [train.py:892] (2/4) Epoch 8, batch 1050, loss[loss=0.2571, simple_loss=0.3142, pruned_loss=0.09994, over 19839.00 frames. ], tot_loss[loss=0.2571, simple_loss=0.3083, pruned_loss=0.1029, over 3930021.89 frames. ], batch size: 43, lr: 1.94e-02, grad_scale: 16.0 +2023-03-27 22:57:00,403 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14072.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:57:29,674 INFO [train.py:892] (2/4) Epoch 8, batch 1100, loss[loss=0.2263, simple_loss=0.27, pruned_loss=0.0913, over 19777.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3071, pruned_loss=0.1017, over 3934214.80 frames. ], batch size: 168, lr: 1.93e-02, grad_scale: 16.0 +2023-03-27 22:57:43,879 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14093.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 22:58:04,916 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14103.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:58:13,330 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.085e+02 5.555e+02 6.523e+02 7.495e+02 1.242e+03, threshold=1.305e+03, percent-clipped=0.0 +2023-03-27 22:58:40,414 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14120.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 22:59:06,751 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14133.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:59:11,548 INFO [train.py:892] (2/4) Epoch 8, batch 1150, loss[loss=0.2704, simple_loss=0.3163, pruned_loss=0.1123, over 19795.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.3065, pruned_loss=0.1015, over 3938655.33 frames. ], batch size: 185, lr: 1.93e-02, grad_scale: 16.0 +2023-03-27 22:59:33,948 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-27 23:00:12,803 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14164.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:00:29,107 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-27 23:00:49,208 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14181.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 23:00:58,059 INFO [train.py:892] (2/4) Epoch 8, batch 1200, loss[loss=0.2601, simple_loss=0.3115, pruned_loss=0.1043, over 19810.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3079, pruned_loss=0.1023, over 3940207.30 frames. ], batch size: 167, lr: 1.93e-02, grad_scale: 16.0 +2023-03-27 23:01:41,588 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.316e+02 5.724e+02 6.994e+02 8.657e+02 1.468e+03, threshold=1.399e+03, percent-clipped=2.0 +2023-03-27 23:02:44,268 INFO [train.py:892] (2/4) Epoch 8, batch 1250, loss[loss=0.2433, simple_loss=0.2992, pruned_loss=0.0937, over 19804.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3067, pruned_loss=0.1017, over 3942702.99 frames. ], batch size: 195, lr: 1.92e-02, grad_scale: 16.0 +2023-03-27 23:03:47,497 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.41 vs. limit=5.0 +2023-03-27 23:04:27,624 INFO [train.py:892] (2/4) Epoch 8, batch 1300, loss[loss=0.2427, simple_loss=0.3138, pruned_loss=0.08584, over 19846.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3076, pruned_loss=0.1017, over 3943022.33 frames. ], batch size: 56, lr: 1.92e-02, grad_scale: 16.0 +2023-03-27 23:05:14,651 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.149e+02 5.379e+02 6.577e+02 8.584e+02 1.412e+03, threshold=1.315e+03, percent-clipped=1.0 +2023-03-27 23:06:13,947 INFO [train.py:892] (2/4) Epoch 8, batch 1350, loss[loss=0.2738, simple_loss=0.3158, pruned_loss=0.1159, over 19788.00 frames. ], tot_loss[loss=0.2574, simple_loss=0.3088, pruned_loss=0.103, over 3943840.90 frames. ], batch size: 211, lr: 1.92e-02, grad_scale: 16.0 +2023-03-27 23:06:15,147 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3689, 2.3381, 1.2140, 2.6901, 2.4760, 2.6055, 2.8112, 2.2288], + device='cuda:2'), covar=tensor([0.0516, 0.0517, 0.1548, 0.0401, 0.0417, 0.0322, 0.0316, 0.0639], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0100, 0.0120, 0.0104, 0.0090, 0.0084, 0.0097, 0.0110], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 23:07:58,323 INFO [train.py:892] (2/4) Epoch 8, batch 1400, loss[loss=0.2275, simple_loss=0.2822, pruned_loss=0.08642, over 19818.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3086, pruned_loss=0.1027, over 3944267.89 frames. ], batch size: 123, lr: 1.92e-02, grad_scale: 16.0 +2023-03-27 23:08:13,087 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14393.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:08:40,811 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 5.328e+02 6.639e+02 8.222e+02 1.941e+03, threshold=1.328e+03, percent-clipped=1.0 +2023-03-27 23:08:52,729 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.77 vs. limit=5.0 +2023-03-27 23:08:56,010 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14413.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:09:24,615 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14428.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:09:31,913 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6843, 3.2087, 4.7004, 3.9350, 4.1673, 4.5028, 4.4823, 4.0565], + device='cuda:2'), covar=tensor([0.0105, 0.0518, 0.0068, 0.0871, 0.0096, 0.0142, 0.0106, 0.0109], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0080, 0.0062, 0.0134, 0.0056, 0.0070, 0.0066, 0.0056], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 23:09:41,894 INFO [train.py:892] (2/4) Epoch 8, batch 1450, loss[loss=0.2513, simple_loss=0.3082, pruned_loss=0.09718, over 19886.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3066, pruned_loss=0.1015, over 3946352.88 frames. ], batch size: 77, lr: 1.91e-02, grad_scale: 16.0 +2023-03-27 23:09:52,307 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14441.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:10:23,914 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6424, 3.6882, 2.2691, 4.0247, 3.9543, 1.7045, 3.3075, 3.3787], + device='cuda:2'), covar=tensor([0.0583, 0.0710, 0.2409, 0.0485, 0.0242, 0.3068, 0.0969, 0.0475], + device='cuda:2'), in_proj_covar=tensor([0.0171, 0.0194, 0.0202, 0.0163, 0.0118, 0.0192, 0.0205, 0.0139], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 23:10:29,186 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14459.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:11:00,031 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14474.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:11:03,491 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14476.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 23:11:24,883 INFO [train.py:892] (2/4) Epoch 8, batch 1500, loss[loss=0.2643, simple_loss=0.3172, pruned_loss=0.1058, over 19856.00 frames. ], tot_loss[loss=0.2575, simple_loss=0.3085, pruned_loss=0.1033, over 3946809.62 frames. ], batch size: 85, lr: 1.91e-02, grad_scale: 32.0 +2023-03-27 23:11:27,487 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14487.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:12:08,353 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.288e+02 5.447e+02 6.433e+02 7.944e+02 1.401e+03, threshold=1.287e+03, percent-clipped=2.0 +2023-03-27 23:12:10,931 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8856, 4.6509, 4.5298, 5.1040, 4.9593, 5.4748, 5.0065, 5.1702], + device='cuda:2'), covar=tensor([0.0932, 0.0376, 0.0540, 0.0260, 0.0382, 0.0187, 0.0512, 0.0505], + device='cuda:2'), in_proj_covar=tensor([0.0117, 0.0133, 0.0158, 0.0132, 0.0129, 0.0111, 0.0125, 0.0146], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 23:12:14,672 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.62 vs. limit=5.0 +2023-03-27 23:13:06,129 INFO [train.py:892] (2/4) Epoch 8, batch 1550, loss[loss=0.2183, simple_loss=0.271, pruned_loss=0.08275, over 19777.00 frames. ], tot_loss[loss=0.2573, simple_loss=0.3082, pruned_loss=0.1032, over 3948143.85 frames. ], batch size: 154, lr: 1.91e-02, grad_scale: 16.0 +2023-03-27 23:13:33,927 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14548.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:14:04,044 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.67 vs. limit=2.0 +2023-03-27 23:14:54,498 INFO [train.py:892] (2/4) Epoch 8, batch 1600, loss[loss=0.2387, simple_loss=0.2916, pruned_loss=0.09293, over 19734.00 frames. ], tot_loss[loss=0.2574, simple_loss=0.3085, pruned_loss=0.1031, over 3948387.13 frames. ], batch size: 63, lr: 1.90e-02, grad_scale: 16.0 +2023-03-27 23:15:42,283 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.460e+02 5.480e+02 6.486e+02 8.210e+02 1.658e+03, threshold=1.297e+03, percent-clipped=3.0 +2023-03-27 23:16:47,974 INFO [train.py:892] (2/4) Epoch 8, batch 1650, loss[loss=0.2186, simple_loss=0.2792, pruned_loss=0.07901, over 19928.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3063, pruned_loss=0.1019, over 3950159.20 frames. ], batch size: 45, lr: 1.90e-02, grad_scale: 16.0 +2023-03-27 23:18:38,583 INFO [train.py:892] (2/4) Epoch 8, batch 1700, loss[loss=0.2588, simple_loss=0.3124, pruned_loss=0.1026, over 19759.00 frames. ], tot_loss[loss=0.258, simple_loss=0.3085, pruned_loss=0.1037, over 3949281.16 frames. ], batch size: 119, lr: 1.90e-02, grad_scale: 16.0 +2023-03-27 23:18:50,157 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2728, 2.3974, 2.6039, 1.8888, 2.4870, 2.1727, 2.2443, 2.6340], + device='cuda:2'), covar=tensor([0.0481, 0.0304, 0.0352, 0.0755, 0.0383, 0.0367, 0.0436, 0.0208], + device='cuda:2'), in_proj_covar=tensor([0.0050, 0.0051, 0.0053, 0.0079, 0.0051, 0.0045, 0.0046, 0.0041], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 23:19:20,506 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4921, 4.0293, 4.0860, 4.5514, 4.1922, 4.6513, 4.6067, 4.7847], + device='cuda:2'), covar=tensor([0.0485, 0.0330, 0.0419, 0.0242, 0.0623, 0.0256, 0.0362, 0.0268], + device='cuda:2'), in_proj_covar=tensor([0.0114, 0.0133, 0.0155, 0.0131, 0.0130, 0.0111, 0.0125, 0.0147], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 23:19:22,944 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.52 vs. limit=5.0 +2023-03-27 23:19:29,028 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.414e+02 5.107e+02 6.150e+02 7.747e+02 1.488e+03, threshold=1.230e+03, percent-clipped=2.0 +2023-03-27 23:20:10,912 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14728.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:20:25,100 INFO [train.py:892] (2/4) Epoch 8, batch 1750, loss[loss=0.2148, simple_loss=0.2623, pruned_loss=0.08367, over 19800.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.3066, pruned_loss=0.1025, over 3950061.30 frames. ], batch size: 151, lr: 1.90e-02, grad_scale: 16.0 +2023-03-27 23:20:32,940 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14740.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:20:44,143 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-03-27 23:21:11,631 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14759.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:21:29,685 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14769.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:21:41,282 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14776.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:21:41,358 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14776.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 23:22:00,370 INFO [train.py:892] (2/4) Epoch 8, batch 1800, loss[loss=0.2445, simple_loss=0.3016, pruned_loss=0.0937, over 19735.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3064, pruned_loss=0.1024, over 3949672.10 frames. ], batch size: 76, lr: 1.89e-02, grad_scale: 16.0 +2023-03-27 23:22:28,464 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14801.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:22:39,841 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14807.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:22:41,054 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.298e+02 5.336e+02 6.514e+02 7.931e+02 1.246e+03, threshold=1.303e+03, percent-clipped=1.0 +2023-03-27 23:23:05,286 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5780, 4.6911, 5.1003, 4.7580, 4.1779, 4.8778, 4.7072, 5.3237], + device='cuda:2'), covar=tensor([0.1111, 0.0327, 0.0329, 0.0331, 0.0556, 0.0332, 0.0350, 0.0230], + device='cuda:2'), in_proj_covar=tensor([0.0232, 0.0174, 0.0164, 0.0168, 0.0167, 0.0169, 0.0161, 0.0152], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 23:23:10,737 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14824.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 23:23:30,840 INFO [train.py:892] (2/4) Epoch 8, batch 1850, loss[loss=0.2699, simple_loss=0.3298, pruned_loss=0.105, over 19541.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.3071, pruned_loss=0.1014, over 3948297.55 frames. ], batch size: 54, lr: 1.89e-02, grad_scale: 16.0 +2023-03-27 23:24:36,975 INFO [train.py:892] (2/4) Epoch 9, batch 0, loss[loss=0.2267, simple_loss=0.2804, pruned_loss=0.08654, over 19878.00 frames. ], tot_loss[loss=0.2267, simple_loss=0.2804, pruned_loss=0.08654, over 19878.00 frames. ], batch size: 77, lr: 1.79e-02, grad_scale: 16.0 +2023-03-27 23:24:36,975 INFO [train.py:917] (2/4) Computing validation loss +2023-03-27 23:25:11,156 INFO [train.py:926] (2/4) Epoch 9, validation: loss=0.1843, simple_loss=0.2646, pruned_loss=0.05198, over 2883724.00 frames. +2023-03-27 23:25:11,157 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-27 23:25:16,199 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14843.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:26:55,680 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.60 vs. limit=2.0 +2023-03-27 23:27:06,403 INFO [train.py:892] (2/4) Epoch 9, batch 50, loss[loss=0.2286, simple_loss=0.285, pruned_loss=0.08607, over 19818.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3015, pruned_loss=0.0992, over 888957.88 frames. ], batch size: 96, lr: 1.79e-02, grad_scale: 16.0 +2023-03-27 23:27:44,393 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.418e+02 5.041e+02 5.916e+02 7.576e+02 1.365e+03, threshold=1.183e+03, percent-clipped=1.0 +2023-03-27 23:29:00,086 INFO [train.py:892] (2/4) Epoch 9, batch 100, loss[loss=0.2273, simple_loss=0.2779, pruned_loss=0.08838, over 19792.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3025, pruned_loss=0.09918, over 1566414.76 frames. ], batch size: 195, lr: 1.78e-02, grad_scale: 16.0 +2023-03-27 23:30:48,416 INFO [train.py:892] (2/4) Epoch 9, batch 150, loss[loss=0.2207, simple_loss=0.2733, pruned_loss=0.08402, over 19804.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.3026, pruned_loss=0.09826, over 2094531.56 frames. ], batch size: 114, lr: 1.78e-02, grad_scale: 16.0 +2023-03-27 23:31:28,933 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.328e+02 4.885e+02 5.781e+02 7.549e+02 1.487e+03, threshold=1.156e+03, percent-clipped=3.0 +2023-03-27 23:32:44,994 INFO [train.py:892] (2/4) Epoch 9, batch 200, loss[loss=0.2144, simple_loss=0.2734, pruned_loss=0.07765, over 19792.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3017, pruned_loss=0.0968, over 2504943.88 frames. ], batch size: 79, lr: 1.78e-02, grad_scale: 16.0 +2023-03-27 23:33:10,907 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6606, 3.0016, 4.7117, 3.9232, 4.3979, 4.5445, 4.5729, 4.1764], + device='cuda:2'), covar=tensor([0.0123, 0.0619, 0.0073, 0.0953, 0.0096, 0.0163, 0.0108, 0.0126], + device='cuda:2'), in_proj_covar=tensor([0.0071, 0.0081, 0.0064, 0.0139, 0.0059, 0.0072, 0.0069, 0.0058], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 23:33:47,995 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15069.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:34:38,155 INFO [train.py:892] (2/4) Epoch 9, batch 250, loss[loss=0.2322, simple_loss=0.292, pruned_loss=0.08622, over 19852.00 frames. ], tot_loss[loss=0.2498, simple_loss=0.3031, pruned_loss=0.09823, over 2824432.03 frames. ], batch size: 60, lr: 1.78e-02, grad_scale: 16.0 +2023-03-27 23:34:43,007 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2835, 2.2327, 3.3172, 3.6641, 4.0984, 4.5950, 4.3840, 4.4353], + device='cuda:2'), covar=tensor([0.0728, 0.2320, 0.0978, 0.0411, 0.0289, 0.0111, 0.0221, 0.0331], + device='cuda:2'), in_proj_covar=tensor([0.0126, 0.0166, 0.0140, 0.0117, 0.0101, 0.0093, 0.0088, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 23:34:48,917 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15096.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:35:13,648 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.451e+02 5.417e+02 6.236e+02 8.058e+02 1.427e+03, threshold=1.247e+03, percent-clipped=5.0 +2023-03-27 23:35:33,485 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2653, 2.2315, 2.7769, 2.3757, 2.1639, 2.1545, 2.1419, 2.6663], + device='cuda:2'), covar=tensor([0.0218, 0.0301, 0.0195, 0.0243, 0.0329, 0.0368, 0.0451, 0.0215], + device='cuda:2'), in_proj_covar=tensor([0.0041, 0.0041, 0.0042, 0.0036, 0.0044, 0.0043, 0.0057, 0.0040], + device='cuda:2'), out_proj_covar=tensor([9.0622e-05, 9.0960e-05, 9.3036e-05, 8.1312e-05, 9.8177e-05, 9.4917e-05, + 1.2471e-04, 9.1709e-05], device='cuda:2') +2023-03-27 23:35:35,313 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15117.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:36:26,564 INFO [train.py:892] (2/4) Epoch 9, batch 300, loss[loss=0.2504, simple_loss=0.2931, pruned_loss=0.1039, over 19854.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3028, pruned_loss=0.09745, over 3074563.65 frames. ], batch size: 165, lr: 1.77e-02, grad_scale: 16.0 +2023-03-27 23:36:33,448 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15143.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:37:23,366 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6908, 2.7092, 3.1382, 2.4530, 2.9563, 2.3732, 2.6609, 3.0301], + device='cuda:2'), covar=tensor([0.0492, 0.0369, 0.0322, 0.0609, 0.0291, 0.0307, 0.0412, 0.0226], + device='cuda:2'), in_proj_covar=tensor([0.0052, 0.0052, 0.0055, 0.0083, 0.0053, 0.0046, 0.0047, 0.0044], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 23:37:37,077 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-27 23:37:54,387 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4302, 4.1744, 4.2982, 4.0265, 4.4645, 3.1698, 3.6895, 2.3498], + device='cuda:2'), covar=tensor([0.0190, 0.0180, 0.0123, 0.0158, 0.0111, 0.0796, 0.0788, 0.1250], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0102, 0.0088, 0.0102, 0.0090, 0.0114, 0.0123, 0.0107], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-27 23:38:17,850 INFO [train.py:892] (2/4) Epoch 9, batch 350, loss[loss=0.2186, simple_loss=0.2896, pruned_loss=0.0738, over 19928.00 frames. ], tot_loss[loss=0.2496, simple_loss=0.303, pruned_loss=0.0981, over 3267783.52 frames. ], batch size: 49, lr: 1.77e-02, grad_scale: 16.0 +2023-03-27 23:38:18,758 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15191.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:38:27,496 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15195.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:38:52,906 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.594e+02 5.622e+02 6.774e+02 8.741e+02 1.605e+03, threshold=1.355e+03, percent-clipped=2.0 +2023-03-27 23:40:06,026 INFO [train.py:892] (2/4) Epoch 9, batch 400, loss[loss=0.2244, simple_loss=0.2905, pruned_loss=0.0791, over 19673.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3015, pruned_loss=0.09685, over 3419983.71 frames. ], batch size: 52, lr: 1.77e-02, grad_scale: 16.0 +2023-03-27 23:40:33,335 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.8631, 2.3154, 1.9112, 1.2385, 1.9105, 2.2588, 2.0491, 2.1458], + device='cuda:2'), covar=tensor([0.0196, 0.0161, 0.0206, 0.0584, 0.0349, 0.0175, 0.0134, 0.0142], + device='cuda:2'), in_proj_covar=tensor([0.0051, 0.0048, 0.0054, 0.0068, 0.0067, 0.0046, 0.0041, 0.0044], + device='cuda:2'), out_proj_covar=tensor([1.1853e-04, 1.1122e-04, 1.2201e-04, 1.5961e-04, 1.5480e-04, 1.0818e-04, + 9.8685e-05, 1.0070e-04], device='cuda:2') +2023-03-27 23:40:40,898 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1343, 3.8519, 4.0099, 3.7524, 4.1430, 3.0377, 3.4000, 2.1404], + device='cuda:2'), covar=tensor([0.0192, 0.0216, 0.0135, 0.0155, 0.0118, 0.0747, 0.0717, 0.1263], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0102, 0.0087, 0.0102, 0.0091, 0.0114, 0.0123, 0.0108], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-27 23:40:40,974 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15256.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 23:40:41,123 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.66 vs. limit=5.0 +2023-03-27 23:41:58,923 INFO [train.py:892] (2/4) Epoch 9, batch 450, loss[loss=0.3331, simple_loss=0.3623, pruned_loss=0.1519, over 19808.00 frames. ], tot_loss[loss=0.2491, simple_loss=0.3026, pruned_loss=0.09776, over 3536799.63 frames. ], batch size: 288, lr: 1.76e-02, grad_scale: 16.0 +2023-03-27 23:42:37,302 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 5.167e+02 6.128e+02 7.422e+02 1.689e+03, threshold=1.226e+03, percent-clipped=1.0 +2023-03-27 23:42:45,050 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8114, 2.1846, 3.0106, 3.3067, 3.7907, 4.4281, 4.0852, 4.4088], + device='cuda:2'), covar=tensor([0.0984, 0.2101, 0.1139, 0.0510, 0.0364, 0.0121, 0.0239, 0.0228], + device='cuda:2'), in_proj_covar=tensor([0.0129, 0.0166, 0.0142, 0.0119, 0.0101, 0.0094, 0.0090, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 23:43:00,155 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0439, 3.3186, 3.3805, 4.2751, 2.6563, 3.2056, 2.6765, 2.2952], + device='cuda:2'), covar=tensor([0.0501, 0.3124, 0.1017, 0.0232, 0.2450, 0.0671, 0.1319, 0.2351], + device='cuda:2'), in_proj_covar=tensor([0.0179, 0.0324, 0.0211, 0.0135, 0.0229, 0.0158, 0.0188, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0001, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 23:43:54,452 INFO [train.py:892] (2/4) Epoch 9, batch 500, loss[loss=0.347, simple_loss=0.4112, pruned_loss=0.1414, over 18704.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3016, pruned_loss=0.09693, over 3626371.75 frames. ], batch size: 564, lr: 1.76e-02, grad_scale: 16.0 +2023-03-27 23:45:12,624 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8761, 4.8265, 5.3305, 5.1205, 5.0089, 4.6578, 4.9530, 4.8090], + device='cuda:2'), covar=tensor([0.1245, 0.1172, 0.0790, 0.0854, 0.0738, 0.0887, 0.1630, 0.1901], + device='cuda:2'), in_proj_covar=tensor([0.0234, 0.0221, 0.0277, 0.0212, 0.0209, 0.0204, 0.0274, 0.0305], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-27 23:45:44,795 INFO [train.py:892] (2/4) Epoch 9, batch 550, loss[loss=0.2041, simple_loss=0.2747, pruned_loss=0.06676, over 19949.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3012, pruned_loss=0.09691, over 3698575.56 frames. ], batch size: 46, lr: 1.76e-02, grad_scale: 16.0 +2023-03-27 23:45:56,297 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15396.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:46:26,490 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.786e+02 5.578e+02 6.357e+02 7.832e+02 1.667e+03, threshold=1.271e+03, percent-clipped=4.0 +2023-03-27 23:47:38,395 INFO [train.py:892] (2/4) Epoch 9, batch 600, loss[loss=0.2968, simple_loss=0.3429, pruned_loss=0.1253, over 19748.00 frames. ], tot_loss[loss=0.2481, simple_loss=0.3019, pruned_loss=0.09719, over 3754418.83 frames. ], batch size: 321, lr: 1.76e-02, grad_scale: 16.0 +2023-03-27 23:47:47,258 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15444.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:48:49,434 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-27 23:49:30,484 INFO [train.py:892] (2/4) Epoch 9, batch 650, loss[loss=0.3199, simple_loss=0.3506, pruned_loss=0.1446, over 19805.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3022, pruned_loss=0.09741, over 3795691.64 frames. ], batch size: 288, lr: 1.75e-02, grad_scale: 16.0 +2023-03-27 23:50:08,029 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.542e+02 5.095e+02 6.402e+02 7.692e+02 1.403e+03, threshold=1.280e+03, percent-clipped=2.0 +2023-03-27 23:50:34,700 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-27 23:50:53,376 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-27 23:51:23,096 INFO [train.py:892] (2/4) Epoch 9, batch 700, loss[loss=0.2252, simple_loss=0.2896, pruned_loss=0.08046, over 19766.00 frames. ], tot_loss[loss=0.2486, simple_loss=0.3024, pruned_loss=0.09742, over 3826643.70 frames. ], batch size: 49, lr: 1.75e-02, grad_scale: 16.0 +2023-03-27 23:51:45,066 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15551.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 23:52:44,733 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7805, 3.1340, 1.9679, 2.0634, 2.4737, 3.1578, 2.7598, 3.0216], + device='cuda:2'), covar=tensor([0.0179, 0.0246, 0.0272, 0.0464, 0.0284, 0.0218, 0.0206, 0.0200], + device='cuda:2'), in_proj_covar=tensor([0.0053, 0.0049, 0.0056, 0.0069, 0.0069, 0.0047, 0.0042, 0.0045], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-27 23:53:15,946 INFO [train.py:892] (2/4) Epoch 9, batch 750, loss[loss=0.2345, simple_loss=0.2859, pruned_loss=0.09152, over 19737.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.3012, pruned_loss=0.0969, over 3854627.73 frames. ], batch size: 106, lr: 1.75e-02, grad_scale: 16.0 +2023-03-27 23:53:54,114 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.826e+02 5.119e+02 6.405e+02 7.720e+02 1.158e+03, threshold=1.281e+03, percent-clipped=0.0 +2023-03-27 23:55:10,135 INFO [train.py:892] (2/4) Epoch 9, batch 800, loss[loss=0.2031, simple_loss=0.2658, pruned_loss=0.07015, over 19735.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3001, pruned_loss=0.09602, over 3875892.79 frames. ], batch size: 95, lr: 1.75e-02, grad_scale: 16.0 +2023-03-27 23:56:08,629 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1842, 2.8845, 4.2608, 3.5697, 4.0126, 4.0826, 4.0597, 3.8769], + device='cuda:2'), covar=tensor([0.0149, 0.0569, 0.0078, 0.0960, 0.0095, 0.0196, 0.0122, 0.0103], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0081, 0.0064, 0.0137, 0.0059, 0.0071, 0.0068, 0.0058], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 23:57:05,116 INFO [train.py:892] (2/4) Epoch 9, batch 850, loss[loss=0.2628, simple_loss=0.3083, pruned_loss=0.1086, over 19768.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3006, pruned_loss=0.09619, over 3891100.48 frames. ], batch size: 226, lr: 1.74e-02, grad_scale: 16.0 +2023-03-27 23:57:44,963 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.938e+02 5.306e+02 6.456e+02 8.498e+02 2.233e+03, threshold=1.291e+03, percent-clipped=2.0 +2023-03-27 23:58:12,520 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3394, 1.7471, 2.0547, 1.7053, 2.2826, 2.6025, 2.1300, 2.5304], + device='cuda:2'), covar=tensor([0.0309, 0.0721, 0.0120, 0.0463, 0.0108, 0.0190, 0.0182, 0.0136], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0082, 0.0064, 0.0136, 0.0059, 0.0071, 0.0068, 0.0058], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-27 23:58:58,985 INFO [train.py:892] (2/4) Epoch 9, batch 900, loss[loss=0.2629, simple_loss=0.3083, pruned_loss=0.1088, over 19807.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3002, pruned_loss=0.09615, over 3902517.02 frames. ], batch size: 126, lr: 1.74e-02, grad_scale: 16.0 +2023-03-28 00:00:51,421 INFO [train.py:892] (2/4) Epoch 9, batch 950, loss[loss=0.2503, simple_loss=0.3102, pruned_loss=0.09514, over 19778.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3002, pruned_loss=0.09578, over 3912410.65 frames. ], batch size: 70, lr: 1.74e-02, grad_scale: 16.0 +2023-03-28 00:01:23,024 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4678, 3.3232, 4.8789, 3.8370, 4.3119, 4.6276, 2.5476, 2.7403], + device='cuda:2'), covar=tensor([0.0520, 0.2322, 0.0288, 0.0477, 0.0922, 0.0405, 0.1360, 0.1727], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0317, 0.0236, 0.0199, 0.0306, 0.0221, 0.0251, 0.0241], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 00:01:31,054 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.223e+02 4.952e+02 6.049e+02 8.217e+02 1.611e+03, threshold=1.210e+03, percent-clipped=3.0 +2023-03-28 00:02:27,263 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-28 00:02:47,428 INFO [train.py:892] (2/4) Epoch 9, batch 1000, loss[loss=0.2453, simple_loss=0.3189, pruned_loss=0.08586, over 19687.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.2994, pruned_loss=0.09542, over 3920771.19 frames. ], batch size: 56, lr: 1.74e-02, grad_scale: 16.0 +2023-03-28 00:03:10,967 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15851.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:04:17,390 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9567, 4.2672, 4.2580, 4.3637, 3.9739, 4.1601, 3.8655, 3.5681], + device='cuda:2'), covar=tensor([0.1057, 0.0955, 0.1180, 0.0773, 0.1104, 0.1125, 0.1295, 0.2459], + device='cuda:2'), in_proj_covar=tensor([0.0179, 0.0171, 0.0220, 0.0179, 0.0175, 0.0164, 0.0192, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 00:04:24,316 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15883.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:04:41,514 INFO [train.py:892] (2/4) Epoch 9, batch 1050, loss[loss=0.2353, simple_loss=0.2984, pruned_loss=0.08606, over 19692.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3006, pruned_loss=0.09622, over 3926954.70 frames. ], batch size: 59, lr: 1.73e-02, grad_scale: 16.0 +2023-03-28 00:04:58,485 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15899.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:05:18,402 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.416e+02 5.707e+02 6.582e+02 7.521e+02 1.130e+03, threshold=1.316e+03, percent-clipped=0.0 +2023-03-28 00:06:16,486 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8559, 2.7497, 1.2630, 3.5194, 3.1856, 3.3652, 3.5973, 2.6294], + device='cuda:2'), covar=tensor([0.0543, 0.0517, 0.1991, 0.0395, 0.0371, 0.0332, 0.0376, 0.0742], + device='cuda:2'), in_proj_covar=tensor([0.0112, 0.0107, 0.0126, 0.0112, 0.0096, 0.0090, 0.0105, 0.0114], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 00:06:34,644 INFO [train.py:892] (2/4) Epoch 9, batch 1100, loss[loss=0.2557, simple_loss=0.3016, pruned_loss=0.1049, over 19784.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3009, pruned_loss=0.09656, over 3930797.19 frames. ], batch size: 211, lr: 1.73e-02, grad_scale: 16.0 +2023-03-28 00:06:42,176 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15944.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:08:26,893 INFO [train.py:892] (2/4) Epoch 9, batch 1150, loss[loss=0.2058, simple_loss=0.262, pruned_loss=0.07481, over 19848.00 frames. ], tot_loss[loss=0.245, simple_loss=0.299, pruned_loss=0.09544, over 3936963.89 frames. ], batch size: 112, lr: 1.73e-02, grad_scale: 16.0 +2023-03-28 00:08:54,411 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16001.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:09:09,154 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.185e+02 5.244e+02 6.208e+02 7.678e+02 1.349e+03, threshold=1.242e+03, percent-clipped=1.0 +2023-03-28 00:10:22,682 INFO [train.py:892] (2/4) Epoch 9, batch 1200, loss[loss=0.2253, simple_loss=0.2858, pruned_loss=0.08234, over 19887.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.2995, pruned_loss=0.09553, over 3940856.78 frames. ], batch size: 88, lr: 1.73e-02, grad_scale: 16.0 +2023-03-28 00:11:12,439 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16062.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:12:03,786 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16085.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:12:15,718 INFO [train.py:892] (2/4) Epoch 9, batch 1250, loss[loss=0.21, simple_loss=0.2711, pruned_loss=0.07447, over 19854.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.2983, pruned_loss=0.09505, over 3943483.80 frames. ], batch size: 104, lr: 1.72e-02, grad_scale: 16.0 +2023-03-28 00:12:28,334 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-03-28 00:12:49,934 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.594e+02 5.905e+02 6.787e+02 8.144e+02 1.550e+03, threshold=1.357e+03, percent-clipped=6.0 +2023-03-28 00:14:05,686 INFO [train.py:892] (2/4) Epoch 9, batch 1300, loss[loss=0.2426, simple_loss=0.3017, pruned_loss=0.09178, over 19439.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.2988, pruned_loss=0.09517, over 3944869.37 frames. ], batch size: 40, lr: 1.72e-02, grad_scale: 16.0 +2023-03-28 00:14:17,760 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16146.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:14:25,347 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16149.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:15:42,906 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3611, 2.6157, 2.7573, 2.3233, 2.0816, 2.3154, 2.3083, 2.6630], + device='cuda:2'), covar=tensor([0.0327, 0.0276, 0.0246, 0.0282, 0.0367, 0.0297, 0.0328, 0.0280], + device='cuda:2'), in_proj_covar=tensor([0.0042, 0.0043, 0.0045, 0.0036, 0.0047, 0.0043, 0.0057, 0.0041], + device='cuda:2'), out_proj_covar=tensor([9.4518e-05, 9.5849e-05, 9.8585e-05, 8.1042e-05, 1.0552e-04, 9.6496e-05, + 1.2585e-04, 9.4678e-05], device='cuda:2') +2023-03-28 00:15:57,820 INFO [train.py:892] (2/4) Epoch 9, batch 1350, loss[loss=0.2374, simple_loss=0.2871, pruned_loss=0.09386, over 19779.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.2983, pruned_loss=0.09476, over 3946015.28 frames. ], batch size: 217, lr: 1.72e-02, grad_scale: 16.0 +2023-03-28 00:16:36,009 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.606e+02 5.207e+02 6.312e+02 7.771e+02 1.214e+03, threshold=1.262e+03, percent-clipped=0.0 +2023-03-28 00:16:41,332 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16210.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:17:20,325 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6385, 2.7358, 3.8931, 3.1260, 3.3309, 3.6762, 2.0681, 2.2957], + device='cuda:2'), covar=tensor([0.0676, 0.2594, 0.0441, 0.0547, 0.1029, 0.0595, 0.1519, 0.2033], + device='cuda:2'), in_proj_covar=tensor([0.0283, 0.0323, 0.0245, 0.0202, 0.0313, 0.0232, 0.0258, 0.0248], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 00:17:50,391 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16239.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:17:53,135 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-28 00:17:53,582 INFO [train.py:892] (2/4) Epoch 9, batch 1400, loss[loss=0.2422, simple_loss=0.2962, pruned_loss=0.09411, over 19872.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.2993, pruned_loss=0.09593, over 3947367.69 frames. ], batch size: 138, lr: 1.72e-02, grad_scale: 16.0 +2023-03-28 00:18:22,877 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-28 00:18:48,205 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9078, 3.0285, 4.4002, 3.2308, 3.7333, 4.0699, 2.3399, 2.4430], + device='cuda:2'), covar=tensor([0.0593, 0.2458, 0.0324, 0.0613, 0.1007, 0.0469, 0.1424, 0.1966], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0323, 0.0245, 0.0203, 0.0312, 0.0232, 0.0259, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 00:19:40,145 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9769, 3.5700, 3.6288, 4.0366, 3.7771, 3.9781, 4.0988, 4.1799], + device='cuda:2'), covar=tensor([0.0615, 0.0432, 0.0554, 0.0312, 0.0569, 0.0393, 0.0390, 0.0309], + device='cuda:2'), in_proj_covar=tensor([0.0121, 0.0141, 0.0165, 0.0137, 0.0136, 0.0118, 0.0127, 0.0156], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 00:19:46,050 INFO [train.py:892] (2/4) Epoch 9, batch 1450, loss[loss=0.2184, simple_loss=0.2825, pruned_loss=0.07719, over 19943.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.2985, pruned_loss=0.09499, over 3948820.86 frames. ], batch size: 46, lr: 1.71e-02, grad_scale: 16.0 +2023-03-28 00:20:26,639 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.895e+02 4.802e+02 5.731e+02 6.907e+02 1.143e+03, threshold=1.146e+03, percent-clipped=0.0 +2023-03-28 00:20:35,633 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.0353, 2.1140, 2.1536, 2.0544, 1.9465, 2.1062, 1.9091, 2.1386], + device='cuda:2'), covar=tensor([0.0232, 0.0276, 0.0323, 0.0257, 0.0299, 0.0272, 0.0411, 0.0274], + device='cuda:2'), in_proj_covar=tensor([0.0043, 0.0043, 0.0046, 0.0037, 0.0048, 0.0045, 0.0059, 0.0042], + device='cuda:2'), out_proj_covar=tensor([9.6353e-05, 9.7103e-05, 1.0147e-04, 8.3115e-05, 1.0716e-04, 1.0028e-04, + 1.2925e-04, 9.5685e-05], device='cuda:2') +2023-03-28 00:21:40,549 INFO [train.py:892] (2/4) Epoch 9, batch 1500, loss[loss=0.2268, simple_loss=0.2797, pruned_loss=0.08691, over 19753.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.2987, pruned_loss=0.09506, over 3950146.86 frames. ], batch size: 134, lr: 1.71e-02, grad_scale: 16.0 +2023-03-28 00:22:17,042 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16357.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:22:45,776 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16371.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:23:19,727 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1147, 2.3462, 3.5370, 2.6809, 3.0574, 3.2912, 1.9600, 2.0078], + device='cuda:2'), covar=tensor([0.0921, 0.2967, 0.0498, 0.0705, 0.1289, 0.0737, 0.1589, 0.2263], + device='cuda:2'), in_proj_covar=tensor([0.0286, 0.0325, 0.0247, 0.0202, 0.0314, 0.0233, 0.0261, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 00:23:27,930 INFO [train.py:892] (2/4) Epoch 9, batch 1550, loss[loss=0.2342, simple_loss=0.2773, pruned_loss=0.09555, over 19817.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.2978, pruned_loss=0.0945, over 3949408.94 frames. ], batch size: 133, lr: 1.71e-02, grad_scale: 16.0 +2023-03-28 00:24:05,932 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.814e+02 5.273e+02 6.072e+02 7.301e+02 1.702e+03, threshold=1.214e+03, percent-clipped=5.0 +2023-03-28 00:24:28,326 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-28 00:25:00,980 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16432.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:25:19,729 INFO [train.py:892] (2/4) Epoch 9, batch 1600, loss[loss=0.2414, simple_loss=0.3067, pruned_loss=0.08802, over 19588.00 frames. ], tot_loss[loss=0.2448, simple_loss=0.2991, pruned_loss=0.09522, over 3949692.50 frames. ], batch size: 53, lr: 1.71e-02, grad_scale: 16.0 +2023-03-28 00:25:20,724 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16441.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:27:15,080 INFO [train.py:892] (2/4) Epoch 9, batch 1650, loss[loss=0.1948, simple_loss=0.2665, pruned_loss=0.06153, over 19687.00 frames. ], tot_loss[loss=0.2439, simple_loss=0.2983, pruned_loss=0.09475, over 3950261.33 frames. ], batch size: 55, lr: 1.71e-02, grad_scale: 16.0 +2023-03-28 00:27:29,040 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7600, 2.2277, 2.9053, 2.5843, 2.5700, 3.0176, 1.7589, 1.9531], + device='cuda:2'), covar=tensor([0.0754, 0.1919, 0.0483, 0.0542, 0.1186, 0.0561, 0.1468, 0.1792], + device='cuda:2'), in_proj_covar=tensor([0.0283, 0.0319, 0.0244, 0.0198, 0.0309, 0.0229, 0.0256, 0.0248], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 00:27:46,425 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16505.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:27:52,676 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.351e+02 5.128e+02 6.235e+02 7.643e+02 1.695e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-03-28 00:29:04,241 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16539.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:29:07,566 INFO [train.py:892] (2/4) Epoch 9, batch 1700, loss[loss=0.2532, simple_loss=0.3064, pruned_loss=0.09997, over 19834.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.2988, pruned_loss=0.09521, over 3951471.13 frames. ], batch size: 43, lr: 1.70e-02, grad_scale: 32.0 +2023-03-28 00:30:27,041 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0411, 3.1425, 3.4304, 3.1596, 2.9213, 3.3918, 3.2249, 3.5184], + device='cuda:2'), covar=tensor([0.1167, 0.0380, 0.0418, 0.0398, 0.1444, 0.0507, 0.0375, 0.0354], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0182, 0.0173, 0.0180, 0.0173, 0.0180, 0.0172, 0.0164], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 00:30:46,928 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16587.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:30:53,431 INFO [train.py:892] (2/4) Epoch 9, batch 1750, loss[loss=0.2248, simple_loss=0.292, pruned_loss=0.07878, over 19896.00 frames. ], tot_loss[loss=0.244, simple_loss=0.2985, pruned_loss=0.09477, over 3949851.46 frames. ], batch size: 62, lr: 1.70e-02, grad_scale: 32.0 +2023-03-28 00:31:28,583 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.990e+02 5.355e+02 6.431e+02 7.521e+02 1.438e+03, threshold=1.286e+03, percent-clipped=3.0 +2023-03-28 00:32:29,082 INFO [train.py:892] (2/4) Epoch 9, batch 1800, loss[loss=0.2297, simple_loss=0.2918, pruned_loss=0.08382, over 19879.00 frames. ], tot_loss[loss=0.2432, simple_loss=0.2979, pruned_loss=0.09427, over 3950300.75 frames. ], batch size: 77, lr: 1.70e-02, grad_scale: 32.0 +2023-03-28 00:32:57,805 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16657.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:33:25,105 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.7831, 1.9599, 2.1524, 1.8287, 1.8454, 1.7987, 1.8935, 1.9616], + device='cuda:2'), covar=tensor([0.0313, 0.0205, 0.0202, 0.0218, 0.0287, 0.0249, 0.0341, 0.0267], + device='cuda:2'), in_proj_covar=tensor([0.0043, 0.0043, 0.0045, 0.0038, 0.0048, 0.0046, 0.0059, 0.0043], + device='cuda:2'), out_proj_covar=tensor([9.6190e-05, 9.6223e-05, 1.0005e-04, 8.5636e-05, 1.0855e-04, 1.0217e-04, + 1.2917e-04, 9.6836e-05], device='cuda:2') +2023-03-28 00:33:58,385 INFO [train.py:892] (2/4) Epoch 9, batch 1850, loss[loss=0.2725, simple_loss=0.3335, pruned_loss=0.1058, over 19853.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.2979, pruned_loss=0.09338, over 3950542.33 frames. ], batch size: 58, lr: 1.70e-02, grad_scale: 32.0 +2023-03-28 00:35:08,033 INFO [train.py:892] (2/4) Epoch 10, batch 0, loss[loss=0.2026, simple_loss=0.2633, pruned_loss=0.07094, over 19809.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2633, pruned_loss=0.07094, over 19809.00 frames. ], batch size: 114, lr: 1.61e-02, grad_scale: 32.0 +2023-03-28 00:35:08,034 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 00:35:42,738 INFO [train.py:926] (2/4) Epoch 10, validation: loss=0.1801, simple_loss=0.2601, pruned_loss=0.05003, over 2883724.00 frames. +2023-03-28 00:35:42,739 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 00:35:46,327 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3118, 4.3757, 4.8006, 4.3401, 4.0775, 4.6649, 4.3852, 4.9503], + device='cuda:2'), covar=tensor([0.0977, 0.0311, 0.0332, 0.0302, 0.0712, 0.0338, 0.0361, 0.0247], + device='cuda:2'), in_proj_covar=tensor([0.0248, 0.0188, 0.0178, 0.0187, 0.0178, 0.0184, 0.0175, 0.0169], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 00:36:04,276 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16705.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:36:11,175 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.977e+02 5.093e+02 5.971e+02 7.550e+02 1.362e+03, threshold=1.194e+03, percent-clipped=1.0 +2023-03-28 00:36:55,597 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16727.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:37:31,408 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16741.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:37:43,761 INFO [train.py:892] (2/4) Epoch 10, batch 50, loss[loss=0.2139, simple_loss=0.2762, pruned_loss=0.07577, over 19791.00 frames. ], tot_loss[loss=0.236, simple_loss=0.291, pruned_loss=0.09046, over 891882.11 frames. ], batch size: 120, lr: 1.61e-02, grad_scale: 32.0 +2023-03-28 00:38:46,003 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2803, 4.6833, 4.9837, 4.6633, 5.1142, 3.0994, 4.0567, 2.4812], + device='cuda:2'), covar=tensor([0.0139, 0.0171, 0.0124, 0.0148, 0.0155, 0.0841, 0.1004, 0.1356], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0106, 0.0093, 0.0105, 0.0095, 0.0113, 0.0128, 0.0109], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 00:39:23,326 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16789.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:39:38,187 INFO [train.py:892] (2/4) Epoch 10, batch 100, loss[loss=0.2504, simple_loss=0.2926, pruned_loss=0.1041, over 19872.00 frames. ], tot_loss[loss=0.238, simple_loss=0.2936, pruned_loss=0.09115, over 1569830.28 frames. ], batch size: 158, lr: 1.61e-02, grad_scale: 32.0 +2023-03-28 00:39:59,336 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16805.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:40:04,287 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.771e+02 5.348e+02 6.393e+02 7.632e+02 1.172e+03, threshold=1.279e+03, percent-clipped=0.0 +2023-03-28 00:41:31,410 INFO [train.py:892] (2/4) Epoch 10, batch 150, loss[loss=0.2345, simple_loss=0.3074, pruned_loss=0.08083, over 19901.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.2943, pruned_loss=0.0913, over 2098846.90 frames. ], batch size: 50, lr: 1.61e-02, grad_scale: 32.0 +2023-03-28 00:41:49,412 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16853.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:42:22,969 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16868.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 00:43:24,899 INFO [train.py:892] (2/4) Epoch 10, batch 200, loss[loss=0.2197, simple_loss=0.2763, pruned_loss=0.0816, over 19800.00 frames. ], tot_loss[loss=0.2411, simple_loss=0.2969, pruned_loss=0.09259, over 2508949.12 frames. ], batch size: 67, lr: 1.60e-02, grad_scale: 16.0 +2023-03-28 00:43:47,979 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16906.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:43:53,277 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.558e+02 5.040e+02 6.126e+02 7.288e+02 1.628e+03, threshold=1.225e+03, percent-clipped=1.0 +2023-03-28 00:44:01,035 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8618, 3.6672, 3.7886, 3.5436, 3.8517, 2.8725, 3.1554, 1.9642], + device='cuda:2'), covar=tensor([0.0220, 0.0227, 0.0142, 0.0170, 0.0135, 0.0833, 0.0856, 0.1405], + device='cuda:2'), in_proj_covar=tensor([0.0080, 0.0107, 0.0093, 0.0107, 0.0096, 0.0115, 0.0130, 0.0110], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 00:44:01,773 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-03-28 00:44:41,636 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16929.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 00:45:17,956 INFO [train.py:892] (2/4) Epoch 10, batch 250, loss[loss=0.2196, simple_loss=0.2749, pruned_loss=0.0821, over 19872.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.2958, pruned_loss=0.09177, over 2829078.05 frames. ], batch size: 138, lr: 1.60e-02, grad_scale: 16.0 +2023-03-28 00:46:06,401 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16967.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 00:47:10,490 INFO [train.py:892] (2/4) Epoch 10, batch 300, loss[loss=0.2071, simple_loss=0.2768, pruned_loss=0.06864, over 19792.00 frames. ], tot_loss[loss=0.24, simple_loss=0.2959, pruned_loss=0.09208, over 3078411.50 frames. ], batch size: 83, lr: 1.60e-02, grad_scale: 16.0 +2023-03-28 00:47:41,233 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.429e+02 5.102e+02 6.617e+02 8.376e+02 1.348e+03, threshold=1.323e+03, percent-clipped=5.0 +2023-03-28 00:48:08,787 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4419, 3.5486, 3.8474, 3.5304, 3.3893, 3.7619, 3.5373, 3.9448], + device='cuda:2'), covar=tensor([0.1166, 0.0367, 0.0397, 0.0396, 0.1076, 0.0486, 0.0410, 0.0334], + device='cuda:2'), in_proj_covar=tensor([0.0237, 0.0181, 0.0176, 0.0180, 0.0175, 0.0179, 0.0171, 0.0163], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 00:48:21,894 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17027.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:48:45,930 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6536, 2.6758, 1.3371, 3.3103, 2.8954, 3.1803, 3.2674, 2.6573], + device='cuda:2'), covar=tensor([0.0587, 0.0559, 0.1715, 0.0349, 0.0439, 0.0345, 0.0463, 0.0693], + device='cuda:2'), in_proj_covar=tensor([0.0118, 0.0112, 0.0128, 0.0115, 0.0101, 0.0095, 0.0110, 0.0120], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 00:49:09,357 INFO [train.py:892] (2/4) Epoch 10, batch 350, loss[loss=0.3667, simple_loss=0.3997, pruned_loss=0.1668, over 19418.00 frames. ], tot_loss[loss=0.2424, simple_loss=0.2983, pruned_loss=0.09322, over 3268625.38 frames. ], batch size: 412, lr: 1.60e-02, grad_scale: 16.0 +2023-03-28 00:50:14,171 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17075.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:50:59,663 INFO [train.py:892] (2/4) Epoch 10, batch 400, loss[loss=0.2471, simple_loss=0.3049, pruned_loss=0.09466, over 19886.00 frames. ], tot_loss[loss=0.2418, simple_loss=0.2979, pruned_loss=0.09287, over 3419912.42 frames. ], batch size: 84, lr: 1.59e-02, grad_scale: 16.0 +2023-03-28 00:51:28,618 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.044e+02 4.864e+02 6.028e+02 7.083e+02 1.226e+03, threshold=1.206e+03, percent-clipped=0.0 +2023-03-28 00:52:51,306 INFO [train.py:892] (2/4) Epoch 10, batch 450, loss[loss=0.2922, simple_loss=0.3426, pruned_loss=0.1209, over 19662.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.2969, pruned_loss=0.09234, over 3537485.13 frames. ], batch size: 330, lr: 1.59e-02, grad_scale: 16.0 +2023-03-28 00:53:49,241 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2645, 3.5864, 3.5108, 4.3993, 2.8387, 3.4244, 2.8099, 2.3900], + device='cuda:2'), covar=tensor([0.0355, 0.2134, 0.0807, 0.0155, 0.1884, 0.0547, 0.1028, 0.1740], + device='cuda:2'), in_proj_covar=tensor([0.0187, 0.0329, 0.0218, 0.0142, 0.0233, 0.0163, 0.0193, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 00:54:31,034 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17189.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:54:47,720 INFO [train.py:892] (2/4) Epoch 10, batch 500, loss[loss=0.2262, simple_loss=0.2837, pruned_loss=0.08434, over 19828.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.2954, pruned_loss=0.09203, over 3629820.11 frames. ], batch size: 190, lr: 1.59e-02, grad_scale: 16.0 +2023-03-28 00:55:03,550 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.57 vs. limit=5.0 +2023-03-28 00:55:18,930 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.795e+02 5.158e+02 6.137e+02 7.953e+02 1.330e+03, threshold=1.227e+03, percent-clipped=1.0 +2023-03-28 00:55:50,558 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17224.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 00:56:37,092 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-28 00:56:39,660 INFO [train.py:892] (2/4) Epoch 10, batch 550, loss[loss=0.248, simple_loss=0.2949, pruned_loss=0.1006, over 19735.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2964, pruned_loss=0.09314, over 3700698.37 frames. ], batch size: 219, lr: 1.59e-02, grad_scale: 16.0 +2023-03-28 00:56:49,555 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17250.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:57:16,244 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17262.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 00:58:18,395 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4729, 4.4353, 2.7079, 4.7841, 5.0095, 2.0306, 4.1847, 3.4113], + device='cuda:2'), covar=tensor([0.0463, 0.0640, 0.2236, 0.0636, 0.0353, 0.2627, 0.0851, 0.0682], + device='cuda:2'), in_proj_covar=tensor([0.0178, 0.0200, 0.0202, 0.0179, 0.0136, 0.0192, 0.0210, 0.0146], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 00:58:30,090 INFO [train.py:892] (2/4) Epoch 10, batch 600, loss[loss=0.249, simple_loss=0.2919, pruned_loss=0.103, over 19799.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.2963, pruned_loss=0.09246, over 3754072.86 frames. ], batch size: 174, lr: 1.59e-02, grad_scale: 16.0 +2023-03-28 00:59:00,057 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.688e+02 5.075e+02 6.633e+02 7.997e+02 1.370e+03, threshold=1.327e+03, percent-clipped=3.0 +2023-03-28 00:59:16,943 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8869, 3.3124, 2.1582, 2.1163, 2.5733, 2.9852, 2.9282, 2.9402], + device='cuda:2'), covar=tensor([0.0197, 0.0153, 0.0234, 0.0441, 0.0289, 0.0166, 0.0130, 0.0137], + device='cuda:2'), in_proj_covar=tensor([0.0056, 0.0051, 0.0059, 0.0072, 0.0071, 0.0050, 0.0043, 0.0047], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-28 01:00:24,292 INFO [train.py:892] (2/4) Epoch 10, batch 650, loss[loss=0.2436, simple_loss=0.2956, pruned_loss=0.09576, over 19852.00 frames. ], tot_loss[loss=0.2403, simple_loss=0.2959, pruned_loss=0.09232, over 3797849.94 frames. ], batch size: 85, lr: 1.58e-02, grad_scale: 16.0 +2023-03-28 01:01:00,996 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17362.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:01:50,424 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17383.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 01:02:19,153 INFO [train.py:892] (2/4) Epoch 10, batch 700, loss[loss=0.2106, simple_loss=0.2775, pruned_loss=0.07182, over 19735.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.2968, pruned_loss=0.09219, over 3829712.10 frames. ], batch size: 47, lr: 1.58e-02, grad_scale: 16.0 +2023-03-28 01:02:48,229 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.516e+02 5.181e+02 6.486e+02 7.594e+02 1.327e+03, threshold=1.297e+03, percent-clipped=1.0 +2023-03-28 01:03:20,573 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17423.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:04:08,324 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17444.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 01:04:11,122 INFO [train.py:892] (2/4) Epoch 10, batch 750, loss[loss=0.2216, simple_loss=0.2754, pruned_loss=0.0839, over 19832.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.2964, pruned_loss=0.09168, over 3854435.91 frames. ], batch size: 143, lr: 1.58e-02, grad_scale: 16.0 +2023-03-28 01:05:46,688 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-03-28 01:06:02,434 INFO [train.py:892] (2/4) Epoch 10, batch 800, loss[loss=0.2426, simple_loss=0.2982, pruned_loss=0.09349, over 19856.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.2969, pruned_loss=0.09222, over 3876255.58 frames. ], batch size: 78, lr: 1.58e-02, grad_scale: 16.0 +2023-03-28 01:06:32,362 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.436e+02 5.370e+02 6.448e+02 7.965e+02 2.022e+03, threshold=1.290e+03, percent-clipped=3.0 +2023-03-28 01:07:07,223 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17524.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 01:07:53,874 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17545.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:07:54,961 INFO [train.py:892] (2/4) Epoch 10, batch 850, loss[loss=0.2289, simple_loss=0.284, pruned_loss=0.0869, over 19767.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.2969, pruned_loss=0.09219, over 3890010.84 frames. ], batch size: 130, lr: 1.58e-02, grad_scale: 16.0 +2023-03-28 01:08:34,055 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17562.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 01:08:53,871 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17572.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 01:09:27,129 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2026, 2.4790, 2.6562, 2.2682, 2.2091, 2.1896, 2.0621, 2.2708], + device='cuda:2'), covar=tensor([0.0284, 0.0236, 0.0223, 0.0294, 0.0314, 0.0269, 0.0413, 0.0306], + device='cuda:2'), in_proj_covar=tensor([0.0044, 0.0045, 0.0046, 0.0039, 0.0049, 0.0048, 0.0061, 0.0043], + device='cuda:2'), out_proj_covar=tensor([9.7931e-05, 9.9545e-05, 1.0180e-04, 8.8300e-05, 1.1030e-04, 1.0606e-04, + 1.3444e-04, 9.8259e-05], device='cuda:2') +2023-03-28 01:09:47,743 INFO [train.py:892] (2/4) Epoch 10, batch 900, loss[loss=0.2411, simple_loss=0.2928, pruned_loss=0.09477, over 19837.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.2954, pruned_loss=0.0914, over 3904764.07 frames. ], batch size: 128, lr: 1.57e-02, grad_scale: 16.0 +2023-03-28 01:10:17,487 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 5.484e+02 6.655e+02 8.099e+02 1.732e+03, threshold=1.331e+03, percent-clipped=3.0 +2023-03-28 01:10:20,842 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17610.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:11:28,107 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.0603, 2.2267, 2.5361, 2.2710, 2.1267, 2.2140, 2.0373, 2.1779], + device='cuda:2'), covar=tensor([0.0238, 0.0278, 0.0202, 0.0211, 0.0304, 0.0254, 0.0406, 0.0360], + device='cuda:2'), in_proj_covar=tensor([0.0043, 0.0044, 0.0045, 0.0038, 0.0048, 0.0047, 0.0060, 0.0042], + device='cuda:2'), out_proj_covar=tensor([9.6152e-05, 9.7825e-05, 9.9786e-05, 8.6730e-05, 1.0846e-04, 1.0406e-04, + 1.3146e-04, 9.7066e-05], device='cuda:2') +2023-03-28 01:11:39,738 INFO [train.py:892] (2/4) Epoch 10, batch 950, loss[loss=0.2557, simple_loss=0.3078, pruned_loss=0.1019, over 19757.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.295, pruned_loss=0.091, over 3914836.35 frames. ], batch size: 253, lr: 1.57e-02, grad_scale: 16.0 +2023-03-28 01:13:32,345 INFO [train.py:892] (2/4) Epoch 10, batch 1000, loss[loss=0.2429, simple_loss=0.3002, pruned_loss=0.09275, over 19741.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.2947, pruned_loss=0.09125, over 3923100.82 frames. ], batch size: 89, lr: 1.57e-02, grad_scale: 16.0 +2023-03-28 01:13:41,725 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17700.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:14:02,555 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.971e+02 5.177e+02 6.007e+02 7.029e+02 1.681e+03, threshold=1.201e+03, percent-clipped=2.0 +2023-03-28 01:14:04,424 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-28 01:14:21,024 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17718.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:14:47,938 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17730.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:15:09,062 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17739.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 01:15:26,011 INFO [train.py:892] (2/4) Epoch 10, batch 1050, loss[loss=0.2376, simple_loss=0.2879, pruned_loss=0.09363, over 19764.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.2954, pruned_loss=0.09138, over 3926352.97 frames. ], batch size: 244, lr: 1.57e-02, grad_scale: 16.0 +2023-03-28 01:15:52,285 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-28 01:15:57,222 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17761.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:16:37,561 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4128, 4.1948, 4.2893, 4.1233, 4.3994, 3.2609, 3.6760, 2.3733], + device='cuda:2'), covar=tensor([0.0214, 0.0178, 0.0118, 0.0145, 0.0116, 0.0692, 0.0717, 0.1160], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0109, 0.0094, 0.0107, 0.0097, 0.0117, 0.0129, 0.0111], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 01:17:06,178 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17791.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:17:10,554 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8859, 3.2196, 4.8205, 4.0249, 4.4210, 4.6701, 4.7161, 4.3015], + device='cuda:2'), covar=tensor([0.0129, 0.0603, 0.0086, 0.1034, 0.0127, 0.0171, 0.0115, 0.0104], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0085, 0.0067, 0.0140, 0.0061, 0.0073, 0.0069, 0.0061], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 01:17:16,098 INFO [train.py:892] (2/4) Epoch 10, batch 1100, loss[loss=0.1853, simple_loss=0.2452, pruned_loss=0.06265, over 19612.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.294, pruned_loss=0.09025, over 3931914.60 frames. ], batch size: 46, lr: 1.57e-02, grad_scale: 16.0 +2023-03-28 01:17:45,686 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.952e+02 5.448e+02 6.504e+02 7.690e+02 1.433e+03, threshold=1.301e+03, percent-clipped=1.0 +2023-03-28 01:19:10,689 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17845.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:19:11,946 INFO [train.py:892] (2/4) Epoch 10, batch 1150, loss[loss=0.2423, simple_loss=0.296, pruned_loss=0.09429, over 19776.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.2959, pruned_loss=0.09198, over 3935185.18 frames. ], batch size: 241, lr: 1.56e-02, grad_scale: 16.0 +2023-03-28 01:19:42,852 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.25 vs. limit=5.0 +2023-03-28 01:20:48,246 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9222, 2.9553, 3.9667, 3.5746, 3.7347, 3.9927, 3.8941, 3.7973], + device='cuda:2'), covar=tensor([0.0157, 0.0513, 0.0082, 0.0589, 0.0099, 0.0151, 0.0121, 0.0097], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0086, 0.0067, 0.0140, 0.0061, 0.0073, 0.0070, 0.0061], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 01:20:58,765 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17893.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:20:59,455 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-28 01:21:06,372 INFO [train.py:892] (2/4) Epoch 10, batch 1200, loss[loss=0.2561, simple_loss=0.3161, pruned_loss=0.09808, over 19538.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.2967, pruned_loss=0.09229, over 3937614.56 frames. ], batch size: 54, lr: 1.56e-02, grad_scale: 16.0 +2023-03-28 01:21:34,296 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.688e+02 5.021e+02 6.012e+02 7.162e+02 1.373e+03, threshold=1.202e+03, percent-clipped=1.0 +2023-03-28 01:22:26,210 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7312, 3.4577, 3.5337, 3.8231, 3.6319, 3.7213, 3.8739, 4.0404], + device='cuda:2'), covar=tensor([0.0722, 0.0426, 0.0481, 0.0299, 0.0550, 0.0501, 0.0433, 0.0309], + device='cuda:2'), in_proj_covar=tensor([0.0122, 0.0142, 0.0165, 0.0134, 0.0137, 0.0118, 0.0126, 0.0156], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 01:22:49,084 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9040, 2.7773, 4.0054, 3.5929, 3.8643, 3.9087, 3.9770, 3.7846], + device='cuda:2'), covar=tensor([0.0172, 0.0625, 0.0101, 0.0563, 0.0117, 0.0195, 0.0126, 0.0111], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0085, 0.0067, 0.0140, 0.0061, 0.0073, 0.0070, 0.0061], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 01:22:56,640 INFO [train.py:892] (2/4) Epoch 10, batch 1250, loss[loss=0.1943, simple_loss=0.2553, pruned_loss=0.06667, over 19651.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.2952, pruned_loss=0.09148, over 3941443.44 frames. ], batch size: 43, lr: 1.56e-02, grad_scale: 16.0 +2023-03-28 01:23:11,341 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5677, 2.5584, 1.4543, 3.0800, 2.8001, 2.9593, 3.1022, 2.3671], + device='cuda:2'), covar=tensor([0.0614, 0.0559, 0.1562, 0.0359, 0.0442, 0.0383, 0.0395, 0.0721], + device='cuda:2'), in_proj_covar=tensor([0.0120, 0.0113, 0.0129, 0.0116, 0.0102, 0.0095, 0.0110, 0.0122], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 01:23:30,696 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17960.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:24:49,117 INFO [train.py:892] (2/4) Epoch 10, batch 1300, loss[loss=0.223, simple_loss=0.2776, pruned_loss=0.0842, over 19756.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.2967, pruned_loss=0.09245, over 3943160.29 frames. ], batch size: 179, lr: 1.56e-02, grad_scale: 16.0 +2023-03-28 01:25:24,405 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.596e+02 5.072e+02 5.807e+02 7.353e+02 1.557e+03, threshold=1.161e+03, percent-clipped=4.0 +2023-03-28 01:25:31,559 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 01:25:44,812 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18018.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:25:50,547 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18021.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:26:34,166 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18039.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 01:26:47,392 INFO [train.py:892] (2/4) Epoch 10, batch 1350, loss[loss=0.2242, simple_loss=0.2826, pruned_loss=0.08288, over 19901.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.2946, pruned_loss=0.0909, over 3945952.80 frames. ], batch size: 91, lr: 1.56e-02, grad_scale: 16.0 +2023-03-28 01:27:11,958 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18056.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:27:12,096 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5295, 1.7010, 2.0544, 2.7070, 3.0831, 3.2582, 3.2037, 3.3214], + device='cuda:2'), covar=tensor([0.0756, 0.1971, 0.1236, 0.0512, 0.0378, 0.0196, 0.0229, 0.0220], + device='cuda:2'), in_proj_covar=tensor([0.0131, 0.0167, 0.0151, 0.0122, 0.0104, 0.0098, 0.0091, 0.0094], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 01:27:33,200 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18066.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:28:05,051 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0213, 2.9849, 4.1387, 3.5595, 3.7684, 4.1926, 4.1422, 3.7868], + device='cuda:2'), covar=tensor([0.0184, 0.0602, 0.0109, 0.0890, 0.0131, 0.0164, 0.0118, 0.0124], + device='cuda:2'), in_proj_covar=tensor([0.0075, 0.0086, 0.0069, 0.0142, 0.0062, 0.0075, 0.0071, 0.0062], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 01:28:19,614 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18086.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:28:21,747 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18087.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 01:28:41,061 INFO [train.py:892] (2/4) Epoch 10, batch 1400, loss[loss=0.1894, simple_loss=0.2528, pruned_loss=0.06301, over 19781.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.294, pruned_loss=0.09062, over 3946701.34 frames. ], batch size: 87, lr: 1.55e-02, grad_scale: 16.0 +2023-03-28 01:29:09,601 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.393e+02 4.780e+02 5.656e+02 7.029e+02 1.296e+03, threshold=1.131e+03, percent-clipped=2.0 +2023-03-28 01:30:37,310 INFO [train.py:892] (2/4) Epoch 10, batch 1450, loss[loss=0.2074, simple_loss=0.2659, pruned_loss=0.07442, over 19857.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.2931, pruned_loss=0.08974, over 3948725.06 frames. ], batch size: 104, lr: 1.55e-02, grad_scale: 16.0 +2023-03-28 01:31:45,151 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18174.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:31:47,363 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18175.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:32:31,627 INFO [train.py:892] (2/4) Epoch 10, batch 1500, loss[loss=0.2081, simple_loss=0.2703, pruned_loss=0.07297, over 19804.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.2924, pruned_loss=0.08957, over 3949214.17 frames. ], batch size: 117, lr: 1.55e-02, grad_scale: 16.0 +2023-03-28 01:33:02,749 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.786e+02 5.080e+02 6.023e+02 7.472e+02 1.411e+03, threshold=1.205e+03, percent-clipped=3.0 +2023-03-28 01:34:02,771 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18235.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:34:04,882 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18236.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:34:27,169 INFO [train.py:892] (2/4) Epoch 10, batch 1550, loss[loss=0.2441, simple_loss=0.3101, pruned_loss=0.08904, over 19726.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.2926, pruned_loss=0.08955, over 3948643.54 frames. ], batch size: 50, lr: 1.55e-02, grad_scale: 16.0 +2023-03-28 01:34:49,804 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6620, 4.3217, 4.4176, 4.2742, 4.6002, 3.1625, 3.8323, 2.3049], + device='cuda:2'), covar=tensor([0.0164, 0.0189, 0.0137, 0.0151, 0.0129, 0.0760, 0.0764, 0.1319], + device='cuda:2'), in_proj_covar=tensor([0.0080, 0.0109, 0.0095, 0.0107, 0.0096, 0.0118, 0.0129, 0.0111], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 01:35:20,842 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-28 01:36:22,688 INFO [train.py:892] (2/4) Epoch 10, batch 1600, loss[loss=0.2316, simple_loss=0.2884, pruned_loss=0.08738, over 19754.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.2919, pruned_loss=0.08929, over 3949619.62 frames. ], batch size: 256, lr: 1.55e-02, grad_scale: 16.0 +2023-03-28 01:36:49,424 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.277e+02 5.238e+02 6.120e+02 7.230e+02 1.238e+03, threshold=1.224e+03, percent-clipped=2.0 +2023-03-28 01:37:03,893 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18316.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:38:12,209 INFO [train.py:892] (2/4) Epoch 10, batch 1650, loss[loss=0.2262, simple_loss=0.2885, pruned_loss=0.08197, over 19654.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.2902, pruned_loss=0.08811, over 3951552.33 frames. ], batch size: 79, lr: 1.54e-02, grad_scale: 16.0 +2023-03-28 01:38:36,367 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18356.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:39:43,878 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18386.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:40:06,513 INFO [train.py:892] (2/4) Epoch 10, batch 1700, loss[loss=0.213, simple_loss=0.2804, pruned_loss=0.07278, over 19553.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.2908, pruned_loss=0.08853, over 3951322.42 frames. ], batch size: 60, lr: 1.54e-02, grad_scale: 16.0 +2023-03-28 01:40:24,865 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18404.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:40:35,305 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.958e+02 5.263e+02 6.960e+02 8.795e+02 1.250e+03, threshold=1.392e+03, percent-clipped=4.0 +2023-03-28 01:41:30,293 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18434.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:41:54,344 INFO [train.py:892] (2/4) Epoch 10, batch 1750, loss[loss=0.2734, simple_loss=0.3241, pruned_loss=0.1113, over 19697.00 frames. ], tot_loss[loss=0.234, simple_loss=0.291, pruned_loss=0.08851, over 3950496.86 frames. ], batch size: 283, lr: 1.54e-02, grad_scale: 16.0 +2023-03-28 01:43:20,413 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18490.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:43:26,831 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-28 01:43:30,609 INFO [train.py:892] (2/4) Epoch 10, batch 1800, loss[loss=0.2321, simple_loss=0.2904, pruned_loss=0.08689, over 19648.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.2918, pruned_loss=0.08952, over 3950145.70 frames. ], batch size: 69, lr: 1.54e-02, grad_scale: 16.0 +2023-03-28 01:43:31,315 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3599, 4.8382, 5.1109, 4.9100, 5.3122, 3.4484, 4.1442, 3.1076], + device='cuda:2'), covar=tensor([0.0162, 0.0151, 0.0112, 0.0132, 0.0112, 0.0688, 0.0928, 0.1058], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0108, 0.0095, 0.0106, 0.0095, 0.0116, 0.0128, 0.0110], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 01:43:52,065 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18508.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 01:43:52,988 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.190e+02 4.885e+02 5.842e+02 7.130e+02 1.277e+03, threshold=1.168e+03, percent-clipped=0.0 +2023-03-28 01:43:54,909 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-28 01:44:17,279 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2453, 5.2915, 5.7362, 5.5687, 5.3675, 5.1384, 5.2602, 5.3932], + device='cuda:2'), covar=tensor([0.1288, 0.1262, 0.0936, 0.0897, 0.0728, 0.0771, 0.2000, 0.1888], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0233, 0.0298, 0.0227, 0.0221, 0.0215, 0.0284, 0.0319], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 01:44:27,592 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.4046, 5.5275, 5.4533, 5.7122, 5.2715, 5.6056, 4.9051, 4.5573], + device='cuda:2'), covar=tensor([0.0693, 0.1002, 0.1229, 0.0792, 0.1001, 0.0971, 0.1435, 0.2387], + device='cuda:2'), in_proj_covar=tensor([0.0190, 0.0190, 0.0230, 0.0193, 0.0183, 0.0176, 0.0207, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 01:44:31,063 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18530.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:44:32,817 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18531.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:44:58,715 INFO [train.py:892] (2/4) Epoch 10, batch 1850, loss[loss=0.2718, simple_loss=0.3264, pruned_loss=0.1086, over 19663.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.2933, pruned_loss=0.08896, over 3950059.70 frames. ], batch size: 55, lr: 1.54e-02, grad_scale: 16.0 +2023-03-28 01:46:02,463 INFO [train.py:892] (2/4) Epoch 11, batch 0, loss[loss=0.2142, simple_loss=0.282, pruned_loss=0.07319, over 19726.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.282, pruned_loss=0.07319, over 19726.00 frames. ], batch size: 51, lr: 1.47e-02, grad_scale: 16.0 +2023-03-28 01:46:02,463 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 01:46:36,863 INFO [train.py:926] (2/4) Epoch 11, validation: loss=0.1783, simple_loss=0.2585, pruned_loss=0.04909, over 2883724.00 frames. +2023-03-28 01:46:36,864 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 01:46:37,930 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18551.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:47:18,721 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18569.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 01:48:31,656 INFO [train.py:892] (2/4) Epoch 11, batch 50, loss[loss=0.1993, simple_loss=0.259, pruned_loss=0.06981, over 19705.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2789, pruned_loss=0.08208, over 891094.49 frames. ], batch size: 85, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:48:46,911 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6257, 3.8063, 4.0306, 4.8950, 2.9244, 3.4953, 2.8465, 2.6772], + device='cuda:2'), covar=tensor([0.0369, 0.2256, 0.0782, 0.0173, 0.2248, 0.0836, 0.1243, 0.1842], + device='cuda:2'), in_proj_covar=tensor([0.0192, 0.0332, 0.0221, 0.0142, 0.0237, 0.0170, 0.0195, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 01:48:47,841 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.428e+02 5.161e+02 6.219e+02 7.148e+02 1.502e+03, threshold=1.244e+03, percent-clipped=1.0 +2023-03-28 01:49:05,524 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18616.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:49:16,584 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.84 vs. limit=5.0 +2023-03-28 01:50:21,264 INFO [train.py:892] (2/4) Epoch 11, batch 100, loss[loss=0.2294, simple_loss=0.2817, pruned_loss=0.08853, over 19776.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.284, pruned_loss=0.08411, over 1569027.23 frames. ], batch size: 191, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:50:23,209 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-03-28 01:50:49,850 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18664.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:51:16,028 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18674.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:52:13,645 INFO [train.py:892] (2/4) Epoch 11, batch 150, loss[loss=0.3214, simple_loss=0.3645, pruned_loss=0.1392, over 19622.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.2882, pruned_loss=0.08674, over 2094808.45 frames. ], batch size: 367, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:52:35,339 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.696e+02 5.324e+02 6.674e+02 8.055e+02 1.622e+03, threshold=1.335e+03, percent-clipped=4.0 +2023-03-28 01:53:31,408 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18735.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:54:09,406 INFO [train.py:892] (2/4) Epoch 11, batch 200, loss[loss=0.2349, simple_loss=0.2802, pruned_loss=0.09478, over 19847.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.2873, pruned_loss=0.08574, over 2507569.19 frames. ], batch size: 144, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:56:02,244 INFO [train.py:892] (2/4) Epoch 11, batch 250, loss[loss=0.2088, simple_loss=0.2759, pruned_loss=0.0709, over 19749.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.2864, pruned_loss=0.08504, over 2827990.23 frames. ], batch size: 97, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:56:18,950 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.168e+02 4.641e+02 5.456e+02 6.612e+02 1.459e+03, threshold=1.091e+03, percent-clipped=1.0 +2023-03-28 01:56:20,411 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-28 01:56:35,220 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2477, 2.4887, 3.1756, 2.9866, 3.3696, 3.3286, 4.0190, 4.4616], + device='cuda:2'), covar=tensor([0.0444, 0.1730, 0.1281, 0.1821, 0.1373, 0.1235, 0.0404, 0.0350], + device='cuda:2'), in_proj_covar=tensor([0.0198, 0.0210, 0.0229, 0.0228, 0.0253, 0.0222, 0.0171, 0.0175], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 01:57:08,043 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18830.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:57:10,156 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18831.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:57:44,366 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18846.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:57:56,248 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.79 vs. limit=5.0 +2023-03-28 01:57:56,661 INFO [train.py:892] (2/4) Epoch 11, batch 300, loss[loss=0.2044, simple_loss=0.2622, pruned_loss=0.07329, over 19857.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.2864, pruned_loss=0.08456, over 3078145.17 frames. ], batch size: 85, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:58:25,344 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18864.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 01:58:56,601 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18878.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:58:58,533 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18879.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:59:48,635 INFO [train.py:892] (2/4) Epoch 11, batch 350, loss[loss=0.2048, simple_loss=0.2631, pruned_loss=0.07328, over 19757.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.2873, pruned_loss=0.0852, over 3270205.37 frames. ], batch size: 205, lr: 1.45e-02, grad_scale: 32.0 +2023-03-28 02:00:05,184 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.253e+02 5.242e+02 6.180e+02 7.064e+02 1.171e+03, threshold=1.236e+03, percent-clipped=1.0 +2023-03-28 02:00:54,834 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18930.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:01:37,336 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9387, 3.9282, 2.4091, 4.2333, 4.3394, 1.9420, 3.6182, 3.3230], + device='cuda:2'), covar=tensor([0.0543, 0.0765, 0.2435, 0.0605, 0.0310, 0.3015, 0.1019, 0.0676], + device='cuda:2'), in_proj_covar=tensor([0.0189, 0.0213, 0.0208, 0.0192, 0.0150, 0.0198, 0.0220, 0.0155], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 02:01:38,286 INFO [train.py:892] (2/4) Epoch 11, batch 400, loss[loss=0.2071, simple_loss=0.2706, pruned_loss=0.07175, over 19840.00 frames. ], tot_loss[loss=0.2285, simple_loss=0.2869, pruned_loss=0.08503, over 3421163.91 frames. ], batch size: 101, lr: 1.45e-02, grad_scale: 32.0 +2023-03-28 02:03:08,403 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18991.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:03:30,246 INFO [train.py:892] (2/4) Epoch 11, batch 450, loss[loss=0.2027, simple_loss=0.2666, pruned_loss=0.06946, over 19874.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.2883, pruned_loss=0.08607, over 3538814.99 frames. ], batch size: 84, lr: 1.45e-02, grad_scale: 32.0 +2023-03-28 02:03:47,203 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.137e+02 4.905e+02 5.923e+02 7.329e+02 1.154e+03, threshold=1.185e+03, percent-clipped=0.0 +2023-03-28 02:04:35,166 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19030.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:05:18,617 INFO [train.py:892] (2/4) Epoch 11, batch 500, loss[loss=0.2444, simple_loss=0.2981, pruned_loss=0.09535, over 19772.00 frames. ], tot_loss[loss=0.231, simple_loss=0.2889, pruned_loss=0.0866, over 3630293.83 frames. ], batch size: 217, lr: 1.45e-02, grad_scale: 32.0 +2023-03-28 02:05:47,662 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-28 02:07:03,777 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.6942, 1.8745, 1.6068, 0.9622, 1.6457, 1.8198, 1.6475, 1.8048], + device='cuda:2'), covar=tensor([0.0219, 0.0169, 0.0187, 0.0464, 0.0307, 0.0168, 0.0167, 0.0147], + device='cuda:2'), in_proj_covar=tensor([0.0062, 0.0056, 0.0064, 0.0076, 0.0077, 0.0052, 0.0048, 0.0050], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-28 02:07:11,286 INFO [train.py:892] (2/4) Epoch 11, batch 550, loss[loss=0.2024, simple_loss=0.268, pruned_loss=0.0684, over 19585.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.2889, pruned_loss=0.08639, over 3700723.24 frames. ], batch size: 44, lr: 1.45e-02, grad_scale: 32.0 +2023-03-28 02:07:27,683 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.995e+02 4.889e+02 6.159e+02 7.664e+02 1.397e+03, threshold=1.232e+03, percent-clipped=1.0 +2023-03-28 02:08:16,557 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19129.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 02:08:51,767 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19146.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:09:03,305 INFO [train.py:892] (2/4) Epoch 11, batch 600, loss[loss=0.2644, simple_loss=0.3372, pruned_loss=0.09578, over 19539.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.2896, pruned_loss=0.08667, over 3755786.94 frames. ], batch size: 54, lr: 1.44e-02, grad_scale: 32.0 +2023-03-28 02:09:34,396 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19164.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 02:10:33,482 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19190.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 02:10:33,569 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1715, 4.6097, 2.4094, 5.0681, 5.1814, 2.1568, 4.1786, 3.3266], + device='cuda:2'), covar=tensor([0.0653, 0.0468, 0.2714, 0.0436, 0.0270, 0.2828, 0.0952, 0.0796], + device='cuda:2'), in_proj_covar=tensor([0.0187, 0.0210, 0.0208, 0.0193, 0.0151, 0.0195, 0.0220, 0.0154], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 02:10:41,233 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19194.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:10:55,570 INFO [train.py:892] (2/4) Epoch 11, batch 650, loss[loss=0.2482, simple_loss=0.3073, pruned_loss=0.09451, over 19705.00 frames. ], tot_loss[loss=0.231, simple_loss=0.2892, pruned_loss=0.08644, over 3799011.49 frames. ], batch size: 283, lr: 1.44e-02, grad_scale: 32.0 +2023-03-28 02:11:13,813 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.235e+02 5.164e+02 6.078e+02 7.650e+02 1.184e+03, threshold=1.216e+03, percent-clipped=0.0 +2023-03-28 02:11:21,847 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19212.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 02:12:30,573 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4458, 4.0042, 4.0469, 4.4377, 4.0588, 4.4801, 4.4973, 4.6885], + device='cuda:2'), covar=tensor([0.0624, 0.0348, 0.0420, 0.0261, 0.0706, 0.0316, 0.0345, 0.0286], + device='cuda:2'), in_proj_covar=tensor([0.0125, 0.0142, 0.0165, 0.0136, 0.0140, 0.0120, 0.0128, 0.0157], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 02:12:45,825 INFO [train.py:892] (2/4) Epoch 11, batch 700, loss[loss=0.2112, simple_loss=0.2734, pruned_loss=0.07452, over 19775.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.2885, pruned_loss=0.08563, over 3833489.63 frames. ], batch size: 108, lr: 1.44e-02, grad_scale: 32.0 +2023-03-28 02:14:07,848 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19286.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:14:41,778 INFO [train.py:892] (2/4) Epoch 11, batch 750, loss[loss=0.2208, simple_loss=0.2743, pruned_loss=0.08362, over 19795.00 frames. ], tot_loss[loss=0.2294, simple_loss=0.2882, pruned_loss=0.08534, over 3859081.25 frames. ], batch size: 185, lr: 1.44e-02, grad_scale: 16.0 +2023-03-28 02:15:01,324 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.572e+02 4.867e+02 5.905e+02 6.947e+02 1.334e+03, threshold=1.181e+03, percent-clipped=1.0 +2023-03-28 02:15:06,597 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2697, 4.3319, 4.7874, 4.3967, 4.0023, 4.5500, 4.3272, 4.8559], + device='cuda:2'), covar=tensor([0.1027, 0.0303, 0.0316, 0.0334, 0.0728, 0.0366, 0.0368, 0.0276], + device='cuda:2'), in_proj_covar=tensor([0.0244, 0.0183, 0.0179, 0.0188, 0.0178, 0.0185, 0.0179, 0.0171], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 02:15:47,225 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19330.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:16:12,103 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8707, 4.7879, 5.2861, 5.0597, 5.1051, 4.6477, 4.9130, 4.7981], + device='cuda:2'), covar=tensor([0.1516, 0.1238, 0.0948, 0.1228, 0.0811, 0.0934, 0.2061, 0.2186], + device='cuda:2'), in_proj_covar=tensor([0.0244, 0.0240, 0.0298, 0.0229, 0.0224, 0.0214, 0.0283, 0.0314], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 02:16:18,486 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.5132, 5.0103, 5.2495, 5.0442, 5.4664, 3.5932, 4.3878, 2.9395], + device='cuda:2'), covar=tensor([0.0202, 0.0147, 0.0119, 0.0128, 0.0121, 0.0635, 0.0827, 0.1217], + device='cuda:2'), in_proj_covar=tensor([0.0080, 0.0110, 0.0096, 0.0108, 0.0097, 0.0116, 0.0126, 0.0110], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 02:16:33,245 INFO [train.py:892] (2/4) Epoch 11, batch 800, loss[loss=0.2266, simple_loss=0.2872, pruned_loss=0.08306, over 19852.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.288, pruned_loss=0.08518, over 3879420.60 frames. ], batch size: 190, lr: 1.44e-02, grad_scale: 16.0 +2023-03-28 02:16:51,720 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8537, 3.0398, 2.7020, 2.1537, 2.8584, 3.2069, 3.0028, 2.8448], + device='cuda:2'), covar=tensor([0.0224, 0.0352, 0.0203, 0.0550, 0.0267, 0.0201, 0.0156, 0.0205], + device='cuda:2'), in_proj_covar=tensor([0.0062, 0.0056, 0.0065, 0.0076, 0.0075, 0.0052, 0.0047, 0.0050], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-28 02:17:37,520 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19378.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:18:12,958 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19394.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:18:27,074 INFO [train.py:892] (2/4) Epoch 11, batch 850, loss[loss=0.2313, simple_loss=0.2861, pruned_loss=0.08822, over 19836.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.2875, pruned_loss=0.08492, over 3895608.11 frames. ], batch size: 204, lr: 1.44e-02, grad_scale: 16.0 +2023-03-28 02:18:49,365 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.769e+02 4.779e+02 5.853e+02 6.766e+02 1.621e+03, threshold=1.171e+03, percent-clipped=3.0 +2023-03-28 02:20:21,647 INFO [train.py:892] (2/4) Epoch 11, batch 900, loss[loss=0.2229, simple_loss=0.2846, pruned_loss=0.08062, over 19888.00 frames. ], tot_loss[loss=0.2286, simple_loss=0.2876, pruned_loss=0.08477, over 3906648.57 frames. ], batch size: 88, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:20:26,496 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2055, 4.2012, 4.6038, 4.4110, 4.4531, 3.9184, 4.2345, 4.1693], + device='cuda:2'), covar=tensor([0.1409, 0.1469, 0.0938, 0.1134, 0.0962, 0.1160, 0.2125, 0.2222], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0240, 0.0299, 0.0229, 0.0225, 0.0215, 0.0284, 0.0318], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 02:20:30,501 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19455.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:21:39,552 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19485.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 02:22:13,035 INFO [train.py:892] (2/4) Epoch 11, batch 950, loss[loss=0.2102, simple_loss=0.2644, pruned_loss=0.07798, over 19867.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.2875, pruned_loss=0.08511, over 3917659.14 frames. ], batch size: 136, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:22:30,343 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.330e+02 4.753e+02 5.819e+02 7.309e+02 1.447e+03, threshold=1.164e+03, percent-clipped=3.0 +2023-03-28 02:23:29,371 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3439, 2.4423, 2.5694, 2.4805, 2.2492, 2.2648, 2.1809, 2.4663], + device='cuda:2'), covar=tensor([0.0202, 0.0266, 0.0267, 0.0185, 0.0275, 0.0316, 0.0419, 0.0322], + device='cuda:2'), in_proj_covar=tensor([0.0047, 0.0046, 0.0049, 0.0041, 0.0051, 0.0048, 0.0063, 0.0044], + device='cuda:2'), out_proj_covar=tensor([1.0520e-04, 1.0273e-04, 1.0825e-04, 9.2668e-05, 1.1387e-04, 1.0912e-04, + 1.3923e-04, 1.0004e-04], device='cuda:2') +2023-03-28 02:24:02,913 INFO [train.py:892] (2/4) Epoch 11, batch 1000, loss[loss=0.1962, simple_loss=0.26, pruned_loss=0.0662, over 19856.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.2877, pruned_loss=0.08504, over 3923940.05 frames. ], batch size: 112, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:24:27,151 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4665, 2.5278, 3.6970, 2.9310, 3.2337, 3.3785, 1.9994, 2.0817], + device='cuda:2'), covar=tensor([0.0776, 0.2778, 0.0438, 0.0627, 0.1268, 0.0764, 0.1741, 0.2310], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0336, 0.0266, 0.0219, 0.0325, 0.0261, 0.0283, 0.0265], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 02:25:24,872 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19586.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:25:55,158 INFO [train.py:892] (2/4) Epoch 11, batch 1050, loss[loss=0.3982, simple_loss=0.4217, pruned_loss=0.1873, over 19405.00 frames. ], tot_loss[loss=0.23, simple_loss=0.2887, pruned_loss=0.0856, over 3930409.00 frames. ], batch size: 431, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:26:15,116 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.901e+02 5.121e+02 6.164e+02 7.352e+02 1.446e+03, threshold=1.233e+03, percent-clipped=2.0 +2023-03-28 02:26:35,352 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19618.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:27:03,018 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.60 vs. limit=5.0 +2023-03-28 02:27:13,328 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19634.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:27:49,149 INFO [train.py:892] (2/4) Epoch 11, batch 1100, loss[loss=0.2086, simple_loss=0.2738, pruned_loss=0.0717, over 19719.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.2886, pruned_loss=0.08538, over 3935367.90 frames. ], batch size: 54, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:28:36,948 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-28 02:28:53,936 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19679.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:29:42,843 INFO [train.py:892] (2/4) Epoch 11, batch 1150, loss[loss=0.1952, simple_loss=0.2618, pruned_loss=0.06432, over 19803.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2887, pruned_loss=0.08577, over 3937921.31 frames. ], batch size: 86, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:30:05,102 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.048e+02 4.779e+02 5.602e+02 6.982e+02 1.239e+03, threshold=1.120e+03, percent-clipped=1.0 +2023-03-28 02:31:36,153 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19750.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:31:37,392 INFO [train.py:892] (2/4) Epoch 11, batch 1200, loss[loss=0.227, simple_loss=0.2858, pruned_loss=0.08406, over 19800.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.2881, pruned_loss=0.08526, over 3940642.82 frames. ], batch size: 200, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:32:55,555 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19785.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 02:33:30,055 INFO [train.py:892] (2/4) Epoch 11, batch 1250, loss[loss=0.1829, simple_loss=0.2476, pruned_loss=0.05906, over 19799.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.2865, pruned_loss=0.08436, over 3943410.15 frames. ], batch size: 107, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:33:49,267 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.235e+02 4.789e+02 5.822e+02 7.202e+02 1.423e+03, threshold=1.164e+03, percent-clipped=5.0 +2023-03-28 02:34:05,115 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.86 vs. limit=5.0 +2023-03-28 02:34:19,018 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5716, 1.8106, 2.5102, 2.8627, 3.3273, 3.4734, 3.4506, 3.5988], + device='cuda:2'), covar=tensor([0.0792, 0.1793, 0.1077, 0.0530, 0.0316, 0.0206, 0.0235, 0.0236], + device='cuda:2'), in_proj_covar=tensor([0.0132, 0.0166, 0.0152, 0.0126, 0.0106, 0.0101, 0.0094, 0.0094], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 02:34:44,025 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19833.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 02:35:24,443 INFO [train.py:892] (2/4) Epoch 11, batch 1300, loss[loss=0.2062, simple_loss=0.2692, pruned_loss=0.07167, over 19775.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.288, pruned_loss=0.08552, over 3943317.89 frames. ], batch size: 70, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:36:23,485 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8744, 4.9374, 5.3685, 5.1138, 5.1634, 4.6932, 4.9922, 4.8847], + device='cuda:2'), covar=tensor([0.1309, 0.1132, 0.0847, 0.1234, 0.0738, 0.0920, 0.1788, 0.2241], + device='cuda:2'), in_proj_covar=tensor([0.0248, 0.0246, 0.0304, 0.0234, 0.0230, 0.0216, 0.0289, 0.0327], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 02:37:16,640 INFO [train.py:892] (2/4) Epoch 11, batch 1350, loss[loss=0.1999, simple_loss=0.2629, pruned_loss=0.06846, over 19885.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.2868, pruned_loss=0.0845, over 3944664.90 frames. ], batch size: 95, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:37:38,385 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.602e+02 5.196e+02 5.941e+02 7.329e+02 1.053e+03, threshold=1.188e+03, percent-clipped=0.0 +2023-03-28 02:38:13,097 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-28 02:39:10,157 INFO [train.py:892] (2/4) Epoch 11, batch 1400, loss[loss=0.2153, simple_loss=0.2797, pruned_loss=0.0754, over 19842.00 frames. ], tot_loss[loss=0.227, simple_loss=0.2864, pruned_loss=0.08384, over 3945265.63 frames. ], batch size: 59, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:40:05,704 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19974.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:40:08,188 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4663, 4.1558, 4.2537, 4.5718, 4.2035, 4.7182, 4.6051, 4.7808], + device='cuda:2'), covar=tensor([0.0625, 0.0300, 0.0411, 0.0212, 0.0478, 0.0239, 0.0332, 0.0237], + device='cuda:2'), in_proj_covar=tensor([0.0127, 0.0145, 0.0171, 0.0141, 0.0142, 0.0123, 0.0132, 0.0163], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 02:41:09,346 INFO [train.py:892] (2/4) Epoch 11, batch 1450, loss[loss=0.2216, simple_loss=0.2992, pruned_loss=0.07201, over 19674.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.2878, pruned_loss=0.08478, over 3946822.01 frames. ], batch size: 51, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:41:30,118 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.362e+02 4.773e+02 5.638e+02 7.039e+02 1.448e+03, threshold=1.128e+03, percent-clipped=1.0 +2023-03-28 02:41:54,624 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20020.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:42:12,437 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.6050, 5.9441, 5.9458, 5.8810, 5.8235, 5.9191, 5.3433, 5.3618], + device='cuda:2'), covar=tensor([0.0309, 0.0335, 0.0486, 0.0360, 0.0452, 0.0467, 0.0533, 0.0765], + device='cuda:2'), in_proj_covar=tensor([0.0194, 0.0199, 0.0233, 0.0199, 0.0186, 0.0182, 0.0211, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 02:43:00,543 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20050.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:43:01,860 INFO [train.py:892] (2/4) Epoch 11, batch 1500, loss[loss=0.2167, simple_loss=0.2707, pruned_loss=0.08137, over 19834.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.2888, pruned_loss=0.08538, over 3946409.60 frames. ], batch size: 144, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:44:11,227 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20081.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:44:30,170 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20089.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:44:50,401 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20098.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:44:55,658 INFO [train.py:892] (2/4) Epoch 11, batch 1550, loss[loss=0.2419, simple_loss=0.2946, pruned_loss=0.09459, over 19763.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.2887, pruned_loss=0.08538, over 3946824.51 frames. ], batch size: 226, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:45:16,070 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.591e+02 5.026e+02 5.881e+02 6.796e+02 1.345e+03, threshold=1.176e+03, percent-clipped=3.0 +2023-03-28 02:45:21,231 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20112.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:45:36,935 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9807, 5.3724, 5.4146, 5.3779, 5.1597, 5.3890, 4.8010, 4.8849], + device='cuda:2'), covar=tensor([0.0455, 0.0475, 0.0559, 0.0418, 0.0561, 0.0566, 0.0763, 0.0897], + device='cuda:2'), in_proj_covar=tensor([0.0197, 0.0203, 0.0237, 0.0202, 0.0191, 0.0186, 0.0213, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 02:46:25,293 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.63 vs. limit=5.0 +2023-03-28 02:46:41,322 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20150.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:46:42,456 INFO [train.py:892] (2/4) Epoch 11, batch 1600, loss[loss=0.2341, simple_loss=0.2872, pruned_loss=0.09056, over 19750.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.2875, pruned_loss=0.08454, over 3948466.00 frames. ], batch size: 250, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:47:01,339 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7304, 4.2984, 4.4225, 4.7444, 4.5077, 4.9034, 4.8688, 5.0478], + device='cuda:2'), covar=tensor([0.0724, 0.0392, 0.0455, 0.0298, 0.0521, 0.0295, 0.0346, 0.0287], + device='cuda:2'), in_proj_covar=tensor([0.0131, 0.0151, 0.0178, 0.0146, 0.0146, 0.0127, 0.0135, 0.0167], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 02:47:33,472 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20173.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:48:38,435 INFO [train.py:892] (2/4) Epoch 11, batch 1650, loss[loss=0.2103, simple_loss=0.2668, pruned_loss=0.0769, over 19748.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.2892, pruned_loss=0.08573, over 3948528.13 frames. ], batch size: 134, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:48:45,308 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20204.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:48:56,034 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.919e+02 4.977e+02 5.709e+02 7.210e+02 1.480e+03, threshold=1.142e+03, percent-clipped=1.0 +2023-03-28 02:49:32,487 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4291, 2.6406, 3.4784, 2.9702, 3.1539, 3.4254, 2.0205, 2.1943], + device='cuda:2'), covar=tensor([0.0708, 0.2214, 0.0440, 0.0624, 0.1150, 0.0729, 0.1671, 0.1932], + device='cuda:2'), in_proj_covar=tensor([0.0299, 0.0331, 0.0266, 0.0218, 0.0323, 0.0258, 0.0282, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 02:50:27,689 INFO [train.py:892] (2/4) Epoch 11, batch 1700, loss[loss=0.1916, simple_loss=0.258, pruned_loss=0.06258, over 19885.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.2913, pruned_loss=0.08715, over 3948964.60 frames. ], batch size: 97, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:50:58,720 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20265.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:51:19,711 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20274.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:51:35,552 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7391, 4.6848, 5.1641, 4.9511, 5.0375, 4.3799, 4.7939, 4.7260], + device='cuda:2'), covar=tensor([0.1267, 0.1144, 0.0920, 0.1025, 0.0686, 0.0990, 0.2077, 0.1798], + device='cuda:2'), in_proj_covar=tensor([0.0246, 0.0245, 0.0303, 0.0235, 0.0228, 0.0216, 0.0292, 0.0322], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 02:52:12,821 INFO [train.py:892] (2/4) Epoch 11, batch 1750, loss[loss=0.2494, simple_loss=0.3158, pruned_loss=0.09145, over 19530.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.2903, pruned_loss=0.08598, over 3949604.06 frames. ], batch size: 54, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:52:29,858 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 5.025e+02 5.710e+02 7.275e+02 1.664e+03, threshold=1.142e+03, percent-clipped=3.0 +2023-03-28 02:52:40,312 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.3226, 1.3675, 1.3156, 0.7294, 1.2567, 1.3800, 1.3331, 1.2835], + device='cuda:2'), covar=tensor([0.0236, 0.0199, 0.0238, 0.0486, 0.0409, 0.0187, 0.0186, 0.0174], + device='cuda:2'), in_proj_covar=tensor([0.0062, 0.0057, 0.0065, 0.0075, 0.0076, 0.0052, 0.0047, 0.0049], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-28 02:52:53,448 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20322.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:53:48,061 INFO [train.py:892] (2/4) Epoch 11, batch 1800, loss[loss=0.1978, simple_loss=0.2537, pruned_loss=0.07096, over 19767.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.2912, pruned_loss=0.08678, over 3947128.56 frames. ], batch size: 102, lr: 1.40e-02, grad_scale: 16.0 +2023-03-28 02:54:13,189 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9459, 3.0752, 4.3377, 3.0456, 3.6582, 3.8940, 2.1176, 2.4318], + device='cuda:2'), covar=tensor([0.0675, 0.2386, 0.0407, 0.0729, 0.1266, 0.0666, 0.1761, 0.2053], + device='cuda:2'), in_proj_covar=tensor([0.0300, 0.0334, 0.0267, 0.0220, 0.0326, 0.0260, 0.0284, 0.0262], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 02:54:35,766 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20376.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:54:57,990 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20388.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:55:18,953 INFO [train.py:892] (2/4) Epoch 11, batch 1850, loss[loss=0.2343, simple_loss=0.3065, pruned_loss=0.08104, over 19835.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.2926, pruned_loss=0.08694, over 3948427.13 frames. ], batch size: 57, lr: 1.40e-02, grad_scale: 16.0 +2023-03-28 02:56:26,118 INFO [train.py:892] (2/4) Epoch 12, batch 0, loss[loss=0.1992, simple_loss=0.2688, pruned_loss=0.06481, over 19727.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2688, pruned_loss=0.06481, over 19727.00 frames. ], batch size: 54, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 02:56:26,119 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 02:56:56,308 INFO [train.py:926] (2/4) Epoch 12, validation: loss=0.1761, simple_loss=0.2565, pruned_loss=0.0478, over 2883724.00 frames. +2023-03-28 02:56:56,309 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 02:57:06,914 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 5.231e+02 6.194e+02 7.302e+02 1.843e+03, threshold=1.239e+03, percent-clipped=4.0 +2023-03-28 02:57:28,133 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20418.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:58:29,437 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20445.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:58:39,849 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20449.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:58:54,028 INFO [train.py:892] (2/4) Epoch 12, batch 50, loss[loss=0.2217, simple_loss=0.2761, pruned_loss=0.08362, over 19900.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2824, pruned_loss=0.08252, over 890831.34 frames. ], batch size: 94, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 02:59:15,985 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1063, 3.1501, 3.5407, 3.2032, 3.0970, 3.4754, 3.3483, 3.5761], + device='cuda:2'), covar=tensor([0.1162, 0.0392, 0.0384, 0.0387, 0.1278, 0.0522, 0.0388, 0.0374], + device='cuda:2'), in_proj_covar=tensor([0.0243, 0.0185, 0.0179, 0.0188, 0.0178, 0.0189, 0.0182, 0.0171], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 02:59:19,976 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20468.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:59:47,802 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20479.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:00:46,540 INFO [train.py:892] (2/4) Epoch 12, batch 100, loss[loss=0.2145, simple_loss=0.2861, pruned_loss=0.07142, over 19671.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2804, pruned_loss=0.07995, over 1569984.51 frames. ], batch size: 52, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 03:00:55,044 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.431e+02 4.892e+02 6.215e+02 7.181e+02 1.654e+03, threshold=1.243e+03, percent-clipped=3.0 +2023-03-28 03:02:01,243 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8987, 5.0150, 5.3370, 5.1112, 5.0682, 4.7346, 4.9543, 4.9108], + device='cuda:2'), covar=tensor([0.1387, 0.1073, 0.0865, 0.1050, 0.0814, 0.0866, 0.1952, 0.2008], + device='cuda:2'), in_proj_covar=tensor([0.0248, 0.0247, 0.0305, 0.0236, 0.0228, 0.0217, 0.0294, 0.0326], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 03:02:24,137 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7643, 2.8642, 3.1753, 2.4182, 3.2652, 2.5959, 2.7696, 3.3879], + device='cuda:2'), covar=tensor([0.0504, 0.0383, 0.0439, 0.0706, 0.0232, 0.0308, 0.0359, 0.0183], + device='cuda:2'), in_proj_covar=tensor([0.0058, 0.0060, 0.0063, 0.0091, 0.0057, 0.0056, 0.0054, 0.0049], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 03:02:39,174 INFO [train.py:892] (2/4) Epoch 12, batch 150, loss[loss=0.2196, simple_loss=0.2697, pruned_loss=0.08472, over 19848.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.2825, pruned_loss=0.08135, over 2097648.00 frames. ], batch size: 165, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 03:02:51,618 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20560.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:03:15,637 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4337, 1.7463, 2.2980, 2.7749, 3.1915, 3.2874, 3.2153, 3.3538], + device='cuda:2'), covar=tensor([0.0974, 0.2231, 0.1463, 0.0598, 0.0411, 0.0234, 0.0277, 0.0336], + device='cuda:2'), in_proj_covar=tensor([0.0139, 0.0173, 0.0160, 0.0130, 0.0112, 0.0105, 0.0099, 0.0096], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 03:03:17,585 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5736, 4.9696, 4.9504, 4.9568, 4.6970, 4.9548, 4.4200, 4.4848], + device='cuda:2'), covar=tensor([0.0497, 0.0420, 0.0590, 0.0394, 0.0609, 0.0579, 0.0684, 0.0926], + device='cuda:2'), in_proj_covar=tensor([0.0195, 0.0196, 0.0235, 0.0199, 0.0187, 0.0181, 0.0210, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 03:03:56,767 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20588.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:04:35,671 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2574, 3.1979, 3.3069, 2.5424, 3.6970, 2.9649, 2.8780, 3.7233], + device='cuda:2'), covar=tensor([0.0575, 0.0313, 0.0808, 0.0720, 0.0304, 0.0326, 0.0430, 0.0425], + device='cuda:2'), in_proj_covar=tensor([0.0058, 0.0061, 0.0064, 0.0091, 0.0057, 0.0057, 0.0054, 0.0050], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 03:04:38,602 INFO [train.py:892] (2/4) Epoch 12, batch 200, loss[loss=0.1991, simple_loss=0.2553, pruned_loss=0.0715, over 19814.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2833, pruned_loss=0.08108, over 2508740.26 frames. ], batch size: 167, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 03:04:47,253 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.285e+02 4.571e+02 5.228e+02 6.546e+02 1.569e+03, threshold=1.046e+03, percent-clipped=1.0 +2023-03-28 03:06:25,771 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20649.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:06:39,392 INFO [train.py:892] (2/4) Epoch 12, batch 250, loss[loss=0.2459, simple_loss=0.3158, pruned_loss=0.08795, over 19828.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2842, pruned_loss=0.08146, over 2829609.27 frames. ], batch size: 57, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 03:06:54,477 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4589, 3.4745, 3.6234, 4.5013, 2.8597, 3.3359, 2.9524, 2.5605], + device='cuda:2'), covar=tensor([0.0415, 0.2989, 0.0960, 0.0238, 0.2337, 0.0815, 0.1191, 0.2134], + device='cuda:2'), in_proj_covar=tensor([0.0198, 0.0331, 0.0223, 0.0150, 0.0238, 0.0174, 0.0194, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 03:07:24,947 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20676.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:08:09,799 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5504, 1.8140, 2.7224, 1.9211, 2.5583, 2.7836, 2.5236, 2.7210], + device='cuda:2'), covar=tensor([0.0342, 0.0871, 0.0154, 0.0569, 0.0115, 0.0239, 0.0219, 0.0158], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0089, 0.0070, 0.0142, 0.0063, 0.0079, 0.0072, 0.0063], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 03:08:16,296 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8529, 2.8768, 1.5582, 3.4928, 3.3112, 3.5298, 3.4936, 2.6494], + device='cuda:2'), covar=tensor([0.0603, 0.0523, 0.1602, 0.0412, 0.0397, 0.0284, 0.0468, 0.0662], + device='cuda:2'), in_proj_covar=tensor([0.0122, 0.0118, 0.0131, 0.0122, 0.0106, 0.0099, 0.0117, 0.0124], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 03:08:30,769 INFO [train.py:892] (2/4) Epoch 12, batch 300, loss[loss=0.2118, simple_loss=0.2814, pruned_loss=0.07109, over 19663.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2847, pruned_loss=0.08149, over 3075348.91 frames. ], batch size: 58, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:08:41,177 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.455e+02 4.985e+02 6.229e+02 8.000e+02 1.241e+03, threshold=1.246e+03, percent-clipped=1.0 +2023-03-28 03:09:12,850 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20724.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:09:57,927 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20744.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:10:01,809 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20745.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:10:24,870 INFO [train.py:892] (2/4) Epoch 12, batch 350, loss[loss=0.2141, simple_loss=0.273, pruned_loss=0.07757, over 19773.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.2836, pruned_loss=0.0808, over 3269899.26 frames. ], batch size: 182, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:10:52,502 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20768.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:11:06,487 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20774.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:11:52,622 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20793.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:12:20,231 INFO [train.py:892] (2/4) Epoch 12, batch 400, loss[loss=0.2201, simple_loss=0.2797, pruned_loss=0.0803, over 19764.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2825, pruned_loss=0.08017, over 3421907.73 frames. ], batch size: 49, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:12:28,476 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.077e+02 4.871e+02 5.546e+02 6.425e+02 9.713e+02, threshold=1.109e+03, percent-clipped=0.0 +2023-03-28 03:12:42,785 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20816.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:14:09,999 INFO [train.py:892] (2/4) Epoch 12, batch 450, loss[loss=0.2072, simple_loss=0.2548, pruned_loss=0.07978, over 19872.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2828, pruned_loss=0.08059, over 3540216.63 frames. ], batch size: 134, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:14:21,492 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20860.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:14:26,029 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1704, 2.1820, 2.2705, 2.1919, 1.9949, 2.1972, 2.1199, 2.3846], + device='cuda:2'), covar=tensor([0.0160, 0.0242, 0.0195, 0.0180, 0.0259, 0.0200, 0.0314, 0.0214], + device='cuda:2'), in_proj_covar=tensor([0.0047, 0.0048, 0.0048, 0.0043, 0.0053, 0.0049, 0.0064, 0.0044], + device='cuda:2'), out_proj_covar=tensor([1.0595e-04, 1.0668e-04, 1.0846e-04, 9.6875e-05, 1.1901e-04, 1.1003e-04, + 1.4071e-04, 1.0102e-04], device='cuda:2') +2023-03-28 03:14:28,100 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.8168, 2.0561, 1.8273, 1.1857, 1.9216, 1.9992, 1.9717, 1.9896], + device='cuda:2'), covar=tensor([0.0243, 0.0175, 0.0206, 0.0482, 0.0325, 0.0158, 0.0157, 0.0162], + device='cuda:2'), in_proj_covar=tensor([0.0063, 0.0059, 0.0067, 0.0077, 0.0078, 0.0052, 0.0048, 0.0051], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-28 03:16:03,411 INFO [train.py:892] (2/4) Epoch 12, batch 500, loss[loss=0.3144, simple_loss=0.3902, pruned_loss=0.1193, over 18725.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.2848, pruned_loss=0.0821, over 3629724.82 frames. ], batch size: 564, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:16:08,876 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20908.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:16:11,906 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.335e+02 4.795e+02 5.889e+02 7.056e+02 1.371e+03, threshold=1.178e+03, percent-clipped=3.0 +2023-03-28 03:17:15,941 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3040, 2.1861, 3.5064, 3.0456, 3.4357, 3.5073, 3.4866, 3.4659], + device='cuda:2'), covar=tensor([0.0224, 0.0768, 0.0086, 0.0489, 0.0095, 0.0178, 0.0130, 0.0117], + device='cuda:2'), in_proj_covar=tensor([0.0078, 0.0089, 0.0070, 0.0141, 0.0064, 0.0078, 0.0073, 0.0065], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 03:17:30,609 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20944.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:17:57,133 INFO [train.py:892] (2/4) Epoch 12, batch 550, loss[loss=0.1962, simple_loss=0.2547, pruned_loss=0.06891, over 19821.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.2856, pruned_loss=0.0828, over 3701506.85 frames. ], batch size: 123, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:18:36,023 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2850, 5.6312, 5.8955, 5.6854, 5.4428, 5.3607, 5.5440, 5.4111], + device='cuda:2'), covar=tensor([0.1159, 0.0925, 0.0821, 0.0959, 0.0595, 0.0662, 0.1928, 0.1816], + device='cuda:2'), in_proj_covar=tensor([0.0245, 0.0248, 0.0302, 0.0236, 0.0223, 0.0218, 0.0295, 0.0329], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 03:19:51,745 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.60 vs. limit=2.0 +2023-03-28 03:19:54,072 INFO [train.py:892] (2/4) Epoch 12, batch 600, loss[loss=0.2412, simple_loss=0.2963, pruned_loss=0.09303, over 19794.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.2853, pruned_loss=0.08217, over 3755814.84 frames. ], batch size: 247, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:20:01,789 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.833e+02 4.713e+02 5.422e+02 6.814e+02 1.308e+03, threshold=1.084e+03, percent-clipped=1.0 +2023-03-28 03:21:20,923 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21044.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:21:31,417 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6307, 2.5365, 1.6982, 2.5852, 2.7683, 1.1941, 2.3471, 2.0503], + device='cuda:2'), covar=tensor([0.0877, 0.0709, 0.2424, 0.0634, 0.0405, 0.2401, 0.0957, 0.0858], + device='cuda:2'), in_proj_covar=tensor([0.0195, 0.0217, 0.0209, 0.0202, 0.0159, 0.0198, 0.0221, 0.0158], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 03:21:37,348 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21052.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:21:45,739 INFO [train.py:892] (2/4) Epoch 12, batch 650, loss[loss=0.2254, simple_loss=0.2885, pruned_loss=0.08117, over 19723.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.2855, pruned_loss=0.08277, over 3797112.98 frames. ], batch size: 61, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:22:27,948 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21074.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:23:09,948 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21092.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:23:29,225 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.36 vs. limit=5.0 +2023-03-28 03:23:39,587 INFO [train.py:892] (2/4) Epoch 12, batch 700, loss[loss=0.2517, simple_loss=0.2983, pruned_loss=0.1026, over 19843.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.2865, pruned_loss=0.08304, over 3829405.84 frames. ], batch size: 208, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:23:49,579 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.058e+02 5.405e+02 6.727e+02 8.130e+02 1.465e+03, threshold=1.345e+03, percent-clipped=5.0 +2023-03-28 03:23:56,874 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21113.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:24:07,605 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21118.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:24:18,045 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21122.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:25:33,701 INFO [train.py:892] (2/4) Epoch 12, batch 750, loss[loss=0.2059, simple_loss=0.263, pruned_loss=0.07441, over 19780.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.2863, pruned_loss=0.08316, over 3856875.44 frames. ], batch size: 168, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:26:29,244 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21179.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:27:28,536 INFO [train.py:892] (2/4) Epoch 12, batch 800, loss[loss=0.2133, simple_loss=0.2849, pruned_loss=0.07081, over 19875.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.2875, pruned_loss=0.08369, over 3876875.95 frames. ], batch size: 53, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:27:36,365 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0726, 4.1222, 2.3046, 4.5288, 4.5778, 1.8708, 3.7712, 3.3794], + device='cuda:2'), covar=tensor([0.0595, 0.0694, 0.2759, 0.0514, 0.0281, 0.3126, 0.0961, 0.0673], + device='cuda:2'), in_proj_covar=tensor([0.0191, 0.0213, 0.0208, 0.0200, 0.0158, 0.0193, 0.0218, 0.0157], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 03:27:37,185 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.248e+02 4.727e+02 5.705e+02 6.809e+02 1.528e+03, threshold=1.141e+03, percent-clipped=1.0 +2023-03-28 03:28:41,424 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-28 03:28:41,608 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-28 03:28:55,910 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21244.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:29:23,762 INFO [train.py:892] (2/4) Epoch 12, batch 850, loss[loss=0.2134, simple_loss=0.2608, pruned_loss=0.08303, over 19820.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.2869, pruned_loss=0.08288, over 3891185.09 frames. ], batch size: 147, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:29:44,589 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 03:30:32,673 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5571, 3.5889, 2.1133, 3.9516, 3.9913, 1.6697, 3.2071, 2.9878], + device='cuda:2'), covar=tensor([0.0722, 0.0981, 0.2740, 0.0700, 0.0386, 0.3000, 0.1173, 0.0769], + device='cuda:2'), in_proj_covar=tensor([0.0191, 0.0212, 0.0208, 0.0200, 0.0159, 0.0193, 0.0219, 0.0156], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 03:30:45,257 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21292.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:31:14,740 INFO [train.py:892] (2/4) Epoch 12, batch 900, loss[loss=0.2228, simple_loss=0.2885, pruned_loss=0.07851, over 19793.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2849, pruned_loss=0.08142, over 3903010.25 frames. ], batch size: 79, lr: 1.32e-02, grad_scale: 32.0 +2023-03-28 03:31:22,631 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.874e+02 4.590e+02 5.772e+02 7.040e+02 1.378e+03, threshold=1.154e+03, percent-clipped=2.0 +2023-03-28 03:31:27,852 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21312.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:32:07,120 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21328.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:33:10,619 INFO [train.py:892] (2/4) Epoch 12, batch 950, loss[loss=0.2282, simple_loss=0.2781, pruned_loss=0.08916, over 19721.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2858, pruned_loss=0.08202, over 3911846.65 frames. ], batch size: 219, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:33:48,279 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21373.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:34:26,565 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21389.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:35:02,719 INFO [train.py:892] (2/4) Epoch 12, batch 1000, loss[loss=0.1994, simple_loss=0.258, pruned_loss=0.07043, over 19828.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2838, pruned_loss=0.08107, over 3921246.57 frames. ], batch size: 127, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:35:08,962 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21408.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:35:14,101 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.713e+02 5.169e+02 6.246e+02 8.325e+02 1.505e+03, threshold=1.249e+03, percent-clipped=4.0 +2023-03-28 03:35:33,479 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.0291, 2.0821, 2.2280, 2.1793, 2.0231, 2.0113, 2.0322, 2.2124], + device='cuda:2'), covar=tensor([0.0234, 0.0232, 0.0230, 0.0202, 0.0326, 0.0291, 0.0345, 0.0315], + device='cuda:2'), in_proj_covar=tensor([0.0049, 0.0048, 0.0049, 0.0043, 0.0054, 0.0050, 0.0065, 0.0046], + device='cuda:2'), out_proj_covar=tensor([1.0862e-04, 1.0666e-04, 1.0982e-04, 9.7197e-05, 1.2173e-04, 1.1301e-04, + 1.4312e-04, 1.0377e-04], device='cuda:2') +2023-03-28 03:36:53,860 INFO [train.py:892] (2/4) Epoch 12, batch 1050, loss[loss=0.2216, simple_loss=0.2917, pruned_loss=0.07578, over 19799.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2831, pruned_loss=0.07999, over 3926527.69 frames. ], batch size: 67, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:37:04,382 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.64 vs. limit=5.0 +2023-03-28 03:37:32,708 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21474.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:37:37,081 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7593, 2.9648, 2.7278, 2.1023, 2.6313, 2.8430, 2.8782, 2.8375], + device='cuda:2'), covar=tensor([0.0206, 0.0275, 0.0214, 0.0508, 0.0324, 0.0215, 0.0153, 0.0160], + device='cuda:2'), in_proj_covar=tensor([0.0063, 0.0059, 0.0068, 0.0078, 0.0079, 0.0054, 0.0049, 0.0052], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-28 03:38:44,216 INFO [train.py:892] (2/4) Epoch 12, batch 1100, loss[loss=0.2209, simple_loss=0.2871, pruned_loss=0.07734, over 19671.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2833, pruned_loss=0.08005, over 3931147.30 frames. ], batch size: 73, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:38:56,789 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.523e+02 4.626e+02 5.286e+02 6.822e+02 1.206e+03, threshold=1.057e+03, percent-clipped=0.0 +2023-03-28 03:39:51,178 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5969, 2.5162, 2.7658, 2.6816, 3.0557, 3.0267, 3.3752, 3.7706], + device='cuda:2'), covar=tensor([0.0583, 0.1584, 0.1610, 0.1754, 0.1541, 0.1337, 0.0535, 0.0444], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0220, 0.0241, 0.0236, 0.0265, 0.0231, 0.0184, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 03:40:01,900 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5656, 2.8560, 2.4491, 1.8749, 2.5456, 2.6966, 2.6267, 2.7328], + device='cuda:2'), covar=tensor([0.0192, 0.0204, 0.0214, 0.0521, 0.0308, 0.0207, 0.0188, 0.0165], + device='cuda:2'), in_proj_covar=tensor([0.0063, 0.0059, 0.0068, 0.0077, 0.0078, 0.0054, 0.0049, 0.0052], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-28 03:40:37,755 INFO [train.py:892] (2/4) Epoch 12, batch 1150, loss[loss=0.2016, simple_loss=0.2644, pruned_loss=0.06942, over 19774.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2818, pruned_loss=0.07943, over 3936138.42 frames. ], batch size: 46, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:42:06,819 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21595.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:42:32,682 INFO [train.py:892] (2/4) Epoch 12, batch 1200, loss[loss=0.2618, simple_loss=0.3177, pruned_loss=0.1029, over 19782.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2831, pruned_loss=0.0798, over 3939239.80 frames. ], batch size: 321, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:42:43,490 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.726e+02 4.687e+02 5.841e+02 7.196e+02 1.649e+03, threshold=1.168e+03, percent-clipped=4.0 +2023-03-28 03:42:48,504 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21613.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:44:28,820 INFO [train.py:892] (2/4) Epoch 12, batch 1250, loss[loss=0.3015, simple_loss=0.3527, pruned_loss=0.1251, over 19603.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2833, pruned_loss=0.08042, over 3941621.57 frames. ], batch size: 387, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:44:29,860 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21656.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:44:54,445 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21668.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:45:08,147 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21674.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:45:32,118 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21684.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:45:41,040 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5431, 3.4502, 4.9401, 3.6813, 4.2049, 4.3291, 2.4694, 2.8494], + device='cuda:2'), covar=tensor([0.0557, 0.2304, 0.0317, 0.0752, 0.1173, 0.0735, 0.1746, 0.1921], + device='cuda:2'), in_proj_covar=tensor([0.0307, 0.0335, 0.0270, 0.0225, 0.0329, 0.0270, 0.0289, 0.0268], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 03:46:19,253 INFO [train.py:892] (2/4) Epoch 12, batch 1300, loss[loss=0.2116, simple_loss=0.2771, pruned_loss=0.07305, over 19807.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2833, pruned_loss=0.08057, over 3944291.81 frames. ], batch size: 117, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:46:24,444 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21708.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:46:31,047 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 4.739e+02 5.550e+02 6.963e+02 1.418e+03, threshold=1.110e+03, percent-clipped=2.0 +2023-03-28 03:48:12,966 INFO [train.py:892] (2/4) Epoch 12, batch 1350, loss[loss=0.2157, simple_loss=0.2802, pruned_loss=0.07558, over 19815.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2824, pruned_loss=0.07999, over 3947312.22 frames. ], batch size: 50, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:48:13,697 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21756.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:48:27,745 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21763.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:48:35,796 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8067, 3.8061, 2.2756, 4.1898, 4.2797, 1.7472, 3.3783, 3.1535], + device='cuda:2'), covar=tensor([0.0670, 0.0934, 0.2656, 0.0656, 0.0427, 0.3076, 0.1059, 0.0746], + device='cuda:2'), in_proj_covar=tensor([0.0194, 0.0218, 0.0211, 0.0206, 0.0166, 0.0196, 0.0222, 0.0160], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 03:48:52,728 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21774.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:50:04,164 INFO [train.py:892] (2/4) Epoch 12, batch 1400, loss[loss=0.3121, simple_loss=0.3551, pruned_loss=0.1346, over 19625.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2809, pruned_loss=0.07903, over 3948386.47 frames. ], batch size: 359, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:50:16,316 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 4.274e+02 5.050e+02 6.143e+02 1.134e+03, threshold=1.010e+03, percent-clipped=1.0 +2023-03-28 03:50:40,270 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21822.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:50:47,644 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21824.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:51:44,409 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8748, 5.1758, 5.1823, 5.3208, 4.7485, 5.1813, 4.6860, 4.3082], + device='cuda:2'), covar=tensor([0.0792, 0.1104, 0.1015, 0.0753, 0.1091, 0.1097, 0.1469, 0.2336], + device='cuda:2'), in_proj_covar=tensor([0.0199, 0.0205, 0.0243, 0.0206, 0.0194, 0.0191, 0.0220, 0.0258], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 03:51:44,525 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21850.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:51:55,130 INFO [train.py:892] (2/4) Epoch 12, batch 1450, loss[loss=0.2321, simple_loss=0.2868, pruned_loss=0.0887, over 19667.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2832, pruned_loss=0.08028, over 3947412.05 frames. ], batch size: 73, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:52:43,123 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1391, 3.7512, 3.7910, 4.1415, 3.7314, 4.1124, 4.2580, 4.3943], + device='cuda:2'), covar=tensor([0.0625, 0.0380, 0.0534, 0.0289, 0.0788, 0.0416, 0.0390, 0.0278], + device='cuda:2'), in_proj_covar=tensor([0.0127, 0.0146, 0.0171, 0.0143, 0.0146, 0.0129, 0.0131, 0.0165], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 03:53:47,749 INFO [train.py:892] (2/4) Epoch 12, batch 1500, loss[loss=0.2072, simple_loss=0.2705, pruned_loss=0.07191, over 19782.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2835, pruned_loss=0.08044, over 3946948.46 frames. ], batch size: 66, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:53:58,747 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.501e+02 4.832e+02 5.724e+02 6.820e+02 1.583e+03, threshold=1.145e+03, percent-clipped=5.0 +2023-03-28 03:53:59,764 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21911.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:55:31,517 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9044, 4.2547, 4.2652, 4.2009, 3.9731, 4.2335, 3.7875, 3.8372], + device='cuda:2'), covar=tensor([0.0509, 0.0504, 0.0590, 0.0469, 0.0635, 0.0588, 0.0631, 0.0991], + device='cuda:2'), in_proj_covar=tensor([0.0196, 0.0204, 0.0238, 0.0204, 0.0192, 0.0187, 0.0216, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 03:55:31,522 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21951.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:55:42,230 INFO [train.py:892] (2/4) Epoch 12, batch 1550, loss[loss=0.1975, simple_loss=0.2714, pruned_loss=0.06182, over 19856.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2837, pruned_loss=0.08065, over 3948524.80 frames. ], batch size: 56, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:56:10,770 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21968.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:56:12,893 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21969.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:56:15,226 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21970.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:56:17,924 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9963, 5.3786, 5.4316, 5.3388, 5.0875, 5.3772, 4.8022, 4.8755], + device='cuda:2'), covar=tensor([0.0369, 0.0439, 0.0450, 0.0359, 0.0524, 0.0538, 0.0590, 0.0794], + device='cuda:2'), in_proj_covar=tensor([0.0196, 0.0205, 0.0239, 0.0204, 0.0193, 0.0189, 0.0216, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 03:56:49,419 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21984.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:57:20,856 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1369, 2.6627, 4.3155, 3.7022, 4.1035, 4.2402, 4.2047, 4.1653], + device='cuda:2'), covar=tensor([0.0168, 0.0712, 0.0099, 0.0876, 0.0106, 0.0178, 0.0123, 0.0094], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0090, 0.0072, 0.0141, 0.0065, 0.0079, 0.0073, 0.0064], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 03:57:43,201 INFO [train.py:892] (2/4) Epoch 12, batch 1600, loss[loss=0.2034, simple_loss=0.2667, pruned_loss=0.07003, over 19793.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2822, pruned_loss=0.07948, over 3950035.29 frames. ], batch size: 168, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:57:54,067 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.885e+02 4.412e+02 5.565e+02 6.803e+02 1.208e+03, threshold=1.113e+03, percent-clipped=1.0 +2023-03-28 03:58:08,641 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22016.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:58:40,469 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22031.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:58:41,953 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22032.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:59:24,247 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.23 vs. limit=5.0 +2023-03-28 03:59:37,625 INFO [train.py:892] (2/4) Epoch 12, batch 1650, loss[loss=0.2177, simple_loss=0.2716, pruned_loss=0.08187, over 19738.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2825, pruned_loss=0.07973, over 3948667.32 frames. ], batch size: 179, lr: 1.29e-02, grad_scale: 16.0 +2023-03-28 04:01:33,559 INFO [train.py:892] (2/4) Epoch 12, batch 1700, loss[loss=0.222, simple_loss=0.2821, pruned_loss=0.08097, over 19854.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2828, pruned_loss=0.08026, over 3949259.15 frames. ], batch size: 104, lr: 1.29e-02, grad_scale: 16.0 +2023-03-28 04:01:45,050 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.459e+02 5.514e+02 6.707e+02 1.132e+03, threshold=1.103e+03, percent-clipped=1.0 +2023-03-28 04:01:52,227 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2603, 2.2709, 2.4474, 2.2728, 2.2485, 2.2729, 2.3299, 2.5304], + device='cuda:2'), covar=tensor([0.0206, 0.0244, 0.0262, 0.0259, 0.0243, 0.0243, 0.0282, 0.0287], + device='cuda:2'), in_proj_covar=tensor([0.0047, 0.0048, 0.0049, 0.0043, 0.0054, 0.0050, 0.0065, 0.0044], + device='cuda:2'), out_proj_covar=tensor([1.0540e-04, 1.0638e-04, 1.0968e-04, 9.8965e-05, 1.2123e-04, 1.1187e-04, + 1.4411e-04, 1.0026e-04], device='cuda:2') +2023-03-28 04:02:01,847 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22119.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:02:07,651 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-28 04:02:26,671 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22130.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:03:17,832 INFO [train.py:892] (2/4) Epoch 12, batch 1750, loss[loss=0.2414, simple_loss=0.296, pruned_loss=0.09343, over 19772.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.2825, pruned_loss=0.08007, over 3948443.10 frames. ], batch size: 198, lr: 1.29e-02, grad_scale: 16.0 +2023-03-28 04:03:56,011 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22176.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:04:22,215 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22191.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:04:47,345 INFO [train.py:892] (2/4) Epoch 12, batch 1800, loss[loss=0.2144, simple_loss=0.2719, pruned_loss=0.07847, over 19779.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2835, pruned_loss=0.0802, over 3948851.13 frames. ], batch size: 163, lr: 1.29e-02, grad_scale: 16.0 +2023-03-28 04:04:47,865 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22206.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:04:55,762 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.946e+02 4.603e+02 5.856e+02 6.792e+02 1.518e+03, threshold=1.171e+03, percent-clipped=5.0 +2023-03-28 04:05:03,066 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22215.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:05:37,906 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9939, 2.7004, 3.0996, 2.8976, 3.2486, 3.1868, 3.8511, 4.1436], + device='cuda:2'), covar=tensor([0.0537, 0.1641, 0.1470, 0.1998, 0.1579, 0.1358, 0.0481, 0.0482], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0221, 0.0241, 0.0240, 0.0267, 0.0233, 0.0185, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 04:05:41,092 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22237.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:05:41,149 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22237.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:06:04,135 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22251.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:06:11,636 INFO [train.py:892] (2/4) Epoch 12, batch 1850, loss[loss=0.2367, simple_loss=0.3049, pruned_loss=0.08421, over 19833.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2843, pruned_loss=0.07944, over 3948369.27 frames. ], batch size: 57, lr: 1.29e-02, grad_scale: 16.0 +2023-03-28 04:07:10,135 INFO [train.py:892] (2/4) Epoch 13, batch 0, loss[loss=0.2057, simple_loss=0.2722, pruned_loss=0.06959, over 19945.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2722, pruned_loss=0.06959, over 19945.00 frames. ], batch size: 46, lr: 1.24e-02, grad_scale: 16.0 +2023-03-28 04:07:10,136 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 04:07:38,663 INFO [train.py:926] (2/4) Epoch 13, validation: loss=0.1745, simple_loss=0.2543, pruned_loss=0.04732, over 2883724.00 frames. +2023-03-28 04:07:38,664 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 04:07:56,664 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22269.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:07:59,081 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0990, 5.4691, 5.4943, 5.4117, 5.1362, 5.3886, 4.8529, 4.8824], + device='cuda:2'), covar=tensor([0.0405, 0.0456, 0.0509, 0.0402, 0.0658, 0.0600, 0.0655, 0.0860], + device='cuda:2'), in_proj_covar=tensor([0.0202, 0.0209, 0.0244, 0.0209, 0.0194, 0.0192, 0.0219, 0.0258], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 04:08:13,802 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22276.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:09:03,381 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22298.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:09:05,476 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22299.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:09:32,911 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.888e+02 4.546e+02 5.479e+02 6.348e+02 1.428e+03, threshold=1.096e+03, percent-clipped=1.0 +2023-03-28 04:09:32,935 INFO [train.py:892] (2/4) Epoch 13, batch 50, loss[loss=0.1847, simple_loss=0.2468, pruned_loss=0.06127, over 19842.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2787, pruned_loss=0.07749, over 889430.77 frames. ], batch size: 109, lr: 1.24e-02, grad_scale: 16.0 +2023-03-28 04:09:46,031 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22317.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:10:02,992 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8939, 2.1962, 2.8313, 3.2950, 3.7864, 4.0181, 4.0800, 4.1853], + device='cuda:2'), covar=tensor([0.0810, 0.1855, 0.1203, 0.0477, 0.0317, 0.0182, 0.0268, 0.0332], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0166, 0.0158, 0.0129, 0.0111, 0.0105, 0.0097, 0.0094], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 04:10:08,860 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22326.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:11:26,808 INFO [train.py:892] (2/4) Epoch 13, batch 100, loss[loss=0.263, simple_loss=0.3184, pruned_loss=0.1038, over 19700.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2806, pruned_loss=0.07758, over 1567955.39 frames. ], batch size: 315, lr: 1.24e-02, grad_scale: 16.0 +2023-03-28 04:12:24,551 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22385.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:13:22,675 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.479e+02 4.716e+02 5.729e+02 6.753e+02 1.100e+03, threshold=1.146e+03, percent-clipped=1.0 +2023-03-28 04:13:22,701 INFO [train.py:892] (2/4) Epoch 13, batch 150, loss[loss=0.2468, simple_loss=0.3034, pruned_loss=0.09509, over 19707.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2773, pruned_loss=0.07571, over 2096553.01 frames. ], batch size: 265, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:13:38,262 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6105, 3.8173, 3.7873, 4.9811, 2.9387, 3.5360, 3.0118, 2.7719], + device='cuda:2'), covar=tensor([0.0395, 0.2283, 0.0896, 0.0186, 0.2179, 0.0807, 0.1059, 0.1808], + device='cuda:2'), in_proj_covar=tensor([0.0204, 0.0336, 0.0231, 0.0158, 0.0241, 0.0178, 0.0199, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 04:13:40,128 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22419.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:14:42,100 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22446.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:15:13,593 INFO [train.py:892] (2/4) Epoch 13, batch 200, loss[loss=0.2542, simple_loss=0.3138, pruned_loss=0.09729, over 19696.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2796, pruned_loss=0.07666, over 2508254.12 frames. ], batch size: 305, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:15:25,045 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2634, 2.9116, 4.5300, 3.9136, 4.3333, 4.4082, 4.3323, 4.2605], + device='cuda:2'), covar=tensor([0.0245, 0.0725, 0.0100, 0.0780, 0.0110, 0.0218, 0.0139, 0.0110], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0088, 0.0071, 0.0139, 0.0065, 0.0079, 0.0074, 0.0064], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 04:15:26,972 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22467.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:16:09,409 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22486.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:16:14,755 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-03-28 04:16:56,081 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22506.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:17:04,713 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.872e+02 4.579e+02 5.437e+02 6.895e+02 1.289e+03, threshold=1.087e+03, percent-clipped=2.0 +2023-03-28 04:17:04,746 INFO [train.py:892] (2/4) Epoch 13, batch 250, loss[loss=0.2015, simple_loss=0.2554, pruned_loss=0.07385, over 19795.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2792, pruned_loss=0.07683, over 2828037.05 frames. ], batch size: 151, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:17:55,799 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22532.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:18:46,252 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22554.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:18:58,038 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6180, 2.5772, 2.8638, 2.6087, 2.4041, 2.5541, 2.3926, 2.9701], + device='cuda:2'), covar=tensor([0.0232, 0.0253, 0.0205, 0.0219, 0.0232, 0.0253, 0.0325, 0.0178], + device='cuda:2'), in_proj_covar=tensor([0.0049, 0.0048, 0.0049, 0.0045, 0.0055, 0.0050, 0.0067, 0.0044], + device='cuda:2'), out_proj_covar=tensor([1.0902e-04, 1.0665e-04, 1.1036e-04, 1.0167e-04, 1.2326e-04, 1.1305e-04, + 1.4761e-04, 9.9505e-05], device='cuda:2') +2023-03-28 04:19:01,076 INFO [train.py:892] (2/4) Epoch 13, batch 300, loss[loss=0.1882, simple_loss=0.2507, pruned_loss=0.06281, over 19836.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2798, pruned_loss=0.07701, over 3076002.95 frames. ], batch size: 171, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:19:15,607 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-28 04:19:22,905 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22571.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:20:04,976 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6424, 4.6974, 5.1807, 4.7105, 4.1680, 4.9192, 4.8470, 5.3050], + device='cuda:2'), covar=tensor([0.0963, 0.0316, 0.0303, 0.0315, 0.0775, 0.0371, 0.0344, 0.0242], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0189, 0.0185, 0.0197, 0.0184, 0.0194, 0.0190, 0.0176], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 04:20:14,540 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22593.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:20:52,589 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.162e+02 4.855e+02 5.580e+02 6.636e+02 1.112e+03, threshold=1.116e+03, percent-clipped=2.0 +2023-03-28 04:20:52,610 INFO [train.py:892] (2/4) Epoch 13, batch 350, loss[loss=0.1863, simple_loss=0.2576, pruned_loss=0.05751, over 19586.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2785, pruned_loss=0.07654, over 3270325.98 frames. ], batch size: 44, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:21:24,695 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22626.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:21:52,260 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22639.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:22:35,980 INFO [train.py:892] (2/4) Epoch 13, batch 400, loss[loss=0.2132, simple_loss=0.2803, pruned_loss=0.07309, over 19884.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2793, pruned_loss=0.07681, over 3420527.62 frames. ], batch size: 62, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:23:05,957 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22674.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:24:00,952 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22700.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 04:24:25,242 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.862e+02 4.701e+02 5.537e+02 6.698e+02 1.146e+03, threshold=1.107e+03, percent-clipped=1.0 +2023-03-28 04:24:25,265 INFO [train.py:892] (2/4) Epoch 13, batch 450, loss[loss=0.1817, simple_loss=0.2393, pruned_loss=0.062, over 19704.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2794, pruned_loss=0.0771, over 3536293.23 frames. ], batch size: 46, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:25:36,264 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22741.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:26:04,564 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-28 04:26:18,711 INFO [train.py:892] (2/4) Epoch 13, batch 500, loss[loss=0.279, simple_loss=0.3305, pruned_loss=0.1138, over 19650.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.279, pruned_loss=0.07709, over 3628692.29 frames. ], batch size: 330, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:27:17,119 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22786.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:27:33,403 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22794.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:28:05,127 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1350, 5.3308, 5.6808, 5.3707, 5.3276, 4.8789, 5.2705, 5.0830], + device='cuda:2'), covar=tensor([0.1291, 0.1056, 0.0865, 0.1145, 0.0743, 0.0928, 0.2180, 0.2114], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0251, 0.0309, 0.0237, 0.0227, 0.0223, 0.0297, 0.0327], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 04:28:12,208 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.034e+02 4.675e+02 5.713e+02 7.210e+02 1.341e+03, threshold=1.143e+03, percent-clipped=1.0 +2023-03-28 04:28:12,240 INFO [train.py:892] (2/4) Epoch 13, batch 550, loss[loss=0.1861, simple_loss=0.2545, pruned_loss=0.05885, over 19863.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2781, pruned_loss=0.07697, over 3701405.40 frames. ], batch size: 46, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:28:45,144 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22825.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:29:01,520 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22832.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:29:06,126 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22834.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:29:54,855 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22855.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:30:05,687 INFO [train.py:892] (2/4) Epoch 13, batch 600, loss[loss=0.1938, simple_loss=0.2611, pruned_loss=0.06327, over 19794.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2788, pruned_loss=0.07769, over 3757373.79 frames. ], batch size: 73, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:30:27,405 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22871.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:30:37,887 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.7794, 5.9999, 5.9285, 5.9868, 5.6716, 5.9498, 5.2641, 4.9601], + device='cuda:2'), covar=tensor([0.0536, 0.0639, 0.0733, 0.0589, 0.0745, 0.0727, 0.1245, 0.2023], + device='cuda:2'), in_proj_covar=tensor([0.0206, 0.0209, 0.0245, 0.0208, 0.0200, 0.0192, 0.0221, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 04:30:47,647 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22880.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:31:02,393 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22886.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:31:18,104 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22893.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:31:56,321 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-28 04:31:58,586 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 4.701e+02 5.475e+02 6.591e+02 1.011e+03, threshold=1.095e+03, percent-clipped=0.0 +2023-03-28 04:31:58,614 INFO [train.py:892] (2/4) Epoch 13, batch 650, loss[loss=0.2354, simple_loss=0.3012, pruned_loss=0.08474, over 19769.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2801, pruned_loss=0.07851, over 3798823.47 frames. ], batch size: 113, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:32:16,546 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22919.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:33:06,211 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22941.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:33:50,892 INFO [train.py:892] (2/4) Epoch 13, batch 700, loss[loss=0.2835, simple_loss=0.338, pruned_loss=0.1145, over 19722.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2791, pruned_loss=0.07799, over 3833761.62 frames. ], batch size: 291, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:35:06,080 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22995.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 04:35:40,964 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.530e+02 4.675e+02 5.708e+02 7.114e+02 1.590e+03, threshold=1.142e+03, percent-clipped=5.0 +2023-03-28 04:35:40,998 INFO [train.py:892] (2/4) Epoch 13, batch 750, loss[loss=0.1995, simple_loss=0.2571, pruned_loss=0.07098, over 19850.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2785, pruned_loss=0.07794, over 3860495.45 frames. ], batch size: 137, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:36:03,826 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9590, 5.2970, 5.2943, 5.2622, 4.9808, 5.2181, 4.7820, 4.8339], + device='cuda:2'), covar=tensor([0.0388, 0.0385, 0.0483, 0.0382, 0.0557, 0.0538, 0.0643, 0.0845], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0210, 0.0245, 0.0209, 0.0202, 0.0193, 0.0223, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 04:36:49,838 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23041.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:37:34,778 INFO [train.py:892] (2/4) Epoch 13, batch 800, loss[loss=0.2517, simple_loss=0.3138, pruned_loss=0.0948, over 19742.00 frames. ], tot_loss[loss=0.2176, simple_loss=0.2794, pruned_loss=0.07792, over 3879904.33 frames. ], batch size: 221, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:38:38,897 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23089.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:39:23,179 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23108.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:39:28,913 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.056e+02 4.768e+02 5.306e+02 6.241e+02 1.399e+03, threshold=1.061e+03, percent-clipped=1.0 +2023-03-28 04:39:28,942 INFO [train.py:892] (2/4) Epoch 13, batch 850, loss[loss=0.2084, simple_loss=0.2662, pruned_loss=0.07524, over 19868.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2799, pruned_loss=0.07846, over 3894833.61 frames. ], batch size: 136, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:39:53,102 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1658, 2.1480, 2.4172, 2.2660, 2.6024, 2.5952, 3.0975, 3.2236], + device='cuda:2'), covar=tensor([0.0608, 0.1640, 0.1462, 0.2161, 0.1539, 0.1371, 0.0514, 0.0526], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0221, 0.0239, 0.0238, 0.0265, 0.0231, 0.0186, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 04:40:56,344 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23150.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:41:21,503 INFO [train.py:892] (2/4) Epoch 13, batch 900, loss[loss=0.1989, simple_loss=0.2698, pruned_loss=0.06404, over 19869.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2801, pruned_loss=0.07858, over 3907451.10 frames. ], batch size: 84, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:41:39,578 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23169.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:42:05,294 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3204, 3.2567, 4.6369, 3.4419, 3.8830, 3.8259, 2.4143, 2.6443], + device='cuda:2'), covar=tensor([0.0569, 0.2532, 0.0379, 0.0711, 0.1223, 0.0891, 0.1765, 0.2116], + device='cuda:2'), in_proj_covar=tensor([0.0312, 0.0347, 0.0278, 0.0227, 0.0336, 0.0278, 0.0300, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 04:42:09,357 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23181.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:42:15,887 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-28 04:42:17,543 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6488, 4.3294, 4.3527, 4.7071, 4.2421, 4.8367, 4.8157, 4.9570], + device='cuda:2'), covar=tensor([0.0700, 0.0360, 0.0473, 0.0290, 0.0754, 0.0399, 0.0460, 0.0346], + device='cuda:2'), in_proj_covar=tensor([0.0129, 0.0149, 0.0173, 0.0144, 0.0147, 0.0129, 0.0134, 0.0167], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 04:43:01,562 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.5577, 5.8679, 6.2164, 5.9406, 5.7378, 5.6179, 5.7648, 5.6964], + device='cuda:2'), covar=tensor([0.1281, 0.1114, 0.0755, 0.0938, 0.0627, 0.0692, 0.1722, 0.1901], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0258, 0.0312, 0.0239, 0.0231, 0.0225, 0.0303, 0.0335], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 04:43:15,286 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.371e+02 4.496e+02 5.311e+02 6.592e+02 1.240e+03, threshold=1.062e+03, percent-clipped=1.0 +2023-03-28 04:43:15,315 INFO [train.py:892] (2/4) Epoch 13, batch 950, loss[loss=0.1805, simple_loss=0.2527, pruned_loss=0.05418, over 19554.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2805, pruned_loss=0.07841, over 3914252.56 frames. ], batch size: 47, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:43:28,251 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23217.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:43:40,737 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2762, 1.6289, 1.8712, 2.4594, 2.7009, 2.7778, 2.7840, 2.8961], + device='cuda:2'), covar=tensor([0.0965, 0.2098, 0.1438, 0.0697, 0.0486, 0.0353, 0.0341, 0.0377], + device='cuda:2'), in_proj_covar=tensor([0.0139, 0.0169, 0.0160, 0.0133, 0.0114, 0.0107, 0.0099, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 04:43:53,089 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23229.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:45:02,583 INFO [train.py:892] (2/4) Epoch 13, batch 1000, loss[loss=0.2167, simple_loss=0.2864, pruned_loss=0.07353, over 19793.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2792, pruned_loss=0.07778, over 3922846.53 frames. ], batch size: 79, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:45:39,868 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23278.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:46:09,980 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23290.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:46:20,735 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23295.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:46:55,460 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.267e+02 4.704e+02 5.526e+02 6.494e+02 9.984e+02, threshold=1.105e+03, percent-clipped=0.0 +2023-03-28 04:46:55,486 INFO [train.py:892] (2/4) Epoch 13, batch 1050, loss[loss=0.2156, simple_loss=0.2888, pruned_loss=0.07118, over 19832.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2796, pruned_loss=0.0783, over 3929986.45 frames. ], batch size: 57, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:48:08,221 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23343.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:48:49,795 INFO [train.py:892] (2/4) Epoch 13, batch 1100, loss[loss=0.1796, simple_loss=0.244, pruned_loss=0.05757, over 19754.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.2798, pruned_loss=0.0782, over 3934094.07 frames. ], batch size: 110, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:48:55,877 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3714, 3.3800, 3.6964, 3.3636, 3.1717, 3.6107, 3.3483, 3.7679], + device='cuda:2'), covar=tensor([0.1004, 0.0376, 0.0370, 0.0424, 0.1365, 0.0558, 0.0518, 0.0368], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0191, 0.0186, 0.0197, 0.0185, 0.0195, 0.0191, 0.0178], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 04:50:43,307 INFO [train.py:892] (2/4) Epoch 13, batch 1150, loss[loss=0.2281, simple_loss=0.2891, pruned_loss=0.0836, over 19780.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2813, pruned_loss=0.0791, over 3936319.88 frames. ], batch size: 215, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:50:45,464 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.703e+02 4.708e+02 5.742e+02 7.091e+02 1.301e+03, threshold=1.148e+03, percent-clipped=2.0 +2023-03-28 04:52:12,660 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23450.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:52:34,817 INFO [train.py:892] (2/4) Epoch 13, batch 1200, loss[loss=0.2235, simple_loss=0.2833, pruned_loss=0.08185, over 19774.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2812, pruned_loss=0.07866, over 3939716.94 frames. ], batch size: 263, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:52:42,258 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23464.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:53:21,252 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23481.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:53:39,062 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23488.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:53:58,273 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23498.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:54:26,405 INFO [train.py:892] (2/4) Epoch 13, batch 1250, loss[loss=0.165, simple_loss=0.2356, pruned_loss=0.04721, over 19789.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2797, pruned_loss=0.07793, over 3941297.96 frames. ], batch size: 83, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:54:28,503 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.878e+02 4.638e+02 5.638e+02 7.017e+02 1.329e+03, threshold=1.128e+03, percent-clipped=4.0 +2023-03-28 04:55:03,135 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23529.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:55:48,741 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23549.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 04:55:59,034 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.8881, 2.0511, 2.0938, 2.1452, 1.9775, 2.0990, 2.0124, 2.3165], + device='cuda:2'), covar=tensor([0.0331, 0.0222, 0.0239, 0.0187, 0.0296, 0.0212, 0.0344, 0.0132], + device='cuda:2'), in_proj_covar=tensor([0.0051, 0.0049, 0.0052, 0.0045, 0.0057, 0.0051, 0.0069, 0.0046], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 04:56:12,801 INFO [train.py:892] (2/4) Epoch 13, batch 1300, loss[loss=0.246, simple_loss=0.3045, pruned_loss=0.09373, over 19836.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2799, pruned_loss=0.07737, over 3942191.97 frames. ], batch size: 177, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:56:39,785 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23573.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:57:02,956 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.63 vs. limit=5.0 +2023-03-28 04:57:08,837 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23585.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:57:56,719 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6707, 2.0236, 2.4847, 2.9267, 3.4973, 3.5789, 3.6115, 3.7370], + device='cuda:2'), covar=tensor([0.0787, 0.1881, 0.1180, 0.0580, 0.0336, 0.0234, 0.0238, 0.0244], + device='cuda:2'), in_proj_covar=tensor([0.0138, 0.0169, 0.0161, 0.0131, 0.0114, 0.0108, 0.0099, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 04:58:05,886 INFO [train.py:892] (2/4) Epoch 13, batch 1350, loss[loss=0.2262, simple_loss=0.2873, pruned_loss=0.08257, over 19774.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2787, pruned_loss=0.07627, over 3944502.97 frames. ], batch size: 213, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 04:58:08,083 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.191e+02 4.401e+02 5.493e+02 6.790e+02 1.400e+03, threshold=1.099e+03, percent-clipped=0.0 +2023-03-28 04:59:59,155 INFO [train.py:892] (2/4) Epoch 13, batch 1400, loss[loss=0.2484, simple_loss=0.3101, pruned_loss=0.09336, over 19702.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2789, pruned_loss=0.0768, over 3945356.60 frames. ], batch size: 315, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:01:47,974 INFO [train.py:892] (2/4) Epoch 13, batch 1450, loss[loss=0.1889, simple_loss=0.2499, pruned_loss=0.06395, over 19612.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.28, pruned_loss=0.07739, over 3946617.36 frames. ], batch size: 46, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:01:50,147 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.049e+02 4.743e+02 5.551e+02 6.654e+02 1.229e+03, threshold=1.110e+03, percent-clipped=4.0 +2023-03-28 05:03:37,024 INFO [train.py:892] (2/4) Epoch 13, batch 1500, loss[loss=0.2019, simple_loss=0.274, pruned_loss=0.06494, over 19787.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2782, pruned_loss=0.07663, over 3948381.39 frames. ], batch size: 40, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:03:45,788 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23764.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:04:54,357 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2498, 5.6584, 5.8145, 5.6826, 5.5341, 5.2368, 5.3793, 5.3389], + device='cuda:2'), covar=tensor([0.1263, 0.0844, 0.0823, 0.0832, 0.0633, 0.0745, 0.1899, 0.1610], + device='cuda:2'), in_proj_covar=tensor([0.0260, 0.0257, 0.0316, 0.0239, 0.0232, 0.0226, 0.0307, 0.0332], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 05:05:01,085 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.61 vs. limit=5.0 +2023-03-28 05:05:29,852 INFO [train.py:892] (2/4) Epoch 13, batch 1550, loss[loss=0.1907, simple_loss=0.2461, pruned_loss=0.0676, over 19834.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2784, pruned_loss=0.07662, over 3947734.99 frames. ], batch size: 128, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:05:31,559 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 4.439e+02 5.194e+02 6.421e+02 1.590e+03, threshold=1.039e+03, percent-clipped=3.0 +2023-03-28 05:05:32,332 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23812.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:06:39,312 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23844.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 05:06:41,295 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1314, 4.6303, 4.8105, 4.5615, 5.0390, 3.2949, 4.1663, 3.0994], + device='cuda:2'), covar=tensor([0.0154, 0.0176, 0.0121, 0.0157, 0.0103, 0.0751, 0.0694, 0.1032], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0123, 0.0102, 0.0117, 0.0103, 0.0122, 0.0132, 0.0118], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 05:06:47,091 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5085, 2.7163, 2.9566, 3.0288, 2.4502, 2.8122, 2.7341, 2.9576], + device='cuda:2'), covar=tensor([0.0209, 0.0247, 0.0226, 0.0160, 0.0307, 0.0218, 0.0286, 0.0312], + device='cuda:2'), in_proj_covar=tensor([0.0051, 0.0050, 0.0052, 0.0046, 0.0057, 0.0052, 0.0070, 0.0047], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 05:07:17,771 INFO [train.py:892] (2/4) Epoch 13, batch 1600, loss[loss=0.1809, simple_loss=0.2445, pruned_loss=0.05864, over 19777.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2781, pruned_loss=0.07619, over 3949163.96 frames. ], batch size: 116, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:07:40,833 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.4029, 5.8276, 6.0456, 5.8452, 5.6767, 5.6017, 5.6622, 5.5802], + device='cuda:2'), covar=tensor([0.1250, 0.0968, 0.0816, 0.1003, 0.0683, 0.0652, 0.1849, 0.1888], + device='cuda:2'), in_proj_covar=tensor([0.0262, 0.0260, 0.0317, 0.0241, 0.0234, 0.0228, 0.0309, 0.0335], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 05:07:46,874 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23873.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:08:12,009 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23885.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:08:45,012 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3749, 2.3474, 2.5767, 2.4234, 2.8320, 2.8332, 3.2178, 3.4287], + device='cuda:2'), covar=tensor([0.0605, 0.1556, 0.1463, 0.1858, 0.1516, 0.1301, 0.0520, 0.0457], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0222, 0.0242, 0.0239, 0.0267, 0.0234, 0.0189, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 05:09:11,678 INFO [train.py:892] (2/4) Epoch 13, batch 1650, loss[loss=0.1774, simple_loss=0.2428, pruned_loss=0.056, over 19815.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2782, pruned_loss=0.07619, over 3949136.28 frames. ], batch size: 82, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:09:13,801 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.923e+02 4.526e+02 5.652e+02 6.938e+02 1.810e+03, threshold=1.130e+03, percent-clipped=2.0 +2023-03-28 05:09:36,898 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23921.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:10:02,334 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23933.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:11:04,315 INFO [train.py:892] (2/4) Epoch 13, batch 1700, loss[loss=0.1881, simple_loss=0.2577, pruned_loss=0.05922, over 19727.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2784, pruned_loss=0.07603, over 3947450.82 frames. ], batch size: 95, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:12:00,295 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-28 05:12:42,860 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6346, 4.6721, 5.1049, 4.5831, 4.2217, 4.7928, 4.8007, 5.2383], + device='cuda:2'), covar=tensor([0.0901, 0.0306, 0.0347, 0.0371, 0.0678, 0.0457, 0.0385, 0.0283], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0195, 0.0193, 0.0201, 0.0189, 0.0198, 0.0197, 0.0181], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 05:12:47,035 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7196, 2.7612, 1.5361, 3.2213, 2.9647, 3.1864, 3.2501, 2.5342], + device='cuda:2'), covar=tensor([0.0615, 0.0597, 0.1713, 0.0485, 0.0501, 0.0353, 0.0570, 0.0756], + device='cuda:2'), in_proj_covar=tensor([0.0128, 0.0122, 0.0135, 0.0127, 0.0109, 0.0105, 0.0122, 0.0127], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 05:12:55,567 INFO [train.py:892] (2/4) Epoch 13, batch 1750, loss[loss=0.2277, simple_loss=0.2864, pruned_loss=0.08457, over 19729.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2799, pruned_loss=0.07736, over 3945469.28 frames. ], batch size: 51, lr: 1.19e-02, grad_scale: 16.0 +2023-03-28 05:12:57,434 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.884e+02 4.345e+02 5.223e+02 6.698e+02 1.111e+03, threshold=1.045e+03, percent-clipped=0.0 +2023-03-28 05:13:46,278 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24037.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:14:14,730 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3724, 4.4647, 4.8936, 4.3265, 4.0996, 4.6084, 4.5161, 5.0142], + device='cuda:2'), covar=tensor([0.0911, 0.0284, 0.0309, 0.0370, 0.0722, 0.0410, 0.0342, 0.0276], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0191, 0.0190, 0.0198, 0.0186, 0.0194, 0.0194, 0.0177], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 05:14:30,471 INFO [train.py:892] (2/4) Epoch 13, batch 1800, loss[loss=0.228, simple_loss=0.301, pruned_loss=0.07751, over 19587.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2813, pruned_loss=0.07786, over 3944808.83 frames. ], batch size: 53, lr: 1.19e-02, grad_scale: 16.0 +2023-03-28 05:15:37,769 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24098.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:15:45,380 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-28 05:15:50,581 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3115, 5.5998, 5.8575, 5.6844, 5.5736, 5.1784, 5.5177, 5.4122], + device='cuda:2'), covar=tensor([0.1208, 0.0963, 0.0845, 0.0950, 0.0641, 0.0803, 0.1823, 0.1812], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0256, 0.0315, 0.0239, 0.0229, 0.0225, 0.0301, 0.0332], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 05:16:01,679 INFO [train.py:892] (2/4) Epoch 13, batch 1850, loss[loss=0.2265, simple_loss=0.2975, pruned_loss=0.0778, over 19827.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.281, pruned_loss=0.07705, over 3946733.95 frames. ], batch size: 57, lr: 1.19e-02, grad_scale: 16.0 +2023-03-28 05:16:03,697 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 4.684e+02 5.642e+02 7.102e+02 1.280e+03, threshold=1.128e+03, percent-clipped=3.0 +2023-03-28 05:17:07,729 INFO [train.py:892] (2/4) Epoch 14, batch 0, loss[loss=0.1977, simple_loss=0.2574, pruned_loss=0.06906, over 19800.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2574, pruned_loss=0.06906, over 19800.00 frames. ], batch size: 150, lr: 1.15e-02, grad_scale: 16.0 +2023-03-28 05:17:07,729 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 05:17:24,520 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4040, 4.5888, 4.8726, 4.4980, 4.1684, 4.6306, 4.4899, 4.9092], + device='cuda:2'), covar=tensor([0.0870, 0.0298, 0.0315, 0.0362, 0.0678, 0.0394, 0.0399, 0.0306], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0196, 0.0195, 0.0203, 0.0190, 0.0199, 0.0200, 0.0183], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 05:17:42,313 INFO [train.py:926] (2/4) Epoch 14, validation: loss=0.1725, simple_loss=0.2522, pruned_loss=0.04642, over 2883724.00 frames. +2023-03-28 05:17:42,315 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 05:18:52,164 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24144.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 05:19:45,552 INFO [train.py:892] (2/4) Epoch 14, batch 50, loss[loss=0.2188, simple_loss=0.2847, pruned_loss=0.0764, over 19863.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2688, pruned_loss=0.07104, over 889553.96 frames. ], batch size: 48, lr: 1.15e-02, grad_scale: 16.0 +2023-03-28 05:20:44,925 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24192.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:21:30,231 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 4.338e+02 5.278e+02 6.303e+02 1.236e+03, threshold=1.056e+03, percent-clipped=1.0 +2023-03-28 05:21:38,380 INFO [train.py:892] (2/4) Epoch 14, batch 100, loss[loss=0.2081, simple_loss=0.2723, pruned_loss=0.07194, over 19815.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2743, pruned_loss=0.07375, over 1568151.27 frames. ], batch size: 96, lr: 1.15e-02, grad_scale: 16.0 +2023-03-28 05:21:43,137 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.0512, 2.2221, 2.2068, 1.6337, 2.3139, 1.9595, 2.1061, 2.2862], + device='cuda:2'), covar=tensor([0.0331, 0.0288, 0.0310, 0.0790, 0.0252, 0.0365, 0.0336, 0.0207], + device='cuda:2'), in_proj_covar=tensor([0.0060, 0.0062, 0.0065, 0.0093, 0.0061, 0.0060, 0.0057, 0.0050], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 05:22:33,409 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5954, 2.5107, 2.8285, 2.6240, 2.9664, 2.9349, 3.5337, 3.7664], + device='cuda:2'), covar=tensor([0.0592, 0.1631, 0.1540, 0.1889, 0.1526, 0.1368, 0.0528, 0.0552], + device='cuda:2'), in_proj_covar=tensor([0.0218, 0.0223, 0.0243, 0.0240, 0.0269, 0.0236, 0.0190, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 05:23:30,577 INFO [train.py:892] (2/4) Epoch 14, batch 150, loss[loss=0.2288, simple_loss=0.2878, pruned_loss=0.0849, over 19775.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.276, pruned_loss=0.07475, over 2096630.73 frames. ], batch size: 263, lr: 1.15e-02, grad_scale: 16.0 +2023-03-28 05:25:15,397 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.679e+02 4.473e+02 5.483e+02 6.976e+02 1.116e+03, threshold=1.097e+03, percent-clipped=1.0 +2023-03-28 05:25:23,188 INFO [train.py:892] (2/4) Epoch 14, batch 200, loss[loss=0.2274, simple_loss=0.304, pruned_loss=0.07536, over 19889.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2793, pruned_loss=0.07593, over 2505995.67 frames. ], batch size: 71, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:26:41,635 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.70 vs. limit=2.0 +2023-03-28 05:26:54,400 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.6026, 6.0038, 5.9936, 5.8809, 5.6898, 5.9752, 5.2325, 5.3631], + device='cuda:2'), covar=tensor([0.0323, 0.0330, 0.0462, 0.0356, 0.0489, 0.0484, 0.0594, 0.0833], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0214, 0.0249, 0.0211, 0.0205, 0.0196, 0.0222, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 05:27:13,857 INFO [train.py:892] (2/4) Epoch 14, batch 250, loss[loss=0.2088, simple_loss=0.2705, pruned_loss=0.07349, over 19875.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2768, pruned_loss=0.07476, over 2824798.41 frames. ], batch size: 159, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:28:09,438 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-03-28 05:28:15,411 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24393.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:28:28,120 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2578, 2.3206, 2.4815, 2.3559, 2.7618, 2.7399, 3.1909, 3.3364], + device='cuda:2'), covar=tensor([0.0594, 0.1477, 0.1546, 0.1774, 0.1364, 0.1248, 0.0550, 0.0573], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0221, 0.0241, 0.0236, 0.0267, 0.0232, 0.0188, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 05:28:38,854 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2037, 3.1484, 4.5873, 3.4617, 3.8338, 3.8549, 2.2646, 2.5889], + device='cuda:2'), covar=tensor([0.0714, 0.2921, 0.0397, 0.0714, 0.1405, 0.0924, 0.2123, 0.2193], + device='cuda:2'), in_proj_covar=tensor([0.0313, 0.0346, 0.0284, 0.0229, 0.0339, 0.0285, 0.0304, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 05:28:44,977 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4311, 3.3336, 4.9265, 3.6651, 4.0771, 4.0369, 2.3949, 2.6018], + device='cuda:2'), covar=tensor([0.0644, 0.2789, 0.0349, 0.0686, 0.1314, 0.0794, 0.1940, 0.2248], + device='cuda:2'), in_proj_covar=tensor([0.0313, 0.0346, 0.0285, 0.0229, 0.0339, 0.0285, 0.0304, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 05:28:53,720 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.621e+02 4.746e+02 5.640e+02 6.808e+02 1.324e+03, threshold=1.128e+03, percent-clipped=3.0 +2023-03-28 05:29:04,994 INFO [train.py:892] (2/4) Epoch 14, batch 300, loss[loss=0.204, simple_loss=0.2649, pruned_loss=0.07156, over 19739.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2764, pruned_loss=0.07444, over 3074744.36 frames. ], batch size: 140, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:29:55,409 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6278, 3.0190, 3.4998, 3.2681, 3.8466, 3.7996, 4.5705, 4.9888], + device='cuda:2'), covar=tensor([0.0420, 0.1471, 0.1218, 0.1777, 0.1403, 0.1207, 0.0381, 0.0371], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0222, 0.0243, 0.0238, 0.0268, 0.0233, 0.0190, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 05:30:41,168 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 05:30:56,504 INFO [train.py:892] (2/4) Epoch 14, batch 350, loss[loss=0.2004, simple_loss=0.2766, pruned_loss=0.06211, over 19678.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2766, pruned_loss=0.07453, over 3266451.06 frames. ], batch size: 49, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:31:06,302 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7883, 2.5568, 2.8906, 2.7248, 3.0894, 3.0031, 3.6751, 3.9413], + device='cuda:2'), covar=tensor([0.0537, 0.1643, 0.1555, 0.1882, 0.1792, 0.1462, 0.0439, 0.0439], + device='cuda:2'), in_proj_covar=tensor([0.0217, 0.0223, 0.0244, 0.0239, 0.0270, 0.0234, 0.0190, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 05:32:18,073 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2750, 1.6819, 2.1267, 2.5962, 2.9148, 3.0599, 3.0084, 3.1618], + device='cuda:2'), covar=tensor([0.0918, 0.1879, 0.1293, 0.0585, 0.0430, 0.0284, 0.0315, 0.0273], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0170, 0.0165, 0.0133, 0.0117, 0.0110, 0.0103, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 05:32:38,985 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 4.463e+02 5.276e+02 6.433e+02 1.148e+03, threshold=1.055e+03, percent-clipped=1.0 +2023-03-28 05:32:42,801 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.67 vs. limit=5.0 +2023-03-28 05:32:48,484 INFO [train.py:892] (2/4) Epoch 14, batch 400, loss[loss=0.2203, simple_loss=0.2773, pruned_loss=0.0817, over 19803.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2767, pruned_loss=0.07477, over 3416468.76 frames. ], batch size: 167, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:34:00,565 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24547.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:34:41,063 INFO [train.py:892] (2/4) Epoch 14, batch 450, loss[loss=0.1912, simple_loss=0.2605, pruned_loss=0.06093, over 19750.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2778, pruned_loss=0.07502, over 3534481.63 frames. ], batch size: 110, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:34:48,619 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24569.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:36:16,276 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24608.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:36:25,000 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.188e+02 4.392e+02 5.020e+02 6.221e+02 1.818e+03, threshold=1.004e+03, percent-clipped=1.0 +2023-03-28 05:36:33,455 INFO [train.py:892] (2/4) Epoch 14, batch 500, loss[loss=0.212, simple_loss=0.2665, pruned_loss=0.07874, over 19865.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2753, pruned_loss=0.07399, over 3627636.29 frames. ], batch size: 122, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:37:04,334 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24630.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:37:04,635 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-28 05:37:10,376 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8921, 3.0809, 3.3313, 2.6271, 3.2497, 2.7005, 2.9566, 3.3436], + device='cuda:2'), covar=tensor([0.0405, 0.0328, 0.0389, 0.0621, 0.0315, 0.0347, 0.0436, 0.0211], + device='cuda:2'), in_proj_covar=tensor([0.0060, 0.0063, 0.0065, 0.0094, 0.0062, 0.0060, 0.0058, 0.0050], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 05:38:20,300 INFO [train.py:892] (2/4) Epoch 14, batch 550, loss[loss=0.2012, simple_loss=0.2635, pruned_loss=0.06944, over 19797.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2775, pruned_loss=0.07571, over 3699208.70 frames. ], batch size: 149, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:39:20,311 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24693.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:39:57,874 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.918e+02 4.778e+02 5.297e+02 6.778e+02 1.181e+03, threshold=1.059e+03, percent-clipped=3.0 +2023-03-28 05:40:07,594 INFO [train.py:892] (2/4) Epoch 14, batch 600, loss[loss=0.205, simple_loss=0.2733, pruned_loss=0.06838, over 19676.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.276, pruned_loss=0.07489, over 3755210.24 frames. ], batch size: 49, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:40:35,769 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4301, 3.0888, 3.2090, 3.5186, 3.2249, 3.3498, 3.6157, 3.7054], + device='cuda:2'), covar=tensor([0.0715, 0.0538, 0.0614, 0.0350, 0.0765, 0.0728, 0.0451, 0.0371], + device='cuda:2'), in_proj_covar=tensor([0.0135, 0.0156, 0.0180, 0.0150, 0.0153, 0.0136, 0.0139, 0.0175], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 05:41:01,101 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24741.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:41:18,034 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-28 05:41:22,814 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7606, 2.6756, 1.4612, 3.2801, 2.9763, 3.2343, 3.3224, 2.5884], + device='cuda:2'), covar=tensor([0.0611, 0.0641, 0.1616, 0.0533, 0.0489, 0.0413, 0.0463, 0.0732], + device='cuda:2'), in_proj_covar=tensor([0.0126, 0.0123, 0.0134, 0.0129, 0.0110, 0.0107, 0.0123, 0.0128], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 05:41:54,313 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24765.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:41:55,330 INFO [train.py:892] (2/4) Epoch 14, batch 650, loss[loss=0.2084, simple_loss=0.273, pruned_loss=0.07184, over 19834.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2766, pruned_loss=0.07508, over 3796947.10 frames. ], batch size: 57, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:42:39,828 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3299, 2.3451, 2.6432, 2.4250, 2.8209, 2.7082, 3.2266, 3.3707], + device='cuda:2'), covar=tensor([0.0615, 0.1553, 0.1445, 0.1792, 0.1432, 0.1428, 0.0521, 0.0519], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0220, 0.0240, 0.0234, 0.0266, 0.0232, 0.0187, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 05:42:49,572 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-28 05:43:34,883 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.281e+02 4.693e+02 5.521e+02 6.311e+02 1.002e+03, threshold=1.104e+03, percent-clipped=0.0 +2023-03-28 05:43:42,884 INFO [train.py:892] (2/4) Epoch 14, batch 700, loss[loss=0.1848, simple_loss=0.2596, pruned_loss=0.05501, over 19612.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2765, pruned_loss=0.07466, over 3832695.25 frames. ], batch size: 48, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:44:08,925 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24826.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:44:55,394 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24848.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:45:12,694 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-28 05:45:36,127 INFO [train.py:892] (2/4) Epoch 14, batch 750, loss[loss=0.2374, simple_loss=0.3129, pruned_loss=0.08096, over 19662.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2766, pruned_loss=0.07464, over 3857557.44 frames. ], batch size: 55, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:46:11,708 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2838, 3.1362, 3.8444, 2.7421, 3.9588, 3.1340, 2.9892, 3.7433], + device='cuda:2'), covar=tensor([0.0932, 0.0410, 0.0328, 0.0674, 0.0197, 0.0312, 0.0419, 0.0320], + device='cuda:2'), in_proj_covar=tensor([0.0061, 0.0064, 0.0065, 0.0095, 0.0062, 0.0060, 0.0059, 0.0052], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 05:46:58,236 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24903.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:47:10,891 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24909.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:47:18,006 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.878e+02 4.189e+02 5.333e+02 6.516e+02 1.195e+03, threshold=1.067e+03, percent-clipped=1.0 +2023-03-28 05:47:26,425 INFO [train.py:892] (2/4) Epoch 14, batch 800, loss[loss=0.2959, simple_loss=0.3786, pruned_loss=0.1066, over 18776.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2784, pruned_loss=0.07566, over 3875808.19 frames. ], batch size: 564, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:47:37,390 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1063, 3.4738, 3.8143, 3.4009, 3.3698, 3.7303, 3.4305, 3.9012], + device='cuda:2'), covar=tensor([0.1699, 0.0499, 0.0554, 0.0537, 0.1562, 0.0672, 0.0715, 0.0531], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0194, 0.0190, 0.0201, 0.0188, 0.0197, 0.0198, 0.0181], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 05:47:45,220 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24925.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:49:09,927 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1068, 4.1549, 4.5622, 4.1453, 3.9001, 4.3432, 4.1637, 4.5887], + device='cuda:2'), covar=tensor([0.0937, 0.0343, 0.0293, 0.0348, 0.0870, 0.0410, 0.0410, 0.0289], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0196, 0.0191, 0.0202, 0.0190, 0.0200, 0.0200, 0.0182], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 05:49:15,224 INFO [train.py:892] (2/4) Epoch 14, batch 850, loss[loss=0.2261, simple_loss=0.2829, pruned_loss=0.08472, over 19768.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.278, pruned_loss=0.07542, over 3892946.03 frames. ], batch size: 247, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:49:28,160 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8587, 2.9753, 3.2208, 3.8339, 2.5896, 3.1136, 2.5802, 2.3103], + device='cuda:2'), covar=tensor([0.0413, 0.2072, 0.0893, 0.0290, 0.1947, 0.0672, 0.1196, 0.1720], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0331, 0.0229, 0.0161, 0.0239, 0.0181, 0.0203, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 05:50:22,824 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24995.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:51:00,270 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.835e+02 4.699e+02 5.581e+02 6.608e+02 1.039e+03, threshold=1.116e+03, percent-clipped=0.0 +2023-03-28 05:51:08,789 INFO [train.py:892] (2/4) Epoch 14, batch 900, loss[loss=0.1898, simple_loss=0.2613, pruned_loss=0.05909, over 19467.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2771, pruned_loss=0.0745, over 3905266.06 frames. ], batch size: 43, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:51:20,186 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3320, 1.6601, 2.0274, 2.6513, 3.0646, 3.1783, 3.0959, 3.2108], + device='cuda:2'), covar=tensor([0.0995, 0.2259, 0.1604, 0.0637, 0.0455, 0.0265, 0.0336, 0.0338], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0172, 0.0168, 0.0132, 0.0116, 0.0110, 0.0102, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 05:52:39,927 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25056.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 05:53:00,398 INFO [train.py:892] (2/4) Epoch 14, batch 950, loss[loss=0.222, simple_loss=0.2782, pruned_loss=0.08291, over 19866.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2775, pruned_loss=0.07425, over 3914835.69 frames. ], batch size: 154, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:54:42,046 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.968e+02 4.212e+02 5.107e+02 6.466e+02 1.651e+03, threshold=1.021e+03, percent-clipped=2.0 +2023-03-28 05:54:50,413 INFO [train.py:892] (2/4) Epoch 14, batch 1000, loss[loss=0.2074, simple_loss=0.2647, pruned_loss=0.07499, over 19742.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2767, pruned_loss=0.07394, over 3923529.03 frames. ], batch size: 205, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:55:04,848 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25121.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:56:40,704 INFO [train.py:892] (2/4) Epoch 14, batch 1050, loss[loss=0.2099, simple_loss=0.2693, pruned_loss=0.07524, over 19836.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2769, pruned_loss=0.07377, over 3929398.60 frames. ], batch size: 204, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:58:05,293 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25203.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:58:07,463 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25204.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:58:23,778 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.002e+02 4.493e+02 5.579e+02 6.984e+02 1.281e+03, threshold=1.116e+03, percent-clipped=2.0 +2023-03-28 05:58:32,625 INFO [train.py:892] (2/4) Epoch 14, batch 1100, loss[loss=0.2109, simple_loss=0.269, pruned_loss=0.07635, over 19651.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2765, pruned_loss=0.07385, over 3934121.73 frames. ], batch size: 67, lr: 1.12e-02, grad_scale: 16.0 +2023-03-28 05:58:53,629 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25225.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:58:59,932 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-28 05:59:55,686 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25251.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:00:27,158 INFO [train.py:892] (2/4) Epoch 14, batch 1150, loss[loss=0.1784, simple_loss=0.2508, pruned_loss=0.053, over 19683.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2771, pruned_loss=0.07438, over 3937009.29 frames. ], batch size: 64, lr: 1.12e-02, grad_scale: 16.0 +2023-03-28 06:00:46,037 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25273.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:01:27,596 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.76 vs. limit=2.0 +2023-03-28 06:02:00,884 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-03-28 06:02:10,586 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.130e+02 4.700e+02 5.345e+02 6.282e+02 1.184e+03, threshold=1.069e+03, percent-clipped=1.0 +2023-03-28 06:02:18,767 INFO [train.py:892] (2/4) Epoch 14, batch 1200, loss[loss=0.1834, simple_loss=0.2531, pruned_loss=0.05682, over 19733.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2778, pruned_loss=0.07446, over 3938405.48 frames. ], batch size: 118, lr: 1.12e-02, grad_scale: 16.0 +2023-03-28 06:02:24,811 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-28 06:02:34,525 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-28 06:03:31,708 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6218, 3.8230, 4.1647, 4.8448, 3.3169, 3.1894, 3.3656, 3.0480], + device='cuda:2'), covar=tensor([0.0412, 0.2206, 0.0697, 0.0245, 0.1871, 0.1024, 0.0903, 0.1490], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0337, 0.0232, 0.0164, 0.0242, 0.0184, 0.0204, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 06:03:35,804 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25351.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 06:04:08,269 INFO [train.py:892] (2/4) Epoch 14, batch 1250, loss[loss=0.2924, simple_loss=0.3443, pruned_loss=0.1202, over 19637.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.277, pruned_loss=0.07431, over 3941504.85 frames. ], batch size: 351, lr: 1.12e-02, grad_scale: 32.0 +2023-03-28 06:05:51,971 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 4.665e+02 5.361e+02 6.465e+02 1.187e+03, threshold=1.072e+03, percent-clipped=1.0 +2023-03-28 06:06:00,924 INFO [train.py:892] (2/4) Epoch 14, batch 1300, loss[loss=0.2078, simple_loss=0.2747, pruned_loss=0.07049, over 19758.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.276, pruned_loss=0.07365, over 3943303.56 frames. ], batch size: 100, lr: 1.12e-02, grad_scale: 32.0 +2023-03-28 06:06:14,945 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25421.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:06:31,005 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25428.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:07:56,730 INFO [train.py:892] (2/4) Epoch 14, batch 1350, loss[loss=0.2141, simple_loss=0.278, pruned_loss=0.07508, over 19698.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.275, pruned_loss=0.07286, over 3945477.97 frames. ], batch size: 101, lr: 1.12e-02, grad_scale: 32.0 +2023-03-28 06:08:04,481 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25469.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:08:50,219 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25489.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 06:09:23,731 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25504.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:09:28,435 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.61 vs. limit=5.0 +2023-03-28 06:09:38,481 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.588e+02 5.403e+02 6.911e+02 1.423e+03, threshold=1.081e+03, percent-clipped=2.0 +2023-03-28 06:09:46,920 INFO [train.py:892] (2/4) Epoch 14, batch 1400, loss[loss=0.1814, simple_loss=0.2547, pruned_loss=0.05405, over 19780.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2746, pruned_loss=0.07318, over 3946091.33 frames. ], batch size: 40, lr: 1.12e-02, grad_scale: 32.0 +2023-03-28 06:10:40,224 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4668, 3.6436, 3.7752, 4.7168, 2.8685, 3.2470, 2.9734, 2.5641], + device='cuda:2'), covar=tensor([0.0440, 0.2311, 0.0842, 0.0223, 0.2102, 0.0843, 0.1093, 0.1756], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0328, 0.0227, 0.0162, 0.0238, 0.0179, 0.0199, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 06:11:00,560 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-28 06:11:01,969 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1624, 3.1218, 3.0142, 2.7886, 3.0742, 2.5517, 2.3452, 1.4991], + device='cuda:2'), covar=tensor([0.0271, 0.0265, 0.0186, 0.0240, 0.0187, 0.0757, 0.0797, 0.1826], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0125, 0.0103, 0.0119, 0.0106, 0.0124, 0.0134, 0.0119], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 06:11:11,927 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25552.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:11:41,935 INFO [train.py:892] (2/4) Epoch 14, batch 1450, loss[loss=0.271, simple_loss=0.306, pruned_loss=0.118, over 19798.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2752, pruned_loss=0.07337, over 3945591.69 frames. ], batch size: 224, lr: 1.12e-02, grad_scale: 32.0 +2023-03-28 06:13:26,188 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.132e+02 4.450e+02 5.479e+02 6.667e+02 1.081e+03, threshold=1.096e+03, percent-clipped=1.0 +2023-03-28 06:13:29,849 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.70 vs. limit=2.0 +2023-03-28 06:13:32,616 INFO [train.py:892] (2/4) Epoch 14, batch 1500, loss[loss=0.209, simple_loss=0.2899, pruned_loss=0.06406, over 19560.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2742, pruned_loss=0.07294, over 3947290.38 frames. ], batch size: 53, lr: 1.12e-02, grad_scale: 16.0 +2023-03-28 06:13:35,846 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8753, 4.4862, 4.6044, 4.3774, 4.7360, 3.2204, 3.8723, 2.4360], + device='cuda:2'), covar=tensor([0.0137, 0.0182, 0.0124, 0.0155, 0.0149, 0.0778, 0.0789, 0.1379], + device='cuda:2'), in_proj_covar=tensor([0.0089, 0.0124, 0.0101, 0.0118, 0.0105, 0.0122, 0.0133, 0.0118], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 06:13:50,658 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.17 vs. limit=5.0 +2023-03-28 06:14:51,783 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25651.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 06:15:19,139 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3511, 2.6306, 2.2685, 1.6578, 2.3974, 2.5738, 2.6091, 2.5629], + device='cuda:2'), covar=tensor([0.0252, 0.0223, 0.0246, 0.0522, 0.0336, 0.0202, 0.0154, 0.0177], + device='cuda:2'), in_proj_covar=tensor([0.0068, 0.0066, 0.0074, 0.0081, 0.0085, 0.0058, 0.0055, 0.0057], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-28 06:15:22,133 INFO [train.py:892] (2/4) Epoch 14, batch 1550, loss[loss=0.2083, simple_loss=0.2671, pruned_loss=0.07478, over 19836.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2734, pruned_loss=0.07263, over 3949186.45 frames. ], batch size: 146, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:16:39,235 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25699.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:17:04,763 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4372, 1.8810, 2.3519, 1.7457, 2.3422, 2.7480, 2.3125, 2.6036], + device='cuda:2'), covar=tensor([0.0351, 0.0854, 0.0111, 0.0330, 0.0119, 0.0203, 0.0159, 0.0166], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0092, 0.0074, 0.0144, 0.0068, 0.0083, 0.0076, 0.0068], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0004, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 06:17:12,373 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.797e+02 4.271e+02 5.006e+02 6.472e+02 1.345e+03, threshold=1.001e+03, percent-clipped=3.0 +2023-03-28 06:17:19,586 INFO [train.py:892] (2/4) Epoch 14, batch 1600, loss[loss=0.1852, simple_loss=0.2469, pruned_loss=0.06172, over 19829.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2737, pruned_loss=0.07252, over 3946528.82 frames. ], batch size: 184, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:19:09,494 INFO [train.py:892] (2/4) Epoch 14, batch 1650, loss[loss=0.2285, simple_loss=0.2979, pruned_loss=0.07952, over 19785.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2746, pruned_loss=0.07318, over 3947919.76 frames. ], batch size: 48, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:19:49,957 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25784.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:20:52,548 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.972e+02 4.640e+02 5.535e+02 6.802e+02 1.148e+03, threshold=1.107e+03, percent-clipped=2.0 +2023-03-28 06:21:01,437 INFO [train.py:892] (2/4) Epoch 14, batch 1700, loss[loss=0.1776, simple_loss=0.2473, pruned_loss=0.05391, over 19746.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2741, pruned_loss=0.07252, over 3948633.18 frames. ], batch size: 110, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:21:25,052 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-28 06:22:29,133 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25858.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 06:22:43,568 INFO [train.py:892] (2/4) Epoch 14, batch 1750, loss[loss=0.2059, simple_loss=0.2682, pruned_loss=0.07176, over 19853.00 frames. ], tot_loss[loss=0.2097, simple_loss=0.2742, pruned_loss=0.07263, over 3948137.06 frames. ], batch size: 78, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:24:15,392 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.161e+02 4.534e+02 5.430e+02 6.216e+02 1.247e+03, threshold=1.086e+03, percent-clipped=3.0 +2023-03-28 06:24:21,521 INFO [train.py:892] (2/4) Epoch 14, batch 1800, loss[loss=0.17, simple_loss=0.2405, pruned_loss=0.04977, over 19757.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.275, pruned_loss=0.07311, over 3946467.20 frames. ], batch size: 97, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:24:28,181 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25919.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:24:52,938 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3422, 3.1694, 4.6909, 3.8928, 4.6174, 4.5724, 4.5881, 4.4667], + device='cuda:2'), covar=tensor([0.0232, 0.0747, 0.0085, 0.1002, 0.0090, 0.0183, 0.0113, 0.0110], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0092, 0.0074, 0.0145, 0.0068, 0.0083, 0.0076, 0.0068], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 06:25:52,592 INFO [train.py:892] (2/4) Epoch 14, batch 1850, loss[loss=0.2246, simple_loss=0.2971, pruned_loss=0.07606, over 19815.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2765, pruned_loss=0.07275, over 3945620.66 frames. ], batch size: 57, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:26:59,296 INFO [train.py:892] (2/4) Epoch 15, batch 0, loss[loss=0.1792, simple_loss=0.2436, pruned_loss=0.05738, over 19782.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2436, pruned_loss=0.05738, over 19782.00 frames. ], batch size: 120, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:26:59,297 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 06:27:35,344 INFO [train.py:926] (2/4) Epoch 15, validation: loss=0.1719, simple_loss=0.2516, pruned_loss=0.0461, over 2883724.00 frames. +2023-03-28 06:27:35,345 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 06:28:31,015 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-03-28 06:29:17,377 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.761e+02 4.443e+02 5.358e+02 6.382e+02 1.006e+03, threshold=1.072e+03, percent-clipped=0.0 +2023-03-28 06:29:35,980 INFO [train.py:892] (2/4) Epoch 15, batch 50, loss[loss=0.1819, simple_loss=0.2528, pruned_loss=0.05554, over 19731.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2615, pruned_loss=0.06679, over 892587.18 frames. ], batch size: 80, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:29:53,177 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8675, 2.7911, 2.9815, 2.7192, 3.1592, 3.0584, 3.7342, 4.0430], + device='cuda:2'), covar=tensor([0.0606, 0.1506, 0.1482, 0.2044, 0.1774, 0.1391, 0.0500, 0.0451], + device='cuda:2'), in_proj_covar=tensor([0.0224, 0.0225, 0.0249, 0.0241, 0.0274, 0.0238, 0.0194, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 06:31:24,926 INFO [train.py:892] (2/4) Epoch 15, batch 100, loss[loss=0.2115, simple_loss=0.2769, pruned_loss=0.07306, over 19805.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2705, pruned_loss=0.07033, over 1570183.32 frames. ], batch size: 74, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:31:47,827 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0161, 3.6259, 3.7109, 4.0189, 3.7499, 3.9492, 4.1065, 4.2203], + device='cuda:2'), covar=tensor([0.0617, 0.0433, 0.0593, 0.0323, 0.0643, 0.0528, 0.0430, 0.0353], + device='cuda:2'), in_proj_covar=tensor([0.0134, 0.0156, 0.0181, 0.0151, 0.0152, 0.0136, 0.0140, 0.0175], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 06:31:55,629 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26084.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 06:32:35,036 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26102.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:32:47,000 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0346, 3.9831, 4.3513, 3.9055, 3.8050, 4.2048, 4.0164, 4.4200], + device='cuda:2'), covar=tensor([0.0978, 0.0355, 0.0342, 0.0398, 0.0940, 0.0484, 0.0433, 0.0319], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0202, 0.0199, 0.0208, 0.0194, 0.0206, 0.0205, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 06:32:57,715 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.557e+02 4.232e+02 5.319e+02 6.491e+02 1.379e+03, threshold=1.064e+03, percent-clipped=5.0 +2023-03-28 06:33:14,713 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7809, 3.1576, 3.4801, 3.2530, 3.9398, 3.9191, 4.5149, 4.9785], + device='cuda:2'), covar=tensor([0.0396, 0.1487, 0.1341, 0.1974, 0.1389, 0.1202, 0.0466, 0.0429], + device='cuda:2'), in_proj_covar=tensor([0.0220, 0.0223, 0.0246, 0.0239, 0.0271, 0.0236, 0.0193, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 06:33:15,671 INFO [train.py:892] (2/4) Epoch 15, batch 150, loss[loss=0.2045, simple_loss=0.2803, pruned_loss=0.06431, over 19841.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.271, pruned_loss=0.06981, over 2096494.87 frames. ], batch size: 49, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:33:41,877 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26132.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 06:34:49,237 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.58 vs. limit=5.0 +2023-03-28 06:34:52,906 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26163.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:35:03,933 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26168.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:35:09,212 INFO [train.py:892] (2/4) Epoch 15, batch 200, loss[loss=0.2037, simple_loss=0.2881, pruned_loss=0.05961, over 19851.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2732, pruned_loss=0.07094, over 2508401.04 frames. ], batch size: 58, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:36:35,456 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3284, 2.3541, 3.6590, 3.1536, 3.4972, 3.7178, 3.5312, 3.5308], + device='cuda:2'), covar=tensor([0.0290, 0.0794, 0.0100, 0.0620, 0.0112, 0.0195, 0.0152, 0.0144], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0091, 0.0074, 0.0143, 0.0067, 0.0083, 0.0076, 0.0068], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 06:36:44,889 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.302e+02 4.298e+02 5.201e+02 6.207e+02 1.187e+03, threshold=1.040e+03, percent-clipped=4.0 +2023-03-28 06:36:48,278 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26214.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 06:37:02,575 INFO [train.py:892] (2/4) Epoch 15, batch 250, loss[loss=0.2225, simple_loss=0.2919, pruned_loss=0.07661, over 19799.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2709, pruned_loss=0.0698, over 2828571.44 frames. ], batch size: 47, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:37:24,957 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26229.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:38:12,326 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26250.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:38:44,004 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.2622, 1.2947, 1.4917, 1.3797, 1.2334, 1.4214, 1.2307, 1.3382], + device='cuda:2'), covar=tensor([0.0262, 0.0209, 0.0196, 0.0208, 0.0324, 0.0226, 0.0407, 0.0249], + device='cuda:2'), in_proj_covar=tensor([0.0055, 0.0053, 0.0057, 0.0050, 0.0062, 0.0058, 0.0076, 0.0053], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 06:38:58,537 INFO [train.py:892] (2/4) Epoch 15, batch 300, loss[loss=0.197, simple_loss=0.2702, pruned_loss=0.06191, over 19857.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2714, pruned_loss=0.07032, over 3076528.26 frames. ], batch size: 64, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:40:32,931 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26311.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:40:36,420 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.754e+02 4.352e+02 4.963e+02 6.294e+02 9.634e+02, threshold=9.926e+02, percent-clipped=0.0 +2023-03-28 06:40:55,207 INFO [train.py:892] (2/4) Epoch 15, batch 350, loss[loss=0.2317, simple_loss=0.2921, pruned_loss=0.08564, over 19753.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2722, pruned_loss=0.07055, over 3271081.53 frames. ], batch size: 276, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:41:38,518 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.2721, 1.2991, 1.4701, 1.3549, 1.1741, 1.4412, 1.1614, 1.3332], + device='cuda:2'), covar=tensor([0.0264, 0.0235, 0.0213, 0.0246, 0.0421, 0.0220, 0.0486, 0.0247], + device='cuda:2'), in_proj_covar=tensor([0.0055, 0.0053, 0.0057, 0.0050, 0.0062, 0.0057, 0.0075, 0.0052], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 06:42:49,754 INFO [train.py:892] (2/4) Epoch 15, batch 400, loss[loss=0.1811, simple_loss=0.2558, pruned_loss=0.05318, over 19886.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2716, pruned_loss=0.07036, over 3422121.17 frames. ], batch size: 97, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:43:22,875 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.60 vs. limit=5.0 +2023-03-28 06:44:23,280 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.029e+02 4.352e+02 5.044e+02 6.255e+02 1.053e+03, threshold=1.009e+03, percent-clipped=1.0 +2023-03-28 06:44:28,691 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26414.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:44:43,035 INFO [train.py:892] (2/4) Epoch 15, batch 450, loss[loss=0.2176, simple_loss=0.2738, pruned_loss=0.08071, over 19778.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2728, pruned_loss=0.07088, over 3539274.98 frames. ], batch size: 217, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:46:07,652 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26458.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:46:15,126 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6459, 2.8524, 3.0889, 2.6411, 2.5132, 2.8433, 2.5010, 3.0378], + device='cuda:2'), covar=tensor([0.0211, 0.0257, 0.0152, 0.0236, 0.0341, 0.0206, 0.0327, 0.0231], + device='cuda:2'), in_proj_covar=tensor([0.0055, 0.0053, 0.0056, 0.0050, 0.0061, 0.0057, 0.0074, 0.0052], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 06:46:36,550 INFO [train.py:892] (2/4) Epoch 15, batch 500, loss[loss=0.1918, simple_loss=0.249, pruned_loss=0.06731, over 19798.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2732, pruned_loss=0.07128, over 3629224.10 frames. ], batch size: 107, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:46:47,838 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26475.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:47:44,005 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-03-28 06:48:13,613 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.684e+02 4.387e+02 5.126e+02 6.696e+02 1.197e+03, threshold=1.025e+03, percent-clipped=1.0 +2023-03-28 06:48:17,058 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26514.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 06:48:30,961 INFO [train.py:892] (2/4) Epoch 15, batch 550, loss[loss=0.1831, simple_loss=0.2614, pruned_loss=0.05246, over 19776.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2744, pruned_loss=0.07199, over 3699902.50 frames. ], batch size: 53, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:48:40,950 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26524.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:48:47,392 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7215, 2.7720, 4.1100, 2.9352, 3.5248, 3.2968, 2.1830, 2.2191], + device='cuda:2'), covar=tensor([0.0835, 0.2906, 0.0479, 0.0835, 0.1382, 0.1202, 0.2080, 0.2512], + device='cuda:2'), in_proj_covar=tensor([0.0319, 0.0348, 0.0292, 0.0236, 0.0349, 0.0296, 0.0311, 0.0284], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 06:50:07,888 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26562.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:50:26,047 INFO [train.py:892] (2/4) Epoch 15, batch 600, loss[loss=0.2723, simple_loss=0.3283, pruned_loss=0.1082, over 19624.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2748, pruned_loss=0.07212, over 3755521.61 frames. ], batch size: 351, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:50:31,533 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-28 06:51:45,269 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26606.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:52:00,971 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.578e+02 4.242e+02 5.300e+02 6.521e+02 1.210e+03, threshold=1.060e+03, percent-clipped=1.0 +2023-03-28 06:52:21,133 INFO [train.py:892] (2/4) Epoch 15, batch 650, loss[loss=0.211, simple_loss=0.2669, pruned_loss=0.07757, over 19847.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2753, pruned_loss=0.07277, over 3797813.43 frames. ], batch size: 109, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:52:59,156 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2543, 3.4201, 1.7106, 4.1960, 3.7024, 4.0303, 4.2186, 3.0070], + device='cuda:2'), covar=tensor([0.0640, 0.0522, 0.1788, 0.0458, 0.0487, 0.0439, 0.0489, 0.0790], + device='cuda:2'), in_proj_covar=tensor([0.0129, 0.0125, 0.0133, 0.0129, 0.0112, 0.0107, 0.0123, 0.0130], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 06:53:18,482 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9263, 2.7032, 3.0858, 2.8482, 3.2590, 3.1236, 3.8293, 4.0658], + device='cuda:2'), covar=tensor([0.0545, 0.1551, 0.1472, 0.1882, 0.1655, 0.1346, 0.0489, 0.0433], + device='cuda:2'), in_proj_covar=tensor([0.0222, 0.0222, 0.0244, 0.0239, 0.0271, 0.0235, 0.0195, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 06:54:05,232 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-28 06:54:12,411 INFO [train.py:892] (2/4) Epoch 15, batch 700, loss[loss=0.2043, simple_loss=0.2599, pruned_loss=0.07439, over 19788.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2737, pruned_loss=0.07195, over 3832878.98 frames. ], batch size: 151, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:55:42,210 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3210, 4.1503, 4.6727, 4.4826, 4.5293, 4.0760, 4.3804, 4.2272], + device='cuda:2'), covar=tensor([0.1270, 0.1359, 0.0874, 0.1028, 0.0854, 0.0949, 0.1887, 0.1924], + device='cuda:2'), in_proj_covar=tensor([0.0255, 0.0261, 0.0312, 0.0243, 0.0232, 0.0228, 0.0303, 0.0332], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 06:55:53,021 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.991e+02 4.486e+02 5.723e+02 6.688e+02 1.072e+03, threshold=1.145e+03, percent-clipped=1.0 +2023-03-28 06:56:01,625 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-03-28 06:56:12,164 INFO [train.py:892] (2/4) Epoch 15, batch 750, loss[loss=0.1714, simple_loss=0.2326, pruned_loss=0.05503, over 19695.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2726, pruned_loss=0.07135, over 3858702.11 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:56:15,513 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9210, 2.3581, 3.1750, 3.3472, 3.8381, 4.4473, 4.3873, 4.3432], + device='cuda:2'), covar=tensor([0.0837, 0.1814, 0.1053, 0.0519, 0.0286, 0.0159, 0.0179, 0.0315], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0170, 0.0167, 0.0134, 0.0116, 0.0109, 0.0107, 0.0101], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 06:57:34,899 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26758.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:58:01,889 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26770.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:58:03,008 INFO [train.py:892] (2/4) Epoch 15, batch 800, loss[loss=0.2756, simple_loss=0.3621, pruned_loss=0.09457, over 18784.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2722, pruned_loss=0.07111, over 3879576.37 frames. ], batch size: 564, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:59:23,182 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26806.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:59:36,948 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 4.760e+02 5.593e+02 6.609e+02 1.473e+03, threshold=1.119e+03, percent-clipped=2.0 +2023-03-28 06:59:56,339 INFO [train.py:892] (2/4) Epoch 15, batch 850, loss[loss=0.1775, simple_loss=0.2388, pruned_loss=0.05812, over 19795.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2718, pruned_loss=0.07038, over 3895781.26 frames. ], batch size: 185, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:00:03,405 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26824.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:01:38,703 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-28 07:01:46,588 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5398, 3.3934, 3.7058, 2.9631, 4.0875, 2.9295, 3.2654, 3.9959], + device='cuda:2'), covar=tensor([0.0537, 0.0338, 0.0445, 0.0630, 0.0292, 0.0437, 0.0343, 0.0218], + device='cuda:2'), in_proj_covar=tensor([0.0061, 0.0065, 0.0067, 0.0095, 0.0062, 0.0061, 0.0059, 0.0052], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 07:01:47,497 INFO [train.py:892] (2/4) Epoch 15, batch 900, loss[loss=0.1884, simple_loss=0.2582, pruned_loss=0.05927, over 19527.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.271, pruned_loss=0.06987, over 3907513.21 frames. ], batch size: 41, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:01:50,749 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26872.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:02:29,607 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-28 07:03:03,264 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26904.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 07:03:07,752 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26906.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:03:23,391 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.859e+02 4.358e+02 4.979e+02 5.962e+02 9.737e+02, threshold=9.958e+02, percent-clipped=0.0 +2023-03-28 07:03:28,786 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-03-28 07:03:41,702 INFO [train.py:892] (2/4) Epoch 15, batch 950, loss[loss=0.1916, simple_loss=0.2523, pruned_loss=0.06544, over 19781.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2727, pruned_loss=0.07056, over 3916042.55 frames. ], batch size: 168, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:04:47,465 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26950.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:04:57,003 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26954.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:05:18,908 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26965.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 07:05:32,177 INFO [train.py:892] (2/4) Epoch 15, batch 1000, loss[loss=0.236, simple_loss=0.2974, pruned_loss=0.08728, over 19785.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2728, pruned_loss=0.07087, over 3923925.89 frames. ], batch size: 53, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:05:51,934 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-28 07:05:55,998 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-03-28 07:06:28,322 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8277, 3.5156, 3.5933, 3.8466, 3.6024, 3.7070, 3.9788, 4.1151], + device='cuda:2'), covar=tensor([0.0622, 0.0384, 0.0493, 0.0303, 0.0662, 0.0581, 0.0377, 0.0274], + device='cuda:2'), in_proj_covar=tensor([0.0134, 0.0156, 0.0181, 0.0150, 0.0153, 0.0134, 0.0138, 0.0172], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 07:07:01,942 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27011.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:07:05,034 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.353e+02 4.723e+02 5.899e+02 7.505e+02 1.756e+03, threshold=1.180e+03, percent-clipped=5.0 +2023-03-28 07:07:24,367 INFO [train.py:892] (2/4) Epoch 15, batch 1050, loss[loss=0.1941, simple_loss=0.2532, pruned_loss=0.0675, over 19888.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2718, pruned_loss=0.07014, over 3929932.51 frames. ], batch size: 176, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:09:11,963 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27070.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:09:13,205 INFO [train.py:892] (2/4) Epoch 15, batch 1100, loss[loss=0.1998, simple_loss=0.2644, pruned_loss=0.06755, over 19721.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2717, pruned_loss=0.06972, over 3933166.92 frames. ], batch size: 63, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:10:49,080 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.986e+02 4.421e+02 5.325e+02 6.263e+02 1.209e+03, threshold=1.065e+03, percent-clipped=1.0 +2023-03-28 07:11:00,535 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27118.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:11:06,892 INFO [train.py:892] (2/4) Epoch 15, batch 1150, loss[loss=0.1936, simple_loss=0.262, pruned_loss=0.06254, over 19736.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2727, pruned_loss=0.07076, over 3937216.78 frames. ], batch size: 92, lr: 1.05e-02, grad_scale: 8.0 +2023-03-28 07:12:06,833 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27148.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:12:58,793 INFO [train.py:892] (2/4) Epoch 15, batch 1200, loss[loss=0.1862, simple_loss=0.2539, pruned_loss=0.05919, over 19891.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2741, pruned_loss=0.0716, over 3937732.36 frames. ], batch size: 91, lr: 1.05e-02, grad_scale: 8.0 +2023-03-28 07:14:25,583 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27209.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:14:34,158 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.952e+02 4.645e+02 5.567e+02 6.439e+02 1.177e+03, threshold=1.113e+03, percent-clipped=3.0 +2023-03-28 07:14:49,437 INFO [train.py:892] (2/4) Epoch 15, batch 1250, loss[loss=0.2, simple_loss=0.2689, pruned_loss=0.06561, over 19887.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2732, pruned_loss=0.07079, over 3941424.14 frames. ], batch size: 87, lr: 1.05e-02, grad_scale: 8.0 +2023-03-28 07:16:16,246 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27260.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 07:16:40,192 INFO [train.py:892] (2/4) Epoch 15, batch 1300, loss[loss=0.1951, simple_loss=0.2566, pruned_loss=0.06678, over 19824.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2724, pruned_loss=0.07041, over 3944618.90 frames. ], batch size: 166, lr: 1.05e-02, grad_scale: 8.0 +2023-03-28 07:16:43,261 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4604, 2.5441, 2.7951, 2.5511, 2.5422, 2.6308, 2.4290, 2.8350], + device='cuda:2'), covar=tensor([0.0183, 0.0316, 0.0248, 0.0185, 0.0313, 0.0264, 0.0348, 0.0263], + device='cuda:2'), in_proj_covar=tensor([0.0055, 0.0054, 0.0056, 0.0050, 0.0061, 0.0057, 0.0074, 0.0052], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 07:18:00,098 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27306.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:18:17,023 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.149e+02 4.572e+02 5.387e+02 6.367e+02 1.430e+03, threshold=1.077e+03, percent-clipped=1.0 +2023-03-28 07:18:32,967 INFO [train.py:892] (2/4) Epoch 15, batch 1350, loss[loss=0.2057, simple_loss=0.2751, pruned_loss=0.06816, over 19626.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2723, pruned_loss=0.07027, over 3946297.92 frames. ], batch size: 68, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:19:53,152 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1492, 3.2934, 3.4789, 3.3534, 3.5935, 3.2017, 3.2167, 3.1288], + device='cuda:2'), covar=tensor([0.1949, 0.1512, 0.1476, 0.1376, 0.0857, 0.1151, 0.2264, 0.2560], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0267, 0.0326, 0.0251, 0.0236, 0.0233, 0.0310, 0.0340], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 07:20:09,396 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3478, 2.5854, 3.0299, 2.7325, 3.3753, 3.3755, 4.2606, 4.6272], + device='cuda:2'), covar=tensor([0.0465, 0.2008, 0.1574, 0.2317, 0.1699, 0.1601, 0.0436, 0.0395], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0225, 0.0250, 0.0240, 0.0273, 0.0239, 0.0197, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 07:20:21,498 INFO [train.py:892] (2/4) Epoch 15, batch 1400, loss[loss=0.199, simple_loss=0.2684, pruned_loss=0.06476, over 19684.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2713, pruned_loss=0.06979, over 3945131.71 frames. ], batch size: 49, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:20:28,277 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9165, 2.9549, 3.3080, 2.2647, 3.4556, 2.7717, 3.0126, 3.2781], + device='cuda:2'), covar=tensor([0.0466, 0.0345, 0.0418, 0.0791, 0.0229, 0.0333, 0.0415, 0.0249], + device='cuda:2'), in_proj_covar=tensor([0.0061, 0.0066, 0.0066, 0.0095, 0.0062, 0.0062, 0.0060, 0.0053], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 07:21:59,974 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.359e+02 4.655e+02 5.654e+02 6.548e+02 1.649e+03, threshold=1.131e+03, percent-clipped=3.0 +2023-03-28 07:22:14,687 INFO [train.py:892] (2/4) Epoch 15, batch 1450, loss[loss=0.223, simple_loss=0.2834, pruned_loss=0.08131, over 19793.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2717, pruned_loss=0.07017, over 3943954.74 frames. ], batch size: 193, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:22:25,590 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-28 07:24:06,281 INFO [train.py:892] (2/4) Epoch 15, batch 1500, loss[loss=0.2467, simple_loss=0.3101, pruned_loss=0.09166, over 19535.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2723, pruned_loss=0.07041, over 3943901.19 frames. ], batch size: 54, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:24:58,650 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2883, 3.2607, 3.6210, 3.2559, 3.1593, 3.5265, 3.3612, 3.6575], + device='cuda:2'), covar=tensor([0.1071, 0.0430, 0.0440, 0.0477, 0.1405, 0.0578, 0.0481, 0.0403], + device='cuda:2'), in_proj_covar=tensor([0.0262, 0.0203, 0.0199, 0.0204, 0.0196, 0.0209, 0.0206, 0.0188], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 07:25:20,002 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27504.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:25:40,818 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1180, 2.6595, 4.4707, 3.9883, 4.3400, 4.4429, 4.3622, 4.1319], + device='cuda:2'), covar=tensor([0.0264, 0.0855, 0.0107, 0.0777, 0.0109, 0.0224, 0.0138, 0.0147], + device='cuda:2'), in_proj_covar=tensor([0.0083, 0.0092, 0.0075, 0.0145, 0.0069, 0.0085, 0.0077, 0.0070], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 07:25:41,663 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.670e+02 4.316e+02 5.110e+02 6.263e+02 1.157e+03, threshold=1.022e+03, percent-clipped=1.0 +2023-03-28 07:25:58,305 INFO [train.py:892] (2/4) Epoch 15, batch 1550, loss[loss=0.173, simple_loss=0.2497, pruned_loss=0.04813, over 19415.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2719, pruned_loss=0.07042, over 3945095.64 frames. ], batch size: 40, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:27:25,874 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-28 07:27:27,770 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27560.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 07:27:50,935 INFO [train.py:892] (2/4) Epoch 15, batch 1600, loss[loss=0.1687, simple_loss=0.2324, pruned_loss=0.05247, over 19767.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2716, pruned_loss=0.07017, over 3946260.28 frames. ], batch size: 116, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:28:42,293 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4993, 4.4654, 4.9046, 4.4901, 4.0953, 4.7250, 4.4872, 5.0137], + device='cuda:2'), covar=tensor([0.0862, 0.0330, 0.0339, 0.0333, 0.0891, 0.0419, 0.0488, 0.0293], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0204, 0.0201, 0.0206, 0.0198, 0.0210, 0.0208, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 07:29:11,834 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27606.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:29:15,493 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27608.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 07:29:28,190 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.222e+02 4.211e+02 5.223e+02 6.136e+02 1.197e+03, threshold=1.045e+03, percent-clipped=1.0 +2023-03-28 07:29:43,660 INFO [train.py:892] (2/4) Epoch 15, batch 1650, loss[loss=0.2838, simple_loss=0.3356, pruned_loss=0.116, over 19698.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2712, pruned_loss=0.07013, over 3947310.19 frames. ], batch size: 305, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:30:14,704 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3487, 3.5270, 3.7213, 4.4367, 2.9075, 3.3356, 2.8710, 2.7189], + device='cuda:2'), covar=tensor([0.0441, 0.2200, 0.0871, 0.0282, 0.2017, 0.0799, 0.1158, 0.1703], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0332, 0.0230, 0.0168, 0.0240, 0.0185, 0.0203, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 07:31:01,155 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27654.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:31:38,701 INFO [train.py:892] (2/4) Epoch 15, batch 1700, loss[loss=0.2115, simple_loss=0.2743, pruned_loss=0.07433, over 19786.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2723, pruned_loss=0.07052, over 3946552.38 frames. ], batch size: 94, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:32:47,566 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3397, 4.3650, 4.7476, 4.3514, 3.9572, 4.5361, 4.4115, 4.8342], + device='cuda:2'), covar=tensor([0.0907, 0.0322, 0.0326, 0.0347, 0.0912, 0.0444, 0.0388, 0.0297], + device='cuda:2'), in_proj_covar=tensor([0.0262, 0.0201, 0.0199, 0.0205, 0.0194, 0.0207, 0.0205, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 07:32:53,543 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1541, 3.7415, 3.9009, 4.1244, 3.9094, 4.0297, 4.2454, 4.4025], + device='cuda:2'), covar=tensor([0.0680, 0.0414, 0.0487, 0.0284, 0.0621, 0.0514, 0.0365, 0.0268], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0157, 0.0182, 0.0151, 0.0154, 0.0137, 0.0138, 0.0174], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 07:33:14,465 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.834e+02 4.579e+02 5.497e+02 6.695e+02 1.218e+03, threshold=1.099e+03, percent-clipped=2.0 +2023-03-28 07:33:15,287 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27714.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 07:33:27,526 INFO [train.py:892] (2/4) Epoch 15, batch 1750, loss[loss=0.199, simple_loss=0.2767, pruned_loss=0.06065, over 19952.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2705, pruned_loss=0.06961, over 3947076.46 frames. ], batch size: 53, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:33:44,806 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.79 vs. limit=5.0 +2023-03-28 07:34:49,239 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5534, 3.5080, 4.8474, 3.6515, 4.0964, 4.0807, 2.4935, 2.8387], + device='cuda:2'), covar=tensor([0.0602, 0.2537, 0.0403, 0.0811, 0.1293, 0.0857, 0.2114, 0.2111], + device='cuda:2'), in_proj_covar=tensor([0.0322, 0.0354, 0.0296, 0.0239, 0.0349, 0.0302, 0.0315, 0.0286], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 07:35:04,128 INFO [train.py:892] (2/4) Epoch 15, batch 1800, loss[loss=0.1903, simple_loss=0.2442, pruned_loss=0.06824, over 19850.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2706, pruned_loss=0.06962, over 3947353.38 frames. ], batch size: 142, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:35:11,082 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27775.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 07:35:18,649 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-28 07:35:54,586 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5566, 2.9095, 2.4648, 1.8530, 2.3755, 2.7594, 2.7241, 2.7776], + device='cuda:2'), covar=tensor([0.0229, 0.0200, 0.0220, 0.0509, 0.0372, 0.0213, 0.0165, 0.0153], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0069, 0.0077, 0.0083, 0.0087, 0.0060, 0.0058, 0.0060], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-28 07:36:05,803 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27804.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:36:22,621 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.736e+02 4.432e+02 5.190e+02 6.215e+02 1.891e+03, threshold=1.038e+03, percent-clipped=1.0 +2023-03-28 07:36:34,454 INFO [train.py:892] (2/4) Epoch 15, batch 1850, loss[loss=0.1886, simple_loss=0.2633, pruned_loss=0.05691, over 19847.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2724, pruned_loss=0.06954, over 3947657.07 frames. ], batch size: 58, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:37:42,185 INFO [train.py:892] (2/4) Epoch 16, batch 0, loss[loss=0.1983, simple_loss=0.2628, pruned_loss=0.06692, over 19801.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2628, pruned_loss=0.06692, over 19801.00 frames. ], batch size: 47, lr: 1.00e-02, grad_scale: 8.0 +2023-03-28 07:37:42,186 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 07:38:12,910 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0596, 3.4699, 2.8995, 2.4126, 2.7804, 3.4142, 3.0492, 3.3069], + device='cuda:2'), covar=tensor([0.0296, 0.0195, 0.0244, 0.0428, 0.0344, 0.0233, 0.0173, 0.0173], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0070, 0.0077, 0.0083, 0.0087, 0.0060, 0.0058, 0.0060], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-28 07:38:14,562 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8908, 3.1993, 2.6050, 2.2155, 2.6225, 3.0226, 2.7744, 3.2154], + device='cuda:2'), covar=tensor([0.0137, 0.0249, 0.0242, 0.0493, 0.0330, 0.0205, 0.0199, 0.0069], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0070, 0.0077, 0.0083, 0.0087, 0.0060, 0.0058, 0.0060], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-28 07:38:15,255 INFO [train.py:926] (2/4) Epoch 16, validation: loss=0.1716, simple_loss=0.2504, pruned_loss=0.04639, over 2883724.00 frames. +2023-03-28 07:38:15,256 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 07:38:45,611 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.10 vs. limit=5.0 +2023-03-28 07:39:17,620 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27852.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:40:09,182 INFO [train.py:892] (2/4) Epoch 16, batch 50, loss[loss=0.1884, simple_loss=0.2605, pruned_loss=0.0581, over 19803.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.268, pruned_loss=0.06809, over 889275.39 frames. ], batch size: 72, lr: 1.00e-02, grad_scale: 8.0 +2023-03-28 07:41:32,613 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.518e+02 4.210e+02 5.030e+02 5.753e+02 1.491e+03, threshold=1.006e+03, percent-clipped=2.0 +2023-03-28 07:42:00,021 INFO [train.py:892] (2/4) Epoch 16, batch 100, loss[loss=0.1992, simple_loss=0.2709, pruned_loss=0.06374, over 19767.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2672, pruned_loss=0.06645, over 1567539.11 frames. ], batch size: 113, lr: 1.00e-02, grad_scale: 8.0 +2023-03-28 07:42:22,942 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2727, 3.4426, 1.9855, 4.2399, 3.7938, 4.1764, 4.2657, 3.1325], + device='cuda:2'), covar=tensor([0.0636, 0.0483, 0.1527, 0.0446, 0.0539, 0.0320, 0.0539, 0.0753], + device='cuda:2'), in_proj_covar=tensor([0.0130, 0.0126, 0.0135, 0.0131, 0.0114, 0.0111, 0.0126, 0.0132], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 07:42:46,061 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7211, 3.8353, 3.9547, 5.0871, 3.1942, 3.3692, 3.1546, 3.0019], + device='cuda:2'), covar=tensor([0.0456, 0.2470, 0.0932, 0.0224, 0.2102, 0.1036, 0.1158, 0.1777], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0331, 0.0229, 0.0168, 0.0240, 0.0184, 0.0203, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 07:43:49,703 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-28 07:43:53,909 INFO [train.py:892] (2/4) Epoch 16, batch 150, loss[loss=0.1734, simple_loss=0.2449, pruned_loss=0.05092, over 19706.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2657, pruned_loss=0.06628, over 2095965.76 frames. ], batch size: 85, lr: 1.00e-02, grad_scale: 8.0 +2023-03-28 07:44:01,562 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2485, 2.2977, 3.8007, 3.4076, 3.7186, 3.8977, 3.5907, 3.6581], + device='cuda:2'), covar=tensor([0.0345, 0.0887, 0.0115, 0.0609, 0.0115, 0.0192, 0.0175, 0.0146], + device='cuda:2'), in_proj_covar=tensor([0.0084, 0.0093, 0.0076, 0.0146, 0.0071, 0.0085, 0.0078, 0.0071], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 07:44:07,809 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2166, 2.8214, 3.2963, 2.9108, 3.4845, 3.5008, 4.1839, 4.5341], + device='cuda:2'), covar=tensor([0.0540, 0.1704, 0.1390, 0.2037, 0.1480, 0.1261, 0.0461, 0.0529], + device='cuda:2'), in_proj_covar=tensor([0.0220, 0.0221, 0.0244, 0.0237, 0.0268, 0.0234, 0.0195, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 07:45:25,173 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.668e+02 4.765e+02 5.560e+02 7.224e+02 1.633e+03, threshold=1.112e+03, percent-clipped=5.0 +2023-03-28 07:45:42,470 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2478, 3.0804, 3.4832, 2.3733, 3.6653, 2.9198, 2.8521, 3.3394], + device='cuda:2'), covar=tensor([0.0632, 0.0491, 0.0579, 0.0845, 0.0277, 0.0365, 0.0552, 0.0350], + device='cuda:2'), in_proj_covar=tensor([0.0063, 0.0068, 0.0068, 0.0098, 0.0064, 0.0063, 0.0062, 0.0054], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 07:45:44,410 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6372, 4.9696, 4.9918, 4.9163, 4.5938, 4.9669, 4.4439, 4.5053], + device='cuda:2'), covar=tensor([0.0464, 0.0448, 0.0549, 0.0442, 0.0594, 0.0537, 0.0651, 0.0937], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0230, 0.0256, 0.0220, 0.0215, 0.0202, 0.0230, 0.0269], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 07:45:54,097 INFO [train.py:892] (2/4) Epoch 16, batch 200, loss[loss=0.2311, simple_loss=0.2974, pruned_loss=0.08245, over 19728.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2685, pruned_loss=0.06799, over 2506721.59 frames. ], batch size: 76, lr: 9.99e-03, grad_scale: 8.0 +2023-03-28 07:46:41,918 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.73 vs. limit=2.0 +2023-03-28 07:47:36,344 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28070.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 07:47:47,821 INFO [train.py:892] (2/4) Epoch 16, batch 250, loss[loss=0.2233, simple_loss=0.2859, pruned_loss=0.08031, over 19723.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2694, pruned_loss=0.06838, over 2827682.14 frames. ], batch size: 219, lr: 9.98e-03, grad_scale: 8.0 +2023-03-28 07:49:14,116 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.535e+02 4.468e+02 5.270e+02 6.395e+02 1.144e+03, threshold=1.054e+03, percent-clipped=2.0 +2023-03-28 07:49:40,346 INFO [train.py:892] (2/4) Epoch 16, batch 300, loss[loss=0.2498, simple_loss=0.3106, pruned_loss=0.09454, over 19702.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2695, pruned_loss=0.06833, over 3075931.52 frames. ], batch size: 315, lr: 9.97e-03, grad_scale: 8.0 +2023-03-28 07:49:48,490 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.66 vs. limit=2.0 +2023-03-28 07:51:33,564 INFO [train.py:892] (2/4) Epoch 16, batch 350, loss[loss=0.1937, simple_loss=0.2742, pruned_loss=0.05662, over 19659.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2701, pruned_loss=0.06847, over 3267656.37 frames. ], batch size: 57, lr: 9.96e-03, grad_scale: 8.0 +2023-03-28 07:52:15,417 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3811, 3.3067, 3.7717, 2.7887, 3.8030, 2.9732, 3.1520, 3.8284], + device='cuda:2'), covar=tensor([0.0551, 0.0382, 0.0333, 0.0721, 0.0382, 0.0396, 0.0464, 0.0267], + device='cuda:2'), in_proj_covar=tensor([0.0063, 0.0067, 0.0066, 0.0097, 0.0064, 0.0063, 0.0060, 0.0054], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 07:52:58,644 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5358, 2.6956, 2.8429, 2.6476, 2.4330, 2.6449, 2.5615, 2.8066], + device='cuda:2'), covar=tensor([0.0200, 0.0192, 0.0193, 0.0207, 0.0330, 0.0248, 0.0380, 0.0284], + device='cuda:2'), in_proj_covar=tensor([0.0057, 0.0055, 0.0059, 0.0050, 0.0063, 0.0060, 0.0077, 0.0052], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 07:53:01,621 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.842e+02 4.257e+02 5.267e+02 6.673e+02 1.330e+03, threshold=1.053e+03, percent-clipped=2.0 +2023-03-28 07:53:27,005 INFO [train.py:892] (2/4) Epoch 16, batch 400, loss[loss=0.2127, simple_loss=0.2882, pruned_loss=0.06858, over 19857.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2698, pruned_loss=0.0683, over 3419259.90 frames. ], batch size: 64, lr: 9.95e-03, grad_scale: 8.0 +2023-03-28 07:53:35,165 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28229.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:54:32,153 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6674, 3.6453, 4.0120, 3.6705, 3.5693, 3.9157, 3.7461, 4.0560], + device='cuda:2'), covar=tensor([0.0990, 0.0366, 0.0355, 0.0362, 0.1050, 0.0482, 0.0405, 0.0323], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0201, 0.0199, 0.0205, 0.0195, 0.0209, 0.0207, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 07:54:36,307 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28255.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 07:54:59,642 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6201, 2.6088, 2.8660, 2.5961, 2.9787, 3.0374, 3.5344, 3.8154], + device='cuda:2'), covar=tensor([0.0638, 0.1624, 0.1536, 0.1950, 0.1599, 0.1430, 0.0510, 0.0522], + device='cuda:2'), in_proj_covar=tensor([0.0224, 0.0225, 0.0249, 0.0239, 0.0272, 0.0239, 0.0199, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 07:55:21,931 INFO [train.py:892] (2/4) Epoch 16, batch 450, loss[loss=0.2065, simple_loss=0.2724, pruned_loss=0.07028, over 19780.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2694, pruned_loss=0.06778, over 3536715.21 frames. ], batch size: 66, lr: 9.95e-03, grad_scale: 8.0 +2023-03-28 07:55:48,520 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28287.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:55:55,224 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28290.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:56:47,444 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.542e+02 5.285e+02 6.486e+02 1.059e+03, threshold=1.057e+03, percent-clipped=1.0 +2023-03-28 07:56:54,370 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28316.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 07:57:14,803 INFO [train.py:892] (2/4) Epoch 16, batch 500, loss[loss=0.1868, simple_loss=0.2591, pruned_loss=0.05728, over 19715.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2703, pruned_loss=0.06875, over 3626738.75 frames. ], batch size: 109, lr: 9.94e-03, grad_scale: 8.0 +2023-03-28 07:57:40,838 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0957, 3.9858, 4.4718, 4.0955, 3.8936, 4.3171, 4.1172, 4.5685], + device='cuda:2'), covar=tensor([0.0987, 0.0430, 0.0404, 0.0397, 0.0937, 0.0509, 0.0509, 0.0343], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0203, 0.0200, 0.0206, 0.0195, 0.0211, 0.0208, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 07:58:04,599 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28348.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:58:55,452 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28370.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 07:59:07,680 INFO [train.py:892] (2/4) Epoch 16, batch 550, loss[loss=0.2017, simple_loss=0.2582, pruned_loss=0.07262, over 19801.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2697, pruned_loss=0.06857, over 3697898.83 frames. ], batch size: 173, lr: 9.93e-03, grad_scale: 8.0 +2023-03-28 08:00:36,160 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.975e+02 4.219e+02 4.967e+02 6.198e+02 1.278e+03, threshold=9.934e+02, percent-clipped=3.0 +2023-03-28 08:00:46,625 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28418.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 08:01:02,581 INFO [train.py:892] (2/4) Epoch 16, batch 600, loss[loss=0.1854, simple_loss=0.253, pruned_loss=0.05892, over 19726.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2706, pruned_loss=0.06895, over 3753698.40 frames. ], batch size: 47, lr: 9.92e-03, grad_scale: 8.0 +2023-03-28 08:01:14,299 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-28 08:01:25,682 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-28 08:01:57,216 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2017, 4.4718, 4.9345, 4.3885, 4.2472, 4.8664, 4.5898, 5.1193], + device='cuda:2'), covar=tensor([0.1591, 0.0476, 0.0578, 0.0527, 0.0794, 0.0484, 0.0557, 0.0417], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0202, 0.0200, 0.0207, 0.0195, 0.0211, 0.0207, 0.0192], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 08:02:06,910 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.60 vs. limit=2.0 +2023-03-28 08:02:22,186 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7780, 2.6270, 2.9619, 2.4403, 3.0222, 2.5113, 2.8325, 2.9075], + device='cuda:2'), covar=tensor([0.0498, 0.0524, 0.0458, 0.0704, 0.0313, 0.0386, 0.0475, 0.0313], + device='cuda:2'), in_proj_covar=tensor([0.0065, 0.0069, 0.0068, 0.0099, 0.0065, 0.0064, 0.0062, 0.0054], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 08:02:53,447 INFO [train.py:892] (2/4) Epoch 16, batch 650, loss[loss=0.2113, simple_loss=0.2708, pruned_loss=0.0759, over 19817.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2704, pruned_loss=0.06911, over 3797355.99 frames. ], batch size: 204, lr: 9.91e-03, grad_scale: 8.0 +2023-03-28 08:03:00,905 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3713, 3.4981, 3.7410, 3.5197, 3.8147, 3.2099, 3.4147, 3.2903], + device='cuda:2'), covar=tensor([0.2059, 0.1551, 0.1383, 0.1429, 0.0936, 0.1556, 0.2466, 0.2630], + device='cuda:2'), in_proj_covar=tensor([0.0270, 0.0271, 0.0331, 0.0257, 0.0245, 0.0240, 0.0321, 0.0348], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 08:03:15,908 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28486.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:03:31,478 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28492.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:04:17,632 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.821e+02 4.448e+02 5.454e+02 6.222e+02 1.151e+03, threshold=1.091e+03, percent-clipped=4.0 +2023-03-28 08:04:44,526 INFO [train.py:892] (2/4) Epoch 16, batch 700, loss[loss=0.187, simple_loss=0.2575, pruned_loss=0.05827, over 19727.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2714, pruned_loss=0.06939, over 3828515.70 frames. ], batch size: 52, lr: 9.90e-03, grad_scale: 8.0 +2023-03-28 08:05:34,278 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28547.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:05:45,687 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28553.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:06:33,877 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9210, 4.8773, 5.3857, 5.2356, 5.1573, 4.7211, 5.0871, 4.9458], + device='cuda:2'), covar=tensor([0.1391, 0.1512, 0.0931, 0.1029, 0.0799, 0.0889, 0.1993, 0.1999], + device='cuda:2'), in_proj_covar=tensor([0.0269, 0.0272, 0.0330, 0.0257, 0.0245, 0.0238, 0.0321, 0.0346], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 08:06:37,717 INFO [train.py:892] (2/4) Epoch 16, batch 750, loss[loss=0.1676, simple_loss=0.2423, pruned_loss=0.04648, over 19880.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2713, pruned_loss=0.06913, over 3855874.78 frames. ], batch size: 95, lr: 9.89e-03, grad_scale: 8.0 +2023-03-28 08:06:59,797 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28585.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:07:59,386 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28611.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 08:08:06,520 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.992e+02 4.406e+02 5.087e+02 6.345e+02 1.381e+03, threshold=1.017e+03, percent-clipped=1.0 +2023-03-28 08:08:34,058 INFO [train.py:892] (2/4) Epoch 16, batch 800, loss[loss=0.1883, simple_loss=0.2577, pruned_loss=0.05948, over 19761.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2691, pruned_loss=0.06784, over 3878161.17 frames. ], batch size: 113, lr: 9.89e-03, grad_scale: 8.0 +2023-03-28 08:09:12,846 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28643.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:10:29,837 INFO [train.py:892] (2/4) Epoch 16, batch 850, loss[loss=0.2006, simple_loss=0.2637, pruned_loss=0.06881, over 19655.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2688, pruned_loss=0.06786, over 3893833.68 frames. ], batch size: 67, lr: 9.88e-03, grad_scale: 8.0 +2023-03-28 08:10:37,885 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.04 vs. limit=5.0 +2023-03-28 08:11:56,547 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.530e+02 4.659e+02 5.536e+02 6.490e+02 1.175e+03, threshold=1.107e+03, percent-clipped=2.0 +2023-03-28 08:12:21,633 INFO [train.py:892] (2/4) Epoch 16, batch 900, loss[loss=0.2276, simple_loss=0.2885, pruned_loss=0.08337, over 19720.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2695, pruned_loss=0.06817, over 3906133.46 frames. ], batch size: 283, lr: 9.87e-03, grad_scale: 8.0 +2023-03-28 08:13:06,845 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9844, 3.3702, 3.2641, 4.0855, 2.6207, 3.2034, 2.7458, 2.5053], + device='cuda:2'), covar=tensor([0.0507, 0.1965, 0.1115, 0.0324, 0.2136, 0.0720, 0.1213, 0.1775], + device='cuda:2'), in_proj_covar=tensor([0.0217, 0.0338, 0.0234, 0.0175, 0.0242, 0.0188, 0.0207, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 08:14:18,786 INFO [train.py:892] (2/4) Epoch 16, batch 950, loss[loss=0.2119, simple_loss=0.2697, pruned_loss=0.07707, over 19877.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2708, pruned_loss=0.0687, over 3913003.63 frames. ], batch size: 139, lr: 9.86e-03, grad_scale: 8.0 +2023-03-28 08:14:58,772 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7340, 4.7897, 3.0622, 5.0173, 5.2054, 2.2004, 4.4579, 3.7687], + device='cuda:2'), covar=tensor([0.0496, 0.0610, 0.2168, 0.0604, 0.0394, 0.2750, 0.0842, 0.0633], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0234, 0.0218, 0.0233, 0.0202, 0.0201, 0.0230, 0.0174], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 08:15:44,964 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.335e+02 4.634e+02 5.431e+02 6.353e+02 1.268e+03, threshold=1.086e+03, percent-clipped=1.0 +2023-03-28 08:15:50,955 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5572, 3.2386, 3.7901, 2.8209, 3.8369, 2.9734, 3.1645, 3.7644], + device='cuda:2'), covar=tensor([0.0517, 0.0369, 0.0424, 0.0749, 0.0419, 0.0435, 0.0522, 0.0327], + device='cuda:2'), in_proj_covar=tensor([0.0065, 0.0070, 0.0069, 0.0100, 0.0067, 0.0065, 0.0063, 0.0055], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 08:16:11,366 INFO [train.py:892] (2/4) Epoch 16, batch 1000, loss[loss=0.2058, simple_loss=0.2536, pruned_loss=0.07893, over 19875.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2694, pruned_loss=0.06744, over 3920859.50 frames. ], batch size: 136, lr: 9.85e-03, grad_scale: 8.0 +2023-03-28 08:16:45,750 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28842.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:16:59,646 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28848.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:17:31,392 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28861.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:18:01,603 INFO [train.py:892] (2/4) Epoch 16, batch 1050, loss[loss=0.1932, simple_loss=0.268, pruned_loss=0.0592, over 19899.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2699, pruned_loss=0.06805, over 3927150.15 frames. ], batch size: 71, lr: 9.84e-03, grad_scale: 8.0 +2023-03-28 08:18:23,667 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28885.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:19:23,567 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28911.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 08:19:30,252 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.700e+02 4.466e+02 5.126e+02 6.804e+02 1.153e+03, threshold=1.025e+03, percent-clipped=1.0 +2023-03-28 08:19:48,236 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28922.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:19:56,640 INFO [train.py:892] (2/4) Epoch 16, batch 1100, loss[loss=0.1806, simple_loss=0.2447, pruned_loss=0.05827, over 19781.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2703, pruned_loss=0.0681, over 3932806.23 frames. ], batch size: 131, lr: 9.84e-03, grad_scale: 8.0 +2023-03-28 08:20:14,256 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28933.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:20:37,590 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28943.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:21:12,752 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28959.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 08:21:48,712 INFO [train.py:892] (2/4) Epoch 16, batch 1150, loss[loss=0.2335, simple_loss=0.2968, pruned_loss=0.08512, over 19778.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2713, pruned_loss=0.0686, over 3935363.69 frames. ], batch size: 191, lr: 9.83e-03, grad_scale: 8.0 +2023-03-28 08:22:23,820 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28991.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:22:50,945 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29002.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:23:15,277 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 4.550e+02 5.400e+02 6.345e+02 1.264e+03, threshold=1.080e+03, percent-clipped=1.0 +2023-03-28 08:23:43,696 INFO [train.py:892] (2/4) Epoch 16, batch 1200, loss[loss=0.1878, simple_loss=0.2601, pruned_loss=0.05772, over 19743.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2703, pruned_loss=0.06767, over 3938580.83 frames. ], batch size: 97, lr: 9.82e-03, grad_scale: 8.0 +2023-03-28 08:25:09,480 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29063.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:25:38,190 INFO [train.py:892] (2/4) Epoch 16, batch 1250, loss[loss=0.1767, simple_loss=0.2445, pruned_loss=0.05448, over 19840.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2691, pruned_loss=0.06738, over 3942703.43 frames. ], batch size: 160, lr: 9.81e-03, grad_scale: 8.0 +2023-03-28 08:25:47,161 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-03-28 08:27:06,756 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.820e+02 4.171e+02 4.962e+02 5.864e+02 1.336e+03, threshold=9.924e+02, percent-clipped=4.0 +2023-03-28 08:27:33,971 INFO [train.py:892] (2/4) Epoch 16, batch 1300, loss[loss=0.2453, simple_loss=0.3099, pruned_loss=0.09041, over 19747.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2686, pruned_loss=0.0671, over 3945542.03 frames. ], batch size: 276, lr: 9.80e-03, grad_scale: 16.0 +2023-03-28 08:27:43,947 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0759, 3.3355, 2.5694, 2.2686, 2.7210, 3.2139, 3.1227, 3.1298], + device='cuda:2'), covar=tensor([0.0211, 0.0245, 0.0292, 0.0535, 0.0372, 0.0207, 0.0183, 0.0163], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0071, 0.0080, 0.0084, 0.0088, 0.0063, 0.0059, 0.0063], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-03-28 08:28:12,032 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29142.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:28:17,015 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 08:28:24,981 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29148.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:29:27,192 INFO [train.py:892] (2/4) Epoch 16, batch 1350, loss[loss=0.1661, simple_loss=0.2384, pruned_loss=0.04693, over 19903.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2678, pruned_loss=0.06621, over 3944423.89 frames. ], batch size: 113, lr: 9.80e-03, grad_scale: 16.0 +2023-03-28 08:30:01,010 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29190.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:30:15,729 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29196.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:30:20,658 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-03-28 08:30:55,172 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 4.385e+02 5.390e+02 6.526e+02 1.400e+03, threshold=1.078e+03, percent-clipped=3.0 +2023-03-28 08:31:01,928 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29217.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:31:23,395 INFO [train.py:892] (2/4) Epoch 16, batch 1400, loss[loss=0.1656, simple_loss=0.238, pruned_loss=0.04664, over 19595.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2682, pruned_loss=0.06642, over 3946045.65 frames. ], batch size: 45, lr: 9.79e-03, grad_scale: 16.0 +2023-03-28 08:31:47,923 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29236.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:33:17,628 INFO [train.py:892] (2/4) Epoch 16, batch 1450, loss[loss=0.1893, simple_loss=0.2554, pruned_loss=0.06159, over 19723.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2681, pruned_loss=0.06614, over 3947095.28 frames. ], batch size: 104, lr: 9.78e-03, grad_scale: 16.0 +2023-03-28 08:34:08,468 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29297.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:34:28,604 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-28 08:34:43,963 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.554e+02 4.411e+02 5.175e+02 6.101e+02 1.424e+03, threshold=1.035e+03, percent-clipped=1.0 +2023-03-28 08:35:11,623 INFO [train.py:892] (2/4) Epoch 16, batch 1500, loss[loss=0.2087, simple_loss=0.2669, pruned_loss=0.07526, over 19845.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2678, pruned_loss=0.06599, over 3949441.84 frames. ], batch size: 137, lr: 9.77e-03, grad_scale: 16.0 +2023-03-28 08:36:25,354 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29358.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:37:05,538 INFO [train.py:892] (2/4) Epoch 16, batch 1550, loss[loss=0.2693, simple_loss=0.324, pruned_loss=0.1073, over 19654.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2678, pruned_loss=0.06611, over 3948822.47 frames. ], batch size: 330, lr: 9.76e-03, grad_scale: 16.0 +2023-03-28 08:37:40,576 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4735, 2.6147, 3.9065, 2.9057, 3.2824, 3.1389, 2.1475, 2.1706], + device='cuda:2'), covar=tensor([0.0988, 0.3085, 0.0527, 0.0873, 0.1543, 0.1273, 0.2022, 0.2605], + device='cuda:2'), in_proj_covar=tensor([0.0326, 0.0356, 0.0302, 0.0244, 0.0351, 0.0309, 0.0320, 0.0294], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 08:38:32,266 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 4.365e+02 5.019e+02 6.130e+02 1.225e+03, threshold=1.004e+03, percent-clipped=3.0 +2023-03-28 08:39:00,857 INFO [train.py:892] (2/4) Epoch 16, batch 1600, loss[loss=0.1847, simple_loss=0.255, pruned_loss=0.05716, over 19832.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2677, pruned_loss=0.06519, over 3948344.50 frames. ], batch size: 52, lr: 9.76e-03, grad_scale: 16.0 +2023-03-28 08:39:23,374 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7656, 2.8980, 3.1632, 2.8302, 2.6613, 3.1339, 2.7542, 3.0385], + device='cuda:2'), covar=tensor([0.0207, 0.0236, 0.0228, 0.0228, 0.0336, 0.0195, 0.0324, 0.0289], + device='cuda:2'), in_proj_covar=tensor([0.0059, 0.0056, 0.0061, 0.0052, 0.0065, 0.0062, 0.0079, 0.0055], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 08:40:51,211 INFO [train.py:892] (2/4) Epoch 16, batch 1650, loss[loss=0.1687, simple_loss=0.2323, pruned_loss=0.05252, over 19815.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2662, pruned_loss=0.06458, over 3948814.88 frames. ], batch size: 82, lr: 9.75e-03, grad_scale: 16.0 +2023-03-28 08:42:16,597 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-03-28 08:42:19,141 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.587e+02 4.298e+02 4.820e+02 5.850e+02 1.066e+03, threshold=9.641e+02, percent-clipped=1.0 +2023-03-28 08:42:27,208 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29517.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:42:45,621 INFO [train.py:892] (2/4) Epoch 16, batch 1700, loss[loss=0.1889, simple_loss=0.2496, pruned_loss=0.06412, over 19875.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2663, pruned_loss=0.06483, over 3949416.58 frames. ], batch size: 125, lr: 9.74e-03, grad_scale: 16.0 +2023-03-28 08:43:45,537 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29551.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:44:03,210 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-28 08:44:06,689 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4712, 3.6530, 3.7999, 4.5919, 2.9933, 3.4282, 2.9539, 2.7841], + device='cuda:2'), covar=tensor([0.0442, 0.2202, 0.0949, 0.0254, 0.2124, 0.0766, 0.1166, 0.1720], + device='cuda:2'), in_proj_covar=tensor([0.0219, 0.0335, 0.0235, 0.0174, 0.0243, 0.0190, 0.0207, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 08:44:15,330 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29565.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:44:35,375 INFO [train.py:892] (2/4) Epoch 16, batch 1750, loss[loss=0.1963, simple_loss=0.2629, pruned_loss=0.06491, over 19834.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2666, pruned_loss=0.06526, over 3949966.90 frames. ], batch size: 101, lr: 9.73e-03, grad_scale: 16.0 +2023-03-28 08:44:56,175 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.85 vs. limit=5.0 +2023-03-28 08:45:10,842 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29592.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:45:15,136 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-28 08:45:39,053 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.4828, 5.9130, 6.1164, 5.8682, 5.7103, 5.5078, 5.6604, 5.6364], + device='cuda:2'), covar=tensor([0.1604, 0.1147, 0.0982, 0.1158, 0.0687, 0.0873, 0.2362, 0.1912], + device='cuda:2'), in_proj_covar=tensor([0.0268, 0.0269, 0.0324, 0.0255, 0.0241, 0.0234, 0.0313, 0.0344], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 08:45:50,273 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29612.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:45:52,890 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.648e+02 4.260e+02 5.446e+02 6.419e+02 1.506e+03, threshold=1.089e+03, percent-clipped=4.0 +2023-03-28 08:46:14,322 INFO [train.py:892] (2/4) Epoch 16, batch 1800, loss[loss=0.2848, simple_loss=0.373, pruned_loss=0.0983, over 18699.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2668, pruned_loss=0.06568, over 3949991.45 frames. ], batch size: 564, lr: 9.72e-03, grad_scale: 16.0 +2023-03-28 08:47:16,008 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29658.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:47:23,435 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29662.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:47:46,315 INFO [train.py:892] (2/4) Epoch 16, batch 1850, loss[loss=0.1954, simple_loss=0.2664, pruned_loss=0.06215, over 19567.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2688, pruned_loss=0.06634, over 3948619.60 frames. ], batch size: 53, lr: 9.72e-03, grad_scale: 16.0 +2023-03-28 08:48:53,454 INFO [train.py:892] (2/4) Epoch 17, batch 0, loss[loss=0.1738, simple_loss=0.2519, pruned_loss=0.04782, over 19613.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2519, pruned_loss=0.04782, over 19613.00 frames. ], batch size: 51, lr: 9.42e-03, grad_scale: 16.0 +2023-03-28 08:48:53,455 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 08:49:26,238 INFO [train.py:926] (2/4) Epoch 17, validation: loss=0.1709, simple_loss=0.2495, pruned_loss=0.0462, over 2883724.00 frames. +2023-03-28 08:49:26,239 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 08:50:27,532 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29706.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:50:46,151 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.983e+02 4.092e+02 5.169e+02 6.324e+02 1.457e+03, threshold=1.034e+03, percent-clipped=3.0 +2023-03-28 08:51:08,848 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29723.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 08:51:25,308 INFO [train.py:892] (2/4) Epoch 17, batch 50, loss[loss=0.1864, simple_loss=0.2717, pruned_loss=0.05062, over 19815.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.267, pruned_loss=0.06659, over 890385.83 frames. ], batch size: 57, lr: 9.41e-03, grad_scale: 16.0 +2023-03-28 08:53:21,581 INFO [train.py:892] (2/4) Epoch 17, batch 100, loss[loss=0.1922, simple_loss=0.2675, pruned_loss=0.05844, over 19776.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2654, pruned_loss=0.06586, over 1569130.65 frames. ], batch size: 69, lr: 9.41e-03, grad_scale: 16.0 +2023-03-28 08:54:13,459 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29802.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:54:32,855 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9304, 3.9997, 2.2105, 4.1287, 4.3312, 1.7517, 3.5966, 3.2555], + device='cuda:2'), covar=tensor([0.0580, 0.0734, 0.2721, 0.0636, 0.0364, 0.2912, 0.0863, 0.0667], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0232, 0.0216, 0.0231, 0.0202, 0.0199, 0.0225, 0.0172], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 08:54:38,190 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.079e+02 4.290e+02 5.009e+02 6.167e+02 1.031e+03, threshold=1.002e+03, percent-clipped=0.0 +2023-03-28 08:55:00,918 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8030, 2.7229, 4.2825, 3.7377, 3.9710, 4.2173, 4.0975, 3.9060], + device='cuda:2'), covar=tensor([0.0313, 0.0758, 0.0110, 0.0651, 0.0136, 0.0223, 0.0142, 0.0150], + device='cuda:2'), in_proj_covar=tensor([0.0086, 0.0092, 0.0076, 0.0146, 0.0071, 0.0085, 0.0079, 0.0071], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 08:55:19,242 INFO [train.py:892] (2/4) Epoch 17, batch 150, loss[loss=0.2194, simple_loss=0.2828, pruned_loss=0.07802, over 19894.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2668, pruned_loss=0.06631, over 2096869.51 frames. ], batch size: 62, lr: 9.40e-03, grad_scale: 16.0 +2023-03-28 08:55:28,846 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9203, 3.3381, 2.7009, 2.2187, 2.7571, 3.1402, 3.1503, 3.1835], + device='cuda:2'), covar=tensor([0.0223, 0.0220, 0.0263, 0.0508, 0.0390, 0.0262, 0.0178, 0.0162], + device='cuda:2'), in_proj_covar=tensor([0.0075, 0.0072, 0.0080, 0.0085, 0.0089, 0.0063, 0.0060, 0.0063], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:2') +2023-03-28 08:55:38,808 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2076, 3.8609, 4.0116, 4.1907, 3.9113, 4.1504, 4.3266, 4.4420], + device='cuda:2'), covar=tensor([0.0611, 0.0368, 0.0425, 0.0299, 0.0626, 0.0426, 0.0347, 0.0290], + device='cuda:2'), in_proj_covar=tensor([0.0135, 0.0155, 0.0179, 0.0151, 0.0151, 0.0132, 0.0134, 0.0172], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 08:56:29,451 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29863.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:57:07,850 INFO [train.py:892] (2/4) Epoch 17, batch 200, loss[loss=0.181, simple_loss=0.2571, pruned_loss=0.05243, over 19801.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2657, pruned_loss=0.06567, over 2508601.97 frames. ], batch size: 114, lr: 9.39e-03, grad_scale: 16.0 +2023-03-28 08:57:33,262 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29892.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:58:05,742 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29907.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:58:19,263 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.430e+02 4.252e+02 5.177e+02 6.087e+02 1.116e+03, threshold=1.035e+03, percent-clipped=2.0 +2023-03-28 08:58:59,503 INFO [train.py:892] (2/4) Epoch 17, batch 250, loss[loss=0.2005, simple_loss=0.2704, pruned_loss=0.06532, over 19844.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2654, pruned_loss=0.06511, over 2828293.15 frames. ], batch size: 58, lr: 9.38e-03, grad_scale: 16.0 +2023-03-28 08:59:22,583 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29940.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:00:54,573 INFO [train.py:892] (2/4) Epoch 17, batch 300, loss[loss=0.1558, simple_loss=0.2288, pruned_loss=0.0414, over 19744.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2651, pruned_loss=0.06477, over 3077826.78 frames. ], batch size: 89, lr: 9.37e-03, grad_scale: 16.0 +2023-03-28 09:01:27,138 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9529, 5.3126, 5.3265, 5.2394, 4.8835, 5.2396, 4.7462, 4.7337], + device='cuda:2'), covar=tensor([0.0399, 0.0388, 0.0508, 0.0435, 0.0588, 0.0543, 0.0696, 0.0915], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0231, 0.0259, 0.0222, 0.0217, 0.0209, 0.0231, 0.0272], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 09:02:13,027 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.673e+02 4.129e+02 4.962e+02 6.041e+02 9.266e+02, threshold=9.924e+02, percent-clipped=0.0 +2023-03-28 09:02:24,236 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30018.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 09:02:53,690 INFO [train.py:892] (2/4) Epoch 17, batch 350, loss[loss=0.1746, simple_loss=0.2516, pruned_loss=0.04878, over 19796.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2662, pruned_loss=0.06529, over 3271487.52 frames. ], batch size: 68, lr: 9.37e-03, grad_scale: 16.0 +2023-03-28 09:04:06,489 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9272, 4.5078, 4.5060, 4.9011, 4.6002, 5.0870, 5.0455, 5.1860], + device='cuda:2'), covar=tensor([0.0639, 0.0360, 0.0462, 0.0290, 0.0613, 0.0345, 0.0380, 0.0275], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0157, 0.0182, 0.0153, 0.0153, 0.0136, 0.0138, 0.0174], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 09:04:44,397 INFO [train.py:892] (2/4) Epoch 17, batch 400, loss[loss=0.2302, simple_loss=0.2948, pruned_loss=0.08278, over 19698.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2646, pruned_loss=0.0642, over 3424272.89 frames. ], batch size: 305, lr: 9.36e-03, grad_scale: 16.0 +2023-03-28 09:05:59,371 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.983e+02 4.154e+02 5.101e+02 6.100e+02 1.061e+03, threshold=1.020e+03, percent-clipped=2.0 +2023-03-28 09:06:26,025 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30125.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:06:38,288 INFO [train.py:892] (2/4) Epoch 17, batch 450, loss[loss=0.1823, simple_loss=0.2489, pruned_loss=0.05787, over 19753.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2657, pruned_loss=0.06475, over 3540915.44 frames. ], batch size: 110, lr: 9.35e-03, grad_scale: 16.0 +2023-03-28 09:07:39,084 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2595, 4.7856, 4.8698, 4.5701, 5.2028, 2.9646, 4.1014, 2.6212], + device='cuda:2'), covar=tensor([0.0215, 0.0218, 0.0168, 0.0189, 0.0140, 0.1048, 0.0953, 0.1617], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0129, 0.0104, 0.0123, 0.0109, 0.0124, 0.0136, 0.0120], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 09:07:40,883 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30158.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:08:24,473 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-28 09:08:32,541 INFO [train.py:892] (2/4) Epoch 17, batch 500, loss[loss=0.1949, simple_loss=0.2599, pruned_loss=0.06495, over 19778.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2658, pruned_loss=0.06524, over 3631618.16 frames. ], batch size: 40, lr: 9.34e-03, grad_scale: 16.0 +2023-03-28 09:08:43,983 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30186.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:09:31,013 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30207.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:09:35,214 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30209.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:09:38,987 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-03-28 09:09:45,649 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.879e+02 4.496e+02 5.220e+02 6.735e+02 1.165e+03, threshold=1.044e+03, percent-clipped=2.0 +2023-03-28 09:10:25,881 INFO [train.py:892] (2/4) Epoch 17, batch 550, loss[loss=0.2336, simple_loss=0.2965, pruned_loss=0.08541, over 19795.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2655, pruned_loss=0.06511, over 3704031.63 frames. ], batch size: 288, lr: 9.34e-03, grad_scale: 16.0 +2023-03-28 09:11:08,973 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5540, 3.4990, 5.0665, 3.8135, 4.1648, 4.1607, 2.6754, 3.0237], + device='cuda:2'), covar=tensor([0.0637, 0.2539, 0.0299, 0.0725, 0.1373, 0.0923, 0.2079, 0.1930], + device='cuda:2'), in_proj_covar=tensor([0.0326, 0.0352, 0.0303, 0.0244, 0.0350, 0.0312, 0.0320, 0.0291], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 09:11:17,347 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.71 vs. limit=5.0 +2023-03-28 09:11:19,688 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30255.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:11:53,904 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30270.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:12:19,495 INFO [train.py:892] (2/4) Epoch 17, batch 600, loss[loss=0.1681, simple_loss=0.2406, pruned_loss=0.04779, over 19909.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2658, pruned_loss=0.06533, over 3759308.33 frames. ], batch size: 45, lr: 9.33e-03, grad_scale: 16.0 +2023-03-28 09:13:11,378 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.7480, 1.8683, 1.9231, 1.8655, 1.7997, 1.8794, 1.8617, 1.9686], + device='cuda:2'), covar=tensor([0.0287, 0.0244, 0.0253, 0.0244, 0.0356, 0.0259, 0.0386, 0.0213], + device='cuda:2'), in_proj_covar=tensor([0.0059, 0.0056, 0.0061, 0.0053, 0.0066, 0.0063, 0.0079, 0.0055], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 09:13:22,838 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.90 vs. limit=5.0 +2023-03-28 09:13:33,410 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.677e+02 4.467e+02 5.041e+02 6.108e+02 1.228e+03, threshold=1.008e+03, percent-clipped=2.0 +2023-03-28 09:13:44,457 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30318.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 09:14:12,616 INFO [train.py:892] (2/4) Epoch 17, batch 650, loss[loss=0.1968, simple_loss=0.2666, pruned_loss=0.0635, over 19696.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2661, pruned_loss=0.06572, over 3800938.59 frames. ], batch size: 56, lr: 9.32e-03, grad_scale: 16.0 +2023-03-28 09:15:18,775 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30360.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:15:32,421 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30366.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:15:43,012 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5074, 3.0976, 3.3713, 3.0880, 3.6935, 3.7037, 4.3321, 4.6987], + device='cuda:2'), covar=tensor([0.0497, 0.1597, 0.1373, 0.2008, 0.1615, 0.1252, 0.0531, 0.0477], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0228, 0.0247, 0.0240, 0.0273, 0.0239, 0.0202, 0.0216], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 09:16:03,578 INFO [train.py:892] (2/4) Epoch 17, batch 700, loss[loss=0.1622, simple_loss=0.2294, pruned_loss=0.04752, over 19742.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2662, pruned_loss=0.06508, over 3834132.44 frames. ], batch size: 44, lr: 9.31e-03, grad_scale: 16.0 +2023-03-28 09:16:06,959 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-28 09:17:22,247 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.759e+02 4.180e+02 5.300e+02 6.975e+02 1.622e+03, threshold=1.060e+03, percent-clipped=3.0 +2023-03-28 09:17:39,009 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30421.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:17:59,217 INFO [train.py:892] (2/4) Epoch 17, batch 750, loss[loss=0.1823, simple_loss=0.2469, pruned_loss=0.05883, over 19866.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2664, pruned_loss=0.06506, over 3859297.26 frames. ], batch size: 104, lr: 9.31e-03, grad_scale: 16.0 +2023-03-28 09:19:02,371 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30458.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:19:53,020 INFO [train.py:892] (2/4) Epoch 17, batch 800, loss[loss=0.2234, simple_loss=0.2929, pruned_loss=0.07692, over 19712.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2659, pruned_loss=0.06487, over 3880222.02 frames. ], batch size: 61, lr: 9.30e-03, grad_scale: 16.0 +2023-03-28 09:19:54,177 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30481.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:20:52,608 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30506.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:21:10,976 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.987e+02 4.028e+02 4.946e+02 5.803e+02 1.096e+03, threshold=9.891e+02, percent-clipped=1.0 +2023-03-28 09:21:23,808 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-28 09:21:46,466 INFO [train.py:892] (2/4) Epoch 17, batch 850, loss[loss=0.221, simple_loss=0.2968, pruned_loss=0.07261, over 19677.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2658, pruned_loss=0.06475, over 3896957.35 frames. ], batch size: 64, lr: 9.29e-03, grad_scale: 16.0 +2023-03-28 09:23:05,879 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30565.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:23:40,245 INFO [train.py:892] (2/4) Epoch 17, batch 900, loss[loss=0.1781, simple_loss=0.2442, pruned_loss=0.05596, over 19824.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.265, pruned_loss=0.06419, over 3907693.97 frames. ], batch size: 117, lr: 9.28e-03, grad_scale: 16.0 +2023-03-28 09:24:54,335 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.960e+02 4.086e+02 5.123e+02 6.200e+02 1.238e+03, threshold=1.025e+03, percent-clipped=1.0 +2023-03-28 09:25:34,357 INFO [train.py:892] (2/4) Epoch 17, batch 950, loss[loss=0.1914, simple_loss=0.2631, pruned_loss=0.05981, over 19797.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2661, pruned_loss=0.06519, over 3917382.96 frames. ], batch size: 51, lr: 9.28e-03, grad_scale: 16.0 +2023-03-28 09:26:28,409 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30655.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:27:25,987 INFO [train.py:892] (2/4) Epoch 17, batch 1000, loss[loss=0.1809, simple_loss=0.247, pruned_loss=0.05737, over 19800.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2655, pruned_loss=0.06492, over 3923898.23 frames. ], batch size: 150, lr: 9.27e-03, grad_scale: 16.0 +2023-03-28 09:28:28,107 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.10 vs. limit=2.0 +2023-03-28 09:28:42,761 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.493e+02 4.412e+02 5.308e+02 6.255e+02 1.077e+03, threshold=1.062e+03, percent-clipped=1.0 +2023-03-28 09:28:48,021 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30716.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:28:48,204 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30716.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 09:29:12,184 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.32 vs. limit=5.0 +2023-03-28 09:29:20,840 INFO [train.py:892] (2/4) Epoch 17, batch 1050, loss[loss=0.1823, simple_loss=0.2478, pruned_loss=0.05838, over 19603.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2658, pruned_loss=0.06468, over 3929335.29 frames. ], batch size: 44, lr: 9.26e-03, grad_scale: 16.0 +2023-03-28 09:31:15,320 INFO [train.py:892] (2/4) Epoch 17, batch 1100, loss[loss=0.2942, simple_loss=0.3798, pruned_loss=0.1043, over 18060.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.267, pruned_loss=0.06527, over 3931964.05 frames. ], batch size: 633, lr: 9.26e-03, grad_scale: 16.0 +2023-03-28 09:31:16,144 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30781.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:32:31,409 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.889e+02 4.403e+02 5.206e+02 6.352e+02 1.143e+03, threshold=1.041e+03, percent-clipped=2.0 +2023-03-28 09:32:58,581 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1661, 5.3966, 5.6839, 5.4048, 5.3836, 5.1579, 5.3225, 5.1473], + device='cuda:2'), covar=tensor([0.1285, 0.1160, 0.0740, 0.1113, 0.0630, 0.0706, 0.1728, 0.1947], + device='cuda:2'), in_proj_covar=tensor([0.0269, 0.0276, 0.0327, 0.0259, 0.0244, 0.0240, 0.0319, 0.0352], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 09:33:05,664 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30829.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:33:08,516 INFO [train.py:892] (2/4) Epoch 17, batch 1150, loss[loss=0.1799, simple_loss=0.2366, pruned_loss=0.06155, over 19870.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2667, pruned_loss=0.06538, over 3934957.44 frames. ], batch size: 157, lr: 9.25e-03, grad_scale: 16.0 +2023-03-28 09:34:25,786 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30865.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:35:00,473 INFO [train.py:892] (2/4) Epoch 17, batch 1200, loss[loss=0.2031, simple_loss=0.2709, pruned_loss=0.06767, over 19730.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2656, pruned_loss=0.06445, over 3937585.32 frames. ], batch size: 62, lr: 9.24e-03, grad_scale: 16.0 +2023-03-28 09:35:15,829 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.6116, 4.9600, 5.2675, 4.9297, 5.4909, 3.3507, 4.2267, 2.9007], + device='cuda:2'), covar=tensor([0.0138, 0.0167, 0.0111, 0.0171, 0.0112, 0.0768, 0.0963, 0.1284], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0130, 0.0103, 0.0123, 0.0109, 0.0125, 0.0134, 0.0119], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 09:35:50,785 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30903.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:36:17,103 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30913.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:36:18,328 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.660e+02 3.834e+02 4.757e+02 6.314e+02 9.786e+02, threshold=9.513e+02, percent-clipped=0.0 +2023-03-28 09:36:30,347 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4523, 4.4948, 4.8745, 4.4336, 4.1188, 4.6674, 4.5107, 4.9800], + device='cuda:2'), covar=tensor([0.0883, 0.0330, 0.0338, 0.0332, 0.0763, 0.0451, 0.0399, 0.0322], + device='cuda:2'), in_proj_covar=tensor([0.0267, 0.0206, 0.0206, 0.0212, 0.0198, 0.0214, 0.0212, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 09:36:42,646 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30925.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:36:53,359 INFO [train.py:892] (2/4) Epoch 17, batch 1250, loss[loss=0.1957, simple_loss=0.2684, pruned_loss=0.06148, over 19583.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.265, pruned_loss=0.06429, over 3940590.08 frames. ], batch size: 44, lr: 9.23e-03, grad_scale: 16.0 +2023-03-28 09:36:56,882 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6553, 2.7440, 3.9811, 3.1464, 3.4204, 3.3157, 2.1729, 2.2884], + device='cuda:2'), covar=tensor([0.0868, 0.2589, 0.0505, 0.0767, 0.1300, 0.1094, 0.1997, 0.2371], + device='cuda:2'), in_proj_covar=tensor([0.0329, 0.0355, 0.0306, 0.0246, 0.0351, 0.0315, 0.0323, 0.0295], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 09:37:01,678 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30934.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:37:52,304 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0047, 2.3972, 2.9014, 3.1873, 3.7642, 4.1360, 3.9875, 4.2121], + device='cuda:2'), covar=tensor([0.0780, 0.1676, 0.1206, 0.0570, 0.0346, 0.0177, 0.0258, 0.0283], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0167, 0.0167, 0.0139, 0.0120, 0.0113, 0.0108, 0.0102], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 09:38:10,806 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30964.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:38:45,959 INFO [train.py:892] (2/4) Epoch 17, batch 1300, loss[loss=0.3306, simple_loss=0.386, pruned_loss=0.1376, over 19224.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2647, pruned_loss=0.06453, over 3942933.21 frames. ], batch size: 483, lr: 9.23e-03, grad_scale: 16.0 +2023-03-28 09:39:00,848 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30986.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:39:22,245 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30995.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:39:56,580 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31011.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 09:39:56,713 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31011.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 09:40:01,502 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.052e+02 4.725e+02 5.560e+02 6.623e+02 9.875e+02, threshold=1.112e+03, percent-clipped=1.0 +2023-03-28 09:40:06,333 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31016.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:40:39,840 INFO [train.py:892] (2/4) Epoch 17, batch 1350, loss[loss=0.1834, simple_loss=0.2503, pruned_loss=0.05827, over 19719.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2639, pruned_loss=0.06375, over 3945289.46 frames. ], batch size: 62, lr: 9.22e-03, grad_scale: 16.0 +2023-03-28 09:41:40,591 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5129, 2.4285, 3.9047, 3.4315, 3.7543, 3.9216, 3.7824, 3.5970], + device='cuda:2'), covar=tensor([0.0315, 0.0839, 0.0102, 0.0658, 0.0124, 0.0217, 0.0151, 0.0175], + device='cuda:2'), in_proj_covar=tensor([0.0086, 0.0095, 0.0077, 0.0147, 0.0072, 0.0086, 0.0080, 0.0073], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 09:41:54,685 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31064.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:42:13,090 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31072.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 09:42:31,186 INFO [train.py:892] (2/4) Epoch 17, batch 1400, loss[loss=0.2314, simple_loss=0.2938, pruned_loss=0.08448, over 19804.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2645, pruned_loss=0.06432, over 3947423.99 frames. ], batch size: 288, lr: 9.21e-03, grad_scale: 16.0 +2023-03-28 09:43:40,799 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 4.073e+02 5.032e+02 6.013e+02 1.066e+03, threshold=1.006e+03, percent-clipped=0.0 +2023-03-28 09:44:19,114 INFO [train.py:892] (2/4) Epoch 17, batch 1450, loss[loss=0.2057, simple_loss=0.2708, pruned_loss=0.07031, over 19738.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2645, pruned_loss=0.06424, over 3949118.06 frames. ], batch size: 205, lr: 9.20e-03, grad_scale: 32.0 +2023-03-28 09:44:40,775 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3549, 3.6054, 1.9591, 4.3508, 3.8498, 4.2556, 4.3070, 3.2767], + device='cuda:2'), covar=tensor([0.0596, 0.0493, 0.1555, 0.0550, 0.0410, 0.0295, 0.0621, 0.0653], + device='cuda:2'), in_proj_covar=tensor([0.0131, 0.0128, 0.0136, 0.0132, 0.0114, 0.0114, 0.0130, 0.0132], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 09:46:05,770 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.63 vs. limit=5.0 +2023-03-28 09:46:17,139 INFO [train.py:892] (2/4) Epoch 17, batch 1500, loss[loss=0.1982, simple_loss=0.2597, pruned_loss=0.06831, over 19840.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2651, pruned_loss=0.06439, over 3949600.76 frames. ], batch size: 144, lr: 9.20e-03, grad_scale: 32.0 +2023-03-28 09:47:32,198 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.841e+02 4.138e+02 4.953e+02 5.911e+02 9.562e+02, threshold=9.905e+02, percent-clipped=0.0 +2023-03-28 09:48:10,818 INFO [train.py:892] (2/4) Epoch 17, batch 1550, loss[loss=0.194, simple_loss=0.27, pruned_loss=0.05899, over 19473.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2656, pruned_loss=0.06476, over 3949072.45 frames. ], batch size: 43, lr: 9.19e-03, grad_scale: 32.0 +2023-03-28 09:49:12,638 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31259.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:49:15,024 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6909, 3.7354, 2.2580, 3.9915, 4.1187, 1.8100, 3.2686, 3.0722], + device='cuda:2'), covar=tensor([0.0730, 0.0858, 0.2794, 0.0705, 0.0444, 0.3124, 0.1213, 0.0839], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0241, 0.0221, 0.0241, 0.0212, 0.0202, 0.0233, 0.0177], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 09:50:00,500 INFO [train.py:892] (2/4) Epoch 17, batch 1600, loss[loss=0.1978, simple_loss=0.2591, pruned_loss=0.06827, over 19789.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2652, pruned_loss=0.06468, over 3949009.11 frames. ], batch size: 236, lr: 9.18e-03, grad_scale: 32.0 +2023-03-28 09:50:02,767 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31281.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:50:23,691 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31290.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:51:07,718 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31311.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:51:07,846 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1799, 3.1671, 3.4528, 2.5513, 3.5476, 2.8840, 3.2638, 3.4079], + device='cuda:2'), covar=tensor([0.0617, 0.0389, 0.0371, 0.0750, 0.0255, 0.0392, 0.0346, 0.0291], + device='cuda:2'), in_proj_covar=tensor([0.0065, 0.0071, 0.0069, 0.0099, 0.0067, 0.0065, 0.0064, 0.0057], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 09:51:12,797 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.169e+02 4.273e+02 5.402e+02 6.708e+02 1.273e+03, threshold=1.080e+03, percent-clipped=3.0 +2023-03-28 09:51:41,199 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6867, 2.1951, 2.5360, 3.0595, 3.4235, 3.5678, 3.5205, 3.6148], + device='cuda:2'), covar=tensor([0.0905, 0.1657, 0.1213, 0.0573, 0.0375, 0.0288, 0.0323, 0.0401], + device='cuda:2'), in_proj_covar=tensor([0.0146, 0.0166, 0.0166, 0.0138, 0.0119, 0.0113, 0.0107, 0.0102], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 09:51:54,931 INFO [train.py:892] (2/4) Epoch 17, batch 1650, loss[loss=0.2769, simple_loss=0.3367, pruned_loss=0.1086, over 19603.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2655, pruned_loss=0.06523, over 3949123.13 frames. ], batch size: 367, lr: 9.18e-03, grad_scale: 32.0 +2023-03-28 09:52:57,496 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31359.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:53:13,471 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31367.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 09:53:43,709 INFO [train.py:892] (2/4) Epoch 17, batch 1700, loss[loss=0.2402, simple_loss=0.3089, pruned_loss=0.08571, over 19636.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2667, pruned_loss=0.06548, over 3947857.79 frames. ], batch size: 330, lr: 9.17e-03, grad_scale: 32.0 +2023-03-28 09:54:48,776 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31409.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:54:59,310 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.015e+02 4.580e+02 5.253e+02 6.145e+02 1.351e+03, threshold=1.051e+03, percent-clipped=1.0 +2023-03-28 09:55:32,409 INFO [train.py:892] (2/4) Epoch 17, batch 1750, loss[loss=0.1767, simple_loss=0.2508, pruned_loss=0.05128, over 19656.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2656, pruned_loss=0.06459, over 3948123.60 frames. ], batch size: 43, lr: 9.16e-03, grad_scale: 32.0 +2023-03-28 09:56:11,859 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.9547, 2.3229, 2.0886, 1.4773, 2.1141, 2.2878, 2.2155, 2.3334], + device='cuda:2'), covar=tensor([0.0308, 0.0232, 0.0268, 0.0562, 0.0352, 0.0241, 0.0202, 0.0191], + device='cuda:2'), in_proj_covar=tensor([0.0078, 0.0074, 0.0082, 0.0087, 0.0090, 0.0065, 0.0062, 0.0065], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 09:56:48,035 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31470.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:56:57,295 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3376, 3.2129, 3.4373, 2.5612, 3.5909, 2.9144, 3.1615, 3.5045], + device='cuda:2'), covar=tensor([0.0608, 0.0366, 0.0509, 0.0713, 0.0362, 0.0360, 0.0428, 0.0290], + device='cuda:2'), in_proj_covar=tensor([0.0064, 0.0070, 0.0068, 0.0098, 0.0066, 0.0064, 0.0063, 0.0056], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 09:57:07,924 INFO [train.py:892] (2/4) Epoch 17, batch 1800, loss[loss=0.1947, simple_loss=0.2679, pruned_loss=0.06076, over 19656.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2651, pruned_loss=0.06445, over 3948432.59 frames. ], batch size: 58, lr: 9.15e-03, grad_scale: 32.0 +2023-03-28 09:57:26,767 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31491.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 09:57:43,853 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8083, 3.0933, 2.6202, 2.1708, 2.6936, 2.9905, 2.9944, 3.1568], + device='cuda:2'), covar=tensor([0.0203, 0.0305, 0.0244, 0.0457, 0.0315, 0.0390, 0.0144, 0.0160], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0073, 0.0081, 0.0086, 0.0089, 0.0064, 0.0061, 0.0064], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 09:58:07,134 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.684e+02 3.864e+02 5.000e+02 5.920e+02 9.753e+02, threshold=1.000e+03, percent-clipped=0.0 +2023-03-28 09:58:38,267 INFO [train.py:892] (2/4) Epoch 17, batch 1850, loss[loss=0.2129, simple_loss=0.292, pruned_loss=0.06685, over 19684.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2674, pruned_loss=0.06416, over 3946760.68 frames. ], batch size: 56, lr: 9.15e-03, grad_scale: 32.0 +2023-03-28 09:59:46,574 INFO [train.py:892] (2/4) Epoch 18, batch 0, loss[loss=0.1515, simple_loss=0.2199, pruned_loss=0.04161, over 19752.00 frames. ], tot_loss[loss=0.1515, simple_loss=0.2199, pruned_loss=0.04161, over 19752.00 frames. ], batch size: 100, lr: 8.89e-03, grad_scale: 32.0 +2023-03-28 09:59:46,575 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 10:00:15,455 INFO [train.py:926] (2/4) Epoch 18, validation: loss=0.171, simple_loss=0.2489, pruned_loss=0.04657, over 2883724.00 frames. +2023-03-28 10:00:15,457 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 10:00:43,156 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9057, 3.7111, 3.7881, 3.5003, 3.9007, 2.8678, 3.1322, 1.8229], + device='cuda:2'), covar=tensor([0.0216, 0.0236, 0.0146, 0.0185, 0.0158, 0.0925, 0.0745, 0.1710], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0130, 0.0103, 0.0124, 0.0110, 0.0124, 0.0135, 0.0118], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 10:00:52,950 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31552.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 10:01:09,686 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31559.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:02:01,489 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31581.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:02:11,836 INFO [train.py:892] (2/4) Epoch 18, batch 50, loss[loss=0.1992, simple_loss=0.2706, pruned_loss=0.06393, over 19779.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2602, pruned_loss=0.06193, over 890128.78 frames. ], batch size: 87, lr: 8.88e-03, grad_scale: 32.0 +2023-03-28 10:02:21,721 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31590.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:02:47,496 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8100, 3.4396, 3.5278, 3.8193, 3.5793, 3.7155, 3.8753, 4.0339], + device='cuda:2'), covar=tensor([0.0689, 0.0438, 0.0528, 0.0346, 0.0662, 0.0571, 0.0441, 0.0350], + device='cuda:2'), in_proj_covar=tensor([0.0141, 0.0160, 0.0187, 0.0157, 0.0158, 0.0139, 0.0141, 0.0179], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 10:02:52,033 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6104, 3.6741, 2.1928, 3.8661, 4.0210, 1.7714, 3.2359, 3.0528], + device='cuda:2'), covar=tensor([0.0810, 0.0992, 0.2851, 0.0863, 0.0545, 0.3066, 0.1225, 0.0830], + device='cuda:2'), in_proj_covar=tensor([0.0217, 0.0239, 0.0220, 0.0241, 0.0212, 0.0203, 0.0230, 0.0177], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 10:03:03,304 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31607.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:03:16,245 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.973e+02 4.915e+02 6.184e+02 1.019e+03, threshold=9.829e+02, percent-clipped=1.0 +2023-03-28 10:03:52,358 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31629.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:04:05,497 INFO [train.py:892] (2/4) Epoch 18, batch 100, loss[loss=0.203, simple_loss=0.2562, pruned_loss=0.07494, over 19811.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2629, pruned_loss=0.06323, over 1568171.18 frames. ], batch size: 148, lr: 8.87e-03, grad_scale: 32.0 +2023-03-28 10:04:10,150 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31638.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:04:57,961 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8049, 4.6973, 5.2338, 5.0107, 5.0434, 4.5252, 4.8856, 4.8010], + device='cuda:2'), covar=tensor([0.1523, 0.1630, 0.0889, 0.1171, 0.0836, 0.0996, 0.1937, 0.2033], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0276, 0.0328, 0.0260, 0.0240, 0.0240, 0.0321, 0.0352], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 10:05:17,218 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31667.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 10:06:00,550 INFO [train.py:892] (2/4) Epoch 18, batch 150, loss[loss=0.1611, simple_loss=0.2324, pruned_loss=0.04487, over 19730.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2638, pruned_loss=0.06354, over 2096975.81 frames. ], batch size: 47, lr: 8.86e-03, grad_scale: 32.0 +2023-03-28 10:06:42,167 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31704.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:07:06,651 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.935e+02 4.429e+02 5.296e+02 6.261e+02 1.038e+03, threshold=1.059e+03, percent-clipped=5.0 +2023-03-28 10:07:07,559 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31715.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 10:07:37,035 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0831, 3.0062, 3.1431, 2.3535, 3.2647, 2.7268, 2.9443, 3.2382], + device='cuda:2'), covar=tensor([0.0487, 0.0359, 0.0580, 0.0776, 0.0347, 0.0371, 0.0434, 0.0280], + device='cuda:2'), in_proj_covar=tensor([0.0065, 0.0071, 0.0069, 0.0100, 0.0066, 0.0066, 0.0065, 0.0057], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 10:07:53,047 INFO [train.py:892] (2/4) Epoch 18, batch 200, loss[loss=0.1604, simple_loss=0.2316, pruned_loss=0.04456, over 19697.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2666, pruned_loss=0.06464, over 2505946.36 frames. ], batch size: 48, lr: 8.86e-03, grad_scale: 16.0 +2023-03-28 10:09:00,504 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31765.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:09:00,789 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31765.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:09:47,138 INFO [train.py:892] (2/4) Epoch 18, batch 250, loss[loss=0.2042, simple_loss=0.2613, pruned_loss=0.07357, over 19750.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.266, pruned_loss=0.06456, over 2826104.55 frames. ], batch size: 139, lr: 8.85e-03, grad_scale: 16.0 +2023-03-28 10:09:53,067 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31788.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:10:27,303 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-28 10:10:52,978 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.400e+02 4.013e+02 4.672e+02 5.827e+02 1.248e+03, threshold=9.344e+02, percent-clipped=1.0 +2023-03-28 10:11:15,792 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31823.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:11:20,280 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.0366, 2.0297, 2.2644, 2.2042, 1.9837, 2.1629, 2.2044, 2.1854], + device='cuda:2'), covar=tensor([0.0256, 0.0255, 0.0248, 0.0230, 0.0347, 0.0285, 0.0358, 0.0279], + device='cuda:2'), in_proj_covar=tensor([0.0061, 0.0057, 0.0061, 0.0053, 0.0067, 0.0062, 0.0079, 0.0055], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 10:11:41,797 INFO [train.py:892] (2/4) Epoch 18, batch 300, loss[loss=0.2063, simple_loss=0.2681, pruned_loss=0.07218, over 19837.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2656, pruned_loss=0.06468, over 3074905.54 frames. ], batch size: 143, lr: 8.84e-03, grad_scale: 16.0 +2023-03-28 10:12:08,485 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31847.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 10:12:13,506 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31849.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:12:34,770 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3409, 4.4350, 4.8998, 4.4553, 4.1947, 4.8198, 4.6332, 5.0708], + device='cuda:2'), covar=tensor([0.1314, 0.0426, 0.0527, 0.0415, 0.0721, 0.0447, 0.0480, 0.0400], + device='cuda:2'), in_proj_covar=tensor([0.0268, 0.0207, 0.0209, 0.0215, 0.0193, 0.0218, 0.0214, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 10:12:42,748 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4199, 2.3715, 2.8794, 2.7425, 2.3386, 2.6981, 2.5221, 2.6507], + device='cuda:2'), covar=tensor([0.0298, 0.0253, 0.0263, 0.0197, 0.0331, 0.0210, 0.0323, 0.0342], + device='cuda:2'), in_proj_covar=tensor([0.0060, 0.0056, 0.0060, 0.0052, 0.0066, 0.0061, 0.0077, 0.0054], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 10:13:18,409 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31878.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:13:32,382 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31884.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:13:35,313 INFO [train.py:892] (2/4) Epoch 18, batch 350, loss[loss=0.1959, simple_loss=0.2613, pruned_loss=0.0652, over 19855.00 frames. ], tot_loss[loss=0.196, simple_loss=0.265, pruned_loss=0.06351, over 3267519.30 frames. ], batch size: 64, lr: 8.84e-03, grad_scale: 16.0 +2023-03-28 10:14:41,483 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.968e+02 4.069e+02 4.792e+02 5.707e+02 1.077e+03, threshold=9.584e+02, percent-clipped=3.0 +2023-03-28 10:14:42,343 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31915.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:15:31,558 INFO [train.py:892] (2/4) Epoch 18, batch 400, loss[loss=0.1897, simple_loss=0.2635, pruned_loss=0.05795, over 19785.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2657, pruned_loss=0.06402, over 3417613.84 frames. ], batch size: 91, lr: 8.83e-03, grad_scale: 16.0 +2023-03-28 10:15:38,992 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31939.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:16:04,269 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31950.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:16:39,590 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5615, 2.8687, 2.4156, 1.9985, 2.5338, 2.7103, 2.8116, 2.8185], + device='cuda:2'), covar=tensor([0.0252, 0.0261, 0.0264, 0.0493, 0.0327, 0.0285, 0.0183, 0.0182], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0073, 0.0081, 0.0085, 0.0089, 0.0065, 0.0061, 0.0064], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 10:17:03,336 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31976.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:17:16,148 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9011, 4.9148, 5.4282, 4.9681, 4.3136, 5.0730, 5.0807, 5.5892], + device='cuda:2'), covar=tensor([0.1037, 0.0383, 0.0400, 0.0370, 0.0794, 0.0462, 0.0481, 0.0308], + device='cuda:2'), in_proj_covar=tensor([0.0267, 0.0207, 0.0207, 0.0214, 0.0193, 0.0217, 0.0214, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 10:17:23,478 INFO [train.py:892] (2/4) Epoch 18, batch 450, loss[loss=0.2375, simple_loss=0.3267, pruned_loss=0.07409, over 18957.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2672, pruned_loss=0.06455, over 3535732.37 frames. ], batch size: 514, lr: 8.82e-03, grad_scale: 16.0 +2023-03-28 10:17:32,115 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5495, 4.5517, 2.6686, 4.8964, 5.1150, 2.1361, 4.1920, 3.8496], + device='cuda:2'), covar=tensor([0.0543, 0.0780, 0.2695, 0.0701, 0.0392, 0.3025, 0.0945, 0.0685], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0239, 0.0220, 0.0242, 0.0213, 0.0200, 0.0230, 0.0177], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 10:18:28,654 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32011.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:18:36,655 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.048e+02 4.283e+02 4.991e+02 5.829e+02 1.431e+03, threshold=9.983e+02, percent-clipped=4.0 +2023-03-28 10:18:59,665 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4475, 4.1157, 4.1181, 4.4492, 4.1232, 4.5127, 4.6239, 4.7329], + device='cuda:2'), covar=tensor([0.0639, 0.0358, 0.0515, 0.0307, 0.0629, 0.0447, 0.0419, 0.0282], + device='cuda:2'), in_proj_covar=tensor([0.0137, 0.0158, 0.0183, 0.0154, 0.0157, 0.0138, 0.0136, 0.0175], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 10:19:26,550 INFO [train.py:892] (2/4) Epoch 18, batch 500, loss[loss=0.1868, simple_loss=0.2578, pruned_loss=0.05793, over 19713.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2655, pruned_loss=0.0636, over 3627923.96 frames. ], batch size: 60, lr: 8.82e-03, grad_scale: 16.0 +2023-03-28 10:20:20,728 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32060.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:20:33,278 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32065.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:21:22,502 INFO [train.py:892] (2/4) Epoch 18, batch 550, loss[loss=0.179, simple_loss=0.2524, pruned_loss=0.05277, over 19769.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2647, pruned_loss=0.06329, over 3700487.72 frames. ], batch size: 119, lr: 8.81e-03, grad_scale: 16.0 +2023-03-28 10:21:32,510 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2516, 2.2481, 2.4728, 2.4367, 2.3994, 2.2471, 2.4335, 2.5100], + device='cuda:2'), covar=tensor([0.0256, 0.0270, 0.0230, 0.0218, 0.0317, 0.0331, 0.0335, 0.0261], + device='cuda:2'), in_proj_covar=tensor([0.0060, 0.0056, 0.0061, 0.0053, 0.0067, 0.0062, 0.0078, 0.0054], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 10:22:25,779 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32113.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:22:30,527 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.880e+02 4.056e+02 4.821e+02 6.235e+02 9.910e+02, threshold=9.642e+02, percent-clipped=0.0 +2023-03-28 10:23:18,292 INFO [train.py:892] (2/4) Epoch 18, batch 600, loss[loss=0.1993, simple_loss=0.2699, pruned_loss=0.06431, over 19581.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2638, pruned_loss=0.06299, over 3756866.49 frames. ], batch size: 49, lr: 8.80e-03, grad_scale: 16.0 +2023-03-28 10:23:38,028 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32144.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:23:44,671 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32147.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 10:24:56,498 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32179.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:25:12,429 INFO [train.py:892] (2/4) Epoch 18, batch 650, loss[loss=0.1657, simple_loss=0.2408, pruned_loss=0.04531, over 19820.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.264, pruned_loss=0.06309, over 3797629.19 frames. ], batch size: 93, lr: 8.80e-03, grad_scale: 16.0 +2023-03-28 10:25:36,497 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32195.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 10:25:38,953 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-28 10:25:54,314 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.86 vs. limit=5.0 +2023-03-28 10:26:20,620 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.941e+02 4.475e+02 5.110e+02 5.947e+02 1.058e+03, threshold=1.022e+03, percent-clipped=2.0 +2023-03-28 10:26:34,226 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2915, 4.7307, 4.9798, 4.6762, 5.1923, 3.2627, 4.1920, 2.5273], + device='cuda:2'), covar=tensor([0.0158, 0.0210, 0.0115, 0.0169, 0.0118, 0.0817, 0.0839, 0.1395], + device='cuda:2'), in_proj_covar=tensor([0.0093, 0.0131, 0.0103, 0.0123, 0.0109, 0.0124, 0.0135, 0.0119], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 10:27:06,411 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32234.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:27:09,560 INFO [train.py:892] (2/4) Epoch 18, batch 700, loss[loss=0.1903, simple_loss=0.2577, pruned_loss=0.06146, over 19813.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2653, pruned_loss=0.06355, over 3829097.12 frames. ], batch size: 132, lr: 8.79e-03, grad_scale: 16.0 +2023-03-28 10:28:31,689 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32271.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:29:04,739 INFO [train.py:892] (2/4) Epoch 18, batch 750, loss[loss=0.1855, simple_loss=0.2489, pruned_loss=0.06107, over 19834.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.265, pruned_loss=0.06374, over 3856402.86 frames. ], batch size: 143, lr: 8.78e-03, grad_scale: 16.0 +2023-03-28 10:29:52,388 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32306.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:30:09,295 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.935e+02 4.187e+02 5.149e+02 5.952e+02 1.160e+03, threshold=1.030e+03, percent-clipped=2.0 +2023-03-28 10:30:59,330 INFO [train.py:892] (2/4) Epoch 18, batch 800, loss[loss=0.1863, simple_loss=0.2476, pruned_loss=0.06254, over 19851.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2655, pruned_loss=0.06393, over 3877544.61 frames. ], batch size: 124, lr: 8.78e-03, grad_scale: 16.0 +2023-03-28 10:31:14,989 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.8121, 2.0455, 1.8601, 1.2461, 1.9264, 1.9664, 1.9021, 2.0187], + device='cuda:2'), covar=tensor([0.0307, 0.0232, 0.0263, 0.0502, 0.0347, 0.0235, 0.0196, 0.0209], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0074, 0.0083, 0.0088, 0.0091, 0.0066, 0.0063, 0.0066], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 10:31:51,874 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32360.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:32:47,930 INFO [train.py:892] (2/4) Epoch 18, batch 850, loss[loss=0.1963, simple_loss=0.2701, pruned_loss=0.06122, over 19840.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2647, pruned_loss=0.06336, over 3894039.56 frames. ], batch size: 59, lr: 8.77e-03, grad_scale: 16.0 +2023-03-28 10:33:03,133 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7967, 3.7454, 4.1133, 3.7514, 3.5610, 3.9617, 3.8352, 4.1443], + device='cuda:2'), covar=tensor([0.0850, 0.0337, 0.0329, 0.0389, 0.1078, 0.0510, 0.0420, 0.0337], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0206, 0.0207, 0.0216, 0.0195, 0.0218, 0.0214, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 10:33:24,118 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7937, 2.3439, 2.7377, 3.0988, 3.4732, 3.6691, 3.6917, 3.6691], + device='cuda:2'), covar=tensor([0.0872, 0.1612, 0.1132, 0.0611, 0.0423, 0.0240, 0.0281, 0.0371], + device='cuda:2'), in_proj_covar=tensor([0.0149, 0.0168, 0.0168, 0.0139, 0.0122, 0.0114, 0.0106, 0.0102], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 10:33:33,054 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4058, 2.3569, 1.4229, 2.6665, 2.5303, 2.5727, 2.6885, 2.1445], + device='cuda:2'), covar=tensor([0.0627, 0.0740, 0.1453, 0.0548, 0.0563, 0.0469, 0.0521, 0.0868], + device='cuda:2'), in_proj_covar=tensor([0.0131, 0.0130, 0.0137, 0.0135, 0.0117, 0.0116, 0.0131, 0.0134], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 10:33:36,800 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32408.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:33:37,126 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4289, 3.7701, 3.9561, 4.7852, 2.9048, 3.3507, 2.7152, 2.8185], + device='cuda:2'), covar=tensor([0.0488, 0.2108, 0.0849, 0.0262, 0.2249, 0.0987, 0.1360, 0.1801], + device='cuda:2'), in_proj_covar=tensor([0.0222, 0.0334, 0.0233, 0.0180, 0.0239, 0.0192, 0.0206, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 10:33:53,380 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.074e+02 4.132e+02 4.901e+02 5.957e+02 1.046e+03, threshold=9.802e+02, percent-clipped=1.0 +2023-03-28 10:34:39,898 INFO [train.py:892] (2/4) Epoch 18, batch 900, loss[loss=0.1932, simple_loss=0.2627, pruned_loss=0.06185, over 19802.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2635, pruned_loss=0.06286, over 3907311.11 frames. ], batch size: 74, lr: 8.76e-03, grad_scale: 16.0 +2023-03-28 10:34:58,549 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32444.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:36:13,457 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6098, 2.0187, 2.3376, 2.8450, 3.1917, 3.3069, 3.2619, 3.3727], + device='cuda:2'), covar=tensor([0.0927, 0.1778, 0.1325, 0.0684, 0.0478, 0.0312, 0.0378, 0.0356], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0168, 0.0168, 0.0139, 0.0122, 0.0114, 0.0106, 0.0102], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 10:36:20,291 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32479.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:36:36,673 INFO [train.py:892] (2/4) Epoch 18, batch 950, loss[loss=0.1827, simple_loss=0.2526, pruned_loss=0.05638, over 19722.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2631, pruned_loss=0.06243, over 3917101.81 frames. ], batch size: 78, lr: 8.76e-03, grad_scale: 16.0 +2023-03-28 10:36:53,375 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32492.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:37:10,057 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-03-28 10:37:13,459 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4316, 4.7461, 4.7886, 4.7086, 4.3935, 4.7834, 4.2647, 4.3673], + device='cuda:2'), covar=tensor([0.0513, 0.0497, 0.0497, 0.0451, 0.0622, 0.0513, 0.0684, 0.0944], + device='cuda:2'), in_proj_covar=tensor([0.0234, 0.0239, 0.0268, 0.0229, 0.0229, 0.0216, 0.0239, 0.0283], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 10:37:41,379 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.554e+02 4.412e+02 5.212e+02 6.577e+02 2.177e+03, threshold=1.042e+03, percent-clipped=5.0 +2023-03-28 10:38:11,324 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32527.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:38:28,554 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32534.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:38:32,107 INFO [train.py:892] (2/4) Epoch 18, batch 1000, loss[loss=0.1925, simple_loss=0.2639, pruned_loss=0.06052, over 19854.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2633, pruned_loss=0.06228, over 3924673.24 frames. ], batch size: 112, lr: 8.75e-03, grad_scale: 16.0 +2023-03-28 10:38:55,957 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0613, 3.0543, 1.7520, 3.7062, 3.4523, 3.6322, 3.7308, 2.9616], + device='cuda:2'), covar=tensor([0.0596, 0.0626, 0.1781, 0.0496, 0.0495, 0.0403, 0.0488, 0.0744], + device='cuda:2'), in_proj_covar=tensor([0.0129, 0.0128, 0.0135, 0.0133, 0.0115, 0.0115, 0.0130, 0.0133], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 10:39:52,742 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32571.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:40:16,948 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32582.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:40:24,638 INFO [train.py:892] (2/4) Epoch 18, batch 1050, loss[loss=0.2021, simple_loss=0.2698, pruned_loss=0.06715, over 19809.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2641, pruned_loss=0.06289, over 3929895.97 frames. ], batch size: 123, lr: 8.74e-03, grad_scale: 16.0 +2023-03-28 10:40:54,099 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5696, 2.6259, 1.4468, 3.0432, 2.8218, 2.9670, 3.0824, 2.4784], + device='cuda:2'), covar=tensor([0.0680, 0.0663, 0.1606, 0.0501, 0.0520, 0.0458, 0.0518, 0.0785], + device='cuda:2'), in_proj_covar=tensor([0.0130, 0.0128, 0.0134, 0.0133, 0.0116, 0.0115, 0.0130, 0.0133], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 10:41:12,743 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32606.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:41:30,003 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.098e+02 4.345e+02 4.935e+02 6.065e+02 2.066e+03, threshold=9.870e+02, percent-clipped=4.0 +2023-03-28 10:41:38,229 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32619.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:42:16,178 INFO [train.py:892] (2/4) Epoch 18, batch 1100, loss[loss=0.1828, simple_loss=0.2607, pruned_loss=0.05245, over 19889.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2653, pruned_loss=0.06349, over 3933385.48 frames. ], batch size: 62, lr: 8.74e-03, grad_scale: 16.0 +2023-03-28 10:42:35,876 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.00 vs. limit=5.0 +2023-03-28 10:42:54,943 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32654.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:43:06,405 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.71 vs. limit=5.0 +2023-03-28 10:44:01,346 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-28 10:44:10,627 INFO [train.py:892] (2/4) Epoch 18, batch 1150, loss[loss=0.2059, simple_loss=0.2678, pruned_loss=0.07194, over 19759.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2645, pruned_loss=0.06351, over 3937644.92 frames. ], batch size: 221, lr: 8.73e-03, grad_scale: 16.0 +2023-03-28 10:44:49,711 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5640, 2.5842, 4.0642, 3.6129, 3.9282, 4.0725, 4.0114, 3.9069], + device='cuda:2'), covar=tensor([0.0440, 0.0963, 0.0135, 0.0891, 0.0173, 0.0259, 0.0174, 0.0176], + device='cuda:2'), in_proj_covar=tensor([0.0089, 0.0096, 0.0079, 0.0151, 0.0075, 0.0087, 0.0083, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 10:45:16,102 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.651e+02 4.519e+02 5.355e+02 6.274e+02 1.728e+03, threshold=1.071e+03, percent-clipped=4.0 +2023-03-28 10:46:02,483 INFO [train.py:892] (2/4) Epoch 18, batch 1200, loss[loss=0.192, simple_loss=0.2423, pruned_loss=0.07084, over 19830.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.264, pruned_loss=0.06361, over 3941920.77 frames. ], batch size: 128, lr: 8.72e-03, grad_scale: 16.0 +2023-03-28 10:48:00,614 INFO [train.py:892] (2/4) Epoch 18, batch 1250, loss[loss=0.2482, simple_loss=0.3135, pruned_loss=0.09145, over 19801.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2638, pruned_loss=0.06381, over 3943490.88 frames. ], batch size: 162, lr: 8.72e-03, grad_scale: 16.0 +2023-03-28 10:48:10,473 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3759, 2.1856, 3.9088, 3.5896, 3.9764, 3.9608, 3.8349, 3.8745], + device='cuda:2'), covar=tensor([0.0450, 0.1150, 0.0129, 0.0729, 0.0126, 0.0245, 0.0191, 0.0174], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0096, 0.0079, 0.0150, 0.0075, 0.0087, 0.0082, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 10:49:03,818 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.128e+02 4.132e+02 4.830e+02 5.865e+02 8.224e+02, threshold=9.660e+02, percent-clipped=0.0 +2023-03-28 10:49:41,484 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2346, 3.1454, 4.6240, 3.5426, 3.7562, 3.7769, 2.4794, 2.6696], + device='cuda:2'), covar=tensor([0.0758, 0.2856, 0.0398, 0.0768, 0.1550, 0.1073, 0.2191, 0.2310], + device='cuda:2'), in_proj_covar=tensor([0.0326, 0.0357, 0.0309, 0.0248, 0.0351, 0.0319, 0.0327, 0.0298], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 10:49:53,660 INFO [train.py:892] (2/4) Epoch 18, batch 1300, loss[loss=0.1842, simple_loss=0.2592, pruned_loss=0.05459, over 19803.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2631, pruned_loss=0.06326, over 3945006.76 frames. ], batch size: 74, lr: 8.71e-03, grad_scale: 16.0 +2023-03-28 10:50:27,656 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3713, 3.8082, 4.0696, 3.6505, 3.6428, 3.9926, 3.7953, 4.1409], + device='cuda:2'), covar=tensor([0.1599, 0.0416, 0.0599, 0.0540, 0.1194, 0.0686, 0.0571, 0.0501], + device='cuda:2'), in_proj_covar=tensor([0.0267, 0.0208, 0.0207, 0.0218, 0.0198, 0.0220, 0.0217, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 10:51:36,847 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0225, 3.4373, 2.9728, 2.2719, 2.7777, 3.2616, 3.1285, 3.2015], + device='cuda:2'), covar=tensor([0.0229, 0.0188, 0.0219, 0.0479, 0.0316, 0.0229, 0.0192, 0.0188], + device='cuda:2'), in_proj_covar=tensor([0.0078, 0.0074, 0.0083, 0.0087, 0.0090, 0.0065, 0.0063, 0.0065], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 10:51:47,878 INFO [train.py:892] (2/4) Epoch 18, batch 1350, loss[loss=0.1938, simple_loss=0.26, pruned_loss=0.0638, over 19868.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2627, pruned_loss=0.06269, over 3945705.16 frames. ], batch size: 165, lr: 8.71e-03, grad_scale: 16.0 +2023-03-28 10:52:16,804 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-28 10:52:20,702 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1842, 2.9215, 3.3475, 2.5034, 3.4402, 2.7212, 2.9747, 3.3605], + device='cuda:2'), covar=tensor([0.0412, 0.0431, 0.0478, 0.0704, 0.0238, 0.0394, 0.0465, 0.0249], + device='cuda:2'), in_proj_covar=tensor([0.0065, 0.0072, 0.0070, 0.0100, 0.0067, 0.0067, 0.0066, 0.0058], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 10:52:51,876 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8324, 2.2774, 2.5447, 3.1154, 3.5450, 4.0567, 3.9059, 4.0684], + device='cuda:2'), covar=tensor([0.0964, 0.1797, 0.1394, 0.0625, 0.0410, 0.0194, 0.0317, 0.0216], + device='cuda:2'), in_proj_covar=tensor([0.0149, 0.0169, 0.0168, 0.0140, 0.0123, 0.0113, 0.0107, 0.0102], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 10:52:55,305 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.479e+02 4.452e+02 5.064e+02 6.052e+02 1.021e+03, threshold=1.013e+03, percent-clipped=2.0 +2023-03-28 10:53:22,168 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.5399, 5.8242, 5.8549, 5.6585, 5.5370, 5.8409, 5.0625, 5.2364], + device='cuda:2'), covar=tensor([0.0406, 0.0433, 0.0505, 0.0431, 0.0539, 0.0480, 0.0653, 0.0856], + device='cuda:2'), in_proj_covar=tensor([0.0232, 0.0236, 0.0266, 0.0228, 0.0227, 0.0218, 0.0236, 0.0279], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 10:53:36,251 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8756, 2.9419, 4.2739, 3.2317, 3.6540, 3.4432, 2.3541, 2.4694], + device='cuda:2'), covar=tensor([0.0881, 0.2728, 0.0498, 0.0945, 0.1402, 0.1187, 0.2256, 0.2613], + device='cuda:2'), in_proj_covar=tensor([0.0332, 0.0362, 0.0314, 0.0252, 0.0355, 0.0323, 0.0333, 0.0303], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 10:53:44,828 INFO [train.py:892] (2/4) Epoch 18, batch 1400, loss[loss=0.2144, simple_loss=0.2805, pruned_loss=0.07409, over 19816.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2625, pruned_loss=0.06264, over 3946669.45 frames. ], batch size: 288, lr: 8.70e-03, grad_scale: 16.0 +2023-03-28 10:54:52,805 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32966.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:55:06,663 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3503, 4.3202, 4.7079, 4.5356, 4.6219, 4.0301, 4.4122, 4.2783], + device='cuda:2'), covar=tensor([0.1357, 0.1537, 0.0939, 0.1137, 0.0821, 0.1114, 0.1807, 0.1919], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0283, 0.0331, 0.0261, 0.0241, 0.0242, 0.0319, 0.0352], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 10:55:38,293 INFO [train.py:892] (2/4) Epoch 18, batch 1450, loss[loss=0.1699, simple_loss=0.2357, pruned_loss=0.05204, over 19749.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2618, pruned_loss=0.06193, over 3948000.65 frames. ], batch size: 129, lr: 8.69e-03, grad_scale: 16.0 +2023-03-28 10:56:44,802 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.979e+02 4.312e+02 5.122e+02 6.077e+02 1.174e+03, threshold=1.024e+03, percent-clipped=2.0 +2023-03-28 10:57:10,254 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-28 10:57:12,029 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33027.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:57:32,904 INFO [train.py:892] (2/4) Epoch 18, batch 1500, loss[loss=0.2211, simple_loss=0.2805, pruned_loss=0.08083, over 19836.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2612, pruned_loss=0.0616, over 3949358.54 frames. ], batch size: 171, lr: 8.69e-03, grad_scale: 16.0 +2023-03-28 10:58:45,818 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33069.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:59:25,354 INFO [train.py:892] (2/4) Epoch 18, batch 1550, loss[loss=0.2553, simple_loss=0.3203, pruned_loss=0.09515, over 19614.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2631, pruned_loss=0.06278, over 3948629.80 frames. ], batch size: 367, lr: 8.68e-03, grad_scale: 16.0 +2023-03-28 11:00:25,340 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.832e+02 4.176e+02 4.933e+02 6.239e+02 1.615e+03, threshold=9.866e+02, percent-clipped=3.0 +2023-03-28 11:00:55,001 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9047, 3.8462, 3.8032, 3.5725, 3.9698, 2.9296, 3.1208, 1.9215], + device='cuda:2'), covar=tensor([0.0315, 0.0253, 0.0216, 0.0242, 0.0238, 0.0974, 0.1037, 0.1874], + device='cuda:2'), in_proj_covar=tensor([0.0095, 0.0132, 0.0105, 0.0125, 0.0110, 0.0126, 0.0137, 0.0120], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 11:00:59,180 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33130.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:01:09,747 INFO [train.py:892] (2/4) Epoch 18, batch 1600, loss[loss=0.1961, simple_loss=0.2682, pruned_loss=0.06196, over 19796.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2636, pruned_loss=0.06251, over 3946213.95 frames. ], batch size: 114, lr: 8.67e-03, grad_scale: 16.0 +2023-03-28 11:01:53,660 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33155.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:01:57,712 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5306, 4.7484, 4.8249, 4.7483, 4.4765, 4.7818, 4.2774, 4.3875], + device='cuda:2'), covar=tensor([0.0468, 0.0521, 0.0505, 0.0442, 0.0641, 0.0535, 0.0708, 0.0902], + device='cuda:2'), in_proj_covar=tensor([0.0231, 0.0235, 0.0264, 0.0228, 0.0226, 0.0217, 0.0236, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 11:02:55,056 INFO [train.py:892] (2/4) Epoch 18, batch 1650, loss[loss=0.2545, simple_loss=0.3153, pruned_loss=0.0969, over 19589.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2631, pruned_loss=0.06251, over 3947571.35 frames. ], batch size: 376, lr: 8.67e-03, grad_scale: 16.0 +2023-03-28 11:03:39,446 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.4175, 5.7219, 5.8494, 5.6986, 5.5479, 5.4730, 5.4868, 5.4690], + device='cuda:2'), covar=tensor([0.1197, 0.1077, 0.0822, 0.1084, 0.0571, 0.0802, 0.1896, 0.1884], + device='cuda:2'), in_proj_covar=tensor([0.0262, 0.0281, 0.0329, 0.0258, 0.0239, 0.0240, 0.0316, 0.0349], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 11:04:01,026 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.567e+02 4.484e+02 5.220e+02 6.414e+02 1.768e+03, threshold=1.044e+03, percent-clipped=5.0 +2023-03-28 11:04:05,546 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33216.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:04:50,002 INFO [train.py:892] (2/4) Epoch 18, batch 1700, loss[loss=0.1784, simple_loss=0.2593, pruned_loss=0.04878, over 19837.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2617, pruned_loss=0.06131, over 3949332.60 frames. ], batch size: 90, lr: 8.66e-03, grad_scale: 16.0 +2023-03-28 11:06:44,503 INFO [train.py:892] (2/4) Epoch 18, batch 1750, loss[loss=0.2582, simple_loss=0.3278, pruned_loss=0.09429, over 19499.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2624, pruned_loss=0.06139, over 3947724.69 frames. ], batch size: 396, lr: 8.65e-03, grad_scale: 16.0 +2023-03-28 11:07:39,469 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3874, 3.3610, 1.8595, 4.2239, 3.6798, 4.1661, 4.1594, 3.1903], + device='cuda:2'), covar=tensor([0.0519, 0.0529, 0.1673, 0.0400, 0.0563, 0.0272, 0.0513, 0.0694], + device='cuda:2'), in_proj_covar=tensor([0.0131, 0.0129, 0.0136, 0.0132, 0.0117, 0.0116, 0.0128, 0.0133], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 11:07:40,437 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 4.001e+02 4.864e+02 5.584e+02 1.277e+03, threshold=9.729e+02, percent-clipped=1.0 +2023-03-28 11:07:54,352 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33322.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:08:21,748 INFO [train.py:892] (2/4) Epoch 18, batch 1800, loss[loss=0.1849, simple_loss=0.2416, pruned_loss=0.06406, over 19849.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2641, pruned_loss=0.06263, over 3946777.87 frames. ], batch size: 137, lr: 8.65e-03, grad_scale: 16.0 +2023-03-28 11:09:11,538 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9370, 2.2615, 2.8613, 3.3230, 3.7087, 4.0103, 3.9744, 4.0006], + device='cuda:2'), covar=tensor([0.0916, 0.1942, 0.1345, 0.0567, 0.0384, 0.0248, 0.0333, 0.0356], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0170, 0.0171, 0.0142, 0.0123, 0.0115, 0.0108, 0.0104], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 11:09:57,880 INFO [train.py:892] (2/4) Epoch 18, batch 1850, loss[loss=0.1864, simple_loss=0.2655, pruned_loss=0.05363, over 19827.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2654, pruned_loss=0.06249, over 3946514.32 frames. ], batch size: 57, lr: 8.64e-03, grad_scale: 16.0 +2023-03-28 11:11:02,086 INFO [train.py:892] (2/4) Epoch 19, batch 0, loss[loss=0.1616, simple_loss=0.228, pruned_loss=0.04762, over 19668.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.228, pruned_loss=0.04762, over 19668.00 frames. ], batch size: 58, lr: 8.41e-03, grad_scale: 16.0 +2023-03-28 11:11:02,087 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 11:11:35,983 INFO [train.py:926] (2/4) Epoch 19, validation: loss=0.1703, simple_loss=0.2482, pruned_loss=0.04619, over 2883724.00 frames. +2023-03-28 11:11:35,984 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 11:12:33,178 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.922e+02 3.899e+02 4.712e+02 6.072e+02 1.255e+03, threshold=9.424e+02, percent-clipped=1.0 +2023-03-28 11:12:54,552 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33425.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:13:17,843 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8042, 4.1620, 4.0286, 5.0665, 3.2424, 3.8772, 3.1928, 2.9967], + device='cuda:2'), covar=tensor([0.0414, 0.1854, 0.0777, 0.0230, 0.1826, 0.0759, 0.1047, 0.1504], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0336, 0.0234, 0.0180, 0.0241, 0.0194, 0.0207, 0.0215], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 11:13:32,566 INFO [train.py:892] (2/4) Epoch 19, batch 50, loss[loss=0.18, simple_loss=0.2504, pruned_loss=0.05486, over 19825.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2569, pruned_loss=0.05951, over 890150.18 frames. ], batch size: 103, lr: 8.40e-03, grad_scale: 16.0 +2023-03-28 11:14:40,756 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33472.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:15:25,824 INFO [train.py:892] (2/4) Epoch 19, batch 100, loss[loss=0.1891, simple_loss=0.2646, pruned_loss=0.05683, over 19781.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2616, pruned_loss=0.06165, over 1567906.27 frames. ], batch size: 65, lr: 8.39e-03, grad_scale: 16.0 +2023-03-28 11:15:45,828 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3855, 3.6933, 3.6741, 4.5067, 3.0618, 3.4933, 2.7102, 2.5629], + device='cuda:2'), covar=tensor([0.0493, 0.1978, 0.0998, 0.0320, 0.2172, 0.0894, 0.1407, 0.2003], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0337, 0.0237, 0.0181, 0.0242, 0.0195, 0.0210, 0.0218], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 11:16:09,705 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33511.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:16:17,082 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.445e+02 4.091e+02 4.969e+02 5.976e+02 1.351e+03, threshold=9.939e+02, percent-clipped=3.0 +2023-03-28 11:16:30,357 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9027, 3.5640, 3.6563, 3.8593, 3.6041, 3.8642, 3.9808, 4.1335], + device='cuda:2'), covar=tensor([0.0655, 0.0429, 0.0541, 0.0379, 0.0677, 0.0539, 0.0435, 0.0330], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0158, 0.0182, 0.0153, 0.0157, 0.0137, 0.0136, 0.0175], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 11:16:59,178 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9878, 2.9607, 3.2959, 2.5635, 3.3428, 2.8012, 3.0461, 3.2569], + device='cuda:2'), covar=tensor([0.0580, 0.0376, 0.0406, 0.0704, 0.0331, 0.0381, 0.0489, 0.0285], + device='cuda:2'), in_proj_covar=tensor([0.0065, 0.0072, 0.0072, 0.0101, 0.0067, 0.0068, 0.0065, 0.0059], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 11:16:59,221 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33533.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:17:16,234 INFO [train.py:892] (2/4) Epoch 19, batch 150, loss[loss=0.2058, simple_loss=0.2765, pruned_loss=0.06752, over 19729.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2591, pruned_loss=0.0604, over 2096444.06 frames. ], batch size: 76, lr: 8.39e-03, grad_scale: 16.0 +2023-03-28 11:17:50,808 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1666, 4.6743, 4.8752, 4.5783, 5.0848, 3.2280, 4.1045, 2.5424], + device='cuda:2'), covar=tensor([0.0157, 0.0183, 0.0121, 0.0174, 0.0103, 0.0836, 0.0746, 0.1380], + device='cuda:2'), in_proj_covar=tensor([0.0095, 0.0133, 0.0106, 0.0126, 0.0111, 0.0127, 0.0138, 0.0120], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 11:18:57,075 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7185, 3.5678, 3.6263, 3.3360, 3.7224, 2.7175, 3.0270, 1.7630], + device='cuda:2'), covar=tensor([0.0220, 0.0260, 0.0174, 0.0225, 0.0166, 0.1057, 0.0734, 0.1664], + device='cuda:2'), in_proj_covar=tensor([0.0095, 0.0133, 0.0106, 0.0126, 0.0111, 0.0127, 0.0138, 0.0121], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 11:19:04,451 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.99 vs. limit=5.0 +2023-03-28 11:19:06,860 INFO [train.py:892] (2/4) Epoch 19, batch 200, loss[loss=0.159, simple_loss=0.2417, pruned_loss=0.03818, over 19743.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2611, pruned_loss=0.06126, over 2507797.93 frames. ], batch size: 71, lr: 8.38e-03, grad_scale: 16.0 +2023-03-28 11:19:19,615 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4501, 2.7963, 3.3362, 2.9627, 3.4942, 3.4641, 4.2172, 4.6141], + device='cuda:2'), covar=tensor([0.0452, 0.1814, 0.1425, 0.2095, 0.1622, 0.1480, 0.0476, 0.0399], + device='cuda:2'), in_proj_covar=tensor([0.0230, 0.0232, 0.0252, 0.0243, 0.0279, 0.0243, 0.0208, 0.0224], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 11:19:43,219 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1145, 2.9553, 3.4192, 2.4642, 3.5693, 2.8682, 3.0149, 3.4141], + device='cuda:2'), covar=tensor([0.0575, 0.0438, 0.0460, 0.0837, 0.0272, 0.0488, 0.0527, 0.0319], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0074, 0.0072, 0.0102, 0.0068, 0.0069, 0.0067, 0.0059], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 11:19:56,413 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.308e+02 4.349e+02 4.912e+02 5.878e+02 1.071e+03, threshold=9.825e+02, percent-clipped=1.0 +2023-03-28 11:19:59,506 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-28 11:20:15,506 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33622.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:20:54,817 INFO [train.py:892] (2/4) Epoch 19, batch 250, loss[loss=0.171, simple_loss=0.247, pruned_loss=0.04747, over 19666.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.261, pruned_loss=0.06088, over 2827500.87 frames. ], batch size: 64, lr: 8.38e-03, grad_scale: 16.0 +2023-03-28 11:21:29,877 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-28 11:21:58,895 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33670.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:22:13,402 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33677.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:22:42,182 INFO [train.py:892] (2/4) Epoch 19, batch 300, loss[loss=0.1623, simple_loss=0.2369, pruned_loss=0.04387, over 19859.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2624, pruned_loss=0.06194, over 3077474.51 frames. ], batch size: 106, lr: 8.37e-03, grad_scale: 16.0 +2023-03-28 11:23:27,270 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.71 vs. limit=2.0 +2023-03-28 11:23:39,621 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 4.475e+02 5.212e+02 6.221e+02 1.054e+03, threshold=1.042e+03, percent-clipped=4.0 +2023-03-28 11:24:02,864 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33725.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:24:29,746 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1777, 3.1527, 3.5351, 2.5084, 3.5783, 2.9589, 3.0517, 3.6116], + device='cuda:2'), covar=tensor([0.0550, 0.0388, 0.0467, 0.0767, 0.0298, 0.0378, 0.0569, 0.0290], + device='cuda:2'), in_proj_covar=tensor([0.0065, 0.0072, 0.0071, 0.0100, 0.0067, 0.0067, 0.0065, 0.0058], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 11:24:31,847 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33738.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 11:24:38,586 INFO [train.py:892] (2/4) Epoch 19, batch 350, loss[loss=0.1748, simple_loss=0.2499, pruned_loss=0.04987, over 19713.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2621, pruned_loss=0.06142, over 3270505.78 frames. ], batch size: 78, lr: 8.36e-03, grad_scale: 32.0 +2023-03-28 11:25:51,613 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33773.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:26:29,469 INFO [train.py:892] (2/4) Epoch 19, batch 400, loss[loss=0.2534, simple_loss=0.3234, pruned_loss=0.09173, over 19603.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2617, pruned_loss=0.06121, over 3421192.01 frames. ], batch size: 376, lr: 8.36e-03, grad_scale: 32.0 +2023-03-28 11:26:33,497 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.50 vs. limit=5.0 +2023-03-28 11:27:18,851 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33811.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:27:26,572 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.571e+02 3.967e+02 4.675e+02 5.503e+02 1.191e+03, threshold=9.350e+02, percent-clipped=1.0 +2023-03-28 11:27:57,255 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33828.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:28:24,665 INFO [train.py:892] (2/4) Epoch 19, batch 450, loss[loss=0.1895, simple_loss=0.2504, pruned_loss=0.0643, over 19777.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2621, pruned_loss=0.06089, over 3538156.86 frames. ], batch size: 168, lr: 8.35e-03, grad_scale: 16.0 +2023-03-28 11:29:08,885 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33859.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:29:13,399 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2961, 1.6567, 1.9218, 2.5792, 2.8838, 2.9501, 2.8426, 2.9121], + device='cuda:2'), covar=tensor([0.1022, 0.1885, 0.1502, 0.0660, 0.0434, 0.0295, 0.0417, 0.0374], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0169, 0.0171, 0.0143, 0.0123, 0.0116, 0.0109, 0.0106], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 11:29:36,815 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6970, 2.7561, 2.9542, 2.3179, 3.0573, 2.4584, 2.7641, 2.9678], + device='cuda:2'), covar=tensor([0.0612, 0.0392, 0.0461, 0.0755, 0.0312, 0.0449, 0.0448, 0.0318], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0073, 0.0071, 0.0100, 0.0066, 0.0068, 0.0065, 0.0059], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 11:30:18,967 INFO [train.py:892] (2/4) Epoch 19, batch 500, loss[loss=0.1805, simple_loss=0.2502, pruned_loss=0.05545, over 19818.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2611, pruned_loss=0.06056, over 3630199.79 frames. ], batch size: 72, lr: 8.35e-03, grad_scale: 16.0 +2023-03-28 11:31:15,655 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.605e+02 4.236e+02 4.841e+02 6.500e+02 9.898e+02, threshold=9.682e+02, percent-clipped=2.0 +2023-03-28 11:32:08,680 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33939.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:32:11,675 INFO [train.py:892] (2/4) Epoch 19, batch 550, loss[loss=0.1698, simple_loss=0.2395, pruned_loss=0.05004, over 19817.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2593, pruned_loss=0.05944, over 3702567.20 frames. ], batch size: 133, lr: 8.34e-03, grad_scale: 16.0 +2023-03-28 11:32:14,560 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.8348, 6.0673, 6.1536, 5.9431, 5.9163, 6.0816, 5.3280, 5.4395], + device='cuda:2'), covar=tensor([0.0279, 0.0366, 0.0397, 0.0356, 0.0420, 0.0469, 0.0578, 0.0860], + device='cuda:2'), in_proj_covar=tensor([0.0230, 0.0238, 0.0265, 0.0228, 0.0228, 0.0216, 0.0237, 0.0280], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 11:33:07,359 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1778, 3.3765, 2.1152, 4.0260, 3.5909, 4.0000, 3.9764, 3.1079], + device='cuda:2'), covar=tensor([0.0603, 0.0522, 0.1381, 0.0459, 0.0524, 0.0321, 0.0680, 0.0763], + device='cuda:2'), in_proj_covar=tensor([0.0132, 0.0130, 0.0136, 0.0134, 0.0117, 0.0117, 0.0131, 0.0135], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 11:34:04,585 INFO [train.py:892] (2/4) Epoch 19, batch 600, loss[loss=0.3408, simple_loss=0.3798, pruned_loss=0.1509, over 19240.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2611, pruned_loss=0.06079, over 3756589.05 frames. ], batch size: 452, lr: 8.33e-03, grad_scale: 16.0 +2023-03-28 11:34:18,334 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9919, 4.8427, 4.7147, 5.1301, 4.8901, 5.6056, 5.1286, 5.2236], + device='cuda:2'), covar=tensor([0.0901, 0.0406, 0.0504, 0.0398, 0.0605, 0.0247, 0.0502, 0.0575], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0158, 0.0181, 0.0152, 0.0156, 0.0136, 0.0137, 0.0174], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 11:34:31,010 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34000.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:34:35,143 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-28 11:35:01,391 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.382e+02 4.429e+02 5.203e+02 6.509e+02 1.553e+03, threshold=1.041e+03, percent-clipped=5.0 +2023-03-28 11:35:42,669 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34033.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 11:36:02,219 INFO [train.py:892] (2/4) Epoch 19, batch 650, loss[loss=0.1821, simple_loss=0.2498, pruned_loss=0.05724, over 19842.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2607, pruned_loss=0.06079, over 3799740.15 frames. ], batch size: 90, lr: 8.33e-03, grad_scale: 16.0 +2023-03-28 11:36:10,141 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8932, 2.4925, 2.8883, 3.3237, 3.6995, 4.0540, 4.0393, 4.0308], + device='cuda:2'), covar=tensor([0.0914, 0.1706, 0.1284, 0.0605, 0.0383, 0.0248, 0.0324, 0.0347], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0170, 0.0172, 0.0144, 0.0123, 0.0117, 0.0110, 0.0106], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 11:36:10,163 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34044.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:37:10,499 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3044, 2.5704, 3.5413, 2.9556, 3.0546, 2.9792, 2.0212, 2.1988], + device='cuda:2'), covar=tensor([0.1056, 0.2565, 0.0600, 0.0928, 0.1586, 0.1237, 0.2310, 0.2488], + device='cuda:2'), in_proj_covar=tensor([0.0334, 0.0365, 0.0315, 0.0254, 0.0358, 0.0326, 0.0336, 0.0304], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 11:37:31,303 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.13 vs. limit=5.0 +2023-03-28 11:37:53,401 INFO [train.py:892] (2/4) Epoch 19, batch 700, loss[loss=0.1727, simple_loss=0.2488, pruned_loss=0.04831, over 19736.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2613, pruned_loss=0.06116, over 3833207.35 frames. ], batch size: 99, lr: 8.32e-03, grad_scale: 16.0 +2023-03-28 11:38:13,689 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4179, 4.0460, 4.0902, 4.4407, 4.0816, 4.5320, 4.5545, 4.6925], + device='cuda:2'), covar=tensor([0.0654, 0.0407, 0.0506, 0.0329, 0.0684, 0.0422, 0.0370, 0.0291], + device='cuda:2'), in_proj_covar=tensor([0.0139, 0.0161, 0.0185, 0.0155, 0.0160, 0.0139, 0.0140, 0.0178], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 11:38:28,839 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34105.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:38:52,054 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.720e+02 4.319e+02 5.100e+02 5.966e+02 1.380e+03, threshold=1.020e+03, percent-clipped=2.0 +2023-03-28 11:39:10,105 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-28 11:39:19,156 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34128.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:39:45,528 INFO [train.py:892] (2/4) Epoch 19, batch 750, loss[loss=0.1911, simple_loss=0.2555, pruned_loss=0.06334, over 19791.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2605, pruned_loss=0.06075, over 3859532.46 frames. ], batch size: 73, lr: 8.32e-03, grad_scale: 16.0 +2023-03-28 11:39:57,474 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34145.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:40:52,115 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.71 vs. limit=5.0 +2023-03-28 11:41:06,999 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34176.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:41:15,000 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8750, 2.8044, 4.5449, 3.9512, 4.3208, 4.4842, 4.3062, 4.1227], + device='cuda:2'), covar=tensor([0.0325, 0.0853, 0.0109, 0.0809, 0.0131, 0.0232, 0.0160, 0.0149], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0097, 0.0079, 0.0149, 0.0075, 0.0088, 0.0083, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 11:41:40,074 INFO [train.py:892] (2/4) Epoch 19, batch 800, loss[loss=0.1697, simple_loss=0.2417, pruned_loss=0.04889, over 19793.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2614, pruned_loss=0.06097, over 3879754.62 frames. ], batch size: 94, lr: 8.31e-03, grad_scale: 16.0 +2023-03-28 11:42:14,817 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34206.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:42:36,504 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.024e+02 4.346e+02 5.123e+02 5.919e+02 1.290e+03, threshold=1.025e+03, percent-clipped=2.0 +2023-03-28 11:43:36,136 INFO [train.py:892] (2/4) Epoch 19, batch 850, loss[loss=0.209, simple_loss=0.2782, pruned_loss=0.06991, over 19827.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2627, pruned_loss=0.06119, over 3895683.24 frames. ], batch size: 229, lr: 8.30e-03, grad_scale: 16.0 +2023-03-28 11:45:03,702 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-03-28 11:45:25,867 INFO [train.py:892] (2/4) Epoch 19, batch 900, loss[loss=0.1546, simple_loss=0.2344, pruned_loss=0.03735, over 19749.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2616, pruned_loss=0.06073, over 3907254.01 frames. ], batch size: 44, lr: 8.30e-03, grad_scale: 8.0 +2023-03-28 11:45:38,101 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34295.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:46:25,646 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.957e+02 4.098e+02 5.258e+02 6.127e+02 1.043e+03, threshold=1.052e+03, percent-clipped=1.0 +2023-03-28 11:47:00,643 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34333.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:47:11,523 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-28 11:47:16,658 INFO [train.py:892] (2/4) Epoch 19, batch 950, loss[loss=0.1585, simple_loss=0.2392, pruned_loss=0.03885, over 19771.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2609, pruned_loss=0.06031, over 3918051.54 frames. ], batch size: 108, lr: 8.29e-03, grad_scale: 8.0 +2023-03-28 11:47:43,696 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2600, 4.7876, 4.8395, 5.2033, 4.8631, 5.5296, 5.3212, 5.5222], + device='cuda:2'), covar=tensor([0.0560, 0.0338, 0.0387, 0.0270, 0.0544, 0.0268, 0.0376, 0.0256], + device='cuda:2'), in_proj_covar=tensor([0.0139, 0.0160, 0.0183, 0.0154, 0.0158, 0.0139, 0.0139, 0.0177], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 11:48:24,883 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4388, 3.6611, 3.8116, 4.5728, 2.9182, 3.4108, 2.8355, 2.6557], + device='cuda:2'), covar=tensor([0.0463, 0.2136, 0.0878, 0.0293, 0.2201, 0.0874, 0.1255, 0.1775], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0332, 0.0235, 0.0181, 0.0240, 0.0192, 0.0206, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 11:48:49,466 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34381.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:49:08,914 INFO [train.py:892] (2/4) Epoch 19, batch 1000, loss[loss=0.1905, simple_loss=0.2564, pruned_loss=0.06229, over 19890.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2611, pruned_loss=0.06066, over 3925461.92 frames. ], batch size: 63, lr: 8.29e-03, grad_scale: 8.0 +2023-03-28 11:49:29,376 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34400.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:49:57,261 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-28 11:50:08,244 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.058e+02 4.216e+02 4.828e+02 6.014e+02 1.612e+03, threshold=9.655e+02, percent-clipped=1.0 +2023-03-28 11:51:00,684 INFO [train.py:892] (2/4) Epoch 19, batch 1050, loss[loss=0.2005, simple_loss=0.27, pruned_loss=0.06545, over 19810.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2602, pruned_loss=0.06012, over 3931735.48 frames. ], batch size: 67, lr: 8.28e-03, grad_scale: 8.0 +2023-03-28 11:52:27,779 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0627, 4.6898, 4.7455, 5.0993, 4.7273, 5.3430, 5.2143, 5.3833], + device='cuda:2'), covar=tensor([0.0666, 0.0366, 0.0458, 0.0288, 0.0679, 0.0313, 0.0434, 0.0335], + device='cuda:2'), in_proj_covar=tensor([0.0139, 0.0160, 0.0184, 0.0154, 0.0158, 0.0139, 0.0139, 0.0177], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 11:52:51,925 INFO [train.py:892] (2/4) Epoch 19, batch 1100, loss[loss=0.2362, simple_loss=0.3031, pruned_loss=0.08461, over 19696.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2612, pruned_loss=0.06076, over 3934976.26 frames. ], batch size: 265, lr: 8.27e-03, grad_scale: 8.0 +2023-03-28 11:53:15,516 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34501.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:53:50,398 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.527e+02 4.200e+02 4.924e+02 5.856e+02 9.802e+02, threshold=9.848e+02, percent-clipped=1.0 +2023-03-28 11:54:41,901 INFO [train.py:892] (2/4) Epoch 19, batch 1150, loss[loss=0.1926, simple_loss=0.2715, pruned_loss=0.0568, over 19723.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2621, pruned_loss=0.0611, over 3936949.11 frames. ], batch size: 52, lr: 8.27e-03, grad_scale: 8.0 +2023-03-28 11:55:14,599 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-28 11:56:35,002 INFO [train.py:892] (2/4) Epoch 19, batch 1200, loss[loss=0.1641, simple_loss=0.2366, pruned_loss=0.04581, over 19719.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2621, pruned_loss=0.06078, over 3940060.00 frames. ], batch size: 54, lr: 8.26e-03, grad_scale: 8.0 +2023-03-28 11:56:41,571 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34593.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:56:45,742 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34595.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:57:14,245 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0095, 2.9726, 3.2165, 2.6392, 3.2151, 2.7066, 3.0154, 3.2120], + device='cuda:2'), covar=tensor([0.0400, 0.0430, 0.0388, 0.0634, 0.0294, 0.0411, 0.0454, 0.0311], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0073, 0.0072, 0.0100, 0.0067, 0.0068, 0.0066, 0.0059], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 11:57:36,125 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.821e+02 4.325e+02 5.082e+02 6.313e+02 1.101e+03, threshold=1.016e+03, percent-clipped=4.0 +2023-03-28 11:57:49,063 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34622.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:57:57,883 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34626.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:58:30,489 INFO [train.py:892] (2/4) Epoch 19, batch 1250, loss[loss=0.1865, simple_loss=0.2523, pruned_loss=0.06033, over 19847.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2608, pruned_loss=0.06025, over 3942998.51 frames. ], batch size: 177, lr: 8.26e-03, grad_scale: 8.0 +2023-03-28 11:58:35,841 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34643.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:58:59,643 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34654.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:00:04,848 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34683.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 12:00:13,940 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34687.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:00:21,338 INFO [train.py:892] (2/4) Epoch 19, batch 1300, loss[loss=0.1914, simple_loss=0.2721, pruned_loss=0.05535, over 19816.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2619, pruned_loss=0.06026, over 3940985.70 frames. ], batch size: 50, lr: 8.25e-03, grad_scale: 8.0 +2023-03-28 12:00:44,874 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34700.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:00:54,677 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-28 12:01:06,046 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34710.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 12:01:21,588 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.806e+02 3.918e+02 4.981e+02 5.928e+02 9.169e+02, threshold=9.963e+02, percent-clipped=0.0 +2023-03-28 12:01:26,957 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2923, 3.5883, 3.7069, 4.3400, 2.8895, 3.3505, 2.8176, 2.5917], + device='cuda:2'), covar=tensor([0.0537, 0.1895, 0.0935, 0.0395, 0.2091, 0.0978, 0.1196, 0.1743], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0333, 0.0237, 0.0181, 0.0240, 0.0193, 0.0207, 0.0215], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 12:02:16,200 INFO [train.py:892] (2/4) Epoch 19, batch 1350, loss[loss=0.172, simple_loss=0.2398, pruned_loss=0.05211, over 19874.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2613, pruned_loss=0.05953, over 3941872.54 frames. ], batch size: 136, lr: 8.24e-03, grad_scale: 8.0 +2023-03-28 12:02:34,277 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34748.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:03:22,017 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34771.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 12:04:08,654 INFO [train.py:892] (2/4) Epoch 19, batch 1400, loss[loss=0.1795, simple_loss=0.2458, pruned_loss=0.05658, over 19826.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2617, pruned_loss=0.05984, over 3942747.64 frames. ], batch size: 127, lr: 8.24e-03, grad_scale: 8.0 +2023-03-28 12:04:31,237 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34801.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:05:06,146 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.813e+02 4.248e+02 4.961e+02 6.105e+02 8.838e+02, threshold=9.921e+02, percent-clipped=0.0 +2023-03-28 12:05:56,249 INFO [train.py:892] (2/4) Epoch 19, batch 1450, loss[loss=0.1767, simple_loss=0.2562, pruned_loss=0.04854, over 19709.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2616, pruned_loss=0.0597, over 3944833.07 frames. ], batch size: 101, lr: 8.23e-03, grad_scale: 8.0 +2023-03-28 12:06:16,984 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34849.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:06:22,882 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-28 12:07:51,364 INFO [train.py:892] (2/4) Epoch 19, batch 1500, loss[loss=0.163, simple_loss=0.2362, pruned_loss=0.04489, over 19750.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2617, pruned_loss=0.05975, over 3945682.01 frames. ], batch size: 89, lr: 8.23e-03, grad_scale: 8.0 +2023-03-28 12:08:14,927 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7020, 3.5509, 3.5508, 3.3473, 3.6855, 2.7672, 3.0255, 1.7485], + device='cuda:2'), covar=tensor([0.0215, 0.0260, 0.0176, 0.0198, 0.0169, 0.1111, 0.0651, 0.1716], + device='cuda:2'), in_proj_covar=tensor([0.0097, 0.0135, 0.0107, 0.0127, 0.0112, 0.0129, 0.0140, 0.0122], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 12:08:22,959 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2584, 4.8036, 4.9103, 5.2381, 4.8885, 5.4955, 5.3845, 5.5724], + device='cuda:2'), covar=tensor([0.0705, 0.0413, 0.0454, 0.0342, 0.0726, 0.0349, 0.0363, 0.0321], + device='cuda:2'), in_proj_covar=tensor([0.0141, 0.0162, 0.0186, 0.0157, 0.0160, 0.0141, 0.0141, 0.0181], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 12:08:47,107 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.972e+02 4.472e+02 5.197e+02 6.398e+02 1.021e+03, threshold=1.039e+03, percent-clipped=2.0 +2023-03-28 12:09:30,000 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3049, 1.6953, 1.9579, 2.5762, 2.8972, 2.8937, 2.8213, 2.9673], + device='cuda:2'), covar=tensor([0.1017, 0.1910, 0.1592, 0.0656, 0.0467, 0.0375, 0.0415, 0.0393], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0168, 0.0170, 0.0141, 0.0125, 0.0117, 0.0109, 0.0106], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 12:09:43,742 INFO [train.py:892] (2/4) Epoch 19, batch 1550, loss[loss=0.1985, simple_loss=0.2654, pruned_loss=0.06574, over 19837.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2627, pruned_loss=0.06025, over 3945887.40 frames. ], batch size: 146, lr: 8.22e-03, grad_scale: 8.0 +2023-03-28 12:10:01,130 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34949.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:11:03,969 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0924, 3.8877, 3.9041, 3.6939, 4.0856, 2.9328, 3.3591, 1.8285], + device='cuda:2'), covar=tensor([0.0235, 0.0244, 0.0179, 0.0215, 0.0173, 0.0980, 0.0786, 0.1720], + device='cuda:2'), in_proj_covar=tensor([0.0097, 0.0135, 0.0106, 0.0127, 0.0112, 0.0129, 0.0140, 0.0122], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 12:11:05,760 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34978.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 12:11:13,809 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34982.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:11:32,653 INFO [train.py:892] (2/4) Epoch 19, batch 1600, loss[loss=0.1624, simple_loss=0.2342, pruned_loss=0.04526, over 19742.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2622, pruned_loss=0.05981, over 3946327.43 frames. ], batch size: 106, lr: 8.22e-03, grad_scale: 8.0 +2023-03-28 12:12:24,027 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.74 vs. limit=5.0 +2023-03-28 12:12:30,733 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 4.214e+02 4.806e+02 5.980e+02 1.376e+03, threshold=9.612e+02, percent-clipped=1.0 +2023-03-28 12:13:27,280 INFO [train.py:892] (2/4) Epoch 19, batch 1650, loss[loss=0.231, simple_loss=0.3006, pruned_loss=0.08071, over 19787.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2607, pruned_loss=0.05954, over 3947461.89 frames. ], batch size: 241, lr: 8.21e-03, grad_scale: 8.0 +2023-03-28 12:14:22,942 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35066.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 12:15:21,216 INFO [train.py:892] (2/4) Epoch 19, batch 1700, loss[loss=0.1633, simple_loss=0.2348, pruned_loss=0.04589, over 19788.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2616, pruned_loss=0.05978, over 3947924.06 frames. ], batch size: 83, lr: 8.20e-03, grad_scale: 8.0 +2023-03-28 12:15:25,084 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35092.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:16:20,385 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 3.844e+02 4.787e+02 6.177e+02 1.345e+03, threshold=9.574e+02, percent-clipped=3.0 +2023-03-28 12:17:09,575 INFO [train.py:892] (2/4) Epoch 19, batch 1750, loss[loss=0.2016, simple_loss=0.2654, pruned_loss=0.06895, over 19625.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2604, pruned_loss=0.05937, over 3948959.37 frames. ], batch size: 65, lr: 8.20e-03, grad_scale: 8.0 +2023-03-28 12:17:32,588 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35153.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:18:09,250 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-03-28 12:18:42,338 INFO [train.py:892] (2/4) Epoch 19, batch 1800, loss[loss=0.1741, simple_loss=0.2526, pruned_loss=0.04783, over 19621.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2609, pruned_loss=0.05985, over 3948962.65 frames. ], batch size: 52, lr: 8.19e-03, grad_scale: 8.0 +2023-03-28 12:19:29,993 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.773e+02 3.767e+02 4.672e+02 5.957e+02 1.054e+03, threshold=9.344e+02, percent-clipped=2.0 +2023-03-28 12:20:12,658 INFO [train.py:892] (2/4) Epoch 19, batch 1850, loss[loss=0.1924, simple_loss=0.2589, pruned_loss=0.063, over 19681.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.262, pruned_loss=0.05951, over 3949835.14 frames. ], batch size: 55, lr: 8.19e-03, grad_scale: 8.0 +2023-03-28 12:21:18,364 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-28 12:21:18,803 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-28 12:21:19,180 INFO [train.py:892] (2/4) Epoch 20, batch 0, loss[loss=0.1652, simple_loss=0.2366, pruned_loss=0.0469, over 19789.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2366, pruned_loss=0.0469, over 19789.00 frames. ], batch size: 191, lr: 7.98e-03, grad_scale: 8.0 +2023-03-28 12:21:19,180 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 12:21:49,712 INFO [train.py:926] (2/4) Epoch 20, validation: loss=0.1718, simple_loss=0.2485, pruned_loss=0.04755, over 2883724.00 frames. +2023-03-28 12:21:49,713 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 12:21:57,712 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35249.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:22:01,571 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.8644, 1.9300, 2.0472, 1.8854, 1.8503, 1.8490, 1.9390, 2.0771], + device='cuda:2'), covar=tensor([0.0328, 0.0261, 0.0238, 0.0261, 0.0390, 0.0310, 0.0380, 0.0215], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0063, 0.0066, 0.0059, 0.0073, 0.0068, 0.0085, 0.0059], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 12:23:03,195 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35278.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 12:23:12,037 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35282.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:23:43,049 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8690, 4.7687, 5.2955, 4.7885, 4.2896, 5.0553, 4.8746, 5.4262], + device='cuda:2'), covar=tensor([0.0908, 0.0386, 0.0373, 0.0377, 0.0702, 0.0442, 0.0498, 0.0320], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0217, 0.0213, 0.0222, 0.0204, 0.0225, 0.0222, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 12:23:44,078 INFO [train.py:892] (2/4) Epoch 20, batch 50, loss[loss=0.1878, simple_loss=0.2609, pruned_loss=0.05732, over 19775.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2567, pruned_loss=0.05818, over 892032.37 frames. ], batch size: 52, lr: 7.97e-03, grad_scale: 8.0 +2023-03-28 12:23:47,122 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35297.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:24:30,953 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.385e+02 4.131e+02 4.865e+02 6.041e+02 1.249e+03, threshold=9.730e+02, percent-clipped=3.0 +2023-03-28 12:24:53,316 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35326.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:25:03,353 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35330.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:25:04,000 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-28 12:25:26,330 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-03-28 12:25:38,686 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35345.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 12:25:39,712 INFO [train.py:892] (2/4) Epoch 20, batch 100, loss[loss=0.179, simple_loss=0.2588, pruned_loss=0.04957, over 19814.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2576, pruned_loss=0.05866, over 1570622.94 frames. ], batch size: 50, lr: 7.96e-03, grad_scale: 8.0 +2023-03-28 12:26:04,108 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1304, 3.1320, 3.3554, 2.5607, 3.4486, 2.7663, 2.8855, 3.4510], + device='cuda:2'), covar=tensor([0.0707, 0.0361, 0.0526, 0.0741, 0.0376, 0.0412, 0.0564, 0.0403], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0075, 0.0073, 0.0102, 0.0068, 0.0069, 0.0068, 0.0060], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 12:26:25,085 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35366.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 12:26:42,405 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35374.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:27:26,056 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.33 vs. limit=5.0 +2023-03-28 12:27:28,759 INFO [train.py:892] (2/4) Epoch 20, batch 150, loss[loss=0.1818, simple_loss=0.2511, pruned_loss=0.05625, over 19656.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2588, pruned_loss=0.05942, over 2097985.44 frames. ], batch size: 47, lr: 7.96e-03, grad_scale: 8.0 +2023-03-28 12:27:54,725 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35406.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 12:28:12,781 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35414.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 12:28:19,116 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.855e+02 4.347e+02 5.138e+02 6.356e+02 9.999e+02, threshold=1.028e+03, percent-clipped=1.0 +2023-03-28 12:28:58,628 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35435.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:29:25,245 INFO [train.py:892] (2/4) Epoch 20, batch 200, loss[loss=0.1889, simple_loss=0.2744, pruned_loss=0.05164, over 19891.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2586, pruned_loss=0.05892, over 2508504.10 frames. ], batch size: 52, lr: 7.95e-03, grad_scale: 8.0 +2023-03-28 12:29:30,576 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35448.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:30:25,121 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7278, 4.3669, 4.4671, 4.2384, 4.6753, 3.1909, 3.8209, 2.4672], + device='cuda:2'), covar=tensor([0.0148, 0.0192, 0.0137, 0.0173, 0.0122, 0.0836, 0.0789, 0.1303], + device='cuda:2'), in_proj_covar=tensor([0.0097, 0.0136, 0.0107, 0.0128, 0.0112, 0.0129, 0.0140, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 12:31:20,208 INFO [train.py:892] (2/4) Epoch 20, batch 250, loss[loss=0.1655, simple_loss=0.235, pruned_loss=0.04797, over 19858.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2582, pruned_loss=0.05903, over 2830178.86 frames. ], batch size: 99, lr: 7.95e-03, grad_scale: 8.0 +2023-03-28 12:32:02,731 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.157e+02 5.011e+02 5.895e+02 1.207e+03, threshold=1.002e+03, percent-clipped=2.0 +2023-03-28 12:32:35,664 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35530.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:33:07,877 INFO [train.py:892] (2/4) Epoch 20, batch 300, loss[loss=0.2227, simple_loss=0.2846, pruned_loss=0.08033, over 19757.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2592, pruned_loss=0.05916, over 3077685.50 frames. ], batch size: 209, lr: 7.94e-03, grad_scale: 8.0 +2023-03-28 12:33:39,239 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7128, 4.6814, 5.1283, 4.9036, 4.9645, 4.4085, 4.7981, 4.6527], + device='cuda:2'), covar=tensor([0.1364, 0.1488, 0.0868, 0.1170, 0.0773, 0.1025, 0.1878, 0.1894], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0287, 0.0336, 0.0269, 0.0251, 0.0248, 0.0325, 0.0357], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 12:34:26,370 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-28 12:34:49,249 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35591.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:34:58,972 INFO [train.py:892] (2/4) Epoch 20, batch 350, loss[loss=0.2299, simple_loss=0.2954, pruned_loss=0.08218, over 19706.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2588, pruned_loss=0.05883, over 3271920.61 frames. ], batch size: 315, lr: 7.94e-03, grad_scale: 8.0 +2023-03-28 12:35:45,652 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.727e+02 4.459e+02 5.252e+02 6.207e+02 1.161e+03, threshold=1.050e+03, percent-clipped=2.0 +2023-03-28 12:36:55,535 INFO [train.py:892] (2/4) Epoch 20, batch 400, loss[loss=0.1903, simple_loss=0.2589, pruned_loss=0.06084, over 19760.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2574, pruned_loss=0.05765, over 3423055.76 frames. ], batch size: 253, lr: 7.93e-03, grad_scale: 8.0 +2023-03-28 12:38:46,372 INFO [train.py:892] (2/4) Epoch 20, batch 450, loss[loss=0.181, simple_loss=0.258, pruned_loss=0.05197, over 19750.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2591, pruned_loss=0.05851, over 3539115.07 frames. ], batch size: 84, lr: 7.93e-03, grad_scale: 8.0 +2023-03-28 12:38:47,693 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.74 vs. limit=5.0 +2023-03-28 12:38:58,495 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35701.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 12:39:31,595 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.836e+02 4.301e+02 4.934e+02 5.630e+02 1.538e+03, threshold=9.868e+02, percent-clipped=1.0 +2023-03-28 12:39:58,937 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35730.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:40:31,621 INFO [train.py:892] (2/4) Epoch 20, batch 500, loss[loss=0.1843, simple_loss=0.2648, pruned_loss=0.05187, over 19904.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2589, pruned_loss=0.05885, over 3630245.75 frames. ], batch size: 50, lr: 7.92e-03, grad_scale: 8.0 +2023-03-28 12:40:37,959 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35748.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:41:26,225 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35771.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:42:19,111 INFO [train.py:892] (2/4) Epoch 20, batch 550, loss[loss=0.1794, simple_loss=0.2463, pruned_loss=0.05625, over 19657.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2597, pruned_loss=0.05944, over 3700259.21 frames. ], batch size: 43, lr: 7.92e-03, grad_scale: 8.0 +2023-03-28 12:42:19,808 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35796.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:42:51,779 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35811.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:43:04,499 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.312e+02 4.108e+02 4.834e+02 5.739e+02 9.285e+02, threshold=9.669e+02, percent-clipped=0.0 +2023-03-28 12:43:11,696 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-03-28 12:43:18,609 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-28 12:43:35,119 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3984, 3.3979, 2.0308, 4.1768, 3.6247, 4.0952, 4.1274, 3.2018], + device='cuda:2'), covar=tensor([0.0579, 0.0568, 0.1595, 0.0447, 0.0557, 0.0349, 0.0531, 0.0750], + device='cuda:2'), in_proj_covar=tensor([0.0134, 0.0134, 0.0138, 0.0136, 0.0121, 0.0121, 0.0133, 0.0137], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 12:43:39,432 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35832.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:44:08,197 INFO [train.py:892] (2/4) Epoch 20, batch 600, loss[loss=0.1723, simple_loss=0.2491, pruned_loss=0.0478, over 19688.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2598, pruned_loss=0.059, over 3753631.24 frames. ], batch size: 75, lr: 7.91e-03, grad_scale: 8.0 +2023-03-28 12:45:06,826 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35872.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:45:28,120 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9285, 4.9014, 5.3894, 4.9162, 4.2971, 5.1145, 5.0540, 5.5301], + device='cuda:2'), covar=tensor([0.0913, 0.0323, 0.0315, 0.0347, 0.0745, 0.0395, 0.0355, 0.0278], + device='cuda:2'), in_proj_covar=tensor([0.0278, 0.0217, 0.0214, 0.0223, 0.0202, 0.0225, 0.0223, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 12:45:36,176 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35886.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:45:54,692 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6413, 4.8083, 4.8702, 4.8896, 4.4514, 4.8896, 4.4127, 4.1171], + device='cuda:2'), covar=tensor([0.0991, 0.1097, 0.1107, 0.0788, 0.1320, 0.1136, 0.1589, 0.2375], + device='cuda:2'), in_proj_covar=tensor([0.0236, 0.0242, 0.0266, 0.0231, 0.0231, 0.0223, 0.0238, 0.0279], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 12:45:55,662 INFO [train.py:892] (2/4) Epoch 20, batch 650, loss[loss=0.1851, simple_loss=0.2506, pruned_loss=0.05982, over 19789.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2595, pruned_loss=0.05883, over 3795487.91 frames. ], batch size: 151, lr: 7.90e-03, grad_scale: 8.0 +2023-03-28 12:46:04,800 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.02 vs. limit=5.0 +2023-03-28 12:46:10,865 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3328, 4.4010, 2.6251, 4.7093, 4.8374, 2.1308, 3.9788, 3.5838], + device='cuda:2'), covar=tensor([0.0611, 0.0760, 0.2474, 0.0553, 0.0438, 0.2779, 0.0960, 0.0734], + device='cuda:2'), in_proj_covar=tensor([0.0220, 0.0242, 0.0223, 0.0249, 0.0220, 0.0202, 0.0230, 0.0181], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 12:46:42,643 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.720e+02 4.236e+02 5.120e+02 6.447e+02 1.973e+03, threshold=1.024e+03, percent-clipped=4.0 +2023-03-28 12:47:47,286 INFO [train.py:892] (2/4) Epoch 20, batch 700, loss[loss=0.1845, simple_loss=0.2461, pruned_loss=0.06146, over 19781.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2593, pruned_loss=0.05886, over 3828637.12 frames. ], batch size: 131, lr: 7.90e-03, grad_scale: 8.0 +2023-03-28 12:48:41,235 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1891, 4.4573, 4.4954, 4.4015, 4.1489, 4.4575, 4.0094, 4.0836], + device='cuda:2'), covar=tensor([0.0501, 0.0485, 0.0535, 0.0493, 0.0649, 0.0597, 0.0706, 0.0964], + device='cuda:2'), in_proj_covar=tensor([0.0237, 0.0244, 0.0267, 0.0232, 0.0233, 0.0223, 0.0240, 0.0280], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 12:49:38,984 INFO [train.py:892] (2/4) Epoch 20, batch 750, loss[loss=0.1586, simple_loss=0.2292, pruned_loss=0.04406, over 19895.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2582, pruned_loss=0.05838, over 3854262.98 frames. ], batch size: 91, lr: 7.89e-03, grad_scale: 8.0 +2023-03-28 12:49:57,214 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36001.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 12:50:31,042 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.681e+02 4.396e+02 5.340e+02 6.377e+02 1.000e+03, threshold=1.068e+03, percent-clipped=0.0 +2023-03-28 12:50:42,248 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.82 vs. limit=5.0 +2023-03-28 12:51:02,825 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36030.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:51:36,005 INFO [train.py:892] (2/4) Epoch 20, batch 800, loss[loss=0.1805, simple_loss=0.2557, pruned_loss=0.0526, over 19793.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2583, pruned_loss=0.05848, over 3874929.34 frames. ], batch size: 94, lr: 7.89e-03, grad_scale: 8.0 +2023-03-28 12:51:42,560 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36049.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 12:52:48,786 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36078.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:53:26,937 INFO [train.py:892] (2/4) Epoch 20, batch 850, loss[loss=0.1779, simple_loss=0.2506, pruned_loss=0.05261, over 19734.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2588, pruned_loss=0.05861, over 3892541.87 frames. ], batch size: 62, lr: 7.88e-03, grad_scale: 8.0 +2023-03-28 12:54:15,632 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.913e+02 4.436e+02 5.161e+02 6.355e+02 1.299e+03, threshold=1.032e+03, percent-clipped=1.0 +2023-03-28 12:54:36,498 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36127.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:55:19,416 INFO [train.py:892] (2/4) Epoch 20, batch 900, loss[loss=0.1614, simple_loss=0.2307, pruned_loss=0.04603, over 19836.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2593, pruned_loss=0.0584, over 3905479.96 frames. ], batch size: 90, lr: 7.88e-03, grad_scale: 8.0 +2023-03-28 12:55:48,575 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3865, 5.7732, 5.9141, 5.7051, 5.5304, 5.3361, 5.5299, 5.4720], + device='cuda:2'), covar=tensor([0.1251, 0.1255, 0.0905, 0.1132, 0.0643, 0.0882, 0.1725, 0.1712], + device='cuda:2'), in_proj_covar=tensor([0.0271, 0.0289, 0.0337, 0.0271, 0.0252, 0.0249, 0.0325, 0.0357], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 12:56:08,568 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36167.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:56:39,322 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36181.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:56:51,669 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36186.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:57:11,803 INFO [train.py:892] (2/4) Epoch 20, batch 950, loss[loss=0.1774, simple_loss=0.2558, pruned_loss=0.04952, over 19855.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2591, pruned_loss=0.05842, over 3916049.26 frames. ], batch size: 81, lr: 7.87e-03, grad_scale: 8.0 +2023-03-28 12:57:58,660 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.907e+02 4.513e+02 5.155e+02 5.945e+02 1.102e+03, threshold=1.031e+03, percent-clipped=1.0 +2023-03-28 12:58:39,632 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36234.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:58:57,934 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36242.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:59:05,349 INFO [train.py:892] (2/4) Epoch 20, batch 1000, loss[loss=0.1717, simple_loss=0.2561, pruned_loss=0.0436, over 19800.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2589, pruned_loss=0.05792, over 3921541.96 frames. ], batch size: 86, lr: 7.87e-03, grad_scale: 8.0 +2023-03-28 13:00:32,248 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4325, 2.7127, 3.2241, 2.9145, 3.3623, 3.3443, 4.2736, 4.6398], + device='cuda:2'), covar=tensor([0.0520, 0.1966, 0.1581, 0.2244, 0.1971, 0.1674, 0.0503, 0.0511], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0232, 0.0253, 0.0246, 0.0281, 0.0245, 0.0213, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 13:01:03,841 INFO [train.py:892] (2/4) Epoch 20, batch 1050, loss[loss=0.1773, simple_loss=0.2408, pruned_loss=0.05688, over 19829.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2595, pruned_loss=0.05817, over 3928515.73 frames. ], batch size: 146, lr: 7.86e-03, grad_scale: 16.0 +2023-03-28 13:01:49,682 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.691e+02 4.131e+02 5.046e+02 5.738e+02 1.025e+03, threshold=1.009e+03, percent-clipped=0.0 +2023-03-28 13:02:33,956 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1649, 2.5348, 3.1137, 2.7963, 3.1381, 3.1089, 4.1258, 4.3833], + device='cuda:2'), covar=tensor([0.0559, 0.2171, 0.1562, 0.2362, 0.1764, 0.1822, 0.0457, 0.0536], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0231, 0.0252, 0.0246, 0.0280, 0.0244, 0.0212, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 13:02:46,785 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-03-28 13:02:56,404 INFO [train.py:892] (2/4) Epoch 20, batch 1100, loss[loss=0.1817, simple_loss=0.2532, pruned_loss=0.05507, over 19661.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2601, pruned_loss=0.05887, over 3933422.53 frames. ], batch size: 67, lr: 7.86e-03, grad_scale: 16.0 +2023-03-28 13:04:06,114 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-28 13:04:48,944 INFO [train.py:892] (2/4) Epoch 20, batch 1150, loss[loss=0.1908, simple_loss=0.2738, pruned_loss=0.05386, over 19535.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2595, pruned_loss=0.05899, over 3937616.67 frames. ], batch size: 54, lr: 7.85e-03, grad_scale: 16.0 +2023-03-28 13:05:36,783 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.688e+02 4.392e+02 5.039e+02 5.876e+02 1.119e+03, threshold=1.008e+03, percent-clipped=1.0 +2023-03-28 13:06:01,288 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36427.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:06:43,818 INFO [train.py:892] (2/4) Epoch 20, batch 1200, loss[loss=0.1846, simple_loss=0.2497, pruned_loss=0.05973, over 19773.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2602, pruned_loss=0.05882, over 3941494.73 frames. ], batch size: 152, lr: 7.85e-03, grad_scale: 16.0 +2023-03-28 13:07:24,904 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4874, 2.8577, 2.6274, 1.9942, 2.6561, 2.8244, 2.6796, 2.8769], + device='cuda:2'), covar=tensor([0.0309, 0.0265, 0.0264, 0.0527, 0.0326, 0.0262, 0.0222, 0.0192], + device='cuda:2'), in_proj_covar=tensor([0.0084, 0.0079, 0.0087, 0.0090, 0.0093, 0.0069, 0.0068, 0.0070], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 13:07:31,515 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36467.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:07:50,179 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36475.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:08:37,661 INFO [train.py:892] (2/4) Epoch 20, batch 1250, loss[loss=0.1838, simple_loss=0.2481, pruned_loss=0.05971, over 19752.00 frames. ], tot_loss[loss=0.188, simple_loss=0.259, pruned_loss=0.05851, over 3942820.73 frames. ], batch size: 205, lr: 7.84e-03, grad_scale: 16.0 +2023-03-28 13:09:11,963 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3241, 5.6886, 5.7215, 5.6285, 5.3142, 5.7051, 5.0591, 5.1382], + device='cuda:2'), covar=tensor([0.0468, 0.0439, 0.0499, 0.0391, 0.0580, 0.0459, 0.0655, 0.0910], + device='cuda:2'), in_proj_covar=tensor([0.0238, 0.0246, 0.0267, 0.0231, 0.0233, 0.0224, 0.0239, 0.0281], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 13:09:22,501 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36515.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:09:27,480 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.037e+02 4.208e+02 4.891e+02 6.117e+02 9.708e+02, threshold=9.782e+02, percent-clipped=0.0 +2023-03-28 13:10:11,609 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36537.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:10:29,787 INFO [train.py:892] (2/4) Epoch 20, batch 1300, loss[loss=0.1901, simple_loss=0.2565, pruned_loss=0.06184, over 19832.00 frames. ], tot_loss[loss=0.188, simple_loss=0.259, pruned_loss=0.05846, over 3944497.80 frames. ], batch size: 128, lr: 7.84e-03, grad_scale: 16.0 +2023-03-28 13:10:30,924 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2128, 2.5672, 2.3759, 1.7329, 2.3098, 2.6100, 2.5071, 2.5330], + device='cuda:2'), covar=tensor([0.0350, 0.0272, 0.0261, 0.0536, 0.0386, 0.0245, 0.0224, 0.0218], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0079, 0.0087, 0.0090, 0.0093, 0.0069, 0.0068, 0.0070], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 13:12:24,050 INFO [train.py:892] (2/4) Epoch 20, batch 1350, loss[loss=0.2022, simple_loss=0.2771, pruned_loss=0.06369, over 19722.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2581, pruned_loss=0.05809, over 3946513.75 frames. ], batch size: 295, lr: 7.83e-03, grad_scale: 16.0 +2023-03-28 13:12:45,791 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36605.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:12:51,685 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36608.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:13:00,920 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36612.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:13:12,354 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.844e+02 4.275e+02 5.272e+02 6.307e+02 1.102e+03, threshold=1.054e+03, percent-clipped=1.0 +2023-03-28 13:14:08,482 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9658, 2.9613, 3.1343, 2.4309, 3.3615, 2.8576, 3.0714, 3.3895], + device='cuda:2'), covar=tensor([0.0554, 0.0414, 0.0479, 0.0770, 0.0297, 0.0378, 0.0439, 0.0305], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0074, 0.0073, 0.0101, 0.0069, 0.0069, 0.0068, 0.0061], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 13:14:16,032 INFO [train.py:892] (2/4) Epoch 20, batch 1400, loss[loss=0.1763, simple_loss=0.2503, pruned_loss=0.05121, over 19814.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2579, pruned_loss=0.05821, over 3947889.42 frames. ], batch size: 96, lr: 7.82e-03, grad_scale: 16.0 +2023-03-28 13:14:41,734 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0297, 2.0933, 2.5217, 2.8781, 1.9315, 2.6189, 1.8849, 1.8809], + device='cuda:2'), covar=tensor([0.0656, 0.1153, 0.1117, 0.0480, 0.2443, 0.0702, 0.1472, 0.1702], + device='cuda:2'), in_proj_covar=tensor([0.0226, 0.0330, 0.0235, 0.0180, 0.0238, 0.0194, 0.0206, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 13:15:03,580 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36666.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:15:09,218 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36669.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:15:16,956 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36673.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:16:06,746 INFO [train.py:892] (2/4) Epoch 20, batch 1450, loss[loss=0.1809, simple_loss=0.2488, pruned_loss=0.05656, over 19742.00 frames. ], tot_loss[loss=0.188, simple_loss=0.259, pruned_loss=0.05849, over 3944981.15 frames. ], batch size: 44, lr: 7.82e-03, grad_scale: 16.0 +2023-03-28 13:16:17,620 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36700.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 13:16:54,915 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.742e+02 4.075e+02 4.928e+02 6.205e+02 1.251e+03, threshold=9.857e+02, percent-clipped=1.0 +2023-03-28 13:17:56,091 INFO [train.py:892] (2/4) Epoch 20, batch 1500, loss[loss=0.1632, simple_loss=0.2376, pruned_loss=0.04439, over 19831.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2588, pruned_loss=0.05786, over 3945675.85 frames. ], batch size: 146, lr: 7.81e-03, grad_scale: 16.0 +2023-03-28 13:18:05,862 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2041, 2.9007, 4.6640, 3.9686, 4.4300, 4.6379, 4.4712, 4.3958], + device='cuda:2'), covar=tensor([0.0288, 0.0824, 0.0089, 0.0867, 0.0094, 0.0184, 0.0152, 0.0119], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0098, 0.0080, 0.0149, 0.0075, 0.0089, 0.0084, 0.0077], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 13:18:30,384 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36761.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 13:19:49,259 INFO [train.py:892] (2/4) Epoch 20, batch 1550, loss[loss=0.1948, simple_loss=0.2658, pruned_loss=0.06186, over 19782.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2596, pruned_loss=0.05783, over 3943460.30 frames. ], batch size: 52, lr: 7.81e-03, grad_scale: 16.0 +2023-03-28 13:20:10,167 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36805.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 13:20:35,959 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.615e+02 4.163e+02 4.935e+02 6.337e+02 1.233e+03, threshold=9.870e+02, percent-clipped=4.0 +2023-03-28 13:21:24,719 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36837.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:21:43,520 INFO [train.py:892] (2/4) Epoch 20, batch 1600, loss[loss=0.2062, simple_loss=0.2762, pruned_loss=0.0681, over 19726.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2608, pruned_loss=0.05829, over 3942350.49 frames. ], batch size: 269, lr: 7.80e-03, grad_scale: 16.0 +2023-03-28 13:22:16,984 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-28 13:22:33,932 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36866.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 13:23:15,930 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36885.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:23:38,880 INFO [train.py:892] (2/4) Epoch 20, batch 1650, loss[loss=0.1701, simple_loss=0.253, pruned_loss=0.04359, over 19694.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2607, pruned_loss=0.05822, over 3944136.38 frames. ], batch size: 48, lr: 7.80e-03, grad_scale: 16.0 +2023-03-28 13:24:13,750 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36911.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:24:26,322 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.686e+02 3.920e+02 4.713e+02 5.573e+02 1.416e+03, threshold=9.427e+02, percent-clipped=1.0 +2023-03-28 13:25:31,476 INFO [train.py:892] (2/4) Epoch 20, batch 1700, loss[loss=0.168, simple_loss=0.2409, pruned_loss=0.04759, over 19751.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2595, pruned_loss=0.05761, over 3945761.17 frames. ], batch size: 102, lr: 7.79e-03, grad_scale: 16.0 +2023-03-28 13:25:58,493 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36958.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:26:04,502 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36961.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:26:10,888 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36964.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:26:21,359 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36968.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:26:29,996 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36972.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:27:09,091 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1901, 3.4831, 3.4324, 4.3327, 2.7597, 3.2591, 2.8775, 2.5977], + device='cuda:2'), covar=tensor([0.0544, 0.2166, 0.1172, 0.0340, 0.2180, 0.0887, 0.1273, 0.1817], + device='cuda:2'), in_proj_covar=tensor([0.0229, 0.0334, 0.0239, 0.0182, 0.0241, 0.0197, 0.0210, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 13:27:17,383 INFO [train.py:892] (2/4) Epoch 20, batch 1750, loss[loss=0.1765, simple_loss=0.2514, pruned_loss=0.0508, over 19703.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2606, pruned_loss=0.05824, over 3945795.58 frames. ], batch size: 101, lr: 7.79e-03, grad_scale: 16.0 +2023-03-28 13:27:35,589 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37004.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:28:00,089 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.609e+02 4.040e+02 4.922e+02 6.023e+02 1.252e+03, threshold=9.844e+02, percent-clipped=2.0 +2023-03-28 13:28:04,681 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37019.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:28:55,521 INFO [train.py:892] (2/4) Epoch 20, batch 1800, loss[loss=0.1997, simple_loss=0.2701, pruned_loss=0.06466, over 19837.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2605, pruned_loss=0.05805, over 3944639.97 frames. ], batch size: 239, lr: 7.78e-03, grad_scale: 16.0 +2023-03-28 13:29:13,131 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37056.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 13:29:29,121 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37065.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:30:21,869 INFO [train.py:892] (2/4) Epoch 20, batch 1850, loss[loss=0.1908, simple_loss=0.2724, pruned_loss=0.05464, over 19593.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2626, pruned_loss=0.05812, over 3941727.72 frames. ], batch size: 53, lr: 7.78e-03, grad_scale: 16.0 +2023-03-28 13:31:23,897 INFO [train.py:892] (2/4) Epoch 21, batch 0, loss[loss=0.1846, simple_loss=0.2541, pruned_loss=0.05758, over 19798.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2541, pruned_loss=0.05758, over 19798.00 frames. ], batch size: 211, lr: 7.59e-03, grad_scale: 16.0 +2023-03-28 13:31:23,898 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 13:31:56,331 INFO [train.py:926] (2/4) Epoch 21, validation: loss=0.1717, simple_loss=0.248, pruned_loss=0.04765, over 2883724.00 frames. +2023-03-28 13:31:56,332 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 13:32:31,700 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.932e+02 4.162e+02 4.969e+02 6.097e+02 9.968e+02, threshold=9.939e+02, percent-clipped=2.0 +2023-03-28 13:33:08,760 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-28 13:33:23,721 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6560, 2.6099, 1.6015, 3.1289, 2.8490, 3.0257, 3.1195, 2.4761], + device='cuda:2'), covar=tensor([0.0632, 0.0683, 0.1650, 0.0500, 0.0552, 0.0468, 0.0548, 0.0851], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0135, 0.0140, 0.0139, 0.0121, 0.0121, 0.0135, 0.0137], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 13:33:48,509 INFO [train.py:892] (2/4) Epoch 21, batch 50, loss[loss=0.2013, simple_loss=0.2798, pruned_loss=0.06146, over 19852.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.247, pruned_loss=0.05295, over 892024.69 frames. ], batch size: 78, lr: 7.58e-03, grad_scale: 16.0 +2023-03-28 13:34:10,847 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37161.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 13:34:13,464 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-28 13:35:00,923 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0081, 4.5918, 4.7267, 4.4730, 4.9352, 3.1463, 4.0007, 2.5541], + device='cuda:2'), covar=tensor([0.0163, 0.0203, 0.0141, 0.0184, 0.0124, 0.0905, 0.0832, 0.1432], + device='cuda:2'), in_proj_covar=tensor([0.0098, 0.0139, 0.0110, 0.0131, 0.0114, 0.0131, 0.0140, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 13:35:40,043 INFO [train.py:892] (2/4) Epoch 21, batch 100, loss[loss=0.1773, simple_loss=0.2555, pruned_loss=0.04959, over 19853.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2519, pruned_loss=0.05407, over 1570341.71 frames. ], batch size: 104, lr: 7.58e-03, grad_scale: 16.0 +2023-03-28 13:36:14,434 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.842e+02 4.106e+02 4.671e+02 5.605e+02 1.435e+03, threshold=9.342e+02, percent-clipped=1.0 +2023-03-28 13:37:33,190 INFO [train.py:892] (2/4) Epoch 21, batch 150, loss[loss=0.2126, simple_loss=0.287, pruned_loss=0.06907, over 19652.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2548, pruned_loss=0.05492, over 2097871.61 frames. ], batch size: 299, lr: 7.57e-03, grad_scale: 16.0 +2023-03-28 13:37:57,910 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37261.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:38:04,264 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37264.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:38:11,679 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37267.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:38:14,128 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37268.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:38:14,160 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37268.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:38:14,200 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.9096, 1.8667, 2.0138, 1.9457, 1.8890, 1.9387, 1.8834, 2.0507], + device='cuda:2'), covar=tensor([0.0271, 0.0285, 0.0264, 0.0246, 0.0379, 0.0309, 0.0393, 0.0248], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0063, 0.0067, 0.0059, 0.0073, 0.0068, 0.0085, 0.0060], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 13:39:25,435 INFO [train.py:892] (2/4) Epoch 21, batch 200, loss[loss=0.1947, simple_loss=0.2651, pruned_loss=0.06212, over 19622.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2569, pruned_loss=0.05667, over 2508767.62 frames. ], batch size: 65, lr: 7.57e-03, grad_scale: 16.0 +2023-03-28 13:39:45,071 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37309.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:39:49,523 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5131, 3.1908, 3.4614, 3.1941, 3.7243, 3.7685, 4.3485, 4.8127], + device='cuda:2'), covar=tensor([0.0604, 0.1557, 0.1421, 0.1944, 0.1653, 0.1297, 0.0542, 0.0489], + device='cuda:2'), in_proj_covar=tensor([0.0238, 0.0233, 0.0255, 0.0246, 0.0281, 0.0245, 0.0213, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 13:39:51,126 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37312.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:39:55,516 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37314.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:40:00,790 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37316.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:40:02,145 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.481e+02 4.044e+02 5.052e+02 5.778e+02 1.190e+03, threshold=1.010e+03, percent-clipped=3.0 +2023-03-28 13:40:32,460 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37329.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:41:19,139 INFO [train.py:892] (2/4) Epoch 21, batch 250, loss[loss=0.1674, simple_loss=0.2338, pruned_loss=0.05046, over 19814.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2563, pruned_loss=0.05693, over 2828379.56 frames. ], batch size: 123, lr: 7.56e-03, grad_scale: 16.0 +2023-03-28 13:41:30,948 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37356.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 13:41:39,260 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37360.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:41:47,458 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37364.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:43:12,252 INFO [train.py:892] (2/4) Epoch 21, batch 300, loss[loss=0.1666, simple_loss=0.2326, pruned_loss=0.05031, over 19836.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2577, pruned_loss=0.05725, over 3076661.25 frames. ], batch size: 144, lr: 7.56e-03, grad_scale: 16.0 +2023-03-28 13:43:21,043 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37404.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 13:43:32,347 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-28 13:43:49,250 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 4.191e+02 4.785e+02 5.606e+02 9.286e+02, threshold=9.571e+02, percent-clipped=0.0 +2023-03-28 13:43:59,593 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8421, 2.3893, 3.7232, 3.1916, 3.6154, 3.8197, 3.5542, 3.5333], + device='cuda:2'), covar=tensor([0.0544, 0.0927, 0.0120, 0.0642, 0.0136, 0.0198, 0.0186, 0.0179], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0098, 0.0081, 0.0149, 0.0076, 0.0090, 0.0085, 0.0078], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 13:44:06,869 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37425.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:44:16,327 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-28 13:44:42,256 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.23 vs. limit=5.0 +2023-03-28 13:45:03,287 INFO [train.py:892] (2/4) Epoch 21, batch 350, loss[loss=0.1744, simple_loss=0.242, pruned_loss=0.05342, over 19804.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2577, pruned_loss=0.05707, over 3270716.68 frames. ], batch size: 126, lr: 7.55e-03, grad_scale: 16.0 +2023-03-28 13:45:26,318 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37461.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 13:46:57,110 INFO [train.py:892] (2/4) Epoch 21, batch 400, loss[loss=0.1599, simple_loss=0.2409, pruned_loss=0.03945, over 19612.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2573, pruned_loss=0.0562, over 3420972.77 frames. ], batch size: 46, lr: 7.55e-03, grad_scale: 16.0 +2023-03-28 13:47:08,114 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-28 13:47:09,942 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-28 13:47:14,269 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37509.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 13:47:33,496 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.804e+02 3.892e+02 4.730e+02 5.808e+02 1.803e+03, threshold=9.460e+02, percent-clipped=3.0 +2023-03-28 13:48:32,479 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8282, 3.9948, 4.2467, 4.9743, 3.1999, 3.6216, 3.2781, 2.8843], + device='cuda:2'), covar=tensor([0.0394, 0.1854, 0.0791, 0.0254, 0.2107, 0.0923, 0.1099, 0.1717], + device='cuda:2'), in_proj_covar=tensor([0.0229, 0.0334, 0.0239, 0.0183, 0.0243, 0.0197, 0.0209, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 13:48:49,659 INFO [train.py:892] (2/4) Epoch 21, batch 450, loss[loss=0.2504, simple_loss=0.3177, pruned_loss=0.09159, over 19614.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2565, pruned_loss=0.05581, over 3538558.14 frames. ], batch size: 387, lr: 7.54e-03, grad_scale: 16.0 +2023-03-28 13:49:28,662 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37567.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:49:42,658 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2961, 2.5252, 3.5662, 2.9648, 3.0716, 2.9668, 2.1189, 2.1602], + device='cuda:2'), covar=tensor([0.1088, 0.2954, 0.0626, 0.0942, 0.1525, 0.1328, 0.2258, 0.2705], + device='cuda:2'), in_proj_covar=tensor([0.0337, 0.0367, 0.0322, 0.0261, 0.0359, 0.0340, 0.0344, 0.0315], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 13:50:01,614 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8401, 4.4585, 4.4625, 4.8276, 4.5618, 4.9872, 4.9143, 5.0753], + device='cuda:2'), covar=tensor([0.0616, 0.0341, 0.0436, 0.0290, 0.0504, 0.0337, 0.0358, 0.0265], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0163, 0.0188, 0.0161, 0.0162, 0.0145, 0.0142, 0.0185], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 13:50:25,073 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-28 13:50:42,359 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2010, 3.0567, 3.5904, 2.6335, 3.5883, 3.0301, 3.3199, 3.7515], + device='cuda:2'), covar=tensor([0.0764, 0.0466, 0.0422, 0.0797, 0.0497, 0.0414, 0.0432, 0.0420], + device='cuda:2'), in_proj_covar=tensor([0.0068, 0.0077, 0.0074, 0.0104, 0.0070, 0.0071, 0.0069, 0.0062], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 13:50:43,320 INFO [train.py:892] (2/4) Epoch 21, batch 500, loss[loss=0.1779, simple_loss=0.2572, pruned_loss=0.04933, over 19797.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2581, pruned_loss=0.05612, over 3628389.10 frames. ], batch size: 68, lr: 7.54e-03, grad_scale: 16.0 +2023-03-28 13:50:44,789 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-28 13:50:49,629 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.86 vs. limit=5.0 +2023-03-28 13:51:16,842 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37614.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:51:19,013 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37615.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:51:22,190 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.832e+02 3.951e+02 4.577e+02 5.485e+02 9.092e+02, threshold=9.154e+02, percent-clipped=0.0 +2023-03-28 13:51:37,846 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37624.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:52:39,178 INFO [train.py:892] (2/4) Epoch 21, batch 550, loss[loss=0.196, simple_loss=0.2658, pruned_loss=0.06311, over 19643.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2574, pruned_loss=0.05608, over 3700995.56 frames. ], batch size: 72, lr: 7.53e-03, grad_scale: 16.0 +2023-03-28 13:52:59,461 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37660.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:53:03,608 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37662.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:54:32,552 INFO [train.py:892] (2/4) Epoch 21, batch 600, loss[loss=0.1574, simple_loss=0.2298, pruned_loss=0.04249, over 19645.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2572, pruned_loss=0.05648, over 3755040.64 frames. ], batch size: 47, lr: 7.53e-03, grad_scale: 16.0 +2023-03-28 13:54:49,155 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37708.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:55:09,544 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.449e+02 4.150e+02 4.838e+02 5.835e+02 9.838e+02, threshold=9.675e+02, percent-clipped=3.0 +2023-03-28 13:55:15,568 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.45 vs. limit=5.0 +2023-03-28 13:55:17,248 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37720.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:55:59,975 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4372, 2.6952, 3.7575, 3.0722, 3.2112, 3.1138, 2.2119, 2.2674], + device='cuda:2'), covar=tensor([0.1008, 0.2706, 0.0601, 0.0909, 0.1514, 0.1316, 0.2254, 0.2478], + device='cuda:2'), in_proj_covar=tensor([0.0339, 0.0370, 0.0324, 0.0262, 0.0362, 0.0342, 0.0348, 0.0317], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 13:56:25,838 INFO [train.py:892] (2/4) Epoch 21, batch 650, loss[loss=0.1947, simple_loss=0.2719, pruned_loss=0.05874, over 19699.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2581, pruned_loss=0.05708, over 3796825.27 frames. ], batch size: 74, lr: 7.52e-03, grad_scale: 16.0 +2023-03-28 13:58:19,128 INFO [train.py:892] (2/4) Epoch 21, batch 700, loss[loss=0.275, simple_loss=0.3756, pruned_loss=0.08721, over 17901.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.258, pruned_loss=0.05717, over 3827292.69 frames. ], batch size: 633, lr: 7.52e-03, grad_scale: 16.0 +2023-03-28 13:58:55,422 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 4.218e+02 4.924e+02 5.585e+02 8.798e+02, threshold=9.849e+02, percent-clipped=0.0 +2023-03-28 13:59:14,384 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8571, 2.9030, 4.1858, 3.2330, 3.5784, 3.3298, 2.3957, 2.4204], + device='cuda:2'), covar=tensor([0.0862, 0.2648, 0.0558, 0.0926, 0.1428, 0.1302, 0.2164, 0.2669], + device='cuda:2'), in_proj_covar=tensor([0.0338, 0.0368, 0.0322, 0.0261, 0.0361, 0.0341, 0.0346, 0.0316], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 13:59:29,911 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-28 14:00:09,470 INFO [train.py:892] (2/4) Epoch 21, batch 750, loss[loss=0.1706, simple_loss=0.2441, pruned_loss=0.04854, over 19866.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2569, pruned_loss=0.05642, over 3855141.39 frames. ], batch size: 154, lr: 7.51e-03, grad_scale: 16.0 +2023-03-28 14:00:33,717 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.40 vs. limit=5.0 +2023-03-28 14:01:18,365 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-28 14:02:04,449 INFO [train.py:892] (2/4) Epoch 21, batch 800, loss[loss=0.1738, simple_loss=0.2464, pruned_loss=0.05057, over 19838.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.257, pruned_loss=0.05668, over 3877364.92 frames. ], batch size: 75, lr: 7.51e-03, grad_scale: 16.0 +2023-03-28 14:02:39,795 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.002e+02 4.314e+02 5.039e+02 6.116e+02 1.150e+03, threshold=1.008e+03, percent-clipped=3.0 +2023-03-28 14:02:56,988 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37924.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:03:45,780 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-28 14:03:57,500 INFO [train.py:892] (2/4) Epoch 21, batch 850, loss[loss=0.1756, simple_loss=0.2524, pruned_loss=0.04935, over 19832.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2568, pruned_loss=0.05616, over 3894549.30 frames. ], batch size: 101, lr: 7.50e-03, grad_scale: 16.0 +2023-03-28 14:04:45,711 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37972.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:05:01,915 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37979.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:05:06,135 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.8418, 6.1920, 6.1994, 6.1121, 5.9519, 6.2418, 5.5095, 5.5993], + device='cuda:2'), covar=tensor([0.0365, 0.0376, 0.0456, 0.0342, 0.0448, 0.0427, 0.0630, 0.0868], + device='cuda:2'), in_proj_covar=tensor([0.0241, 0.0251, 0.0272, 0.0235, 0.0238, 0.0228, 0.0245, 0.0286], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 14:05:53,010 INFO [train.py:892] (2/4) Epoch 21, batch 900, loss[loss=0.1647, simple_loss=0.2399, pruned_loss=0.04476, over 19894.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2573, pruned_loss=0.05662, over 3907207.91 frames. ], batch size: 80, lr: 7.50e-03, grad_scale: 16.0 +2023-03-28 14:06:31,293 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.842e+02 4.121e+02 4.904e+02 6.115e+02 1.131e+03, threshold=9.807e+02, percent-clipped=1.0 +2023-03-28 14:06:38,823 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38020.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:07:21,074 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38040.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:07:46,395 INFO [train.py:892] (2/4) Epoch 21, batch 950, loss[loss=0.1754, simple_loss=0.256, pruned_loss=0.04741, over 19897.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2564, pruned_loss=0.05624, over 3917372.34 frames. ], batch size: 50, lr: 7.49e-03, grad_scale: 16.0 +2023-03-28 14:08:17,599 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.0234, 1.2628, 1.5253, 2.2431, 2.3446, 2.4025, 2.3329, 2.3867], + device='cuda:2'), covar=tensor([0.0955, 0.2109, 0.1762, 0.0747, 0.0600, 0.0392, 0.0434, 0.0456], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0169, 0.0172, 0.0143, 0.0126, 0.0119, 0.0111, 0.0107], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 14:08:23,054 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38068.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:09:34,128 INFO [train.py:892] (2/4) Epoch 21, batch 1000, loss[loss=0.2044, simple_loss=0.2707, pruned_loss=0.06906, over 19622.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2578, pruned_loss=0.05648, over 3924014.87 frames. ], batch size: 299, lr: 7.49e-03, grad_scale: 16.0 +2023-03-28 14:10:09,400 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.902e+02 4.380e+02 5.195e+02 6.424e+02 1.121e+03, threshold=1.039e+03, percent-clipped=5.0 +2023-03-28 14:10:53,238 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0955, 3.7632, 3.8996, 4.0721, 3.8381, 4.1029, 4.2533, 4.3795], + device='cuda:2'), covar=tensor([0.0639, 0.0420, 0.0534, 0.0355, 0.0719, 0.0504, 0.0447, 0.0363], + device='cuda:2'), in_proj_covar=tensor([0.0146, 0.0167, 0.0193, 0.0164, 0.0165, 0.0149, 0.0145, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 14:11:25,975 INFO [train.py:892] (2/4) Epoch 21, batch 1050, loss[loss=0.1741, simple_loss=0.244, pruned_loss=0.05208, over 19578.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2574, pruned_loss=0.05614, over 3929277.31 frames. ], batch size: 42, lr: 7.48e-03, grad_scale: 16.0 +2023-03-28 14:12:10,974 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0949, 3.1044, 1.8547, 3.8565, 3.4370, 3.8488, 3.8960, 2.9632], + device='cuda:2'), covar=tensor([0.0681, 0.0612, 0.1718, 0.0660, 0.0624, 0.0343, 0.0527, 0.0796], + device='cuda:2'), in_proj_covar=tensor([0.0137, 0.0134, 0.0140, 0.0138, 0.0123, 0.0124, 0.0136, 0.0138], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 14:13:22,209 INFO [train.py:892] (2/4) Epoch 21, batch 1100, loss[loss=0.1701, simple_loss=0.2423, pruned_loss=0.04894, over 19774.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2572, pruned_loss=0.05609, over 3932521.38 frames. ], batch size: 46, lr: 7.48e-03, grad_scale: 16.0 +2023-03-28 14:13:46,087 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4765, 2.5377, 1.5083, 2.8891, 2.6571, 2.7965, 2.9176, 2.3047], + device='cuda:2'), covar=tensor([0.0703, 0.0696, 0.1596, 0.0510, 0.0640, 0.0515, 0.0552, 0.0847], + device='cuda:2'), in_proj_covar=tensor([0.0137, 0.0135, 0.0141, 0.0139, 0.0123, 0.0125, 0.0136, 0.0139], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 14:13:55,406 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.714e+02 3.883e+02 4.794e+02 6.298e+02 1.135e+03, threshold=9.588e+02, percent-clipped=2.0 +2023-03-28 14:15:12,024 INFO [train.py:892] (2/4) Epoch 21, batch 1150, loss[loss=0.2141, simple_loss=0.2849, pruned_loss=0.0716, over 19787.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2572, pruned_loss=0.05664, over 3936502.09 frames. ], batch size: 193, lr: 7.47e-03, grad_scale: 16.0 +2023-03-28 14:17:02,222 INFO [train.py:892] (2/4) Epoch 21, batch 1200, loss[loss=0.1738, simple_loss=0.24, pruned_loss=0.05382, over 19756.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2566, pruned_loss=0.05591, over 3940351.16 frames. ], batch size: 102, lr: 7.47e-03, grad_scale: 32.0 +2023-03-28 14:17:37,169 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.715e+02 4.012e+02 4.628e+02 5.373e+02 1.374e+03, threshold=9.255e+02, percent-clipped=1.0 +2023-03-28 14:18:05,236 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1416, 2.4567, 3.3141, 2.7594, 2.9298, 2.8889, 2.0384, 2.1590], + device='cuda:2'), covar=tensor([0.1087, 0.2669, 0.0744, 0.0981, 0.1611, 0.1287, 0.2284, 0.2470], + device='cuda:2'), in_proj_covar=tensor([0.0337, 0.0367, 0.0322, 0.0261, 0.0363, 0.0339, 0.0343, 0.0313], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 14:18:18,186 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38335.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:18:18,269 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3350, 3.0670, 3.1883, 3.3174, 3.2340, 3.2277, 3.4706, 3.5833], + device='cuda:2'), covar=tensor([0.0714, 0.0486, 0.0571, 0.0413, 0.0659, 0.0728, 0.0438, 0.0358], + device='cuda:2'), in_proj_covar=tensor([0.0145, 0.0166, 0.0191, 0.0164, 0.0163, 0.0148, 0.0144, 0.0188], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 14:18:53,058 INFO [train.py:892] (2/4) Epoch 21, batch 1250, loss[loss=0.1715, simple_loss=0.2586, pruned_loss=0.04221, over 19931.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2578, pruned_loss=0.05697, over 3941903.33 frames. ], batch size: 49, lr: 7.46e-03, grad_scale: 32.0 +2023-03-28 14:19:13,180 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38360.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:20:46,462 INFO [train.py:892] (2/4) Epoch 21, batch 1300, loss[loss=0.1682, simple_loss=0.2431, pruned_loss=0.04658, over 19731.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.258, pruned_loss=0.05685, over 3944032.91 frames. ], batch size: 99, lr: 7.46e-03, grad_scale: 32.0 +2023-03-28 14:21:20,980 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.582e+02 4.064e+02 4.748e+02 5.705e+02 1.088e+03, threshold=9.497e+02, percent-clipped=1.0 +2023-03-28 14:21:27,797 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7625, 4.8598, 5.1598, 5.0164, 4.9910, 4.5803, 4.8627, 4.7016], + device='cuda:2'), covar=tensor([0.1382, 0.1180, 0.0820, 0.0996, 0.0757, 0.0898, 0.1863, 0.1857], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0298, 0.0347, 0.0276, 0.0255, 0.0259, 0.0336, 0.0366], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 14:21:32,063 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38421.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:21:43,035 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.9203, 2.0977, 2.1327, 1.8033, 2.1945, 1.7643, 2.0570, 2.1823], + device='cuda:2'), covar=tensor([0.0458, 0.0446, 0.0449, 0.0907, 0.0401, 0.0486, 0.0480, 0.0330], + device='cuda:2'), in_proj_covar=tensor([0.0068, 0.0076, 0.0074, 0.0104, 0.0070, 0.0071, 0.0069, 0.0062], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 14:22:38,959 INFO [train.py:892] (2/4) Epoch 21, batch 1350, loss[loss=0.2063, simple_loss=0.2808, pruned_loss=0.0659, over 19713.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2583, pruned_loss=0.05697, over 3945059.59 frames. ], batch size: 295, lr: 7.45e-03, grad_scale: 32.0 +2023-03-28 14:23:49,382 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1441, 3.4228, 3.5267, 4.1142, 2.7347, 3.2464, 2.5704, 2.5553], + device='cuda:2'), covar=tensor([0.0471, 0.2099, 0.0908, 0.0363, 0.2066, 0.0842, 0.1433, 0.1720], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0333, 0.0240, 0.0184, 0.0240, 0.0196, 0.0209, 0.0213], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 14:24:34,315 INFO [train.py:892] (2/4) Epoch 21, batch 1400, loss[loss=0.2054, simple_loss=0.2728, pruned_loss=0.06899, over 19794.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2595, pruned_loss=0.05768, over 3942447.78 frames. ], batch size: 193, lr: 7.45e-03, grad_scale: 32.0 +2023-03-28 14:25:09,935 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.194e+02 4.264e+02 4.951e+02 5.984e+02 1.387e+03, threshold=9.901e+02, percent-clipped=2.0 +2023-03-28 14:26:03,928 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-28 14:26:26,395 INFO [train.py:892] (2/4) Epoch 21, batch 1450, loss[loss=0.1635, simple_loss=0.2325, pruned_loss=0.04726, over 19780.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2586, pruned_loss=0.05691, over 3945308.13 frames. ], batch size: 168, lr: 7.45e-03, grad_scale: 32.0 +2023-03-28 14:28:09,536 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6855, 2.1397, 2.3675, 3.1048, 3.3996, 3.5669, 3.4580, 3.4328], + device='cuda:2'), covar=tensor([0.0949, 0.1701, 0.1384, 0.0597, 0.0420, 0.0265, 0.0412, 0.0467], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0167, 0.0172, 0.0142, 0.0124, 0.0119, 0.0112, 0.0108], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 14:28:22,054 INFO [train.py:892] (2/4) Epoch 21, batch 1500, loss[loss=0.1777, simple_loss=0.2461, pruned_loss=0.05467, over 19927.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2578, pruned_loss=0.05651, over 3947029.33 frames. ], batch size: 51, lr: 7.44e-03, grad_scale: 32.0 +2023-03-28 14:28:58,329 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.556e+02 4.180e+02 4.793e+02 6.063e+02 1.172e+03, threshold=9.587e+02, percent-clipped=1.0 +2023-03-28 14:29:40,659 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38635.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:30:15,090 INFO [train.py:892] (2/4) Epoch 21, batch 1550, loss[loss=0.1671, simple_loss=0.2345, pruned_loss=0.04984, over 19772.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.257, pruned_loss=0.05594, over 3948142.17 frames. ], batch size: 154, lr: 7.44e-03, grad_scale: 32.0 +2023-03-28 14:31:30,390 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38683.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:32:09,934 INFO [train.py:892] (2/4) Epoch 21, batch 1600, loss[loss=0.1823, simple_loss=0.2508, pruned_loss=0.05695, over 19549.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2586, pruned_loss=0.05699, over 3945226.01 frames. ], batch size: 41, lr: 7.43e-03, grad_scale: 32.0 +2023-03-28 14:32:35,462 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-03-28 14:32:47,586 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38716.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:32:48,618 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.983e+02 3.919e+02 4.752e+02 6.022e+02 1.186e+03, threshold=9.505e+02, percent-clipped=3.0 +2023-03-28 14:32:58,205 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5536, 4.4286, 4.3782, 4.1078, 4.6314, 3.0069, 3.5725, 2.1185], + device='cuda:2'), covar=tensor([0.0305, 0.0242, 0.0223, 0.0246, 0.0243, 0.1035, 0.1058, 0.1903], + device='cuda:2'), in_proj_covar=tensor([0.0098, 0.0137, 0.0109, 0.0129, 0.0115, 0.0128, 0.0138, 0.0122], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 14:34:02,103 INFO [train.py:892] (2/4) Epoch 21, batch 1650, loss[loss=0.1563, simple_loss=0.2393, pruned_loss=0.03667, over 19608.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2569, pruned_loss=0.05616, over 3947216.89 frames. ], batch size: 46, lr: 7.43e-03, grad_scale: 32.0 +2023-03-28 14:35:16,183 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3653, 2.3759, 2.4115, 1.9752, 2.5720, 2.1780, 2.4710, 2.4969], + device='cuda:2'), covar=tensor([0.0436, 0.0467, 0.0508, 0.0868, 0.0329, 0.0445, 0.0415, 0.0324], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0077, 0.0075, 0.0105, 0.0071, 0.0071, 0.0070, 0.0062], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 14:35:20,011 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.96 vs. limit=5.0 +2023-03-28 14:35:57,132 INFO [train.py:892] (2/4) Epoch 21, batch 1700, loss[loss=0.1887, simple_loss=0.2582, pruned_loss=0.05962, over 19832.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2582, pruned_loss=0.05643, over 3946410.90 frames. ], batch size: 177, lr: 7.42e-03, grad_scale: 32.0 +2023-03-28 14:36:32,826 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.773e+02 3.919e+02 4.516e+02 5.441e+02 1.197e+03, threshold=9.032e+02, percent-clipped=2.0 +2023-03-28 14:36:48,324 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38823.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:37:43,275 INFO [train.py:892] (2/4) Epoch 21, batch 1750, loss[loss=0.1899, simple_loss=0.2548, pruned_loss=0.06245, over 19837.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2572, pruned_loss=0.05628, over 3947700.19 frames. ], batch size: 239, lr: 7.42e-03, grad_scale: 32.0 +2023-03-28 14:38:32,040 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.76 vs. limit=5.0 +2023-03-28 14:38:47,647 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38884.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:39:18,434 INFO [train.py:892] (2/4) Epoch 21, batch 1800, loss[loss=0.1981, simple_loss=0.2639, pruned_loss=0.06618, over 19734.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2573, pruned_loss=0.05605, over 3947454.63 frames. ], batch size: 134, lr: 7.41e-03, grad_scale: 32.0 +2023-03-28 14:39:48,597 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.425e+02 4.042e+02 5.128e+02 6.021e+02 1.055e+03, threshold=1.026e+03, percent-clipped=2.0 +2023-03-28 14:40:47,467 INFO [train.py:892] (2/4) Epoch 21, batch 1850, loss[loss=0.1866, simple_loss=0.2691, pruned_loss=0.05199, over 19594.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2593, pruned_loss=0.05641, over 3946448.07 frames. ], batch size: 53, lr: 7.41e-03, grad_scale: 32.0 +2023-03-28 14:41:51,757 INFO [train.py:892] (2/4) Epoch 22, batch 0, loss[loss=0.1664, simple_loss=0.2392, pruned_loss=0.04681, over 19620.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.2392, pruned_loss=0.04681, over 19620.00 frames. ], batch size: 65, lr: 7.23e-03, grad_scale: 32.0 +2023-03-28 14:41:51,758 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 14:42:30,100 INFO [train.py:926] (2/4) Epoch 22, validation: loss=0.1727, simple_loss=0.2482, pruned_loss=0.04859, over 2883724.00 frames. +2023-03-28 14:42:30,101 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 14:42:36,134 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6817, 3.0619, 2.6288, 2.2287, 2.6848, 2.9159, 3.0445, 3.0554], + device='cuda:2'), covar=tensor([0.0325, 0.0288, 0.0278, 0.0509, 0.0405, 0.0306, 0.0173, 0.0213], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0081, 0.0087, 0.0091, 0.0094, 0.0071, 0.0069, 0.0071], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 14:44:26,276 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3594, 4.7767, 4.8910, 4.6100, 5.2627, 3.3393, 4.1619, 2.9061], + device='cuda:2'), covar=tensor([0.0164, 0.0203, 0.0150, 0.0203, 0.0122, 0.0874, 0.0907, 0.1303], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0138, 0.0110, 0.0130, 0.0115, 0.0129, 0.0140, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 14:44:29,527 INFO [train.py:892] (2/4) Epoch 22, batch 50, loss[loss=0.1909, simple_loss=0.2588, pruned_loss=0.06146, over 19858.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.244, pruned_loss=0.05043, over 892081.79 frames. ], batch size: 157, lr: 7.23e-03, grad_scale: 32.0 +2023-03-28 14:44:52,644 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39016.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:44:53,816 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.550e+02 3.535e+02 4.354e+02 5.558e+02 1.145e+03, threshold=8.708e+02, percent-clipped=3.0 +2023-03-28 14:46:23,800 INFO [train.py:892] (2/4) Epoch 22, batch 100, loss[loss=0.1754, simple_loss=0.2542, pruned_loss=0.04827, over 19783.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2508, pruned_loss=0.053, over 1569792.00 frames. ], batch size: 48, lr: 7.22e-03, grad_scale: 32.0 +2023-03-28 14:46:43,236 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39064.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:48:04,968 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4008, 3.2608, 3.4715, 2.6058, 3.8316, 2.9779, 3.2499, 3.8516], + device='cuda:2'), covar=tensor([0.0802, 0.0405, 0.0744, 0.0834, 0.0304, 0.0477, 0.0585, 0.0252], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0077, 0.0075, 0.0105, 0.0072, 0.0072, 0.0070, 0.0062], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 14:48:19,879 INFO [train.py:892] (2/4) Epoch 22, batch 150, loss[loss=0.1637, simple_loss=0.2374, pruned_loss=0.04499, over 19773.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2543, pruned_loss=0.05448, over 2094703.39 frames. ], batch size: 169, lr: 7.22e-03, grad_scale: 32.0 +2023-03-28 14:48:33,014 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6023, 2.5647, 2.8591, 2.5252, 2.9760, 2.9825, 3.4704, 3.7794], + device='cuda:2'), covar=tensor([0.0648, 0.1756, 0.1532, 0.2109, 0.1551, 0.1536, 0.0588, 0.0601], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0235, 0.0256, 0.0247, 0.0285, 0.0248, 0.0215, 0.0237], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 14:48:48,465 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.956e+02 3.952e+02 4.652e+02 5.506e+02 8.622e+02, threshold=9.304e+02, percent-clipped=0.0 +2023-03-28 14:50:15,011 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5895, 3.4518, 4.0530, 2.8083, 4.1414, 3.1777, 3.4556, 3.9892], + device='cuda:2'), covar=tensor([0.0726, 0.0384, 0.0284, 0.0825, 0.0380, 0.0404, 0.0491, 0.0316], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0077, 0.0075, 0.0105, 0.0071, 0.0071, 0.0069, 0.0062], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 14:50:18,183 INFO [train.py:892] (2/4) Epoch 22, batch 200, loss[loss=0.2212, simple_loss=0.2892, pruned_loss=0.07657, over 19709.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2546, pruned_loss=0.05458, over 2505610.06 frames. ], batch size: 295, lr: 7.22e-03, grad_scale: 32.0 +2023-03-28 14:51:07,118 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39179.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:51:21,858 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39185.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:52:10,112 INFO [train.py:892] (2/4) Epoch 22, batch 250, loss[loss=0.1895, simple_loss=0.2568, pruned_loss=0.06114, over 19771.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2555, pruned_loss=0.05518, over 2824732.19 frames. ], batch size: 233, lr: 7.21e-03, grad_scale: 32.0 +2023-03-28 14:52:16,046 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6629, 4.3769, 4.4091, 4.2162, 4.6496, 3.1464, 3.8268, 2.3373], + device='cuda:2'), covar=tensor([0.0171, 0.0201, 0.0142, 0.0187, 0.0125, 0.0858, 0.0817, 0.1360], + device='cuda:2'), in_proj_covar=tensor([0.0098, 0.0137, 0.0109, 0.0129, 0.0114, 0.0128, 0.0139, 0.0121], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 14:52:34,875 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.591e+02 3.931e+02 4.730e+02 5.720e+02 9.531e+02, threshold=9.460e+02, percent-clipped=1.0 +2023-03-28 14:53:42,655 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39246.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:54:03,339 INFO [train.py:892] (2/4) Epoch 22, batch 300, loss[loss=0.1653, simple_loss=0.2365, pruned_loss=0.04711, over 19708.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2541, pruned_loss=0.05426, over 3075561.48 frames. ], batch size: 81, lr: 7.21e-03, grad_scale: 32.0 +2023-03-28 14:55:08,569 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39284.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:55:59,304 INFO [train.py:892] (2/4) Epoch 22, batch 350, loss[loss=0.1581, simple_loss=0.2414, pruned_loss=0.03735, over 19687.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.255, pruned_loss=0.05474, over 3270025.45 frames. ], batch size: 45, lr: 7.20e-03, grad_scale: 32.0 +2023-03-28 14:56:21,663 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.483e+02 3.997e+02 4.728e+02 5.910e+02 1.079e+03, threshold=9.457e+02, percent-clipped=2.0 +2023-03-28 14:56:47,615 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5283, 4.2079, 4.2880, 4.5417, 4.1630, 4.6359, 4.6503, 4.7870], + device='cuda:2'), covar=tensor([0.0639, 0.0365, 0.0428, 0.0303, 0.0682, 0.0390, 0.0404, 0.0305], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0165, 0.0190, 0.0164, 0.0163, 0.0148, 0.0144, 0.0188], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 14:57:26,603 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.85 vs. limit=5.0 +2023-03-28 14:57:27,952 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39345.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:57:49,322 INFO [train.py:892] (2/4) Epoch 22, batch 400, loss[loss=0.171, simple_loss=0.2524, pruned_loss=0.04482, over 19801.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2547, pruned_loss=0.05433, over 3420005.86 frames. ], batch size: 65, lr: 7.20e-03, grad_scale: 32.0 +2023-03-28 14:58:57,993 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7497, 3.0578, 2.6382, 2.2331, 2.6837, 2.9519, 2.8076, 2.9521], + device='cuda:2'), covar=tensor([0.0261, 0.0260, 0.0284, 0.0518, 0.0357, 0.0275, 0.0253, 0.0207], + device='cuda:2'), in_proj_covar=tensor([0.0086, 0.0082, 0.0088, 0.0093, 0.0095, 0.0071, 0.0071, 0.0073], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 14:59:41,762 INFO [train.py:892] (2/4) Epoch 22, batch 450, loss[loss=0.1867, simple_loss=0.2678, pruned_loss=0.05283, over 19856.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2552, pruned_loss=0.05488, over 3538616.59 frames. ], batch size: 60, lr: 7.19e-03, grad_scale: 32.0 +2023-03-28 15:00:08,525 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.964e+02 3.895e+02 4.549e+02 5.283e+02 8.975e+02, threshold=9.097e+02, percent-clipped=0.0 +2023-03-28 15:01:37,218 INFO [train.py:892] (2/4) Epoch 22, batch 500, loss[loss=0.1515, simple_loss=0.2289, pruned_loss=0.03711, over 19811.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2542, pruned_loss=0.05422, over 3630710.47 frames. ], batch size: 98, lr: 7.19e-03, grad_scale: 32.0 +2023-03-28 15:01:53,510 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39463.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:02:31,991 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39479.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:03:33,879 INFO [train.py:892] (2/4) Epoch 22, batch 550, loss[loss=0.1916, simple_loss=0.2564, pruned_loss=0.06338, over 19808.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2554, pruned_loss=0.05531, over 3701215.58 frames. ], batch size: 132, lr: 7.18e-03, grad_scale: 32.0 +2023-03-28 15:03:58,570 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.760e+02 4.104e+02 4.987e+02 6.113e+02 1.669e+03, threshold=9.973e+02, percent-clipped=4.0 +2023-03-28 15:04:16,077 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39524.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:04:22,107 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39527.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:04:51,800 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39541.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:04:56,474 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9290, 2.3137, 2.8490, 2.9938, 3.5477, 3.9478, 3.7777, 3.9608], + device='cuda:2'), covar=tensor([0.0896, 0.1744, 0.1253, 0.0679, 0.0376, 0.0224, 0.0384, 0.0370], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0168, 0.0171, 0.0141, 0.0124, 0.0120, 0.0114, 0.0108], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 15:05:16,640 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-28 15:05:26,075 INFO [train.py:892] (2/4) Epoch 22, batch 600, loss[loss=0.1817, simple_loss=0.2518, pruned_loss=0.05577, over 19891.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2538, pruned_loss=0.0545, over 3757829.45 frames. ], batch size: 92, lr: 7.18e-03, grad_scale: 32.0 +2023-03-28 15:06:15,359 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6065, 4.4279, 4.4229, 4.7840, 4.5189, 5.1651, 4.7383, 4.7747], + device='cuda:2'), covar=tensor([0.0957, 0.0572, 0.0691, 0.0476, 0.0842, 0.0407, 0.0629, 0.0709], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0165, 0.0190, 0.0163, 0.0163, 0.0147, 0.0143, 0.0187], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 15:07:20,941 INFO [train.py:892] (2/4) Epoch 22, batch 650, loss[loss=0.1544, simple_loss=0.2325, pruned_loss=0.0381, over 19794.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2538, pruned_loss=0.05483, over 3801607.08 frames. ], batch size: 79, lr: 7.17e-03, grad_scale: 32.0 +2023-03-28 15:07:45,280 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.674e+02 3.926e+02 4.968e+02 5.717e+02 7.985e+02, threshold=9.935e+02, percent-clipped=0.0 +2023-03-28 15:08:37,631 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39639.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 15:08:39,446 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39640.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:09:12,696 INFO [train.py:892] (2/4) Epoch 22, batch 700, loss[loss=0.156, simple_loss=0.23, pruned_loss=0.04101, over 19897.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2549, pruned_loss=0.05569, over 3835423.41 frames. ], batch size: 113, lr: 7.17e-03, grad_scale: 32.0 +2023-03-28 15:10:00,218 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-03-28 15:10:53,923 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39700.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 15:11:05,441 INFO [train.py:892] (2/4) Epoch 22, batch 750, loss[loss=0.1521, simple_loss=0.2293, pruned_loss=0.03748, over 19653.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2546, pruned_loss=0.05498, over 3861079.95 frames. ], batch size: 58, lr: 7.17e-03, grad_scale: 16.0 +2023-03-28 15:11:32,103 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.919e+02 4.119e+02 5.270e+02 6.689e+02 1.138e+03, threshold=1.054e+03, percent-clipped=1.0 +2023-03-28 15:13:01,461 INFO [train.py:892] (2/4) Epoch 22, batch 800, loss[loss=0.1607, simple_loss=0.2373, pruned_loss=0.04207, over 19811.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2552, pruned_loss=0.05516, over 3881343.22 frames. ], batch size: 93, lr: 7.16e-03, grad_scale: 16.0 +2023-03-28 15:13:25,087 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39767.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:14:37,142 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4244, 4.3040, 4.7587, 4.3675, 4.0829, 4.5605, 4.3848, 4.8699], + device='cuda:2'), covar=tensor([0.0838, 0.0390, 0.0359, 0.0380, 0.0832, 0.0500, 0.0453, 0.0343], + device='cuda:2'), in_proj_covar=tensor([0.0268, 0.0208, 0.0208, 0.0220, 0.0199, 0.0220, 0.0217, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 15:14:52,001 INFO [train.py:892] (2/4) Epoch 22, batch 850, loss[loss=0.1923, simple_loss=0.2673, pruned_loss=0.05866, over 19840.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2546, pruned_loss=0.05468, over 3896882.67 frames. ], batch size: 145, lr: 7.16e-03, grad_scale: 16.0 +2023-03-28 15:15:12,767 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39815.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:15:18,343 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.581e+02 3.870e+02 4.647e+02 5.944e+02 1.250e+03, threshold=9.293e+02, percent-clipped=1.0 +2023-03-28 15:15:21,401 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39819.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:15:42,476 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39828.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:16:09,978 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39841.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:16:42,170 INFO [train.py:892] (2/4) Epoch 22, batch 900, loss[loss=0.2201, simple_loss=0.2876, pruned_loss=0.07631, over 19663.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2534, pruned_loss=0.05415, over 3909424.74 frames. ], batch size: 299, lr: 7.15e-03, grad_scale: 16.0 +2023-03-28 15:17:30,864 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39876.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:18:01,288 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39889.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:18:37,556 INFO [train.py:892] (2/4) Epoch 22, batch 950, loss[loss=0.1576, simple_loss=0.234, pruned_loss=0.04054, over 19821.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2535, pruned_loss=0.05369, over 3917808.40 frames. ], batch size: 103, lr: 7.15e-03, grad_scale: 16.0 +2023-03-28 15:19:06,557 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.773e+02 4.111e+02 4.981e+02 6.379e+02 1.110e+03, threshold=9.962e+02, percent-clipped=1.0 +2023-03-28 15:19:32,909 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.61 vs. limit=5.0 +2023-03-28 15:19:55,979 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39940.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:20:06,313 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-28 15:20:28,354 INFO [train.py:892] (2/4) Epoch 22, batch 1000, loss[loss=0.1947, simple_loss=0.2627, pruned_loss=0.06337, over 19871.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2529, pruned_loss=0.05352, over 3926202.34 frames. ], batch size: 134, lr: 7.14e-03, grad_scale: 16.0 +2023-03-28 15:20:53,195 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0567, 3.1713, 4.4530, 3.4452, 3.8332, 3.6096, 2.5199, 2.6761], + device='cuda:2'), covar=tensor([0.0827, 0.2781, 0.0462, 0.0856, 0.1373, 0.1133, 0.2158, 0.2466], + device='cuda:2'), in_proj_covar=tensor([0.0338, 0.0368, 0.0326, 0.0262, 0.0363, 0.0344, 0.0347, 0.0317], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 15:21:42,516 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39988.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:21:59,360 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39995.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 15:22:27,064 INFO [train.py:892] (2/4) Epoch 22, batch 1050, loss[loss=0.1807, simple_loss=0.2598, pruned_loss=0.0508, over 19926.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2532, pruned_loss=0.05325, over 3930716.81 frames. ], batch size: 51, lr: 7.14e-03, grad_scale: 16.0 +2023-03-28 15:22:53,657 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.755e+02 4.195e+02 4.862e+02 6.171e+02 1.256e+03, threshold=9.724e+02, percent-clipped=2.0 +2023-03-28 15:24:16,345 INFO [train.py:892] (2/4) Epoch 22, batch 1100, loss[loss=0.1643, simple_loss=0.2346, pruned_loss=0.04697, over 19817.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2527, pruned_loss=0.05355, over 3935584.35 frames. ], batch size: 133, lr: 7.13e-03, grad_scale: 16.0 +2023-03-28 15:24:56,342 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.91 vs. limit=5.0 +2023-03-28 15:25:30,174 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4457, 1.8452, 2.2079, 2.7035, 3.0093, 3.1490, 3.0611, 3.1599], + device='cuda:2'), covar=tensor([0.0921, 0.1769, 0.1347, 0.0647, 0.0480, 0.0335, 0.0384, 0.0384], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0167, 0.0170, 0.0142, 0.0124, 0.0119, 0.0113, 0.0108], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 15:25:52,664 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6082, 3.9094, 4.0474, 4.7453, 2.9230, 3.3529, 2.7008, 2.7747], + device='cuda:2'), covar=tensor([0.0464, 0.2239, 0.0878, 0.0325, 0.2444, 0.1095, 0.1467, 0.1877], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0329, 0.0241, 0.0184, 0.0241, 0.0198, 0.0209, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 15:26:09,783 INFO [train.py:892] (2/4) Epoch 22, batch 1150, loss[loss=0.2659, simple_loss=0.3182, pruned_loss=0.1068, over 19720.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2543, pruned_loss=0.05467, over 3937980.09 frames. ], batch size: 295, lr: 7.13e-03, grad_scale: 16.0 +2023-03-28 15:26:11,534 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-28 15:26:33,615 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.627e+02 4.168e+02 4.947e+02 6.030e+02 1.277e+03, threshold=9.894e+02, percent-clipped=1.0 +2023-03-28 15:26:37,572 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40119.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:26:46,623 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40123.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:28:02,048 INFO [train.py:892] (2/4) Epoch 22, batch 1200, loss[loss=0.1847, simple_loss=0.2618, pruned_loss=0.05384, over 19834.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2529, pruned_loss=0.05397, over 3941482.80 frames. ], batch size: 75, lr: 7.13e-03, grad_scale: 16.0 +2023-03-28 15:28:26,261 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40167.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:28:35,094 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40171.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:28:46,644 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.5592, 5.9085, 5.9566, 5.8173, 5.5361, 5.9320, 5.2464, 5.3672], + device='cuda:2'), covar=tensor([0.0414, 0.0394, 0.0462, 0.0358, 0.0525, 0.0477, 0.0694, 0.0881], + device='cuda:2'), in_proj_covar=tensor([0.0245, 0.0258, 0.0278, 0.0242, 0.0242, 0.0230, 0.0250, 0.0293], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 15:29:32,231 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40195.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:29:36,219 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2033, 3.0619, 1.9449, 3.7624, 3.4600, 3.7457, 3.7719, 2.9983], + device='cuda:2'), covar=tensor([0.0563, 0.0667, 0.1618, 0.0584, 0.0517, 0.0440, 0.0551, 0.0760], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0134, 0.0139, 0.0139, 0.0123, 0.0124, 0.0136, 0.0138], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 15:29:54,916 INFO [train.py:892] (2/4) Epoch 22, batch 1250, loss[loss=0.1865, simple_loss=0.259, pruned_loss=0.05696, over 19638.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2525, pruned_loss=0.05341, over 3944129.62 frames. ], batch size: 299, lr: 7.12e-03, grad_scale: 16.0 +2023-03-28 15:30:21,937 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 3.858e+02 4.574e+02 5.518e+02 1.243e+03, threshold=9.148e+02, percent-clipped=1.0 +2023-03-28 15:30:23,139 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40218.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:31:33,927 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5275, 4.5932, 4.9008, 4.6965, 4.8319, 4.5137, 4.6125, 4.4307], + device='cuda:2'), covar=tensor([0.1520, 0.1395, 0.1015, 0.1267, 0.0793, 0.0908, 0.2108, 0.2044], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0299, 0.0348, 0.0278, 0.0257, 0.0258, 0.0337, 0.0363], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 15:31:49,163 INFO [train.py:892] (2/4) Epoch 22, batch 1300, loss[loss=0.1787, simple_loss=0.2453, pruned_loss=0.056, over 19731.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2534, pruned_loss=0.05427, over 3945526.25 frames. ], batch size: 219, lr: 7.12e-03, grad_scale: 16.0 +2023-03-28 15:31:50,185 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40256.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:32:26,059 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40272.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:32:41,656 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40279.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:33:20,655 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40295.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 15:33:43,562 INFO [train.py:892] (2/4) Epoch 22, batch 1350, loss[loss=0.1385, simple_loss=0.2156, pruned_loss=0.03075, over 19588.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2532, pruned_loss=0.05416, over 3946183.42 frames. ], batch size: 44, lr: 7.11e-03, grad_scale: 16.0 +2023-03-28 15:34:03,662 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-28 15:34:11,583 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.667e+02 4.340e+02 4.978e+02 5.730e+02 1.244e+03, threshold=9.955e+02, percent-clipped=1.0 +2023-03-28 15:34:39,899 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40330.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:34:46,754 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40333.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:34:51,241 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1005, 3.1234, 1.9292, 3.7872, 3.4489, 3.6118, 3.7749, 2.9402], + device='cuda:2'), covar=tensor([0.0674, 0.0658, 0.1549, 0.0631, 0.0487, 0.0575, 0.0679, 0.0788], + device='cuda:2'), in_proj_covar=tensor([0.0137, 0.0134, 0.0139, 0.0139, 0.0123, 0.0124, 0.0136, 0.0138], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 15:35:00,382 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2858, 4.9208, 4.9699, 4.6957, 5.2470, 3.1665, 4.1467, 2.4433], + device='cuda:2'), covar=tensor([0.0189, 0.0170, 0.0131, 0.0187, 0.0128, 0.0966, 0.0935, 0.1566], + device='cuda:2'), in_proj_covar=tensor([0.0098, 0.0137, 0.0109, 0.0130, 0.0113, 0.0130, 0.0139, 0.0122], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 15:35:09,356 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40343.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 15:35:13,413 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.0729, 2.3452, 2.0600, 1.5296, 2.2002, 2.3095, 2.1880, 2.3651], + device='cuda:2'), covar=tensor([0.0405, 0.0333, 0.0364, 0.0667, 0.0421, 0.0299, 0.0298, 0.0253], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0084, 0.0091, 0.0095, 0.0097, 0.0074, 0.0073, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 15:35:17,564 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1473, 1.8073, 2.6501, 1.8125, 2.6368, 2.8087, 2.4344, 2.7731], + device='cuda:2'), covar=tensor([0.0695, 0.0989, 0.0167, 0.0426, 0.0153, 0.0240, 0.0210, 0.0192], + device='cuda:2'), in_proj_covar=tensor([0.0093, 0.0099, 0.0082, 0.0149, 0.0079, 0.0092, 0.0085, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 15:35:21,642 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6944, 2.5826, 2.8897, 2.6073, 3.0341, 2.9704, 3.4632, 3.8003], + device='cuda:2'), covar=tensor([0.0590, 0.1646, 0.1571, 0.2081, 0.1602, 0.1556, 0.0636, 0.0533], + device='cuda:2'), in_proj_covar=tensor([0.0239, 0.0231, 0.0255, 0.0246, 0.0284, 0.0246, 0.0214, 0.0238], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 15:35:35,538 INFO [train.py:892] (2/4) Epoch 22, batch 1400, loss[loss=0.174, simple_loss=0.2537, pruned_loss=0.04716, over 19910.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2536, pruned_loss=0.05424, over 3945994.72 frames. ], batch size: 53, lr: 7.11e-03, grad_scale: 16.0 +2023-03-28 15:36:58,915 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40391.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:37:29,877 INFO [train.py:892] (2/4) Epoch 22, batch 1450, loss[loss=0.1956, simple_loss=0.2737, pruned_loss=0.05869, over 19650.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2536, pruned_loss=0.05401, over 3947262.82 frames. ], batch size: 69, lr: 7.10e-03, grad_scale: 16.0 +2023-03-28 15:37:57,982 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.955e+02 4.197e+02 4.850e+02 6.324e+02 1.307e+03, threshold=9.700e+02, percent-clipped=2.0 +2023-03-28 15:38:11,889 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40423.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:38:47,616 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-03-28 15:39:20,779 INFO [train.py:892] (2/4) Epoch 22, batch 1500, loss[loss=0.1799, simple_loss=0.2667, pruned_loss=0.04654, over 19532.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2532, pruned_loss=0.05358, over 3948300.98 frames. ], batch size: 54, lr: 7.10e-03, grad_scale: 16.0 +2023-03-28 15:39:55,916 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40471.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:39:56,013 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40471.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:40:41,447 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40492.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:41:12,879 INFO [train.py:892] (2/4) Epoch 22, batch 1550, loss[loss=0.1696, simple_loss=0.2425, pruned_loss=0.04829, over 19723.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2526, pruned_loss=0.05331, over 3949156.95 frames. ], batch size: 104, lr: 7.10e-03, grad_scale: 16.0 +2023-03-28 15:41:39,826 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.411e+02 3.822e+02 4.612e+02 5.584e+02 1.112e+03, threshold=9.223e+02, percent-clipped=2.0 +2023-03-28 15:41:43,099 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40519.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:41:47,622 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2614, 3.9402, 4.0702, 4.2982, 3.9914, 4.2919, 4.3798, 4.5747], + device='cuda:2'), covar=tensor([0.0630, 0.0385, 0.0459, 0.0345, 0.0660, 0.0446, 0.0396, 0.0277], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0165, 0.0189, 0.0164, 0.0163, 0.0147, 0.0143, 0.0187], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 15:42:25,437 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2743, 2.1677, 2.3795, 2.2848, 2.2803, 2.4234, 2.3305, 2.4702], + device='cuda:2'), covar=tensor([0.0291, 0.0316, 0.0260, 0.0238, 0.0369, 0.0255, 0.0341, 0.0247], + device='cuda:2'), in_proj_covar=tensor([0.0071, 0.0066, 0.0069, 0.0063, 0.0075, 0.0069, 0.0087, 0.0061], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 15:42:55,036 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40551.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:42:59,817 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40553.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:43:05,440 INFO [train.py:892] (2/4) Epoch 22, batch 1600, loss[loss=0.2022, simple_loss=0.2716, pruned_loss=0.06644, over 19790.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2539, pruned_loss=0.05373, over 3947224.51 frames. ], batch size: 193, lr: 7.09e-03, grad_scale: 16.0 +2023-03-28 15:43:46,497 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40574.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:44:55,500 INFO [train.py:892] (2/4) Epoch 22, batch 1650, loss[loss=0.1745, simple_loss=0.2538, pruned_loss=0.04763, over 19648.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2539, pruned_loss=0.05354, over 3948197.36 frames. ], batch size: 68, lr: 7.09e-03, grad_scale: 16.0 +2023-03-28 15:45:03,815 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-28 15:45:24,170 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.699e+02 4.293e+02 4.863e+02 5.850e+02 9.887e+02, threshold=9.726e+02, percent-clipped=2.0 +2023-03-28 15:45:29,826 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.01 vs. limit=5.0 +2023-03-28 15:45:46,672 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40628.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:46:34,546 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40651.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:46:35,028 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-28 15:46:46,990 INFO [train.py:892] (2/4) Epoch 22, batch 1700, loss[loss=0.1833, simple_loss=0.251, pruned_loss=0.05776, over 19850.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2529, pruned_loss=0.05308, over 3950433.34 frames. ], batch size: 144, lr: 7.08e-03, grad_scale: 16.0 +2023-03-28 15:47:22,037 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.9930, 2.2921, 2.0594, 1.4012, 2.0362, 2.2329, 2.0655, 2.1722], + device='cuda:2'), covar=tensor([0.0303, 0.0244, 0.0292, 0.0550, 0.0386, 0.0238, 0.0259, 0.0240], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0083, 0.0089, 0.0093, 0.0096, 0.0073, 0.0072, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 15:47:52,432 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40686.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:48:31,904 INFO [train.py:892] (2/4) Epoch 22, batch 1750, loss[loss=0.1964, simple_loss=0.267, pruned_loss=0.06286, over 19764.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2533, pruned_loss=0.05324, over 3949700.81 frames. ], batch size: 244, lr: 7.08e-03, grad_scale: 16.0 +2023-03-28 15:48:44,979 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40712.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:48:56,706 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.471e+02 3.805e+02 4.645e+02 5.444e+02 1.360e+03, threshold=9.290e+02, percent-clipped=2.0 +2023-03-28 15:50:07,467 INFO [train.py:892] (2/4) Epoch 22, batch 1800, loss[loss=0.1735, simple_loss=0.2412, pruned_loss=0.05285, over 19800.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2533, pruned_loss=0.05316, over 3950279.61 frames. ], batch size: 200, lr: 7.07e-03, grad_scale: 16.0 +2023-03-28 15:50:57,641 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-28 15:51:11,357 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-28 15:51:37,483 INFO [train.py:892] (2/4) Epoch 22, batch 1850, loss[loss=0.1804, simple_loss=0.2685, pruned_loss=0.04611, over 19821.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2552, pruned_loss=0.05343, over 3950039.23 frames. ], batch size: 57, lr: 7.07e-03, grad_scale: 16.0 +2023-03-28 15:51:41,055 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4620, 3.2955, 3.7170, 2.7149, 3.9492, 3.2189, 3.4622, 3.8966], + device='cuda:2'), covar=tensor([0.0658, 0.0435, 0.0566, 0.0808, 0.0282, 0.0378, 0.0429, 0.0278], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0078, 0.0074, 0.0104, 0.0071, 0.0072, 0.0070, 0.0063], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 15:52:42,422 INFO [train.py:892] (2/4) Epoch 23, batch 0, loss[loss=0.2191, simple_loss=0.2894, pruned_loss=0.07439, over 19616.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2894, pruned_loss=0.07439, over 19616.00 frames. ], batch size: 351, lr: 6.91e-03, grad_scale: 16.0 +2023-03-28 15:52:42,422 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 15:53:12,914 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8665, 3.7147, 3.7277, 3.9567, 3.8040, 3.6895, 3.9745, 4.1494], + device='cuda:2'), covar=tensor([0.0605, 0.0391, 0.0454, 0.0306, 0.0552, 0.0593, 0.0403, 0.0260], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0166, 0.0190, 0.0164, 0.0163, 0.0148, 0.0143, 0.0188], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 15:53:16,291 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0548, 2.7901, 3.4646, 3.3535, 3.7864, 4.1839, 4.1638, 4.1138], + device='cuda:2'), covar=tensor([0.0755, 0.1493, 0.0926, 0.0583, 0.0309, 0.0204, 0.0260, 0.0540], + device='cuda:2'), in_proj_covar=tensor([0.0155, 0.0168, 0.0173, 0.0144, 0.0125, 0.0121, 0.0115, 0.0109], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 15:53:21,301 INFO [train.py:926] (2/4) Epoch 23, validation: loss=0.1723, simple_loss=0.2475, pruned_loss=0.04853, over 2883724.00 frames. +2023-03-28 15:53:21,304 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 15:53:38,424 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.877e+02 3.829e+02 4.262e+02 4.921e+02 1.071e+03, threshold=8.525e+02, percent-clipped=1.0 +2023-03-28 15:54:49,717 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40848.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:54:56,755 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40851.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:55:18,627 INFO [train.py:892] (2/4) Epoch 23, batch 50, loss[loss=0.1529, simple_loss=0.2268, pruned_loss=0.03951, over 19901.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2514, pruned_loss=0.05318, over 891359.80 frames. ], batch size: 94, lr: 6.91e-03, grad_scale: 16.0 +2023-03-28 15:55:28,717 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40865.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:55:33,002 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2371, 4.7712, 4.8751, 4.6423, 5.1365, 3.2763, 4.1138, 2.4382], + device='cuda:2'), covar=tensor([0.0178, 0.0202, 0.0138, 0.0189, 0.0126, 0.0900, 0.0923, 0.1596], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0140, 0.0111, 0.0131, 0.0115, 0.0131, 0.0141, 0.0124], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 15:55:44,838 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2069, 2.5371, 2.2166, 1.5754, 2.2935, 2.3537, 2.3328, 2.4319], + device='cuda:2'), covar=tensor([0.0320, 0.0245, 0.0274, 0.0577, 0.0352, 0.0285, 0.0241, 0.0212], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0082, 0.0089, 0.0093, 0.0096, 0.0073, 0.0072, 0.0073], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 15:55:48,737 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40874.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:55:52,638 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2860, 3.2128, 3.4434, 2.6896, 3.6272, 2.9090, 3.2385, 3.5709], + device='cuda:2'), covar=tensor([0.0478, 0.0378, 0.0466, 0.0723, 0.0309, 0.0474, 0.0474, 0.0270], + device='cuda:2'), in_proj_covar=tensor([0.0070, 0.0078, 0.0075, 0.0105, 0.0072, 0.0073, 0.0071, 0.0063], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 15:56:49,854 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40899.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:56:52,108 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40900.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:57:16,968 INFO [train.py:892] (2/4) Epoch 23, batch 100, loss[loss=0.1788, simple_loss=0.2577, pruned_loss=0.04995, over 19658.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2517, pruned_loss=0.0529, over 1569032.75 frames. ], batch size: 50, lr: 6.90e-03, grad_scale: 16.0 +2023-03-28 15:57:32,516 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.530e+02 4.075e+02 4.882e+02 5.777e+02 1.089e+03, threshold=9.764e+02, percent-clipped=3.0 +2023-03-28 15:57:44,682 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40922.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:57:54,329 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40926.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:57:58,476 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40928.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:58:48,847 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0676, 3.1919, 2.0414, 3.2384, 3.3280, 1.5782, 2.7013, 2.5718], + device='cuda:2'), covar=tensor([0.0873, 0.0837, 0.2538, 0.0750, 0.0582, 0.2530, 0.1150, 0.0933], + device='cuda:2'), in_proj_covar=tensor([0.0222, 0.0245, 0.0225, 0.0256, 0.0232, 0.0198, 0.0231, 0.0184], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 15:58:54,914 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0848, 4.1064, 4.4716, 4.2434, 4.4151, 3.9551, 4.1747, 3.9717], + device='cuda:2'), covar=tensor([0.1492, 0.1773, 0.1073, 0.1325, 0.0950, 0.1189, 0.2030, 0.2263], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0304, 0.0354, 0.0283, 0.0262, 0.0263, 0.0339, 0.0368], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 15:59:13,806 INFO [train.py:892] (2/4) Epoch 23, batch 150, loss[loss=0.1739, simple_loss=0.2579, pruned_loss=0.04495, over 19714.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2536, pruned_loss=0.05336, over 2095550.37 frames. ], batch size: 78, lr: 6.90e-03, grad_scale: 16.0 +2023-03-28 15:59:14,895 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40961.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 15:59:46,968 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40976.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:00:11,799 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40986.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:00:40,929 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40997.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:01:02,002 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41007.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:01:09,789 INFO [train.py:892] (2/4) Epoch 23, batch 200, loss[loss=0.2044, simple_loss=0.2767, pruned_loss=0.06599, over 19750.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2535, pruned_loss=0.05309, over 2506798.34 frames. ], batch size: 250, lr: 6.89e-03, grad_scale: 16.0 +2023-03-28 16:01:24,970 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.565e+02 4.412e+02 5.143e+02 6.492e+02 1.088e+03, threshold=1.029e+03, percent-clipped=2.0 +2023-03-28 16:02:08,346 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41034.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:02:30,358 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41045.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:03:00,133 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41058.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:03:05,858 INFO [train.py:892] (2/4) Epoch 23, batch 250, loss[loss=0.1522, simple_loss=0.2178, pruned_loss=0.04333, over 19841.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2533, pruned_loss=0.05341, over 2826465.16 frames. ], batch size: 128, lr: 6.89e-03, grad_scale: 16.0 +2023-03-28 16:04:44,481 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41102.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:04:55,297 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41106.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:05:07,151 INFO [train.py:892] (2/4) Epoch 23, batch 300, loss[loss=0.1621, simple_loss=0.2466, pruned_loss=0.03881, over 19765.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2547, pruned_loss=0.05374, over 3073415.71 frames. ], batch size: 70, lr: 6.89e-03, grad_scale: 16.0 +2023-03-28 16:05:23,580 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.930e+02 3.981e+02 4.919e+02 5.993e+02 1.063e+03, threshold=9.839e+02, percent-clipped=1.0 +2023-03-28 16:06:37,233 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41148.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:06:39,847 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.89 vs. limit=5.0 +2023-03-28 16:07:05,671 INFO [train.py:892] (2/4) Epoch 23, batch 350, loss[loss=0.1537, simple_loss=0.2319, pruned_loss=0.03774, over 19875.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2557, pruned_loss=0.05423, over 3265813.92 frames. ], batch size: 84, lr: 6.88e-03, grad_scale: 16.0 +2023-03-28 16:07:11,186 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41163.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:08:27,636 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41196.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:09:03,395 INFO [train.py:892] (2/4) Epoch 23, batch 400, loss[loss=0.1789, simple_loss=0.2448, pruned_loss=0.05651, over 19876.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2543, pruned_loss=0.05337, over 3418096.79 frames. ], batch size: 136, lr: 6.88e-03, grad_scale: 16.0 +2023-03-28 16:09:22,971 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.551e+02 4.181e+02 5.035e+02 6.093e+02 9.382e+02, threshold=1.007e+03, percent-clipped=0.0 +2023-03-28 16:09:31,465 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41221.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:10:58,683 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41256.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 16:11:03,039 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6814, 3.1040, 2.5816, 2.1517, 2.7542, 2.9881, 2.9747, 3.0342], + device='cuda:2'), covar=tensor([0.0313, 0.0274, 0.0305, 0.0587, 0.0351, 0.0285, 0.0211, 0.0199], + device='cuda:2'), in_proj_covar=tensor([0.0091, 0.0085, 0.0091, 0.0095, 0.0098, 0.0074, 0.0074, 0.0075], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 16:11:08,155 INFO [train.py:892] (2/4) Epoch 23, batch 450, loss[loss=0.1577, simple_loss=0.2421, pruned_loss=0.03669, over 19870.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2549, pruned_loss=0.05345, over 3535561.00 frames. ], batch size: 48, lr: 6.87e-03, grad_scale: 16.0 +2023-03-28 16:11:24,022 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-03-28 16:12:47,464 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.4997, 5.9320, 5.9697, 5.7740, 5.6899, 5.6243, 5.5175, 5.4400], + device='cuda:2'), covar=tensor([0.1241, 0.0950, 0.0815, 0.1075, 0.0593, 0.0652, 0.1752, 0.2061], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0304, 0.0351, 0.0282, 0.0262, 0.0262, 0.0335, 0.0367], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 16:12:57,517 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41307.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:13:05,196 INFO [train.py:892] (2/4) Epoch 23, batch 500, loss[loss=0.1673, simple_loss=0.2443, pruned_loss=0.04518, over 19794.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2541, pruned_loss=0.05322, over 3627744.95 frames. ], batch size: 83, lr: 6.87e-03, grad_scale: 16.0 +2023-03-28 16:13:24,677 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.561e+02 3.916e+02 4.619e+02 5.416e+02 1.072e+03, threshold=9.239e+02, percent-clipped=2.0 +2023-03-28 16:13:40,636 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9591, 4.5571, 4.6236, 4.8783, 4.6337, 5.1139, 5.0536, 5.2047], + device='cuda:2'), covar=tensor([0.0519, 0.0325, 0.0488, 0.0308, 0.0547, 0.0369, 0.0441, 0.0258], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0166, 0.0188, 0.0163, 0.0162, 0.0147, 0.0141, 0.0186], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 16:14:26,463 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8333, 3.8654, 2.2999, 4.0719, 4.1941, 1.9839, 3.4495, 3.2844], + device='cuda:2'), covar=tensor([0.0701, 0.0930, 0.2938, 0.0925, 0.0572, 0.2854, 0.1141, 0.0840], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0243, 0.0223, 0.0253, 0.0229, 0.0198, 0.0229, 0.0184], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 16:14:51,521 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41353.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:14:56,244 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41355.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:15:11,437 INFO [train.py:892] (2/4) Epoch 23, batch 550, loss[loss=0.1658, simple_loss=0.2346, pruned_loss=0.0485, over 19661.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2548, pruned_loss=0.05312, over 3696694.50 frames. ], batch size: 43, lr: 6.87e-03, grad_scale: 16.0 +2023-03-28 16:16:51,470 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41401.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:17:14,938 INFO [train.py:892] (2/4) Epoch 23, batch 600, loss[loss=0.1482, simple_loss=0.2258, pruned_loss=0.03525, over 19766.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2544, pruned_loss=0.05338, over 3752732.47 frames. ], batch size: 113, lr: 6.86e-03, grad_scale: 16.0 +2023-03-28 16:17:30,476 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.341e+02 3.739e+02 4.752e+02 5.799e+02 9.691e+02, threshold=9.504e+02, percent-clipped=1.0 +2023-03-28 16:18:16,877 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-28 16:19:02,739 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41458.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:19:09,198 INFO [train.py:892] (2/4) Epoch 23, batch 650, loss[loss=0.2218, simple_loss=0.2871, pruned_loss=0.0782, over 19587.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2549, pruned_loss=0.05394, over 3796986.39 frames. ], batch size: 45, lr: 6.86e-03, grad_scale: 16.0 +2023-03-28 16:21:07,514 INFO [train.py:892] (2/4) Epoch 23, batch 700, loss[loss=0.202, simple_loss=0.2752, pruned_loss=0.06442, over 19940.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.254, pruned_loss=0.05351, over 3832061.10 frames. ], batch size: 52, lr: 6.85e-03, grad_scale: 16.0 +2023-03-28 16:21:25,088 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.559e+02 3.930e+02 4.652e+02 5.850e+02 9.862e+02, threshold=9.304e+02, percent-clipped=2.0 +2023-03-28 16:21:31,962 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41521.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:21:52,799 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-28 16:22:52,879 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2137, 4.7626, 4.8537, 4.6432, 5.1124, 3.4250, 4.1762, 2.8866], + device='cuda:2'), covar=tensor([0.0175, 0.0192, 0.0136, 0.0173, 0.0135, 0.0733, 0.0820, 0.1240], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0138, 0.0109, 0.0129, 0.0114, 0.0128, 0.0140, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 16:22:57,174 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41556.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 16:23:06,376 INFO [train.py:892] (2/4) Epoch 23, batch 750, loss[loss=0.3004, simple_loss=0.355, pruned_loss=0.1229, over 19433.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2544, pruned_loss=0.05395, over 3858331.47 frames. ], batch size: 412, lr: 6.85e-03, grad_scale: 16.0 +2023-03-28 16:23:24,385 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41569.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:24:47,242 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41604.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:25:00,930 INFO [train.py:892] (2/4) Epoch 23, batch 800, loss[loss=0.1747, simple_loss=0.253, pruned_loss=0.0482, over 19741.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2547, pruned_loss=0.05363, over 3877611.12 frames. ], batch size: 221, lr: 6.85e-03, grad_scale: 16.0 +2023-03-28 16:25:18,848 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.557e+02 3.999e+02 4.823e+02 6.128e+02 1.113e+03, threshold=9.646e+02, percent-clipped=2.0 +2023-03-28 16:26:16,268 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.10 vs. limit=5.0 +2023-03-28 16:26:31,944 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0142, 2.4660, 2.8396, 3.1663, 3.6483, 3.9191, 3.9033, 3.9037], + device='cuda:2'), covar=tensor([0.0822, 0.1585, 0.1242, 0.0604, 0.0354, 0.0218, 0.0283, 0.0375], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0165, 0.0169, 0.0141, 0.0123, 0.0119, 0.0113, 0.0107], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 16:26:41,338 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41653.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:26:45,736 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41654.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:27:02,023 INFO [train.py:892] (2/4) Epoch 23, batch 850, loss[loss=0.1586, simple_loss=0.2307, pruned_loss=0.04329, over 19749.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2559, pruned_loss=0.05436, over 3892983.21 frames. ], batch size: 100, lr: 6.84e-03, grad_scale: 16.0 +2023-03-28 16:27:23,414 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0199, 2.9692, 3.2648, 2.4960, 3.3771, 2.7648, 2.9579, 3.2755], + device='cuda:2'), covar=tensor([0.0567, 0.0505, 0.0501, 0.0813, 0.0302, 0.0472, 0.0533, 0.0297], + device='cuda:2'), in_proj_covar=tensor([0.0070, 0.0078, 0.0076, 0.0106, 0.0072, 0.0074, 0.0071, 0.0064], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 16:28:35,328 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41701.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:28:35,403 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41701.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:28:59,723 INFO [train.py:892] (2/4) Epoch 23, batch 900, loss[loss=0.1766, simple_loss=0.2545, pruned_loss=0.04933, over 19889.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2565, pruned_loss=0.05426, over 3904461.49 frames. ], batch size: 84, lr: 6.84e-03, grad_scale: 16.0 +2023-03-28 16:29:10,586 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41715.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 16:29:18,875 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.536e+02 3.998e+02 4.733e+02 5.736e+02 9.757e+02, threshold=9.466e+02, percent-clipped=2.0 +2023-03-28 16:29:20,690 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-28 16:30:02,587 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2984, 2.3881, 2.4820, 2.4653, 2.4466, 2.4829, 2.4730, 2.4924], + device='cuda:2'), covar=tensor([0.0298, 0.0271, 0.0285, 0.0218, 0.0363, 0.0280, 0.0378, 0.0292], + device='cuda:2'), in_proj_covar=tensor([0.0073, 0.0068, 0.0071, 0.0064, 0.0077, 0.0072, 0.0089, 0.0063], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 16:30:31,516 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41749.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:30:54,443 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41758.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:31:00,101 INFO [train.py:892] (2/4) Epoch 23, batch 950, loss[loss=0.1916, simple_loss=0.2572, pruned_loss=0.06294, over 19822.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2571, pruned_loss=0.05431, over 3915242.73 frames. ], batch size: 72, lr: 6.83e-03, grad_scale: 16.0 +2023-03-28 16:32:53,035 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41806.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:33:02,652 INFO [train.py:892] (2/4) Epoch 23, batch 1000, loss[loss=0.1806, simple_loss=0.262, pruned_loss=0.04963, over 19828.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.257, pruned_loss=0.05441, over 3921622.31 frames. ], batch size: 57, lr: 6.83e-03, grad_scale: 16.0 +2023-03-28 16:33:19,749 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.497e+02 4.077e+02 4.736e+02 5.828e+02 1.008e+03, threshold=9.473e+02, percent-clipped=1.0 +2023-03-28 16:33:39,951 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 16:35:00,305 INFO [train.py:892] (2/4) Epoch 23, batch 1050, loss[loss=0.1766, simple_loss=0.2454, pruned_loss=0.05387, over 19809.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2559, pruned_loss=0.05382, over 3927703.73 frames. ], batch size: 132, lr: 6.83e-03, grad_scale: 16.0 +2023-03-28 16:36:36,380 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-28 16:36:49,550 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41906.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:37:01,115 INFO [train.py:892] (2/4) Epoch 23, batch 1100, loss[loss=0.1462, simple_loss=0.225, pruned_loss=0.03368, over 19819.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2566, pruned_loss=0.05435, over 3930938.46 frames. ], batch size: 82, lr: 6.82e-03, grad_scale: 16.0 +2023-03-28 16:37:20,547 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.656e+02 4.250e+02 5.076e+02 6.187e+02 1.225e+03, threshold=1.015e+03, percent-clipped=1.0 +2023-03-28 16:37:30,969 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1017, 4.0199, 4.4758, 4.2878, 4.4098, 3.8816, 4.1863, 4.0180], + device='cuda:2'), covar=tensor([0.1550, 0.1787, 0.1040, 0.1342, 0.0939, 0.1172, 0.1902, 0.2169], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0304, 0.0351, 0.0282, 0.0262, 0.0262, 0.0336, 0.0364], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 16:37:53,709 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1132, 2.1387, 2.1949, 2.0735, 2.1946, 2.2751, 2.1427, 2.2716], + device='cuda:2'), covar=tensor([0.0278, 0.0266, 0.0290, 0.0295, 0.0378, 0.0255, 0.0391, 0.0260], + device='cuda:2'), in_proj_covar=tensor([0.0073, 0.0067, 0.0070, 0.0064, 0.0076, 0.0071, 0.0088, 0.0063], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-28 16:38:31,179 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41948.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 16:38:58,874 INFO [train.py:892] (2/4) Epoch 23, batch 1150, loss[loss=0.1659, simple_loss=0.2374, pruned_loss=0.04722, over 19768.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.257, pruned_loss=0.0547, over 3934438.01 frames. ], batch size: 130, lr: 6.82e-03, grad_scale: 16.0 +2023-03-28 16:39:13,899 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41967.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:40:59,051 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42009.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 16:41:01,145 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42010.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 16:41:03,074 INFO [train.py:892] (2/4) Epoch 23, batch 1200, loss[loss=0.17, simple_loss=0.2445, pruned_loss=0.04773, over 19803.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2549, pruned_loss=0.05391, over 3939182.74 frames. ], batch size: 148, lr: 6.81e-03, grad_scale: 16.0 +2023-03-28 16:41:23,892 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.867e+02 3.967e+02 4.687e+02 5.246e+02 9.664e+02, threshold=9.374e+02, percent-clipped=0.0 +2023-03-28 16:41:33,977 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0447, 2.8954, 3.1751, 2.4326, 3.1750, 2.6680, 3.0340, 3.2678], + device='cuda:2'), covar=tensor([0.0436, 0.0454, 0.0399, 0.0800, 0.0395, 0.0469, 0.0372, 0.0256], + device='cuda:2'), in_proj_covar=tensor([0.0071, 0.0079, 0.0077, 0.0106, 0.0073, 0.0074, 0.0071, 0.0064], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 16:42:20,158 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9122, 3.1767, 3.2902, 3.8358, 2.7969, 3.1876, 2.4346, 2.4620], + device='cuda:2'), covar=tensor([0.0513, 0.2163, 0.1096, 0.0386, 0.1919, 0.0778, 0.1397, 0.1739], + device='cuda:2'), in_proj_covar=tensor([0.0232, 0.0329, 0.0241, 0.0185, 0.0241, 0.0198, 0.0211, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 16:43:04,805 INFO [train.py:892] (2/4) Epoch 23, batch 1250, loss[loss=0.1748, simple_loss=0.2539, pruned_loss=0.04786, over 19823.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2536, pruned_loss=0.05289, over 3941534.02 frames. ], batch size: 72, lr: 6.81e-03, grad_scale: 16.0 +2023-03-28 16:44:53,095 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.80 vs. limit=5.0 +2023-03-28 16:44:59,356 INFO [train.py:892] (2/4) Epoch 23, batch 1300, loss[loss=0.2052, simple_loss=0.2872, pruned_loss=0.06156, over 19698.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2538, pruned_loss=0.05291, over 3942759.60 frames. ], batch size: 56, lr: 6.81e-03, grad_scale: 16.0 +2023-03-28 16:45:16,101 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.506e+02 3.507e+02 4.427e+02 5.595e+02 1.023e+03, threshold=8.855e+02, percent-clipped=1.0 +2023-03-28 16:45:24,493 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4401, 3.0232, 3.3480, 3.0698, 3.7108, 3.6791, 4.3122, 4.7626], + device='cuda:2'), covar=tensor([0.0568, 0.1761, 0.1540, 0.2158, 0.1593, 0.1389, 0.0620, 0.0539], + device='cuda:2'), in_proj_covar=tensor([0.0240, 0.0233, 0.0257, 0.0245, 0.0286, 0.0249, 0.0216, 0.0239], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 16:46:50,591 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1884, 2.9098, 3.3611, 2.5999, 3.3255, 2.7348, 3.0282, 3.3203], + device='cuda:2'), covar=tensor([0.0473, 0.0486, 0.0592, 0.0768, 0.0372, 0.0474, 0.0441, 0.0308], + device='cuda:2'), in_proj_covar=tensor([0.0071, 0.0079, 0.0077, 0.0107, 0.0073, 0.0074, 0.0071, 0.0065], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 16:46:59,028 INFO [train.py:892] (2/4) Epoch 23, batch 1350, loss[loss=0.1665, simple_loss=0.2401, pruned_loss=0.04649, over 19762.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2534, pruned_loss=0.05251, over 3944882.54 frames. ], batch size: 217, lr: 6.80e-03, grad_scale: 16.0 +2023-03-28 16:47:44,704 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0359, 3.3411, 2.7264, 2.3788, 2.8761, 3.3848, 3.1572, 3.2356], + device='cuda:2'), covar=tensor([0.0249, 0.0263, 0.0273, 0.0510, 0.0321, 0.0193, 0.0201, 0.0217], + device='cuda:2'), in_proj_covar=tensor([0.0091, 0.0085, 0.0091, 0.0095, 0.0098, 0.0075, 0.0074, 0.0075], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 16:48:06,482 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9483, 2.3942, 2.9827, 3.1611, 3.6824, 4.2011, 4.1799, 4.1957], + device='cuda:2'), covar=tensor([0.0996, 0.1963, 0.1384, 0.0710, 0.0421, 0.0233, 0.0286, 0.0323], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0168, 0.0172, 0.0143, 0.0125, 0.0122, 0.0116, 0.0109], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 16:48:58,465 INFO [train.py:892] (2/4) Epoch 23, batch 1400, loss[loss=0.1741, simple_loss=0.2464, pruned_loss=0.05091, over 19820.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2521, pruned_loss=0.05169, over 3945472.16 frames. ], batch size: 147, lr: 6.80e-03, grad_scale: 16.0 +2023-03-28 16:49:18,202 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.596e+02 4.046e+02 4.840e+02 5.531e+02 1.167e+03, threshold=9.681e+02, percent-clipped=2.0 +2023-03-28 16:50:51,481 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.66 vs. limit=5.0 +2023-03-28 16:50:55,953 INFO [train.py:892] (2/4) Epoch 23, batch 1450, loss[loss=0.1671, simple_loss=0.2401, pruned_loss=0.0471, over 19429.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2535, pruned_loss=0.05251, over 3946469.48 frames. ], batch size: 40, lr: 6.79e-03, grad_scale: 16.0 +2023-03-28 16:50:59,117 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42262.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:52:15,462 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6762, 3.3038, 3.4952, 3.2809, 3.8775, 3.7763, 4.4753, 5.0262], + device='cuda:2'), covar=tensor([0.0485, 0.1532, 0.1398, 0.1982, 0.1534, 0.1330, 0.0511, 0.0409], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0235, 0.0259, 0.0248, 0.0288, 0.0250, 0.0219, 0.0242], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 16:52:37,634 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4718, 3.4138, 4.9610, 3.5567, 4.1456, 3.8724, 2.6562, 2.8866], + device='cuda:2'), covar=tensor([0.0779, 0.2725, 0.0397, 0.0926, 0.1430, 0.1168, 0.2270, 0.2357], + device='cuda:2'), in_proj_covar=tensor([0.0340, 0.0370, 0.0328, 0.0265, 0.0361, 0.0348, 0.0350, 0.0319], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 16:52:39,464 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42304.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 16:52:55,095 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42310.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 16:52:56,158 INFO [train.py:892] (2/4) Epoch 23, batch 1500, loss[loss=0.1795, simple_loss=0.2486, pruned_loss=0.05516, over 19866.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2541, pruned_loss=0.05264, over 3946468.66 frames. ], batch size: 136, lr: 6.79e-03, grad_scale: 16.0 +2023-03-28 16:53:09,970 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-28 16:53:12,434 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.820e+02 3.834e+02 4.491e+02 5.475e+02 9.229e+02, threshold=8.983e+02, percent-clipped=0.0 +2023-03-28 16:54:46,315 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42358.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:54:51,542 INFO [train.py:892] (2/4) Epoch 23, batch 1550, loss[loss=0.1789, simple_loss=0.2468, pruned_loss=0.0555, over 19817.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2545, pruned_loss=0.05267, over 3946824.61 frames. ], batch size: 167, lr: 6.79e-03, grad_scale: 16.0 +2023-03-28 16:55:25,783 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1697, 2.6453, 3.0539, 2.7305, 3.3513, 3.1800, 4.1660, 4.5054], + device='cuda:2'), covar=tensor([0.0596, 0.2214, 0.1685, 0.2410, 0.1898, 0.1823, 0.0515, 0.0464], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0236, 0.0260, 0.0248, 0.0288, 0.0250, 0.0219, 0.0241], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 16:55:38,082 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42379.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:56:55,952 INFO [train.py:892] (2/4) Epoch 23, batch 1600, loss[loss=0.1531, simple_loss=0.2292, pruned_loss=0.03846, over 19713.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.254, pruned_loss=0.05256, over 3947424.54 frames. ], batch size: 109, lr: 6.78e-03, grad_scale: 16.0 +2023-03-28 16:57:13,629 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.225e+02 3.903e+02 4.499e+02 5.665e+02 1.044e+03, threshold=8.998e+02, percent-clipped=1.0 +2023-03-28 16:58:07,303 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42440.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:58:30,392 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5921, 2.7804, 4.2600, 3.7754, 4.1225, 4.2796, 4.1356, 3.8950], + device='cuda:2'), covar=tensor([0.0406, 0.0848, 0.0106, 0.0674, 0.0128, 0.0182, 0.0143, 0.0165], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0101, 0.0083, 0.0151, 0.0080, 0.0094, 0.0086, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 16:58:53,567 INFO [train.py:892] (2/4) Epoch 23, batch 1650, loss[loss=0.158, simple_loss=0.2406, pruned_loss=0.03771, over 19584.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2531, pruned_loss=0.05219, over 3947929.98 frames. ], batch size: 44, lr: 6.78e-03, grad_scale: 16.0 +2023-03-28 17:00:35,988 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9183, 3.0427, 4.7462, 4.1326, 4.6778, 4.6992, 4.5605, 4.4140], + device='cuda:2'), covar=tensor([0.0408, 0.0816, 0.0097, 0.0735, 0.0103, 0.0178, 0.0153, 0.0132], + device='cuda:2'), in_proj_covar=tensor([0.0093, 0.0100, 0.0082, 0.0149, 0.0079, 0.0093, 0.0086, 0.0081], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 17:00:37,924 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42506.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:00:47,790 INFO [train.py:892] (2/4) Epoch 23, batch 1700, loss[loss=0.1694, simple_loss=0.2322, pruned_loss=0.05329, over 19773.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.253, pruned_loss=0.05178, over 3947513.17 frames. ], batch size: 169, lr: 6.77e-03, grad_scale: 16.0 +2023-03-28 17:01:09,673 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.489e+02 4.052e+02 4.477e+02 5.328e+02 1.019e+03, threshold=8.953e+02, percent-clipped=3.0 +2023-03-28 17:01:39,164 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2152, 4.2188, 2.5876, 4.4584, 4.6015, 2.0521, 3.9035, 3.4968], + device='cuda:2'), covar=tensor([0.0619, 0.0807, 0.2666, 0.0794, 0.0589, 0.2693, 0.0907, 0.0795], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0247, 0.0224, 0.0258, 0.0236, 0.0199, 0.0232, 0.0186], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 17:02:36,480 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1823, 4.1347, 4.0641, 3.8655, 4.2580, 2.9274, 3.3898, 2.0602], + device='cuda:2'), covar=tensor([0.0323, 0.0252, 0.0228, 0.0235, 0.0249, 0.1173, 0.1061, 0.1928], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0139, 0.0110, 0.0129, 0.0114, 0.0130, 0.0139, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 17:02:39,538 INFO [train.py:892] (2/4) Epoch 23, batch 1750, loss[loss=0.1702, simple_loss=0.2576, pruned_loss=0.04136, over 19827.00 frames. ], tot_loss[loss=0.178, simple_loss=0.252, pruned_loss=0.05198, over 3948789.72 frames. ], batch size: 57, lr: 6.77e-03, grad_scale: 16.0 +2023-03-28 17:02:40,582 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-28 17:02:43,395 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42562.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:02:52,557 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3073, 4.9327, 4.9645, 5.2516, 4.9733, 5.6006, 5.4502, 5.6457], + device='cuda:2'), covar=tensor([0.0608, 0.0302, 0.0392, 0.0308, 0.0660, 0.0273, 0.0354, 0.0242], + device='cuda:2'), in_proj_covar=tensor([0.0145, 0.0169, 0.0193, 0.0166, 0.0166, 0.0149, 0.0144, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 17:02:52,678 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42567.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:03:21,361 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8496, 3.5232, 3.6085, 3.8320, 3.6037, 3.8583, 3.9618, 4.1279], + device='cuda:2'), covar=tensor([0.0718, 0.0503, 0.0615, 0.0401, 0.0803, 0.0583, 0.0492, 0.0348], + device='cuda:2'), in_proj_covar=tensor([0.0146, 0.0170, 0.0194, 0.0167, 0.0167, 0.0150, 0.0145, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 17:04:07,778 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42604.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 17:04:19,975 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42610.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:04:21,415 INFO [train.py:892] (2/4) Epoch 23, batch 1800, loss[loss=0.3189, simple_loss=0.3787, pruned_loss=0.1296, over 19193.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2531, pruned_loss=0.05205, over 3946871.64 frames. ], batch size: 452, lr: 6.77e-03, grad_scale: 16.0 +2023-03-28 17:04:37,540 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.634e+02 3.841e+02 4.565e+02 5.704e+02 1.447e+03, threshold=9.129e+02, percent-clipped=3.0 +2023-03-28 17:05:05,661 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5099, 4.5551, 5.0662, 4.5968, 4.2716, 4.9480, 4.7834, 5.2965], + device='cuda:2'), covar=tensor([0.1201, 0.0430, 0.0515, 0.0401, 0.0777, 0.0500, 0.0461, 0.0359], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0214, 0.0214, 0.0223, 0.0201, 0.0228, 0.0224, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 17:05:16,516 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3146, 5.5987, 5.8596, 5.7265, 5.5816, 5.4464, 5.4962, 5.4689], + device='cuda:2'), covar=tensor([0.1484, 0.1378, 0.0874, 0.1061, 0.0684, 0.0831, 0.1938, 0.1931], + device='cuda:2'), in_proj_covar=tensor([0.0280, 0.0307, 0.0352, 0.0281, 0.0264, 0.0262, 0.0336, 0.0366], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 17:05:32,117 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42649.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:05:37,679 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42652.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 17:05:49,617 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2920, 4.8228, 4.9403, 4.7333, 5.2425, 3.1323, 4.1289, 2.3001], + device='cuda:2'), covar=tensor([0.0189, 0.0195, 0.0141, 0.0189, 0.0118, 0.1020, 0.1017, 0.1663], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0140, 0.0110, 0.0129, 0.0115, 0.0130, 0.0140, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 17:05:54,184 INFO [train.py:892] (2/4) Epoch 23, batch 1850, loss[loss=0.172, simple_loss=0.2537, pruned_loss=0.04513, over 19810.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2544, pruned_loss=0.05173, over 3947451.44 frames. ], batch size: 57, lr: 6.76e-03, grad_scale: 16.0 +2023-03-28 17:06:55,097 INFO [train.py:892] (2/4) Epoch 24, batch 0, loss[loss=0.2545, simple_loss=0.3277, pruned_loss=0.09061, over 19591.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.3277, pruned_loss=0.09061, over 19591.00 frames. ], batch size: 387, lr: 6.62e-03, grad_scale: 16.0 +2023-03-28 17:06:55,097 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 17:07:24,173 INFO [train.py:926] (2/4) Epoch 24, validation: loss=0.1738, simple_loss=0.2478, pruned_loss=0.0499, over 2883724.00 frames. +2023-03-28 17:07:24,174 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 17:07:25,251 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9000, 4.8499, 5.4286, 4.9149, 4.3332, 5.1750, 5.0778, 5.6218], + device='cuda:2'), covar=tensor([0.1033, 0.0379, 0.0346, 0.0378, 0.0706, 0.0446, 0.0463, 0.0288], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0215, 0.0214, 0.0224, 0.0201, 0.0229, 0.0224, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 17:09:11,918 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42710.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:09:24,509 INFO [train.py:892] (2/4) Epoch 24, batch 50, loss[loss=0.1715, simple_loss=0.2408, pruned_loss=0.0511, over 19771.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2537, pruned_loss=0.05378, over 890895.31 frames. ], batch size: 217, lr: 6.61e-03, grad_scale: 16.0 +2023-03-28 17:09:30,669 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.706e+02 3.984e+02 4.809e+02 5.646e+02 9.126e+02, threshold=9.617e+02, percent-clipped=0.0 +2023-03-28 17:10:09,823 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42735.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:11:17,049 INFO [train.py:892] (2/4) Epoch 24, batch 100, loss[loss=0.1673, simple_loss=0.2399, pruned_loss=0.04732, over 19797.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2487, pruned_loss=0.05055, over 1569224.25 frames. ], batch size: 67, lr: 6.61e-03, grad_scale: 16.0 +2023-03-28 17:11:39,376 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.48 vs. limit=5.0 +2023-03-28 17:12:45,592 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7092, 3.7302, 2.1840, 3.9448, 4.0051, 1.7655, 3.2435, 3.1129], + device='cuda:2'), covar=tensor([0.0693, 0.0887, 0.2960, 0.0872, 0.0678, 0.2901, 0.1174, 0.0851], + device='cuda:2'), in_proj_covar=tensor([0.0224, 0.0248, 0.0225, 0.0258, 0.0236, 0.0199, 0.0233, 0.0187], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 17:13:12,236 INFO [train.py:892] (2/4) Epoch 24, batch 150, loss[loss=0.1748, simple_loss=0.2391, pruned_loss=0.05522, over 19827.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.248, pruned_loss=0.05012, over 2097508.79 frames. ], batch size: 202, lr: 6.60e-03, grad_scale: 16.0 +2023-03-28 17:13:19,949 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.303e+02 3.674e+02 4.384e+02 5.229e+02 7.320e+02, threshold=8.767e+02, percent-clipped=0.0 +2023-03-28 17:14:17,385 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.9132, 2.1623, 1.9500, 1.3268, 2.0042, 2.1289, 1.9677, 2.0606], + device='cuda:2'), covar=tensor([0.0345, 0.0238, 0.0299, 0.0554, 0.0386, 0.0255, 0.0252, 0.0270], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0083, 0.0089, 0.0093, 0.0096, 0.0074, 0.0073, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 17:14:33,831 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3974, 3.5036, 2.2998, 4.2131, 3.8073, 4.1323, 4.2034, 3.1226], + device='cuda:2'), covar=tensor([0.0626, 0.0571, 0.1446, 0.0564, 0.0528, 0.0421, 0.0491, 0.0800], + device='cuda:2'), in_proj_covar=tensor([0.0138, 0.0136, 0.0140, 0.0142, 0.0125, 0.0124, 0.0138, 0.0140], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 17:14:41,089 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9166, 4.7771, 5.3251, 4.8008, 4.3131, 5.1437, 4.9749, 5.5376], + device='cuda:2'), covar=tensor([0.0953, 0.0371, 0.0338, 0.0359, 0.0766, 0.0374, 0.0353, 0.0267], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0215, 0.0214, 0.0224, 0.0202, 0.0228, 0.0223, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 17:14:53,503 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.4173, 5.8526, 5.8697, 5.7142, 5.5637, 5.8017, 5.2140, 5.2936], + device='cuda:2'), covar=tensor([0.0386, 0.0341, 0.0359, 0.0370, 0.0453, 0.0470, 0.0631, 0.0896], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0261, 0.0280, 0.0240, 0.0244, 0.0232, 0.0250, 0.0297], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 17:14:53,582 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42859.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:15:01,982 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42862.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:15:09,292 INFO [train.py:892] (2/4) Epoch 24, batch 200, loss[loss=0.1597, simple_loss=0.2293, pruned_loss=0.04502, over 19741.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2489, pruned_loss=0.05001, over 2507545.47 frames. ], batch size: 134, lr: 6.60e-03, grad_scale: 16.0 +2023-03-28 17:15:39,937 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42879.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:16:57,870 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.6342, 1.8314, 1.6951, 1.0633, 1.6804, 1.7858, 1.7078, 1.7556], + device='cuda:2'), covar=tensor([0.0365, 0.0254, 0.0307, 0.0558, 0.0444, 0.0251, 0.0270, 0.0254], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0084, 0.0090, 0.0094, 0.0097, 0.0074, 0.0074, 0.0075], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 17:17:07,498 INFO [train.py:892] (2/4) Epoch 24, batch 250, loss[loss=0.1859, simple_loss=0.2585, pruned_loss=0.0567, over 19786.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.251, pruned_loss=0.05141, over 2826918.10 frames. ], batch size: 247, lr: 6.60e-03, grad_scale: 16.0 +2023-03-28 17:17:15,130 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.878e+02 4.086e+02 4.768e+02 5.713e+02 1.218e+03, threshold=9.536e+02, percent-clipped=1.0 +2023-03-28 17:17:18,085 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42920.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:18:08,680 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42940.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:19:11,293 INFO [train.py:892] (2/4) Epoch 24, batch 300, loss[loss=0.2028, simple_loss=0.273, pruned_loss=0.06628, over 19852.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.25, pruned_loss=0.05048, over 3075231.13 frames. ], batch size: 64, lr: 6.59e-03, grad_scale: 16.0 +2023-03-28 17:19:28,651 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4385, 3.3425, 3.7052, 3.3864, 3.1852, 3.6180, 3.4986, 3.7558], + device='cuda:2'), covar=tensor([0.0818, 0.0368, 0.0390, 0.0403, 0.1543, 0.0561, 0.0449, 0.0369], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0215, 0.0214, 0.0225, 0.0202, 0.0229, 0.0224, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 17:20:47,289 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43005.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:21:09,374 INFO [train.py:892] (2/4) Epoch 24, batch 350, loss[loss=0.1657, simple_loss=0.2461, pruned_loss=0.04265, over 19727.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.249, pruned_loss=0.05001, over 3269452.26 frames. ], batch size: 52, lr: 6.59e-03, grad_scale: 16.0 +2023-03-28 17:21:15,713 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.627e+02 4.222e+02 4.669e+02 5.474e+02 9.136e+02, threshold=9.338e+02, percent-clipped=0.0 +2023-03-28 17:21:58,418 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43035.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:23:09,365 INFO [train.py:892] (2/4) Epoch 24, batch 400, loss[loss=0.1686, simple_loss=0.2429, pruned_loss=0.04712, over 19844.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2493, pruned_loss=0.0498, over 3421056.78 frames. ], batch size: 142, lr: 6.59e-03, grad_scale: 16.0 +2023-03-28 17:23:55,616 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43083.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:25:13,654 INFO [train.py:892] (2/4) Epoch 24, batch 450, loss[loss=0.1768, simple_loss=0.2574, pruned_loss=0.04813, over 19828.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2506, pruned_loss=0.05031, over 3538294.62 frames. ], batch size: 93, lr: 6.58e-03, grad_scale: 16.0 +2023-03-28 17:25:20,844 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.554e+02 4.148e+02 4.850e+02 5.814e+02 7.870e+02, threshold=9.701e+02, percent-clipped=0.0 +2023-03-28 17:27:01,448 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43162.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:27:08,688 INFO [train.py:892] (2/4) Epoch 24, batch 500, loss[loss=0.1666, simple_loss=0.2345, pruned_loss=0.04935, over 19823.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2515, pruned_loss=0.05098, over 3627040.69 frames. ], batch size: 146, lr: 6.58e-03, grad_scale: 16.0 +2023-03-28 17:28:57,139 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43210.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:28:59,701 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.55 vs. limit=5.0 +2023-03-28 17:29:09,439 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43215.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:29:10,719 INFO [train.py:892] (2/4) Epoch 24, batch 550, loss[loss=0.1529, simple_loss=0.2202, pruned_loss=0.04278, over 19703.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2513, pruned_loss=0.05103, over 3699040.18 frames. ], batch size: 46, lr: 6.57e-03, grad_scale: 16.0 +2023-03-28 17:29:18,520 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.622e+02 3.772e+02 4.624e+02 5.545e+02 8.960e+02, threshold=9.249e+02, percent-clipped=0.0 +2023-03-28 17:29:59,349 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43235.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:30:01,487 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3737, 4.9049, 4.9971, 4.6692, 5.2363, 3.3134, 4.0683, 2.5765], + device='cuda:2'), covar=tensor([0.0153, 0.0178, 0.0139, 0.0197, 0.0152, 0.0871, 0.1020, 0.1572], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0142, 0.0111, 0.0131, 0.0117, 0.0132, 0.0143, 0.0124], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 17:31:16,248 INFO [train.py:892] (2/4) Epoch 24, batch 600, loss[loss=0.1607, simple_loss=0.2351, pruned_loss=0.04318, over 19797.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2505, pruned_loss=0.05098, over 3754954.01 frames. ], batch size: 191, lr: 6.57e-03, grad_scale: 16.0 +2023-03-28 17:31:53,637 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1019, 5.0001, 5.5567, 5.0583, 4.3465, 5.3144, 5.2204, 5.7229], + device='cuda:2'), covar=tensor([0.0821, 0.0331, 0.0302, 0.0295, 0.0635, 0.0370, 0.0322, 0.0270], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0216, 0.0214, 0.0226, 0.0201, 0.0228, 0.0224, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 17:32:50,179 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43305.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:32:54,333 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7934, 3.0916, 3.1734, 3.7120, 2.5869, 3.2101, 2.4331, 2.4112], + device='cuda:2'), covar=tensor([0.0525, 0.1845, 0.1067, 0.0399, 0.2010, 0.0765, 0.1312, 0.1685], + device='cuda:2'), in_proj_covar=tensor([0.0233, 0.0332, 0.0242, 0.0190, 0.0242, 0.0199, 0.0210, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 17:33:14,109 INFO [train.py:892] (2/4) Epoch 24, batch 650, loss[loss=0.1782, simple_loss=0.2593, pruned_loss=0.04858, over 19802.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2496, pruned_loss=0.05066, over 3798539.42 frames. ], batch size: 51, lr: 6.57e-03, grad_scale: 16.0 +2023-03-28 17:33:20,463 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.750e+02 3.990e+02 4.608e+02 6.131e+02 1.046e+03, threshold=9.216e+02, percent-clipped=2.0 +2023-03-28 17:34:03,970 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8238, 3.8178, 2.3326, 4.0669, 4.1764, 1.8193, 3.3806, 3.2167], + device='cuda:2'), covar=tensor([0.0764, 0.1157, 0.2792, 0.1007, 0.0620, 0.2922, 0.1156, 0.0872], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0248, 0.0226, 0.0260, 0.0237, 0.0199, 0.0233, 0.0188], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 17:34:34,768 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43353.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:35:00,965 INFO [train.py:892] (2/4) Epoch 24, batch 700, loss[loss=0.1637, simple_loss=0.2359, pruned_loss=0.04573, over 19802.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2499, pruned_loss=0.05068, over 3832969.64 frames. ], batch size: 195, lr: 6.56e-03, grad_scale: 16.0 +2023-03-28 17:35:09,740 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2630, 2.5488, 2.3397, 1.7821, 2.4344, 2.6047, 2.5173, 2.4698], + device='cuda:2'), covar=tensor([0.0383, 0.0320, 0.0303, 0.0598, 0.0379, 0.0282, 0.0256, 0.0287], + device='cuda:2'), in_proj_covar=tensor([0.0091, 0.0085, 0.0091, 0.0095, 0.0098, 0.0075, 0.0075, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 17:36:10,998 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0964, 2.5805, 4.0843, 3.5827, 3.9721, 4.0798, 3.8838, 3.8491], + device='cuda:2'), covar=tensor([0.0533, 0.0869, 0.0110, 0.0619, 0.0145, 0.0199, 0.0168, 0.0164], + device='cuda:2'), in_proj_covar=tensor([0.0095, 0.0100, 0.0083, 0.0150, 0.0080, 0.0094, 0.0087, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 17:37:05,660 INFO [train.py:892] (2/4) Epoch 24, batch 750, loss[loss=0.1878, simple_loss=0.2689, pruned_loss=0.05332, over 19781.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2508, pruned_loss=0.0512, over 3859320.28 frames. ], batch size: 42, lr: 6.56e-03, grad_scale: 16.0 +2023-03-28 17:37:13,196 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.432e+02 3.941e+02 4.702e+02 5.595e+02 1.048e+03, threshold=9.403e+02, percent-clipped=2.0 +2023-03-28 17:37:26,715 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9318, 3.5701, 3.7189, 3.9138, 3.5876, 3.8295, 4.0047, 4.1668], + device='cuda:2'), covar=tensor([0.0595, 0.0444, 0.0491, 0.0360, 0.0694, 0.0569, 0.0392, 0.0308], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0169, 0.0191, 0.0165, 0.0164, 0.0146, 0.0142, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 17:37:42,449 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-28 17:39:06,659 INFO [train.py:892] (2/4) Epoch 24, batch 800, loss[loss=0.1688, simple_loss=0.2481, pruned_loss=0.04474, over 19768.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2506, pruned_loss=0.05117, over 3880553.67 frames. ], batch size: 69, lr: 6.56e-03, grad_scale: 16.0 +2023-03-28 17:41:04,073 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43514.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:41:05,992 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43515.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:41:06,978 INFO [train.py:892] (2/4) Epoch 24, batch 850, loss[loss=0.1937, simple_loss=0.265, pruned_loss=0.0612, over 19821.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2514, pruned_loss=0.05135, over 3896166.59 frames. ], batch size: 184, lr: 6.55e-03, grad_scale: 16.0 +2023-03-28 17:41:14,150 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.358e+02 3.878e+02 4.715e+02 5.531e+02 7.871e+02, threshold=9.429e+02, percent-clipped=0.0 +2023-03-28 17:41:52,098 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43535.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:43:00,646 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43563.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:43:06,588 INFO [train.py:892] (2/4) Epoch 24, batch 900, loss[loss=0.1779, simple_loss=0.2438, pruned_loss=0.05606, over 19817.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2511, pruned_loss=0.05105, over 3906837.65 frames. ], batch size: 133, lr: 6.55e-03, grad_scale: 16.0 +2023-03-28 17:43:07,741 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3023, 4.0533, 4.1314, 4.3613, 4.2378, 4.5570, 4.3104, 4.4447], + device='cuda:2'), covar=tensor([0.0929, 0.0548, 0.0706, 0.0484, 0.0769, 0.0539, 0.0688, 0.0747], + device='cuda:2'), in_proj_covar=tensor([0.0146, 0.0171, 0.0194, 0.0167, 0.0166, 0.0149, 0.0144, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 17:43:27,466 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43575.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:43:45,945 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43583.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:45:04,121 INFO [train.py:892] (2/4) Epoch 24, batch 950, loss[loss=0.1756, simple_loss=0.2517, pruned_loss=0.04978, over 19875.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2519, pruned_loss=0.05116, over 3916629.94 frames. ], batch size: 159, lr: 6.54e-03, grad_scale: 16.0 +2023-03-28 17:45:11,471 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.960e+02 4.203e+02 4.948e+02 5.601e+02 1.021e+03, threshold=9.897e+02, percent-clipped=1.0 +2023-03-28 17:45:47,415 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7602, 3.6929, 4.0324, 3.6915, 3.5126, 3.9249, 3.7649, 4.1051], + device='cuda:2'), covar=tensor([0.0786, 0.0328, 0.0356, 0.0356, 0.1045, 0.0491, 0.0427, 0.0338], + device='cuda:2'), in_proj_covar=tensor([0.0271, 0.0213, 0.0211, 0.0222, 0.0200, 0.0226, 0.0221, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 17:47:04,242 INFO [train.py:892] (2/4) Epoch 24, batch 1000, loss[loss=0.1802, simple_loss=0.263, pruned_loss=0.0487, over 19603.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2513, pruned_loss=0.05086, over 3924696.70 frames. ], batch size: 50, lr: 6.54e-03, grad_scale: 16.0 +2023-03-28 17:49:06,756 INFO [train.py:892] (2/4) Epoch 24, batch 1050, loss[loss=0.1612, simple_loss=0.239, pruned_loss=0.04167, over 19853.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2524, pruned_loss=0.05137, over 3930377.89 frames. ], batch size: 85, lr: 6.54e-03, grad_scale: 32.0 +2023-03-28 17:49:14,073 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.754e+02 4.065e+02 4.703e+02 5.528e+02 1.039e+03, threshold=9.406e+02, percent-clipped=2.0 +2023-03-28 17:51:07,531 INFO [train.py:892] (2/4) Epoch 24, batch 1100, loss[loss=0.1882, simple_loss=0.2675, pruned_loss=0.05449, over 19875.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2535, pruned_loss=0.05221, over 3932268.57 frames. ], batch size: 46, lr: 6.53e-03, grad_scale: 16.0 +2023-03-28 17:51:32,664 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3401, 3.4949, 2.1500, 4.2469, 3.7047, 4.1532, 4.1616, 3.1239], + device='cuda:2'), covar=tensor([0.0604, 0.0550, 0.1397, 0.0491, 0.0545, 0.0359, 0.0649, 0.0819], + device='cuda:2'), in_proj_covar=tensor([0.0140, 0.0138, 0.0140, 0.0143, 0.0127, 0.0127, 0.0140, 0.0141], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 17:51:38,309 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4330, 4.3341, 4.7416, 4.3363, 3.9569, 4.5948, 4.4014, 4.8497], + device='cuda:2'), covar=tensor([0.0816, 0.0339, 0.0366, 0.0381, 0.0980, 0.0454, 0.0432, 0.0322], + device='cuda:2'), in_proj_covar=tensor([0.0272, 0.0213, 0.0214, 0.0224, 0.0201, 0.0227, 0.0223, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 17:51:56,973 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43787.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:52:23,042 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6339, 3.6749, 2.3401, 3.8287, 3.9528, 1.8459, 3.2926, 3.1040], + device='cuda:2'), covar=tensor([0.0747, 0.0993, 0.2642, 0.0931, 0.0633, 0.2673, 0.1129, 0.0811], + device='cuda:2'), in_proj_covar=tensor([0.0226, 0.0251, 0.0228, 0.0262, 0.0241, 0.0201, 0.0235, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 17:53:09,050 INFO [train.py:892] (2/4) Epoch 24, batch 1150, loss[loss=0.1873, simple_loss=0.2516, pruned_loss=0.06148, over 19815.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2533, pruned_loss=0.05239, over 3935222.69 frames. ], batch size: 229, lr: 6.53e-03, grad_scale: 16.0 +2023-03-28 17:53:19,342 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.514e+02 3.854e+02 4.753e+02 5.981e+02 1.175e+03, threshold=9.505e+02, percent-clipped=4.0 +2023-03-28 17:54:27,323 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43848.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:55:06,384 INFO [train.py:892] (2/4) Epoch 24, batch 1200, loss[loss=0.153, simple_loss=0.2243, pruned_loss=0.0409, over 19792.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2532, pruned_loss=0.05234, over 3938313.34 frames. ], batch size: 151, lr: 6.53e-03, grad_scale: 16.0 +2023-03-28 17:55:15,602 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43870.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:56:17,595 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5114, 3.5925, 2.1968, 3.7153, 3.8221, 1.7923, 3.1719, 2.9193], + device='cuda:2'), covar=tensor([0.0816, 0.0904, 0.2846, 0.0854, 0.0656, 0.2861, 0.1163, 0.0946], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0251, 0.0228, 0.0262, 0.0241, 0.0202, 0.0235, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 17:56:47,541 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43909.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:57:03,655 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1191, 2.8351, 3.3062, 2.9917, 3.3735, 3.2584, 4.0043, 4.3300], + device='cuda:2'), covar=tensor([0.0629, 0.1883, 0.1624, 0.1973, 0.1730, 0.1641, 0.0610, 0.0568], + device='cuda:2'), in_proj_covar=tensor([0.0244, 0.0237, 0.0260, 0.0248, 0.0289, 0.0250, 0.0220, 0.0243], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 17:57:04,534 INFO [train.py:892] (2/4) Epoch 24, batch 1250, loss[loss=0.1647, simple_loss=0.2304, pruned_loss=0.04949, over 19826.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2519, pruned_loss=0.05187, over 3942159.95 frames. ], batch size: 121, lr: 6.52e-03, grad_scale: 8.0 +2023-03-28 17:57:16,403 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.338e+02 3.868e+02 4.714e+02 5.719e+02 9.399e+02, threshold=9.429e+02, percent-clipped=0.0 +2023-03-28 17:58:46,081 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43959.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:59:01,801 INFO [train.py:892] (2/4) Epoch 24, batch 1300, loss[loss=0.1695, simple_loss=0.2515, pruned_loss=0.04368, over 19893.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2508, pruned_loss=0.05126, over 3944710.38 frames. ], batch size: 91, lr: 6.52e-03, grad_scale: 8.0 +2023-03-28 17:59:13,339 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43970.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:59:19,246 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.63 vs. limit=5.0 +2023-03-28 18:00:59,568 INFO [train.py:892] (2/4) Epoch 24, batch 1350, loss[loss=0.1553, simple_loss=0.2317, pruned_loss=0.03948, over 19699.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2509, pruned_loss=0.05103, over 3946178.04 frames. ], batch size: 75, lr: 6.52e-03, grad_scale: 8.0 +2023-03-28 18:01:08,795 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44020.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:01:09,753 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.867e+02 4.545e+02 5.331e+02 8.729e+02, threshold=9.091e+02, percent-clipped=0.0 +2023-03-28 18:02:45,797 INFO [train.py:892] (2/4) Epoch 24, batch 1400, loss[loss=0.1744, simple_loss=0.2605, pruned_loss=0.04409, over 19857.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2499, pruned_loss=0.05056, over 3945906.19 frames. ], batch size: 58, lr: 6.51e-03, grad_scale: 8.0 +2023-03-28 18:03:36,431 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-28 18:04:35,900 INFO [train.py:892] (2/4) Epoch 24, batch 1450, loss[loss=0.2438, simple_loss=0.3115, pruned_loss=0.08803, over 19446.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2513, pruned_loss=0.05077, over 3944139.19 frames. ], batch size: 396, lr: 6.51e-03, grad_scale: 8.0 +2023-03-28 18:04:46,668 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.585e+02 3.832e+02 4.632e+02 5.399e+02 1.168e+03, threshold=9.265e+02, percent-clipped=4.0 +2023-03-28 18:05:40,623 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44143.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:06:31,947 INFO [train.py:892] (2/4) Epoch 24, batch 1500, loss[loss=0.1486, simple_loss=0.2208, pruned_loss=0.03817, over 19803.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2505, pruned_loss=0.05021, over 3946232.43 frames. ], batch size: 211, lr: 6.50e-03, grad_scale: 8.0 +2023-03-28 18:06:40,968 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44170.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:08:09,488 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.62 vs. limit=5.0 +2023-03-28 18:08:30,427 INFO [train.py:892] (2/4) Epoch 24, batch 1550, loss[loss=0.2586, simple_loss=0.3374, pruned_loss=0.08988, over 19267.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2502, pruned_loss=0.05009, over 3947205.24 frames. ], batch size: 483, lr: 6.50e-03, grad_scale: 8.0 +2023-03-28 18:08:35,834 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44218.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:08:41,917 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.584e+02 4.103e+02 5.047e+02 5.818e+02 1.108e+03, threshold=1.009e+03, percent-clipped=1.0 +2023-03-28 18:09:29,681 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-28 18:09:57,803 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1799, 2.8572, 4.9862, 4.1044, 4.6622, 4.9133, 4.8022, 4.4787], + device='cuda:2'), covar=tensor([0.0369, 0.0930, 0.0103, 0.0957, 0.0153, 0.0183, 0.0149, 0.0169], + device='cuda:2'), in_proj_covar=tensor([0.0095, 0.0101, 0.0082, 0.0150, 0.0080, 0.0094, 0.0087, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 18:10:28,858 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44265.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:10:30,420 INFO [train.py:892] (2/4) Epoch 24, batch 1600, loss[loss=0.2662, simple_loss=0.3452, pruned_loss=0.09361, over 19469.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2496, pruned_loss=0.04964, over 3948252.51 frames. ], batch size: 396, lr: 6.50e-03, grad_scale: 8.0 +2023-03-28 18:11:32,082 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8666, 2.1912, 3.5448, 2.9736, 3.5742, 3.6154, 3.4154, 3.4014], + device='cuda:2'), covar=tensor([0.0544, 0.1068, 0.0126, 0.0557, 0.0137, 0.0224, 0.0210, 0.0194], + device='cuda:2'), in_proj_covar=tensor([0.0095, 0.0101, 0.0082, 0.0151, 0.0080, 0.0094, 0.0087, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 18:11:56,364 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1587, 4.1752, 2.4688, 4.4192, 4.5612, 2.0058, 3.8491, 3.2859], + device='cuda:2'), covar=tensor([0.0660, 0.0860, 0.2835, 0.0757, 0.0592, 0.2823, 0.0981, 0.0880], + device='cuda:2'), in_proj_covar=tensor([0.0224, 0.0249, 0.0227, 0.0262, 0.0239, 0.0200, 0.0233, 0.0188], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 18:12:24,888 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44315.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:12:26,295 INFO [train.py:892] (2/4) Epoch 24, batch 1650, loss[loss=0.1567, simple_loss=0.2333, pruned_loss=0.04004, over 19856.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2497, pruned_loss=0.04999, over 3946969.77 frames. ], batch size: 104, lr: 6.49e-03, grad_scale: 8.0 +2023-03-28 18:12:36,792 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.188e+02 4.041e+02 4.652e+02 5.558e+02 9.682e+02, threshold=9.304e+02, percent-clipped=0.0 +2023-03-28 18:14:04,318 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4454, 4.3678, 4.8000, 4.4013, 4.0730, 4.5525, 4.4382, 4.9026], + device='cuda:2'), covar=tensor([0.0859, 0.0320, 0.0340, 0.0360, 0.0857, 0.0477, 0.0438, 0.0266], + device='cuda:2'), in_proj_covar=tensor([0.0278, 0.0218, 0.0217, 0.0228, 0.0205, 0.0232, 0.0226, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 18:14:21,543 INFO [train.py:892] (2/4) Epoch 24, batch 1700, loss[loss=0.2018, simple_loss=0.2804, pruned_loss=0.06162, over 19747.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2505, pruned_loss=0.05035, over 3948495.33 frames. ], batch size: 276, lr: 6.49e-03, grad_scale: 8.0 +2023-03-28 18:14:54,463 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6872, 5.0160, 5.0482, 4.9457, 4.7132, 5.0141, 4.5172, 4.5188], + device='cuda:2'), covar=tensor([0.0465, 0.0460, 0.0516, 0.0427, 0.0581, 0.0550, 0.0674, 0.0960], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0259, 0.0279, 0.0240, 0.0243, 0.0234, 0.0250, 0.0295], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 18:16:13,210 INFO [train.py:892] (2/4) Epoch 24, batch 1750, loss[loss=0.1392, simple_loss=0.2187, pruned_loss=0.02987, over 19739.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.25, pruned_loss=0.04993, over 3948225.85 frames. ], batch size: 95, lr: 6.49e-03, grad_scale: 8.0 +2023-03-28 18:16:22,189 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.732e+02 4.086e+02 4.781e+02 5.963e+02 1.155e+03, threshold=9.562e+02, percent-clipped=1.0 +2023-03-28 18:17:12,495 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44443.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:17:56,847 INFO [train.py:892] (2/4) Epoch 24, batch 1800, loss[loss=0.2133, simple_loss=0.2871, pruned_loss=0.06979, over 19643.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2497, pruned_loss=0.05012, over 3949651.81 frames. ], batch size: 330, lr: 6.48e-03, grad_scale: 8.0 +2023-03-28 18:18:42,768 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44491.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:19:13,693 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0480, 3.3102, 2.8635, 2.5074, 2.9417, 3.2778, 3.2044, 3.2235], + device='cuda:2'), covar=tensor([0.0280, 0.0245, 0.0253, 0.0438, 0.0319, 0.0267, 0.0183, 0.0188], + device='cuda:2'), in_proj_covar=tensor([0.0093, 0.0087, 0.0093, 0.0096, 0.0098, 0.0077, 0.0076, 0.0077], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 18:19:30,405 INFO [train.py:892] (2/4) Epoch 24, batch 1850, loss[loss=0.162, simple_loss=0.2453, pruned_loss=0.0393, over 19684.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2513, pruned_loss=0.05005, over 3949978.23 frames. ], batch size: 55, lr: 6.48e-03, grad_scale: 8.0 +2023-03-28 18:20:24,348 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.697e+02 4.242e+02 4.967e+02 6.010e+02 1.010e+03, threshold=9.934e+02, percent-clipped=1.0 +2023-03-28 18:20:24,373 INFO [train.py:892] (2/4) Epoch 25, batch 0, loss[loss=0.1679, simple_loss=0.2335, pruned_loss=0.05119, over 19817.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2335, pruned_loss=0.05119, over 19817.00 frames. ], batch size: 133, lr: 6.35e-03, grad_scale: 8.0 +2023-03-28 18:20:24,373 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 18:20:47,697 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5553, 4.0186, 3.8772, 3.9080, 3.9415, 3.9075, 3.8267, 3.5843], + device='cuda:2'), covar=tensor([0.2184, 0.1246, 0.1593, 0.1311, 0.1237, 0.0907, 0.1747, 0.2412], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0310, 0.0354, 0.0283, 0.0264, 0.0261, 0.0340, 0.0370], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 18:20:53,332 INFO [train.py:926] (2/4) Epoch 25, validation: loss=0.1751, simple_loss=0.2485, pruned_loss=0.05079, over 2883724.00 frames. +2023-03-28 18:20:53,333 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 18:21:07,847 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44527.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:21:57,564 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44550.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:22:06,408 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.80 vs. limit=5.0 +2023-03-28 18:22:33,079 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44565.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:22:45,067 INFO [train.py:892] (2/4) Epoch 25, batch 50, loss[loss=0.188, simple_loss=0.264, pruned_loss=0.05605, over 19701.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2407, pruned_loss=0.04562, over 892382.82 frames. ], batch size: 48, lr: 6.34e-03, grad_scale: 8.0 +2023-03-28 18:23:23,617 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44588.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:23:56,792 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0634, 5.2467, 5.4608, 5.1940, 5.3205, 4.9333, 5.1597, 4.9600], + device='cuda:2'), covar=tensor([0.1328, 0.1221, 0.0892, 0.1176, 0.0666, 0.0829, 0.1895, 0.1930], + device='cuda:2'), in_proj_covar=tensor([0.0283, 0.0311, 0.0356, 0.0284, 0.0266, 0.0263, 0.0340, 0.0371], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 18:24:18,594 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44611.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:24:24,367 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44613.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:24:29,315 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44615.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:24:45,061 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.512e+02 3.985e+02 4.536e+02 5.355e+02 8.886e+02, threshold=9.072e+02, percent-clipped=0.0 +2023-03-28 18:24:45,088 INFO [train.py:892] (2/4) Epoch 25, batch 100, loss[loss=0.1916, simple_loss=0.265, pruned_loss=0.05912, over 19785.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2486, pruned_loss=0.04872, over 1567033.34 frames. ], batch size: 69, lr: 6.34e-03, grad_scale: 8.0 +2023-03-28 18:26:30,247 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44663.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:26:47,938 INFO [train.py:892] (2/4) Epoch 25, batch 150, loss[loss=0.1568, simple_loss=0.2389, pruned_loss=0.03733, over 19610.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2494, pruned_loss=0.04911, over 2095959.58 frames. ], batch size: 46, lr: 6.33e-03, grad_scale: 8.0 +2023-03-28 18:26:51,051 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0848, 4.0322, 4.4549, 4.1025, 3.8371, 4.3377, 4.0929, 4.5646], + device='cuda:2'), covar=tensor([0.1120, 0.0464, 0.0499, 0.0477, 0.1024, 0.0605, 0.0602, 0.0442], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0218, 0.0218, 0.0228, 0.0205, 0.0231, 0.0227, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 18:28:52,029 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.576e+02 4.007e+02 4.679e+02 6.121e+02 1.422e+03, threshold=9.359e+02, percent-clipped=5.0 +2023-03-28 18:28:52,059 INFO [train.py:892] (2/4) Epoch 25, batch 200, loss[loss=0.1597, simple_loss=0.2383, pruned_loss=0.04056, over 19921.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2492, pruned_loss=0.04955, over 2506979.16 frames. ], batch size: 45, lr: 6.33e-03, grad_scale: 8.0 +2023-03-28 18:29:36,184 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8812, 3.0861, 3.0733, 3.0636, 2.7837, 3.0271, 2.9049, 2.9828], + device='cuda:2'), covar=tensor([0.0282, 0.0247, 0.0308, 0.0259, 0.0395, 0.0291, 0.0315, 0.0412], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0071, 0.0073, 0.0068, 0.0081, 0.0074, 0.0091, 0.0066], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 18:30:53,945 INFO [train.py:892] (2/4) Epoch 25, batch 250, loss[loss=0.1596, simple_loss=0.2312, pruned_loss=0.04402, over 19796.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2479, pruned_loss=0.04882, over 2827216.75 frames. ], batch size: 211, lr: 6.33e-03, grad_scale: 8.0 +2023-03-28 18:31:09,299 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1868, 4.8588, 4.8786, 5.1517, 4.7445, 5.3609, 5.3160, 5.4758], + device='cuda:2'), covar=tensor([0.0669, 0.0378, 0.0459, 0.0351, 0.0751, 0.0371, 0.0396, 0.0282], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0170, 0.0193, 0.0168, 0.0168, 0.0149, 0.0146, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 18:31:40,999 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3857, 1.8686, 2.0863, 2.6172, 2.8919, 2.9631, 2.8554, 3.0096], + device='cuda:2'), covar=tensor([0.0982, 0.1778, 0.1401, 0.0770, 0.0504, 0.0415, 0.0491, 0.0438], + device='cuda:2'), in_proj_covar=tensor([0.0155, 0.0167, 0.0172, 0.0146, 0.0128, 0.0125, 0.0118, 0.0110], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 18:32:57,529 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.592e+02 3.844e+02 4.514e+02 5.418e+02 9.178e+02, threshold=9.028e+02, percent-clipped=0.0 +2023-03-28 18:32:57,560 INFO [train.py:892] (2/4) Epoch 25, batch 300, loss[loss=0.2125, simple_loss=0.2824, pruned_loss=0.07128, over 19753.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.249, pruned_loss=0.04968, over 3075520.06 frames. ], batch size: 233, lr: 6.32e-03, grad_scale: 8.0 +2023-03-28 18:33:47,843 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7239, 4.3778, 4.4727, 4.7350, 4.3992, 4.8788, 4.8507, 5.0127], + device='cuda:2'), covar=tensor([0.0667, 0.0423, 0.0440, 0.0342, 0.0673, 0.0393, 0.0393, 0.0294], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0171, 0.0194, 0.0169, 0.0169, 0.0150, 0.0146, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 18:35:01,674 INFO [train.py:892] (2/4) Epoch 25, batch 350, loss[loss=0.1814, simple_loss=0.2514, pruned_loss=0.05564, over 19847.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2506, pruned_loss=0.05017, over 3268239.45 frames. ], batch size: 43, lr: 6.32e-03, grad_scale: 8.0 +2023-03-28 18:35:28,935 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44883.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:36:26,420 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44906.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:36:54,307 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2857, 5.7095, 5.9061, 5.6766, 5.5429, 5.4558, 5.5315, 5.4033], + device='cuda:2'), covar=tensor([0.1544, 0.0974, 0.0730, 0.1038, 0.0695, 0.0769, 0.1751, 0.1842], + device='cuda:2'), in_proj_covar=tensor([0.0285, 0.0309, 0.0354, 0.0285, 0.0264, 0.0262, 0.0339, 0.0370], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 18:36:57,356 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-28 18:37:01,642 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.886e+02 4.144e+02 4.807e+02 5.961e+02 1.159e+03, threshold=9.615e+02, percent-clipped=3.0 +2023-03-28 18:37:01,669 INFO [train.py:892] (2/4) Epoch 25, batch 400, loss[loss=0.1701, simple_loss=0.2489, pruned_loss=0.04563, over 19892.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2482, pruned_loss=0.04901, over 3420306.21 frames. ], batch size: 87, lr: 6.32e-03, grad_scale: 8.0 +2023-03-28 18:38:49,456 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3782, 2.4516, 2.5892, 2.4869, 2.3389, 2.5630, 2.3887, 2.5637], + device='cuda:2'), covar=tensor([0.0311, 0.0283, 0.0240, 0.0237, 0.0399, 0.0270, 0.0400, 0.0305], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0071, 0.0073, 0.0067, 0.0081, 0.0074, 0.0092, 0.0066], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 18:39:03,904 INFO [train.py:892] (2/4) Epoch 25, batch 450, loss[loss=0.182, simple_loss=0.2557, pruned_loss=0.0542, over 19780.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2488, pruned_loss=0.04916, over 3535998.05 frames. ], batch size: 215, lr: 6.31e-03, grad_scale: 8.0 +2023-03-28 18:40:59,618 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.643e+02 4.015e+02 4.807e+02 5.728e+02 9.598e+02, threshold=9.613e+02, percent-clipped=0.0 +2023-03-28 18:40:59,647 INFO [train.py:892] (2/4) Epoch 25, batch 500, loss[loss=0.1713, simple_loss=0.2523, pruned_loss=0.04514, over 19897.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2503, pruned_loss=0.04981, over 3627748.65 frames. ], batch size: 87, lr: 6.31e-03, grad_scale: 8.0 +2023-03-28 18:41:11,535 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7808, 4.4675, 4.5554, 4.2849, 4.7315, 3.2177, 3.8772, 2.3687], + device='cuda:2'), covar=tensor([0.0182, 0.0210, 0.0134, 0.0191, 0.0138, 0.0919, 0.0802, 0.1409], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0144, 0.0111, 0.0132, 0.0117, 0.0132, 0.0142, 0.0125], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 18:42:20,425 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45056.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:42:52,453 INFO [train.py:892] (2/4) Epoch 25, batch 550, loss[loss=0.1771, simple_loss=0.2442, pruned_loss=0.05501, over 19755.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.251, pruned_loss=0.05004, over 3698617.03 frames. ], batch size: 213, lr: 6.31e-03, grad_scale: 8.0 +2023-03-28 18:43:06,704 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2196, 2.9527, 3.3988, 2.7210, 3.3111, 2.8090, 3.1207, 3.2625], + device='cuda:2'), covar=tensor([0.0528, 0.0471, 0.0544, 0.0732, 0.0357, 0.0508, 0.0473, 0.0382], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0079, 0.0078, 0.0106, 0.0074, 0.0075, 0.0073, 0.0066], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 18:43:21,267 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45081.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:44:44,778 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45117.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 18:44:51,744 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.460e+02 3.947e+02 4.547e+02 5.526e+02 8.636e+02, threshold=9.094e+02, percent-clipped=0.0 +2023-03-28 18:44:51,771 INFO [train.py:892] (2/4) Epoch 25, batch 600, loss[loss=0.1676, simple_loss=0.248, pruned_loss=0.04357, over 19803.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2509, pruned_loss=0.05014, over 3754023.65 frames. ], batch size: 65, lr: 6.30e-03, grad_scale: 8.0 +2023-03-28 18:45:04,117 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4456, 4.7521, 4.7752, 4.6652, 4.4246, 4.7241, 4.3000, 4.3045], + device='cuda:2'), covar=tensor([0.0541, 0.0502, 0.0548, 0.0488, 0.0637, 0.0618, 0.0669, 0.1036], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0263, 0.0283, 0.0244, 0.0248, 0.0237, 0.0253, 0.0299], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 18:45:04,218 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5201, 1.8658, 2.1991, 2.6944, 3.0040, 3.0689, 3.0559, 3.0002], + device='cuda:2'), covar=tensor([0.1016, 0.1843, 0.1537, 0.0762, 0.0480, 0.0376, 0.0445, 0.0597], + device='cuda:2'), in_proj_covar=tensor([0.0156, 0.0170, 0.0175, 0.0148, 0.0129, 0.0126, 0.0119, 0.0113], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 18:45:40,629 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3761, 2.4100, 2.5321, 2.4875, 2.4056, 2.5137, 2.4066, 2.6030], + device='cuda:2'), covar=tensor([0.0346, 0.0304, 0.0298, 0.0257, 0.0377, 0.0310, 0.0406, 0.0275], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0070, 0.0073, 0.0067, 0.0081, 0.0074, 0.0091, 0.0066], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 18:45:45,472 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45142.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:46:55,685 INFO [train.py:892] (2/4) Epoch 25, batch 650, loss[loss=0.1828, simple_loss=0.2656, pruned_loss=0.04997, over 19700.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2513, pruned_loss=0.05082, over 3794832.25 frames. ], batch size: 265, lr: 6.30e-03, grad_scale: 8.0 +2023-03-28 18:47:21,499 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6063, 2.1690, 3.3786, 2.9085, 3.4543, 3.5634, 3.2835, 3.3750], + device='cuda:2'), covar=tensor([0.0643, 0.1035, 0.0126, 0.0515, 0.0146, 0.0217, 0.0208, 0.0178], + device='cuda:2'), in_proj_covar=tensor([0.0096, 0.0102, 0.0084, 0.0153, 0.0081, 0.0095, 0.0088, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 18:47:25,374 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45183.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:48:24,809 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45206.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:48:58,096 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.622e+02 4.071e+02 4.790e+02 5.596e+02 1.238e+03, threshold=9.579e+02, percent-clipped=2.0 +2023-03-28 18:48:58,130 INFO [train.py:892] (2/4) Epoch 25, batch 700, loss[loss=0.168, simple_loss=0.2336, pruned_loss=0.05119, over 19787.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2521, pruned_loss=0.0512, over 3829498.29 frames. ], batch size: 168, lr: 6.30e-03, grad_scale: 8.0 +2023-03-28 18:49:21,634 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45231.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:50:14,557 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45254.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:50:54,526 INFO [train.py:892] (2/4) Epoch 25, batch 750, loss[loss=0.1709, simple_loss=0.2476, pruned_loss=0.04709, over 19688.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2512, pruned_loss=0.05077, over 3856177.03 frames. ], batch size: 45, lr: 6.29e-03, grad_scale: 8.0 +2023-03-28 18:51:37,728 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-28 18:52:49,365 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.500e+02 3.772e+02 4.659e+02 5.559e+02 8.961e+02, threshold=9.318e+02, percent-clipped=0.0 +2023-03-28 18:52:49,391 INFO [train.py:892] (2/4) Epoch 25, batch 800, loss[loss=0.1687, simple_loss=0.236, pruned_loss=0.05064, over 19742.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2517, pruned_loss=0.05074, over 3875517.38 frames. ], batch size: 140, lr: 6.29e-03, grad_scale: 8.0 +2023-03-28 18:53:27,741 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-28 18:53:53,977 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45348.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 18:54:43,300 INFO [train.py:892] (2/4) Epoch 25, batch 850, loss[loss=0.1651, simple_loss=0.2478, pruned_loss=0.04126, over 19736.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2521, pruned_loss=0.05065, over 3891966.99 frames. ], batch size: 76, lr: 6.29e-03, grad_scale: 8.0 +2023-03-28 18:56:15,197 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45409.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 18:56:20,849 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45412.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 18:56:43,405 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.654e+02 4.187e+02 5.025e+02 5.805e+02 1.375e+03, threshold=1.005e+03, percent-clipped=1.0 +2023-03-28 18:56:43,435 INFO [train.py:892] (2/4) Epoch 25, batch 900, loss[loss=0.1647, simple_loss=0.2354, pruned_loss=0.04702, over 19800.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2513, pruned_loss=0.05081, over 3905594.06 frames. ], batch size: 200, lr: 6.28e-03, grad_scale: 8.0 +2023-03-28 18:57:24,619 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45437.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:58:28,308 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1950, 2.6653, 3.1486, 3.4696, 3.7334, 4.3433, 4.3027, 4.4176], + device='cuda:2'), covar=tensor([0.0739, 0.1601, 0.1231, 0.0530, 0.0381, 0.0201, 0.0231, 0.0244], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0167, 0.0173, 0.0145, 0.0127, 0.0123, 0.0116, 0.0111], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 18:58:45,866 INFO [train.py:892] (2/4) Epoch 25, batch 950, loss[loss=0.169, simple_loss=0.2475, pruned_loss=0.04521, over 19736.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2512, pruned_loss=0.0504, over 3913943.03 frames. ], batch size: 99, lr: 6.28e-03, grad_scale: 8.0 +2023-03-28 19:00:40,926 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.498e+02 3.760e+02 4.893e+02 5.936e+02 1.409e+03, threshold=9.787e+02, percent-clipped=1.0 +2023-03-28 19:00:40,955 INFO [train.py:892] (2/4) Epoch 25, batch 1000, loss[loss=0.1706, simple_loss=0.2524, pruned_loss=0.04441, over 19719.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2517, pruned_loss=0.05074, over 3922039.53 frames. ], batch size: 54, lr: 6.28e-03, grad_scale: 8.0 +2023-03-28 19:01:55,540 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7205, 3.7313, 3.6563, 3.4093, 3.8041, 2.7911, 2.9578, 1.6594], + device='cuda:2'), covar=tensor([0.0363, 0.0312, 0.0234, 0.0285, 0.0257, 0.1362, 0.1104, 0.2314], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0143, 0.0111, 0.0131, 0.0117, 0.0131, 0.0141, 0.0125], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 19:02:32,313 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6665, 2.7769, 2.9229, 2.7644, 2.6487, 2.8359, 2.6389, 2.8891], + device='cuda:2'), covar=tensor([0.0277, 0.0323, 0.0238, 0.0247, 0.0397, 0.0308, 0.0348, 0.0370], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0071, 0.0074, 0.0068, 0.0081, 0.0074, 0.0091, 0.0066], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 19:02:41,449 INFO [train.py:892] (2/4) Epoch 25, batch 1050, loss[loss=0.1772, simple_loss=0.2496, pruned_loss=0.05242, over 19719.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2524, pruned_loss=0.05081, over 3928372.42 frames. ], batch size: 60, lr: 6.27e-03, grad_scale: 8.0 +2023-03-28 19:03:11,276 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5357, 4.6430, 2.7361, 4.8896, 5.0540, 2.2588, 4.4173, 3.5859], + device='cuda:2'), covar=tensor([0.0612, 0.0730, 0.2630, 0.0799, 0.0498, 0.2678, 0.0828, 0.0816], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0251, 0.0228, 0.0265, 0.0243, 0.0200, 0.0233, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 19:03:57,373 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-28 19:04:40,788 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.489e+02 3.919e+02 4.482e+02 5.377e+02 1.345e+03, threshold=8.964e+02, percent-clipped=5.0 +2023-03-28 19:04:40,816 INFO [train.py:892] (2/4) Epoch 25, batch 1100, loss[loss=0.1578, simple_loss=0.24, pruned_loss=0.03775, over 19805.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2515, pruned_loss=0.05008, over 3933895.26 frames. ], batch size: 98, lr: 6.27e-03, grad_scale: 8.0 +2023-03-28 19:06:36,324 INFO [train.py:892] (2/4) Epoch 25, batch 1150, loss[loss=0.1682, simple_loss=0.2429, pruned_loss=0.04675, over 19872.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2513, pruned_loss=0.05013, over 3936384.16 frames. ], batch size: 138, lr: 6.27e-03, grad_scale: 8.0 +2023-03-28 19:07:52,557 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45702.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:07:58,013 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45704.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 19:08:19,266 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45712.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 19:08:39,153 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.049e+02 4.389e+02 4.900e+02 5.957e+02 9.190e+02, threshold=9.801e+02, percent-clipped=1.0 +2023-03-28 19:08:39,183 INFO [train.py:892] (2/4) Epoch 25, batch 1200, loss[loss=0.1493, simple_loss=0.2233, pruned_loss=0.03762, over 19744.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2516, pruned_loss=0.05064, over 3939386.90 frames. ], batch size: 118, lr: 6.26e-03, grad_scale: 8.0 +2023-03-28 19:09:16,055 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45737.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:10:08,281 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45760.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:10:11,949 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8619, 3.0288, 3.0208, 2.9467, 2.8784, 2.8909, 2.8987, 3.2338], + device='cuda:2'), covar=tensor([0.0270, 0.0284, 0.0335, 0.0294, 0.0347, 0.0307, 0.0376, 0.0214], + device='cuda:2'), in_proj_covar=tensor([0.0078, 0.0072, 0.0074, 0.0069, 0.0082, 0.0076, 0.0093, 0.0066], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 19:10:13,993 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45763.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:10:31,150 INFO [train.py:892] (2/4) Epoch 25, batch 1250, loss[loss=0.1801, simple_loss=0.255, pruned_loss=0.05262, over 19883.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2513, pruned_loss=0.05065, over 3942759.74 frames. ], batch size: 88, lr: 6.26e-03, grad_scale: 8.0 +2023-03-28 19:10:38,085 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45774.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:11:03,082 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45785.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:12:11,849 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8658, 4.0147, 2.3559, 4.1590, 4.3202, 1.9695, 3.6113, 3.3405], + device='cuda:2'), covar=tensor([0.0727, 0.0740, 0.2656, 0.0744, 0.0521, 0.2631, 0.0973, 0.0747], + device='cuda:2'), in_proj_covar=tensor([0.0226, 0.0250, 0.0227, 0.0263, 0.0241, 0.0200, 0.0233, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 19:12:24,905 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.740e+02 4.060e+02 4.817e+02 5.850e+02 1.056e+03, threshold=9.634e+02, percent-clipped=3.0 +2023-03-28 19:12:24,940 INFO [train.py:892] (2/4) Epoch 25, batch 1300, loss[loss=0.1838, simple_loss=0.2625, pruned_loss=0.05252, over 19804.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2501, pruned_loss=0.04994, over 3944411.27 frames. ], batch size: 68, lr: 6.26e-03, grad_scale: 8.0 +2023-03-28 19:13:01,639 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45835.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:14:00,852 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45859.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:14:08,873 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-28 19:14:29,508 INFO [train.py:892] (2/4) Epoch 25, batch 1350, loss[loss=0.1806, simple_loss=0.2639, pruned_loss=0.04863, over 19885.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2494, pruned_loss=0.04919, over 3946293.38 frames. ], batch size: 71, lr: 6.25e-03, grad_scale: 8.0 +2023-03-28 19:14:43,465 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4771, 2.5280, 2.7438, 2.4619, 2.9196, 2.8781, 3.3277, 3.6060], + device='cuda:2'), covar=tensor([0.0697, 0.1683, 0.1758, 0.2222, 0.1565, 0.1471, 0.0693, 0.0662], + device='cuda:2'), in_proj_covar=tensor([0.0246, 0.0238, 0.0261, 0.0250, 0.0290, 0.0252, 0.0224, 0.0247], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 19:16:24,350 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45920.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:16:25,385 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.758e+02 4.142e+02 4.668e+02 5.534e+02 8.223e+02, threshold=9.336e+02, percent-clipped=0.0 +2023-03-28 19:16:25,415 INFO [train.py:892] (2/4) Epoch 25, batch 1400, loss[loss=0.1589, simple_loss=0.2247, pruned_loss=0.04658, over 19866.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2483, pruned_loss=0.04873, over 3947797.87 frames. ], batch size: 129, lr: 6.25e-03, grad_scale: 16.0 +2023-03-28 19:18:21,753 INFO [train.py:892] (2/4) Epoch 25, batch 1450, loss[loss=0.1732, simple_loss=0.2516, pruned_loss=0.04741, over 19565.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2495, pruned_loss=0.04898, over 3948030.73 frames. ], batch size: 41, lr: 6.25e-03, grad_scale: 16.0 +2023-03-28 19:19:53,653 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46004.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 19:20:30,232 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.575e+02 3.735e+02 4.746e+02 5.527e+02 8.310e+02, threshold=9.492e+02, percent-clipped=0.0 +2023-03-28 19:20:30,266 INFO [train.py:892] (2/4) Epoch 25, batch 1500, loss[loss=0.1583, simple_loss=0.2334, pruned_loss=0.04156, over 19791.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2494, pruned_loss=0.04939, over 3948766.62 frames. ], batch size: 105, lr: 6.24e-03, grad_scale: 16.0 +2023-03-28 19:20:59,324 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1546, 4.3149, 2.4404, 4.5756, 4.8289, 2.0252, 3.9509, 3.2636], + device='cuda:2'), covar=tensor([0.0732, 0.0796, 0.2872, 0.0764, 0.0482, 0.2884, 0.1095, 0.1059], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0251, 0.0228, 0.0265, 0.0244, 0.0202, 0.0236, 0.0192], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 19:21:06,282 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.27 vs. limit=5.0 +2023-03-28 19:21:41,915 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-28 19:21:43,810 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46052.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 19:21:57,569 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46058.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:22:29,055 INFO [train.py:892] (2/4) Epoch 25, batch 1550, loss[loss=0.1583, simple_loss=0.2253, pruned_loss=0.0457, over 19781.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2503, pruned_loss=0.0497, over 3947686.51 frames. ], batch size: 131, lr: 6.24e-03, grad_scale: 16.0 +2023-03-28 19:23:54,913 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.77 vs. limit=5.0 +2023-03-28 19:24:26,939 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.641e+02 4.209e+02 5.084e+02 6.041e+02 9.841e+02, threshold=1.017e+03, percent-clipped=2.0 +2023-03-28 19:24:26,968 INFO [train.py:892] (2/4) Epoch 25, batch 1600, loss[loss=0.1975, simple_loss=0.2694, pruned_loss=0.06282, over 19682.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2498, pruned_loss=0.04962, over 3949199.73 frames. ], batch size: 265, lr: 6.24e-03, grad_scale: 16.0 +2023-03-28 19:24:36,703 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4961, 2.5407, 2.7960, 2.5627, 2.9684, 2.9051, 3.3685, 3.6475], + device='cuda:2'), covar=tensor([0.0707, 0.1672, 0.1634, 0.2037, 0.1490, 0.1432, 0.0667, 0.0705], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0238, 0.0261, 0.0250, 0.0289, 0.0251, 0.0225, 0.0245], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 19:24:49,226 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46130.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:26:20,484 INFO [train.py:892] (2/4) Epoch 25, batch 1650, loss[loss=0.1611, simple_loss=0.2339, pruned_loss=0.04416, over 19851.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2477, pruned_loss=0.04906, over 3950222.14 frames. ], batch size: 104, lr: 6.23e-03, grad_scale: 16.0 +2023-03-28 19:28:05,059 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1732, 3.0574, 1.8883, 3.7699, 3.4070, 3.6924, 3.7497, 2.9425], + device='cuda:2'), covar=tensor([0.0641, 0.0669, 0.1843, 0.0599, 0.0586, 0.0395, 0.0648, 0.0820], + device='cuda:2'), in_proj_covar=tensor([0.0140, 0.0139, 0.0141, 0.0145, 0.0129, 0.0129, 0.0142, 0.0143], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 19:28:09,020 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46215.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:28:20,993 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.501e+02 4.063e+02 5.043e+02 6.372e+02 1.431e+03, threshold=1.009e+03, percent-clipped=3.0 +2023-03-28 19:28:21,024 INFO [train.py:892] (2/4) Epoch 25, batch 1700, loss[loss=0.1533, simple_loss=0.2288, pruned_loss=0.03897, over 19764.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2481, pruned_loss=0.0492, over 3948936.53 frames. ], batch size: 122, lr: 6.23e-03, grad_scale: 16.0 +2023-03-28 19:29:21,094 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0470, 3.0585, 3.4364, 2.6338, 3.5766, 2.9268, 3.1438, 3.5276], + device='cuda:2'), covar=tensor([0.0804, 0.0484, 0.0517, 0.0776, 0.0345, 0.0436, 0.0500, 0.0299], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0080, 0.0077, 0.0106, 0.0073, 0.0076, 0.0073, 0.0066], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 19:29:32,704 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.66 vs. limit=5.0 +2023-03-28 19:29:42,098 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1209, 4.2892, 2.5476, 4.5497, 4.6978, 1.9747, 3.9313, 3.4422], + device='cuda:2'), covar=tensor([0.0701, 0.0721, 0.2487, 0.0642, 0.0533, 0.2709, 0.0944, 0.0797], + device='cuda:2'), in_proj_covar=tensor([0.0228, 0.0250, 0.0227, 0.0265, 0.0244, 0.0202, 0.0235, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 19:30:18,587 INFO [train.py:892] (2/4) Epoch 25, batch 1750, loss[loss=0.1603, simple_loss=0.2375, pruned_loss=0.04149, over 19659.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2489, pruned_loss=0.04978, over 3948155.63 frames. ], batch size: 58, lr: 6.23e-03, grad_scale: 16.0 +2023-03-28 19:30:28,969 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46276.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:30:56,032 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3879, 3.6942, 3.8975, 4.5377, 3.0033, 3.4113, 2.8958, 2.8109], + device='cuda:2'), covar=tensor([0.0514, 0.1973, 0.0851, 0.0338, 0.1901, 0.0913, 0.1163, 0.1611], + device='cuda:2'), in_proj_covar=tensor([0.0238, 0.0334, 0.0243, 0.0196, 0.0244, 0.0205, 0.0214, 0.0215], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 19:31:48,278 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.66 vs. limit=5.0 +2023-03-28 19:32:03,796 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.718e+02 4.184e+02 4.852e+02 5.831e+02 2.262e+03, threshold=9.705e+02, percent-clipped=1.0 +2023-03-28 19:32:03,820 INFO [train.py:892] (2/4) Epoch 25, batch 1800, loss[loss=0.1636, simple_loss=0.2405, pruned_loss=0.0433, over 19793.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2491, pruned_loss=0.04969, over 3948445.05 frames. ], batch size: 191, lr: 6.22e-03, grad_scale: 16.0 +2023-03-28 19:32:32,159 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46337.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:32:40,727 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1106, 5.4172, 5.4246, 5.2966, 5.0796, 5.4084, 4.8654, 4.8970], + device='cuda:2'), covar=tensor([0.0416, 0.0399, 0.0428, 0.0389, 0.0562, 0.0454, 0.0556, 0.0890], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0268, 0.0286, 0.0249, 0.0251, 0.0238, 0.0256, 0.0303], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 19:32:56,975 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2297, 3.5289, 3.7360, 4.2911, 2.9102, 3.3563, 2.8193, 2.7306], + device='cuda:2'), covar=tensor([0.0480, 0.2171, 0.0942, 0.0371, 0.2016, 0.0893, 0.1243, 0.1659], + device='cuda:2'), in_proj_covar=tensor([0.0238, 0.0335, 0.0244, 0.0196, 0.0244, 0.0205, 0.0214, 0.0215], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 19:33:15,022 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46358.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:33:38,590 INFO [train.py:892] (2/4) Epoch 25, batch 1850, loss[loss=0.1721, simple_loss=0.2542, pruned_loss=0.04504, over 19822.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2499, pruned_loss=0.04958, over 3948515.91 frames. ], batch size: 57, lr: 6.22e-03, grad_scale: 16.0 +2023-03-28 19:34:37,731 INFO [train.py:892] (2/4) Epoch 26, batch 0, loss[loss=0.1544, simple_loss=0.2246, pruned_loss=0.04209, over 19735.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2246, pruned_loss=0.04209, over 19735.00 frames. ], batch size: 134, lr: 6.10e-03, grad_scale: 16.0 +2023-03-28 19:34:37,731 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 19:34:56,254 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3707, 3.2529, 3.7193, 3.0592, 3.9960, 3.1936, 3.3099, 3.9028], + device='cuda:2'), covar=tensor([0.0879, 0.0420, 0.0640, 0.0702, 0.0290, 0.0427, 0.0538, 0.0298], + device='cuda:2'), in_proj_covar=tensor([0.0071, 0.0079, 0.0077, 0.0105, 0.0073, 0.0076, 0.0073, 0.0066], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 19:35:04,408 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0478, 3.1232, 3.1160, 3.0636, 2.9155, 3.0741, 2.9628, 3.2661], + device='cuda:2'), covar=tensor([0.0253, 0.0317, 0.0315, 0.0286, 0.0353, 0.0223, 0.0303, 0.0279], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0073, 0.0075, 0.0070, 0.0083, 0.0076, 0.0094, 0.0067], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 19:35:16,909 INFO [train.py:926] (2/4) Epoch 26, validation: loss=0.176, simple_loss=0.2485, pruned_loss=0.05179, over 2883724.00 frames. +2023-03-28 19:35:16,910 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 19:35:36,329 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4576, 4.2455, 4.3248, 4.0598, 4.4715, 3.1864, 3.7523, 2.1580], + device='cuda:2'), covar=tensor([0.0175, 0.0199, 0.0129, 0.0174, 0.0122, 0.0832, 0.0664, 0.1413], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0144, 0.0111, 0.0131, 0.0118, 0.0132, 0.0141, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 19:36:01,558 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5466, 3.6325, 2.2623, 3.8020, 3.8580, 1.8149, 3.2093, 3.0014], + device='cuda:2'), covar=tensor([0.0787, 0.0839, 0.2729, 0.0777, 0.0597, 0.2814, 0.1202, 0.0884], + device='cuda:2'), in_proj_covar=tensor([0.0229, 0.0251, 0.0229, 0.0267, 0.0246, 0.0203, 0.0237, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 19:36:29,076 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46406.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:37:07,521 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.563e+02 3.816e+02 4.430e+02 5.059e+02 8.683e+02, threshold=8.861e+02, percent-clipped=0.0 +2023-03-28 19:37:18,521 INFO [train.py:892] (2/4) Epoch 26, batch 50, loss[loss=0.2059, simple_loss=0.278, pruned_loss=0.06692, over 19760.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2461, pruned_loss=0.04831, over 889926.48 frames. ], batch size: 321, lr: 6.09e-03, grad_scale: 16.0 +2023-03-28 19:37:28,803 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46430.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:39:07,487 INFO [train.py:892] (2/4) Epoch 26, batch 100, loss[loss=0.1618, simple_loss=0.2399, pruned_loss=0.04186, over 19851.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2476, pruned_loss=0.04835, over 1568383.22 frames. ], batch size: 78, lr: 6.09e-03, grad_scale: 16.0 +2023-03-28 19:39:12,050 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46478.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:40:31,541 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46515.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:40:42,635 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.820e+02 4.177e+02 4.877e+02 5.600e+02 1.186e+03, threshold=9.755e+02, percent-clipped=5.0 +2023-03-28 19:40:54,309 INFO [train.py:892] (2/4) Epoch 26, batch 150, loss[loss=0.1694, simple_loss=0.2455, pruned_loss=0.04669, over 19785.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2467, pruned_loss=0.0481, over 2096759.88 frames. ], batch size: 48, lr: 6.09e-03, grad_scale: 16.0 +2023-03-28 19:42:14,626 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46563.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:42:42,331 INFO [train.py:892] (2/4) Epoch 26, batch 200, loss[loss=0.1595, simple_loss=0.2389, pruned_loss=0.04003, over 19682.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2472, pruned_loss=0.04823, over 2508251.50 frames. ], batch size: 75, lr: 6.08e-03, grad_scale: 16.0 +2023-03-28 19:43:25,422 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46595.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:43:42,612 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46602.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:44:24,550 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.062e+02 3.750e+02 4.526e+02 5.438e+02 1.075e+03, threshold=9.053e+02, percent-clipped=3.0 +2023-03-28 19:44:31,779 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2327, 4.0704, 4.1270, 4.3314, 4.1761, 4.5869, 4.3160, 4.4033], + device='cuda:2'), covar=tensor([0.0957, 0.0512, 0.0667, 0.0454, 0.0786, 0.0468, 0.0639, 0.0719], + device='cuda:2'), in_proj_covar=tensor([0.0149, 0.0174, 0.0196, 0.0170, 0.0169, 0.0152, 0.0147, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 19:44:34,762 INFO [train.py:892] (2/4) Epoch 26, batch 250, loss[loss=0.1809, simple_loss=0.2548, pruned_loss=0.05344, over 19750.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2468, pruned_loss=0.04825, over 2828649.75 frames. ], batch size: 179, lr: 6.08e-03, grad_scale: 16.0 +2023-03-28 19:44:49,641 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46632.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:44:53,942 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8456, 3.7386, 3.7365, 3.5154, 3.8327, 2.8165, 3.1821, 1.7114], + device='cuda:2'), covar=tensor([0.0219, 0.0245, 0.0160, 0.0203, 0.0170, 0.1066, 0.0670, 0.1727], + device='cuda:2'), in_proj_covar=tensor([0.0102, 0.0142, 0.0111, 0.0130, 0.0117, 0.0132, 0.0140, 0.0125], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 19:45:47,202 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46656.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 19:46:01,386 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46663.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:46:01,525 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9625, 2.8932, 3.0660, 2.7725, 3.2432, 3.1946, 3.8687, 4.2359], + device='cuda:2'), covar=tensor([0.0615, 0.1718, 0.1707, 0.2065, 0.1740, 0.1557, 0.0609, 0.0612], + device='cuda:2'), in_proj_covar=tensor([0.0246, 0.0237, 0.0261, 0.0251, 0.0290, 0.0251, 0.0225, 0.0245], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 19:46:16,904 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1833, 4.2839, 2.5746, 4.5964, 4.7769, 2.0394, 3.9410, 3.5263], + device='cuda:2'), covar=tensor([0.0657, 0.0771, 0.2622, 0.0629, 0.0440, 0.2816, 0.1009, 0.0820], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0249, 0.0226, 0.0265, 0.0244, 0.0201, 0.0236, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 19:46:31,119 INFO [train.py:892] (2/4) Epoch 26, batch 300, loss[loss=0.1462, simple_loss=0.2278, pruned_loss=0.03233, over 19891.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2473, pruned_loss=0.04793, over 3077589.04 frames. ], batch size: 94, lr: 6.08e-03, grad_scale: 16.0 +2023-03-28 19:46:58,458 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-28 19:48:12,956 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.886e+02 3.778e+02 4.525e+02 5.535e+02 8.157e+02, threshold=9.049e+02, percent-clipped=0.0 +2023-03-28 19:48:23,028 INFO [train.py:892] (2/4) Epoch 26, batch 350, loss[loss=0.1485, simple_loss=0.2244, pruned_loss=0.03627, over 19759.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2471, pruned_loss=0.04782, over 3272166.86 frames. ], batch size: 97, lr: 6.07e-03, grad_scale: 16.0 +2023-03-28 19:50:21,468 INFO [train.py:892] (2/4) Epoch 26, batch 400, loss[loss=0.1754, simple_loss=0.2533, pruned_loss=0.04881, over 19885.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2489, pruned_loss=0.04872, over 3420967.63 frames. ], batch size: 62, lr: 6.07e-03, grad_scale: 16.0 +2023-03-28 19:50:45,581 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.8992, 6.1892, 6.1699, 6.0507, 5.9185, 6.2010, 5.4547, 5.4918], + device='cuda:2'), covar=tensor([0.0380, 0.0416, 0.0499, 0.0404, 0.0543, 0.0461, 0.0662, 0.1014], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0267, 0.0288, 0.0247, 0.0252, 0.0239, 0.0257, 0.0303], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 19:51:41,924 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46810.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:51:43,852 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9326, 3.9944, 4.4072, 3.9755, 3.8800, 4.3161, 4.1012, 4.5239], + device='cuda:2'), covar=tensor([0.1142, 0.0440, 0.0497, 0.0469, 0.0981, 0.0594, 0.0514, 0.0417], + device='cuda:2'), in_proj_covar=tensor([0.0283, 0.0222, 0.0221, 0.0231, 0.0206, 0.0234, 0.0230, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 19:52:09,139 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.488e+02 3.851e+02 4.931e+02 6.030e+02 1.390e+03, threshold=9.862e+02, percent-clipped=3.0 +2023-03-28 19:52:19,760 INFO [train.py:892] (2/4) Epoch 26, batch 450, loss[loss=0.1841, simple_loss=0.2534, pruned_loss=0.05739, over 19866.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2495, pruned_loss=0.04893, over 3538576.34 frames. ], batch size: 154, lr: 6.07e-03, grad_scale: 16.0 +2023-03-28 19:52:54,442 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6442, 2.8155, 2.8672, 2.7913, 2.6656, 2.7158, 2.6731, 2.8048], + device='cuda:2'), covar=tensor([0.0284, 0.0292, 0.0272, 0.0260, 0.0378, 0.0296, 0.0374, 0.0321], + device='cuda:2'), in_proj_covar=tensor([0.0078, 0.0072, 0.0074, 0.0069, 0.0082, 0.0076, 0.0093, 0.0067], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 19:54:06,062 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46871.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:54:16,469 INFO [train.py:892] (2/4) Epoch 26, batch 500, loss[loss=0.1752, simple_loss=0.2484, pruned_loss=0.05102, over 19871.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2487, pruned_loss=0.04871, over 3629906.43 frames. ], batch size: 89, lr: 6.06e-03, grad_scale: 16.0 +2023-03-28 19:54:21,982 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0409, 2.8962, 1.6729, 3.4868, 3.1492, 3.4642, 3.4955, 2.8265], + device='cuda:2'), covar=tensor([0.0670, 0.0750, 0.1910, 0.0700, 0.0689, 0.0516, 0.0658, 0.0802], + device='cuda:2'), in_proj_covar=tensor([0.0140, 0.0140, 0.0141, 0.0146, 0.0130, 0.0129, 0.0142, 0.0142], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 19:56:03,145 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.854e+02 4.348e+02 5.094e+02 6.034e+02 1.006e+03, threshold=1.019e+03, percent-clipped=1.0 +2023-03-28 19:56:14,643 INFO [train.py:892] (2/4) Epoch 26, batch 550, loss[loss=0.1728, simple_loss=0.2516, pruned_loss=0.04699, over 19866.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2503, pruned_loss=0.04962, over 3698461.57 frames. ], batch size: 104, lr: 6.06e-03, grad_scale: 16.0 +2023-03-28 19:56:27,456 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46932.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:57:13,265 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46951.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 19:57:28,657 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46958.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:57:28,924 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1139, 3.3752, 3.5830, 4.1581, 2.9353, 3.1769, 2.7683, 2.7285], + device='cuda:2'), covar=tensor([0.0486, 0.2128, 0.0915, 0.0354, 0.1754, 0.0895, 0.1201, 0.1540], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0334, 0.0244, 0.0196, 0.0242, 0.0203, 0.0212, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 19:58:10,257 INFO [train.py:892] (2/4) Epoch 26, batch 600, loss[loss=0.1735, simple_loss=0.2568, pruned_loss=0.04505, over 19787.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2516, pruned_loss=0.05021, over 3753621.12 frames. ], batch size: 73, lr: 6.06e-03, grad_scale: 16.0 +2023-03-28 19:58:20,934 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46980.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:58:51,165 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.21 vs. limit=5.0 +2023-03-28 19:59:54,797 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.697e+02 3.904e+02 4.581e+02 5.783e+02 1.259e+03, threshold=9.163e+02, percent-clipped=1.0 +2023-03-28 20:00:06,808 INFO [train.py:892] (2/4) Epoch 26, batch 650, loss[loss=0.1692, simple_loss=0.2508, pruned_loss=0.04384, over 19642.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.251, pruned_loss=0.04991, over 3796259.31 frames. ], batch size: 68, lr: 6.05e-03, grad_scale: 16.0 +2023-03-28 20:02:09,484 INFO [train.py:892] (2/4) Epoch 26, batch 700, loss[loss=0.1786, simple_loss=0.2553, pruned_loss=0.051, over 19632.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2492, pruned_loss=0.0487, over 3830531.14 frames. ], batch size: 68, lr: 6.05e-03, grad_scale: 16.0 +2023-03-28 20:02:51,791 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.0054, 2.3310, 2.0935, 1.4653, 2.1076, 2.2463, 2.1472, 2.2296], + device='cuda:2'), covar=tensor([0.0401, 0.0289, 0.0337, 0.0643, 0.0397, 0.0323, 0.0304, 0.0305], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0088, 0.0093, 0.0097, 0.0099, 0.0078, 0.0078, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 20:03:57,429 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.011e+02 3.881e+02 4.556e+02 5.406e+02 9.543e+02, threshold=9.112e+02, percent-clipped=1.0 +2023-03-28 20:04:09,095 INFO [train.py:892] (2/4) Epoch 26, batch 750, loss[loss=0.1523, simple_loss=0.2289, pruned_loss=0.03782, over 19791.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2484, pruned_loss=0.04826, over 3858290.88 frames. ], batch size: 40, lr: 6.05e-03, grad_scale: 16.0 +2023-03-28 20:05:49,775 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47166.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:05:50,013 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4080, 3.5045, 2.1748, 3.6122, 3.7027, 1.6987, 3.0576, 2.8699], + device='cuda:2'), covar=tensor([0.0787, 0.0899, 0.2705, 0.0802, 0.0579, 0.2664, 0.1122, 0.0897], + device='cuda:2'), in_proj_covar=tensor([0.0230, 0.0253, 0.0228, 0.0268, 0.0248, 0.0202, 0.0238, 0.0194], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 20:06:15,250 INFO [train.py:892] (2/4) Epoch 26, batch 800, loss[loss=0.1701, simple_loss=0.2546, pruned_loss=0.0428, over 19609.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2501, pruned_loss=0.04878, over 3876050.37 frames. ], batch size: 48, lr: 6.04e-03, grad_scale: 16.0 +2023-03-28 20:07:12,024 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.4836, 5.9234, 6.0185, 5.8507, 5.7183, 5.5854, 5.6811, 5.5939], + device='cuda:2'), covar=tensor([0.1258, 0.1213, 0.0904, 0.1138, 0.0668, 0.0792, 0.2027, 0.1864], + device='cuda:2'), in_proj_covar=tensor([0.0291, 0.0320, 0.0363, 0.0290, 0.0270, 0.0271, 0.0347, 0.0381], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 20:08:13,343 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.013e+02 4.034e+02 4.451e+02 5.296e+02 1.110e+03, threshold=8.902e+02, percent-clipped=2.0 +2023-03-28 20:08:25,313 INFO [train.py:892] (2/4) Epoch 26, batch 850, loss[loss=0.1749, simple_loss=0.2588, pruned_loss=0.04547, over 19837.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2492, pruned_loss=0.04807, over 3891746.85 frames. ], batch size: 52, lr: 6.04e-03, grad_scale: 16.0 +2023-03-28 20:09:04,978 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47241.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:09:28,447 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47251.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:09:48,090 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47258.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:10:29,640 INFO [train.py:892] (2/4) Epoch 26, batch 900, loss[loss=0.1772, simple_loss=0.2506, pruned_loss=0.05187, over 19795.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2481, pruned_loss=0.04775, over 3906225.44 frames. ], batch size: 185, lr: 6.04e-03, grad_scale: 16.0 +2023-03-28 20:11:15,873 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-28 20:11:30,981 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47299.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:11:38,950 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47302.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:11:47,723 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47306.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:12:26,924 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.526e+02 3.904e+02 4.611e+02 5.515e+02 1.040e+03, threshold=9.223e+02, percent-clipped=2.0 +2023-03-28 20:12:28,102 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2385, 5.4509, 5.7385, 5.5425, 5.4996, 5.1331, 5.4602, 5.2933], + device='cuda:2'), covar=tensor([0.1455, 0.1305, 0.0804, 0.1190, 0.0680, 0.0956, 0.1734, 0.2031], + device='cuda:2'), in_proj_covar=tensor([0.0289, 0.0318, 0.0362, 0.0289, 0.0268, 0.0269, 0.0346, 0.0379], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 20:12:38,722 INFO [train.py:892] (2/4) Epoch 26, batch 950, loss[loss=0.1773, simple_loss=0.2617, pruned_loss=0.04649, over 19856.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2477, pruned_loss=0.04719, over 3915521.94 frames. ], batch size: 112, lr: 6.03e-03, grad_scale: 16.0 +2023-03-28 20:14:46,335 INFO [train.py:892] (2/4) Epoch 26, batch 1000, loss[loss=0.1606, simple_loss=0.2377, pruned_loss=0.04172, over 19742.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.247, pruned_loss=0.04684, over 3923785.73 frames. ], batch size: 205, lr: 6.03e-03, grad_scale: 16.0 +2023-03-28 20:14:55,176 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47379.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:16:35,893 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2089, 3.8583, 3.9993, 4.2454, 3.9624, 4.2241, 4.2863, 4.4837], + device='cuda:2'), covar=tensor([0.0681, 0.0494, 0.0527, 0.0352, 0.0735, 0.0541, 0.0480, 0.0311], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0172, 0.0195, 0.0168, 0.0168, 0.0151, 0.0147, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 20:16:43,926 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.712e+02 3.612e+02 4.402e+02 5.356e+02 9.611e+02, threshold=8.803e+02, percent-clipped=1.0 +2023-03-28 20:16:51,905 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5999, 4.2433, 4.3370, 4.6133, 4.3614, 4.7031, 4.6735, 4.8638], + device='cuda:2'), covar=tensor([0.0642, 0.0374, 0.0512, 0.0317, 0.0639, 0.0407, 0.0464, 0.0288], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0172, 0.0195, 0.0168, 0.0168, 0.0151, 0.0147, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 20:16:55,238 INFO [train.py:892] (2/4) Epoch 26, batch 1050, loss[loss=0.1594, simple_loss=0.2384, pruned_loss=0.04015, over 19794.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2468, pruned_loss=0.04665, over 3930989.14 frames. ], batch size: 114, lr: 6.03e-03, grad_scale: 16.0 +2023-03-28 20:17:31,934 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47440.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:18:20,541 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3988, 3.3078, 4.7742, 3.6244, 3.9246, 3.8352, 2.6394, 2.9419], + device='cuda:2'), covar=tensor([0.0827, 0.2755, 0.0461, 0.0950, 0.1577, 0.1341, 0.2448, 0.2450], + device='cuda:2'), in_proj_covar=tensor([0.0345, 0.0378, 0.0339, 0.0276, 0.0368, 0.0363, 0.0361, 0.0329], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 20:18:33,776 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47466.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:18:57,882 INFO [train.py:892] (2/4) Epoch 26, batch 1100, loss[loss=0.1809, simple_loss=0.2421, pruned_loss=0.05981, over 19819.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2471, pruned_loss=0.04647, over 3935147.83 frames. ], batch size: 166, lr: 6.03e-03, grad_scale: 16.0 +2023-03-28 20:20:37,059 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47514.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:20:53,316 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.834e+02 3.692e+02 4.407e+02 5.502e+02 8.056e+02, threshold=8.815e+02, percent-clipped=0.0 +2023-03-28 20:21:05,816 INFO [train.py:892] (2/4) Epoch 26, batch 1150, loss[loss=0.161, simple_loss=0.2318, pruned_loss=0.04507, over 19605.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2481, pruned_loss=0.04742, over 3937762.89 frames. ], batch size: 46, lr: 6.02e-03, grad_scale: 16.0 +2023-03-28 20:21:53,935 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8093, 3.1918, 3.3083, 3.7991, 2.7381, 3.1565, 2.4791, 2.4539], + device='cuda:2'), covar=tensor([0.0513, 0.1811, 0.0966, 0.0369, 0.1786, 0.0738, 0.1286, 0.1549], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0332, 0.0244, 0.0193, 0.0241, 0.0202, 0.0211, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 20:23:12,031 INFO [train.py:892] (2/4) Epoch 26, batch 1200, loss[loss=0.1428, simple_loss=0.2177, pruned_loss=0.03398, over 19777.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2488, pruned_loss=0.0479, over 3938879.72 frames. ], batch size: 116, lr: 6.02e-03, grad_scale: 16.0 +2023-03-28 20:24:05,382 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47597.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:25:05,923 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.537e+02 4.111e+02 4.755e+02 5.836e+02 8.359e+02, threshold=9.510e+02, percent-clipped=0.0 +2023-03-28 20:25:19,224 INFO [train.py:892] (2/4) Epoch 26, batch 1250, loss[loss=0.1764, simple_loss=0.2514, pruned_loss=0.05071, over 19828.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2472, pruned_loss=0.0473, over 3943154.00 frames. ], batch size: 208, lr: 6.02e-03, grad_scale: 16.0 +2023-03-28 20:27:24,899 INFO [train.py:892] (2/4) Epoch 26, batch 1300, loss[loss=0.1786, simple_loss=0.2401, pruned_loss=0.05853, over 19812.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2475, pruned_loss=0.04728, over 3943906.75 frames. ], batch size: 168, lr: 6.01e-03, grad_scale: 16.0 +2023-03-28 20:27:28,336 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7081, 2.6007, 4.3756, 3.2022, 3.3808, 3.1115, 2.3109, 2.4095], + device='cuda:2'), covar=tensor([0.1215, 0.3679, 0.0530, 0.1150, 0.2242, 0.1764, 0.2751, 0.3066], + device='cuda:2'), in_proj_covar=tensor([0.0348, 0.0380, 0.0339, 0.0277, 0.0370, 0.0365, 0.0362, 0.0331], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 20:28:19,890 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8536, 3.3096, 3.7367, 3.2558, 4.0184, 4.0499, 4.7538, 5.1822], + device='cuda:2'), covar=tensor([0.0529, 0.1659, 0.1441, 0.2147, 0.1768, 0.1343, 0.0506, 0.0534], + device='cuda:2'), in_proj_covar=tensor([0.0248, 0.0238, 0.0263, 0.0250, 0.0292, 0.0252, 0.0226, 0.0248], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 20:29:07,142 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5227, 4.2687, 4.2759, 3.9975, 4.4430, 3.0522, 3.7013, 2.0675], + device='cuda:2'), covar=tensor([0.0191, 0.0222, 0.0147, 0.0221, 0.0155, 0.0980, 0.0798, 0.1626], + device='cuda:2'), in_proj_covar=tensor([0.0104, 0.0143, 0.0112, 0.0133, 0.0118, 0.0134, 0.0141, 0.0127], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 20:29:07,925 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-28 20:29:19,681 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.334e+02 3.779e+02 4.339e+02 5.421e+02 8.530e+02, threshold=8.679e+02, percent-clipped=0.0 +2023-03-28 20:29:34,004 INFO [train.py:892] (2/4) Epoch 26, batch 1350, loss[loss=0.1722, simple_loss=0.2533, pruned_loss=0.04551, over 19660.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2472, pruned_loss=0.04726, over 3946251.71 frames. ], batch size: 50, lr: 6.01e-03, grad_scale: 16.0 +2023-03-28 20:29:58,940 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47735.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:30:06,630 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47738.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:31:42,324 INFO [train.py:892] (2/4) Epoch 26, batch 1400, loss[loss=0.204, simple_loss=0.2754, pruned_loss=0.06628, over 19954.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2479, pruned_loss=0.04742, over 3947784.74 frames. ], batch size: 53, lr: 6.01e-03, grad_scale: 16.0 +2023-03-28 20:32:13,956 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47788.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:32:43,736 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47799.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:32:59,852 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8904, 3.8006, 4.1775, 3.8013, 3.5638, 4.0492, 3.8539, 4.2156], + device='cuda:2'), covar=tensor([0.0856, 0.0390, 0.0406, 0.0407, 0.1140, 0.0554, 0.0535, 0.0387], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0219, 0.0220, 0.0231, 0.0205, 0.0233, 0.0230, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 20:33:05,681 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47809.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 20:33:29,897 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.973e+02 4.160e+02 4.762e+02 6.051e+02 1.589e+03, threshold=9.525e+02, percent-clipped=3.0 +2023-03-28 20:33:39,293 INFO [train.py:892] (2/4) Epoch 26, batch 1450, loss[loss=0.2193, simple_loss=0.2885, pruned_loss=0.07507, over 19702.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2489, pruned_loss=0.04779, over 3948664.02 frames. ], batch size: 325, lr: 6.00e-03, grad_scale: 16.0 +2023-03-28 20:34:39,392 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47849.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:35:29,932 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47870.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 20:35:46,878 INFO [train.py:892] (2/4) Epoch 26, batch 1500, loss[loss=0.1971, simple_loss=0.2758, pruned_loss=0.05923, over 19638.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2485, pruned_loss=0.04773, over 3949116.50 frames. ], batch size: 343, lr: 6.00e-03, grad_scale: 16.0 +2023-03-28 20:36:42,306 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47897.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:37:43,344 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.594e+02 4.057e+02 4.781e+02 5.680e+02 8.130e+02, threshold=9.563e+02, percent-clipped=0.0 +2023-03-28 20:37:54,767 INFO [train.py:892] (2/4) Epoch 26, batch 1550, loss[loss=0.1514, simple_loss=0.2323, pruned_loss=0.03528, over 19882.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2478, pruned_loss=0.04723, over 3948676.38 frames. ], batch size: 97, lr: 6.00e-03, grad_scale: 32.0 +2023-03-28 20:38:45,540 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47945.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:39:59,941 INFO [train.py:892] (2/4) Epoch 26, batch 1600, loss[loss=0.1618, simple_loss=0.2243, pruned_loss=0.04968, over 19810.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.248, pruned_loss=0.04748, over 3948859.59 frames. ], batch size: 149, lr: 5.99e-03, grad_scale: 32.0 +2023-03-28 20:41:53,733 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.602e+02 3.888e+02 4.549e+02 5.335e+02 7.774e+02, threshold=9.097e+02, percent-clipped=0.0 +2023-03-28 20:42:08,504 INFO [train.py:892] (2/4) Epoch 26, batch 1650, loss[loss=0.1817, simple_loss=0.2566, pruned_loss=0.05342, over 19716.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2463, pruned_loss=0.04717, over 3950390.38 frames. ], batch size: 109, lr: 5.99e-03, grad_scale: 32.0 +2023-03-28 20:42:31,654 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48035.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:44:05,483 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48072.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:44:15,506 INFO [train.py:892] (2/4) Epoch 26, batch 1700, loss[loss=0.1689, simple_loss=0.2515, pruned_loss=0.04312, over 19800.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2461, pruned_loss=0.04686, over 3949510.74 frames. ], batch size: 51, lr: 5.99e-03, grad_scale: 32.0 +2023-03-28 20:44:34,470 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48083.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:45:03,264 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48094.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:45:32,294 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2832, 5.6169, 5.6570, 5.5817, 5.3589, 5.6409, 5.0800, 5.1249], + device='cuda:2'), covar=tensor([0.0400, 0.0420, 0.0429, 0.0377, 0.0494, 0.0540, 0.0628, 0.0902], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0271, 0.0284, 0.0248, 0.0253, 0.0238, 0.0255, 0.0302], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 20:46:06,339 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.958e+02 3.839e+02 4.571e+02 5.458e+02 1.264e+03, threshold=9.143e+02, percent-clipped=4.0 +2023-03-28 20:46:18,131 INFO [train.py:892] (2/4) Epoch 26, batch 1750, loss[loss=0.1622, simple_loss=0.2484, pruned_loss=0.03798, over 19776.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2458, pruned_loss=0.04671, over 3948508.89 frames. ], batch size: 66, lr: 5.98e-03, grad_scale: 32.0 +2023-03-28 20:46:35,018 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48133.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:46:58,797 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48144.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:47:09,943 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2246, 3.0995, 3.2579, 2.6759, 3.4958, 2.9274, 3.2090, 3.2525], + device='cuda:2'), covar=tensor([0.0713, 0.0427, 0.0811, 0.0802, 0.0387, 0.0434, 0.0419, 0.0434], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0081, 0.0079, 0.0107, 0.0076, 0.0077, 0.0075, 0.0068], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 20:47:42,222 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48165.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 20:48:01,805 INFO [train.py:892] (2/4) Epoch 26, batch 1800, loss[loss=0.2282, simple_loss=0.3313, pruned_loss=0.06254, over 18863.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2465, pruned_loss=0.04694, over 3947742.36 frames. ], batch size: 513, lr: 5.98e-03, grad_scale: 16.0 +2023-03-28 20:49:11,079 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48212.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 20:49:30,284 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.294e+02 3.982e+02 4.834e+02 5.879e+02 1.454e+03, threshold=9.667e+02, percent-clipped=6.0 +2023-03-28 20:49:38,199 INFO [train.py:892] (2/4) Epoch 26, batch 1850, loss[loss=0.1829, simple_loss=0.2734, pruned_loss=0.04613, over 19688.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2481, pruned_loss=0.04715, over 3947728.76 frames. ], batch size: 56, lr: 5.98e-03, grad_scale: 16.0 +2023-03-28 20:50:46,041 INFO [train.py:892] (2/4) Epoch 27, batch 0, loss[loss=0.1636, simple_loss=0.243, pruned_loss=0.04205, over 19715.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.243, pruned_loss=0.04205, over 19715.00 frames. ], batch size: 62, lr: 5.86e-03, grad_scale: 16.0 +2023-03-28 20:50:46,042 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 20:51:17,585 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8583, 3.8877, 4.1089, 3.8603, 3.7500, 3.9675, 3.7449, 4.1290], + device='cuda:2'), covar=tensor([0.0668, 0.0313, 0.0341, 0.0352, 0.0714, 0.0500, 0.0533, 0.0328], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0220, 0.0221, 0.0232, 0.0205, 0.0233, 0.0230, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 20:51:19,617 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.7725, 1.5796, 1.7738, 1.6870, 1.7071, 1.7216, 1.7269, 1.7561], + device='cuda:2'), covar=tensor([0.0339, 0.0343, 0.0330, 0.0319, 0.0441, 0.0343, 0.0452, 0.0392], + device='cuda:2'), in_proj_covar=tensor([0.0080, 0.0074, 0.0077, 0.0070, 0.0086, 0.0078, 0.0095, 0.0069], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 20:51:25,319 INFO [train.py:926] (2/4) Epoch 27, validation: loss=0.1767, simple_loss=0.2485, pruned_loss=0.05248, over 2883724.00 frames. +2023-03-28 20:51:25,320 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 20:51:34,223 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3034, 4.0021, 4.1535, 4.3328, 4.0073, 4.3821, 4.4771, 4.6423], + device='cuda:2'), covar=tensor([0.0672, 0.0411, 0.0481, 0.0362, 0.0732, 0.0513, 0.0408, 0.0299], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0174, 0.0197, 0.0172, 0.0170, 0.0155, 0.0147, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 20:52:16,809 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-28 20:53:19,748 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48273.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 20:53:39,040 INFO [train.py:892] (2/4) Epoch 27, batch 50, loss[loss=0.1698, simple_loss=0.2503, pruned_loss=0.04458, over 19678.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2487, pruned_loss=0.04863, over 889732.89 frames. ], batch size: 52, lr: 5.86e-03, grad_scale: 16.0 +2023-03-28 20:54:11,152 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.9321, 6.2739, 6.2355, 6.1765, 5.9373, 6.2146, 5.5598, 5.5832], + device='cuda:2'), covar=tensor([0.0357, 0.0394, 0.0446, 0.0358, 0.0551, 0.0496, 0.0793, 0.1071], + device='cuda:2'), in_proj_covar=tensor([0.0257, 0.0273, 0.0288, 0.0250, 0.0255, 0.0240, 0.0259, 0.0305], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 20:55:21,749 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.295e+02 3.761e+02 4.503e+02 5.543e+02 9.672e+02, threshold=9.006e+02, percent-clipped=1.0 +2023-03-28 20:55:27,378 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48324.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:55:44,764 INFO [train.py:892] (2/4) Epoch 27, batch 100, loss[loss=0.153, simple_loss=0.2311, pruned_loss=0.03741, over 19798.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2461, pruned_loss=0.04653, over 1569109.67 frames. ], batch size: 83, lr: 5.86e-03, grad_scale: 16.0 +2023-03-28 20:55:55,510 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0042, 2.9251, 1.8378, 3.5651, 3.2655, 3.4670, 3.5634, 2.8442], + device='cuda:2'), covar=tensor([0.0654, 0.0700, 0.1793, 0.0608, 0.0649, 0.0531, 0.0615, 0.0802], + device='cuda:2'), in_proj_covar=tensor([0.0139, 0.0139, 0.0140, 0.0146, 0.0130, 0.0129, 0.0142, 0.0142], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 20:57:46,605 INFO [train.py:892] (2/4) Epoch 27, batch 150, loss[loss=0.1828, simple_loss=0.2568, pruned_loss=0.05436, over 19771.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2474, pruned_loss=0.04759, over 2096848.69 frames. ], batch size: 233, lr: 5.86e-03, grad_scale: 16.0 +2023-03-28 20:57:56,780 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48385.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:58:23,384 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48394.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:58:30,101 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48397.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:59:26,410 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.124e+02 3.642e+02 4.290e+02 5.743e+02 1.049e+03, threshold=8.581e+02, percent-clipped=1.0 +2023-03-28 20:59:30,122 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48423.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:59:40,799 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48428.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:59:49,347 INFO [train.py:892] (2/4) Epoch 27, batch 200, loss[loss=0.1388, simple_loss=0.2185, pruned_loss=0.02953, over 19640.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2458, pruned_loss=0.04679, over 2507046.55 frames. ], batch size: 47, lr: 5.85e-03, grad_scale: 16.0 +2023-03-28 21:00:15,426 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48442.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:00:19,996 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48444.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:00:53,395 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48458.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:01:08,515 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48465.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 21:01:09,087 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-28 21:01:49,442 INFO [train.py:892] (2/4) Epoch 27, batch 250, loss[loss=0.1572, simple_loss=0.2394, pruned_loss=0.03749, over 19885.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2447, pruned_loss=0.04577, over 2827866.47 frames. ], batch size: 92, lr: 5.85e-03, grad_scale: 16.0 +2023-03-28 21:01:58,412 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48484.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:02:17,785 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48492.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:03:11,040 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48513.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 21:03:17,532 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6730, 2.4781, 2.8527, 2.4867, 2.9194, 2.9059, 3.5582, 3.8023], + device='cuda:2'), covar=tensor([0.0652, 0.1888, 0.1629, 0.2357, 0.1765, 0.1674, 0.0627, 0.0594], + device='cuda:2'), in_proj_covar=tensor([0.0245, 0.0235, 0.0259, 0.0249, 0.0289, 0.0251, 0.0224, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 21:03:32,759 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.677e+02 3.668e+02 4.339e+02 5.007e+02 7.630e+02, threshold=8.678e+02, percent-clipped=0.0 +2023-03-28 21:03:53,493 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3259, 2.9631, 3.3709, 2.9297, 3.5092, 3.5339, 4.1553, 4.5577], + device='cuda:2'), covar=tensor([0.0518, 0.1637, 0.1373, 0.2058, 0.1517, 0.1285, 0.0573, 0.0496], + device='cuda:2'), in_proj_covar=tensor([0.0245, 0.0235, 0.0259, 0.0249, 0.0289, 0.0251, 0.0224, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 21:03:56,488 INFO [train.py:892] (2/4) Epoch 27, batch 300, loss[loss=0.155, simple_loss=0.2386, pruned_loss=0.03567, over 19782.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.247, pruned_loss=0.04672, over 3074189.79 frames. ], batch size: 94, lr: 5.85e-03, grad_scale: 16.0 +2023-03-28 21:04:47,307 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3528, 3.2426, 3.6596, 2.8437, 3.6686, 3.0030, 3.3380, 3.5883], + device='cuda:2'), covar=tensor([0.0723, 0.0421, 0.0396, 0.0736, 0.0367, 0.0479, 0.0437, 0.0349], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0081, 0.0078, 0.0106, 0.0076, 0.0078, 0.0075, 0.0068], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 21:05:21,952 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48568.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 21:05:49,782 INFO [train.py:892] (2/4) Epoch 27, batch 350, loss[loss=0.167, simple_loss=0.2403, pruned_loss=0.04683, over 19817.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2467, pruned_loss=0.04677, over 3269063.22 frames. ], batch size: 231, lr: 5.84e-03, grad_scale: 16.0 +2023-03-28 21:07:35,027 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.631e+02 3.784e+02 4.446e+02 5.402e+02 1.015e+03, threshold=8.893e+02, percent-clipped=1.0 +2023-03-28 21:07:58,134 INFO [train.py:892] (2/4) Epoch 27, batch 400, loss[loss=0.1723, simple_loss=0.2556, pruned_loss=0.04451, over 19782.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2492, pruned_loss=0.04753, over 3416393.36 frames. ], batch size: 91, lr: 5.84e-03, grad_scale: 16.0 +2023-03-28 21:10:02,606 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48680.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:10:04,246 INFO [train.py:892] (2/4) Epoch 27, batch 450, loss[loss=0.148, simple_loss=0.2338, pruned_loss=0.03111, over 19732.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2477, pruned_loss=0.04668, over 3535038.65 frames. ], batch size: 118, lr: 5.84e-03, grad_scale: 16.0 +2023-03-28 21:10:31,838 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4069, 2.7566, 2.4285, 1.7964, 2.4573, 2.5261, 2.6114, 2.6120], + device='cuda:2'), covar=tensor([0.0368, 0.0266, 0.0299, 0.0660, 0.0370, 0.0286, 0.0251, 0.0267], + device='cuda:2'), in_proj_covar=tensor([0.0097, 0.0091, 0.0095, 0.0099, 0.0102, 0.0081, 0.0081, 0.0081], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 21:12:00,737 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6220, 3.4991, 3.4711, 3.2762, 3.5713, 2.7666, 2.9329, 1.6082], + device='cuda:2'), covar=tensor([0.0228, 0.0262, 0.0169, 0.0198, 0.0183, 0.1179, 0.0695, 0.1850], + device='cuda:2'), in_proj_covar=tensor([0.0102, 0.0142, 0.0111, 0.0132, 0.0118, 0.0133, 0.0140, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 21:12:01,742 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.486e+02 3.534e+02 4.421e+02 5.220e+02 9.459e+02, threshold=8.842e+02, percent-clipped=1.0 +2023-03-28 21:12:21,476 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48728.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:12:28,175 INFO [train.py:892] (2/4) Epoch 27, batch 500, loss[loss=0.1664, simple_loss=0.2476, pruned_loss=0.04263, over 19711.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2474, pruned_loss=0.04671, over 3627315.66 frames. ], batch size: 60, lr: 5.83e-03, grad_scale: 16.0 +2023-03-28 21:13:22,982 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48753.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:14:22,700 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48776.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:14:30,885 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48779.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:14:34,563 INFO [train.py:892] (2/4) Epoch 27, batch 550, loss[loss=0.1689, simple_loss=0.2428, pruned_loss=0.04749, over 19774.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2486, pruned_loss=0.04769, over 3698151.33 frames. ], batch size: 46, lr: 5.83e-03, grad_scale: 16.0 +2023-03-28 21:14:35,968 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7074, 4.7404, 2.7543, 4.9888, 5.1795, 2.3279, 4.4867, 3.6977], + device='cuda:2'), covar=tensor([0.0524, 0.0596, 0.2452, 0.0621, 0.0382, 0.2631, 0.0776, 0.0808], + device='cuda:2'), in_proj_covar=tensor([0.0229, 0.0251, 0.0226, 0.0265, 0.0245, 0.0199, 0.0236, 0.0192], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 21:15:14,859 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-28 21:16:20,327 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48821.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:16:22,694 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.382e+02 4.009e+02 4.912e+02 5.862e+02 1.286e+03, threshold=9.824e+02, percent-clipped=2.0 +2023-03-28 21:16:44,187 INFO [train.py:892] (2/4) Epoch 27, batch 600, loss[loss=0.1627, simple_loss=0.2442, pruned_loss=0.04059, over 19751.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2484, pruned_loss=0.04776, over 3754610.48 frames. ], batch size: 110, lr: 5.83e-03, grad_scale: 16.0 +2023-03-28 21:17:02,382 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48838.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:17:37,402 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.42 vs. limit=5.0 +2023-03-28 21:18:12,797 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48868.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 21:18:42,159 INFO [train.py:892] (2/4) Epoch 27, batch 650, loss[loss=0.1557, simple_loss=0.2298, pruned_loss=0.04077, over 19729.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2485, pruned_loss=0.04792, over 3796918.17 frames. ], batch size: 95, lr: 5.83e-03, grad_scale: 16.0 +2023-03-28 21:18:47,000 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48882.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:19:27,548 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48899.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:20:07,024 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48916.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 21:20:16,756 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2624, 2.3609, 1.5463, 2.5527, 2.4125, 2.4765, 2.5574, 2.0684], + device='cuda:2'), covar=tensor([0.0682, 0.0739, 0.1310, 0.0639, 0.0656, 0.0553, 0.0591, 0.0929], + device='cuda:2'), in_proj_covar=tensor([0.0140, 0.0141, 0.0140, 0.0147, 0.0130, 0.0129, 0.0142, 0.0142], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 21:20:19,903 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.871e+02 3.831e+02 4.540e+02 6.098e+02 1.137e+03, threshold=9.081e+02, percent-clipped=2.0 +2023-03-28 21:20:42,900 INFO [train.py:892] (2/4) Epoch 27, batch 700, loss[loss=0.176, simple_loss=0.2532, pruned_loss=0.04938, over 19751.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2496, pruned_loss=0.04814, over 3829843.04 frames. ], batch size: 256, lr: 5.82e-03, grad_scale: 16.0 +2023-03-28 21:21:50,838 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4482, 2.5336, 2.7770, 2.4727, 2.9152, 2.9054, 3.3378, 3.6342], + device='cuda:2'), covar=tensor([0.0729, 0.1675, 0.1636, 0.2195, 0.1698, 0.1522, 0.0736, 0.0665], + device='cuda:2'), in_proj_covar=tensor([0.0248, 0.0238, 0.0262, 0.0251, 0.0292, 0.0254, 0.0227, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 21:22:43,485 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9832, 2.8442, 3.1038, 2.4938, 3.1715, 2.7194, 2.9463, 2.9924], + device='cuda:2'), covar=tensor([0.0496, 0.0460, 0.0477, 0.0760, 0.0366, 0.0432, 0.0500, 0.0421], + device='cuda:2'), in_proj_covar=tensor([0.0075, 0.0082, 0.0079, 0.0107, 0.0076, 0.0078, 0.0076, 0.0068], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 21:22:48,147 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48980.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:22:49,436 INFO [train.py:892] (2/4) Epoch 27, batch 750, loss[loss=0.1729, simple_loss=0.2608, pruned_loss=0.0425, over 19529.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.248, pruned_loss=0.04737, over 3857498.99 frames. ], batch size: 54, lr: 5.82e-03, grad_scale: 16.0 +2023-03-28 21:23:37,708 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.81 vs. limit=5.0 +2023-03-28 21:23:40,044 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-28 21:24:30,753 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 3.742e+02 4.626e+02 5.475e+02 1.269e+03, threshold=9.252e+02, percent-clipped=2.0 +2023-03-28 21:24:49,079 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49028.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:24:56,450 INFO [train.py:892] (2/4) Epoch 27, batch 800, loss[loss=0.153, simple_loss=0.2362, pruned_loss=0.03484, over 19592.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2471, pruned_loss=0.04667, over 3878459.58 frames. ], batch size: 44, lr: 5.82e-03, grad_scale: 16.0 +2023-03-28 21:25:52,130 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49053.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:26:58,717 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49079.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:27:02,197 INFO [train.py:892] (2/4) Epoch 27, batch 850, loss[loss=0.1959, simple_loss=0.2697, pruned_loss=0.0611, over 19775.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2463, pruned_loss=0.04611, over 3894742.90 frames. ], batch size: 280, lr: 5.81e-03, grad_scale: 16.0 +2023-03-28 21:27:51,030 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49101.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:28:42,758 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.493e+02 3.766e+02 4.379e+02 5.390e+02 1.134e+03, threshold=8.757e+02, percent-clipped=1.0 +2023-03-28 21:28:56,253 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49127.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:29:01,318 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5126, 4.2464, 4.2790, 4.0884, 4.4652, 3.2405, 3.8128, 2.3708], + device='cuda:2'), covar=tensor([0.0179, 0.0206, 0.0143, 0.0172, 0.0139, 0.0835, 0.0656, 0.1361], + device='cuda:2'), in_proj_covar=tensor([0.0102, 0.0142, 0.0111, 0.0131, 0.0117, 0.0132, 0.0139, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 21:29:04,462 INFO [train.py:892] (2/4) Epoch 27, batch 900, loss[loss=0.1691, simple_loss=0.2336, pruned_loss=0.0523, over 19764.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2449, pruned_loss=0.04561, over 3908250.26 frames. ], batch size: 122, lr: 5.81e-03, grad_scale: 16.0 +2023-03-28 21:31:02,107 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49177.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:31:10,473 INFO [train.py:892] (2/4) Epoch 27, batch 950, loss[loss=0.185, simple_loss=0.2579, pruned_loss=0.05603, over 19881.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2447, pruned_loss=0.04513, over 3918462.09 frames. ], batch size: 97, lr: 5.81e-03, grad_scale: 16.0 +2023-03-28 21:31:41,687 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49194.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:32:49,464 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.549e+02 3.706e+02 4.158e+02 5.021e+02 8.010e+02, threshold=8.316e+02, percent-clipped=0.0 +2023-03-28 21:33:13,379 INFO [train.py:892] (2/4) Epoch 27, batch 1000, loss[loss=0.1857, simple_loss=0.2612, pruned_loss=0.05509, over 19639.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2455, pruned_loss=0.04563, over 3923593.12 frames. ], batch size: 299, lr: 5.81e-03, grad_scale: 16.0 +2023-03-28 21:33:25,892 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49236.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:35:10,511 INFO [train.py:892] (2/4) Epoch 27, batch 1050, loss[loss=0.1695, simple_loss=0.2498, pruned_loss=0.04461, over 19821.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2459, pruned_loss=0.04564, over 3929011.10 frames. ], batch size: 57, lr: 5.80e-03, grad_scale: 16.0 +2023-03-28 21:35:54,855 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49297.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:36:14,162 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.76 vs. limit=5.0 +2023-03-28 21:36:15,816 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5043, 3.2515, 3.3200, 3.5166, 3.3566, 3.4058, 3.6144, 3.7676], + device='cuda:2'), covar=tensor([0.0719, 0.0509, 0.0607, 0.0450, 0.0781, 0.0719, 0.0519, 0.0367], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0172, 0.0196, 0.0170, 0.0169, 0.0153, 0.0146, 0.0192], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 21:36:54,128 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.170e+02 3.715e+02 4.308e+02 4.982e+02 8.833e+02, threshold=8.615e+02, percent-clipped=1.0 +2023-03-28 21:37:20,661 INFO [train.py:892] (2/4) Epoch 27, batch 1100, loss[loss=0.1706, simple_loss=0.2417, pruned_loss=0.04978, over 19764.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2465, pruned_loss=0.04614, over 3933978.48 frames. ], batch size: 244, lr: 5.80e-03, grad_scale: 16.0 +2023-03-28 21:38:27,615 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2768, 4.7857, 4.8685, 4.6565, 5.1604, 3.3095, 4.1934, 2.7171], + device='cuda:2'), covar=tensor([0.0149, 0.0197, 0.0124, 0.0169, 0.0130, 0.0905, 0.0874, 0.1300], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0141, 0.0110, 0.0130, 0.0116, 0.0131, 0.0138, 0.0124], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 21:39:01,509 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1073, 3.4461, 3.3937, 4.1157, 2.7777, 3.3743, 2.7879, 2.5105], + device='cuda:2'), covar=tensor([0.0546, 0.1994, 0.1172, 0.0462, 0.2096, 0.0830, 0.1383, 0.1842], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0329, 0.0244, 0.0195, 0.0242, 0.0203, 0.0212, 0.0213], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 21:39:23,290 INFO [train.py:892] (2/4) Epoch 27, batch 1150, loss[loss=0.181, simple_loss=0.2537, pruned_loss=0.05411, over 19836.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2471, pruned_loss=0.04679, over 3937328.39 frames. ], batch size: 204, lr: 5.80e-03, grad_scale: 16.0 +2023-03-28 21:40:57,342 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.4861, 4.9012, 5.0502, 4.8249, 5.3600, 3.3347, 4.2019, 2.6608], + device='cuda:2'), covar=tensor([0.0151, 0.0179, 0.0126, 0.0156, 0.0126, 0.0886, 0.0919, 0.1389], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0142, 0.0111, 0.0130, 0.0116, 0.0132, 0.0138, 0.0124], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 21:41:04,927 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.904e+02 3.848e+02 4.568e+02 5.710e+02 9.674e+02, threshold=9.135e+02, percent-clipped=2.0 +2023-03-28 21:41:06,042 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49422.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:41:28,123 INFO [train.py:892] (2/4) Epoch 27, batch 1200, loss[loss=0.1625, simple_loss=0.2418, pruned_loss=0.04164, over 19886.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2469, pruned_loss=0.0469, over 3940869.52 frames. ], batch size: 52, lr: 5.79e-03, grad_scale: 16.0 +2023-03-28 21:41:31,359 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49432.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 21:41:45,789 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8834, 3.0692, 3.1174, 3.0793, 2.8518, 3.0883, 2.7815, 3.0766], + device='cuda:2'), covar=tensor([0.0259, 0.0341, 0.0258, 0.0220, 0.0364, 0.0283, 0.0340, 0.0355], + device='cuda:2'), in_proj_covar=tensor([0.0081, 0.0074, 0.0077, 0.0071, 0.0085, 0.0079, 0.0095, 0.0068], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 21:43:28,871 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49477.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:43:35,934 INFO [train.py:892] (2/4) Epoch 27, batch 1250, loss[loss=0.1661, simple_loss=0.2558, pruned_loss=0.03816, over 19667.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2464, pruned_loss=0.04661, over 3942969.75 frames. ], batch size: 55, lr: 5.79e-03, grad_scale: 16.0 +2023-03-28 21:43:41,685 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49483.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:44:06,461 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49493.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 21:44:08,879 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49494.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:45:21,342 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.439e+02 3.880e+02 4.551e+02 5.249e+02 9.846e+02, threshold=9.103e+02, percent-clipped=1.0 +2023-03-28 21:45:29,393 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49525.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:45:44,585 INFO [train.py:892] (2/4) Epoch 27, batch 1300, loss[loss=0.1734, simple_loss=0.2655, pruned_loss=0.04066, over 19698.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2472, pruned_loss=0.04689, over 3945140.32 frames. ], batch size: 56, lr: 5.79e-03, grad_scale: 16.0 +2023-03-28 21:46:14,124 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49542.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:47:50,723 INFO [train.py:892] (2/4) Epoch 27, batch 1350, loss[loss=0.1489, simple_loss=0.2225, pruned_loss=0.0377, over 19866.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2469, pruned_loss=0.04644, over 3947134.55 frames. ], batch size: 154, lr: 5.78e-03, grad_scale: 16.0 +2023-03-28 21:48:20,099 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49592.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:49:30,804 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.556e+02 4.075e+02 4.881e+02 5.911e+02 1.218e+03, threshold=9.761e+02, percent-clipped=5.0 +2023-03-28 21:49:51,502 INFO [train.py:892] (2/4) Epoch 27, batch 1400, loss[loss=0.1758, simple_loss=0.2581, pruned_loss=0.0467, over 19671.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2478, pruned_loss=0.04654, over 3944707.48 frames. ], batch size: 55, lr: 5.78e-03, grad_scale: 16.0 +2023-03-28 21:49:54,071 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.4768, 5.8177, 5.8467, 5.6667, 5.5118, 5.7758, 5.1848, 5.2268], + device='cuda:2'), covar=tensor([0.0383, 0.0392, 0.0415, 0.0406, 0.0510, 0.0512, 0.0653, 0.0936], + device='cuda:2'), in_proj_covar=tensor([0.0257, 0.0271, 0.0287, 0.0251, 0.0256, 0.0239, 0.0257, 0.0304], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 21:50:02,264 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0267, 4.6125, 4.7167, 4.4775, 4.9400, 3.1997, 4.0315, 2.7222], + device='cuda:2'), covar=tensor([0.0178, 0.0224, 0.0142, 0.0170, 0.0134, 0.0964, 0.0846, 0.1328], + device='cuda:2'), in_proj_covar=tensor([0.0102, 0.0143, 0.0112, 0.0132, 0.0117, 0.0133, 0.0140, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 21:51:48,453 INFO [train.py:892] (2/4) Epoch 27, batch 1450, loss[loss=0.1589, simple_loss=0.229, pruned_loss=0.04435, over 19885.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2474, pruned_loss=0.04637, over 3947021.05 frames. ], batch size: 77, lr: 5.78e-03, grad_scale: 16.0 +2023-03-28 21:51:49,510 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2013, 3.0314, 3.0520, 3.2388, 3.1396, 3.0709, 3.3300, 3.5245], + device='cuda:2'), covar=tensor([0.0743, 0.0501, 0.0575, 0.0421, 0.0718, 0.0804, 0.0511, 0.0342], + device='cuda:2'), in_proj_covar=tensor([0.0149, 0.0173, 0.0196, 0.0170, 0.0170, 0.0154, 0.0146, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 21:53:31,584 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.500e+02 3.941e+02 4.673e+02 5.511e+02 7.393e+02, threshold=9.346e+02, percent-clipped=0.0 +2023-03-28 21:53:39,487 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-03-28 21:53:54,196 INFO [train.py:892] (2/4) Epoch 27, batch 1500, loss[loss=0.1679, simple_loss=0.2392, pruned_loss=0.04832, over 19736.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2484, pruned_loss=0.04713, over 3947065.95 frames. ], batch size: 62, lr: 5.78e-03, grad_scale: 16.0 +2023-03-28 21:55:22,626 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49766.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:55:51,245 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49778.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:55:57,474 INFO [train.py:892] (2/4) Epoch 27, batch 1550, loss[loss=0.1606, simple_loss=0.2282, pruned_loss=0.04645, over 19743.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2471, pruned_loss=0.04696, over 3949383.63 frames. ], batch size: 134, lr: 5.77e-03, grad_scale: 16.0 +2023-03-28 21:56:16,164 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49788.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 21:56:51,582 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8045, 3.6865, 4.0727, 3.6980, 3.5220, 3.9628, 3.8187, 4.1315], + device='cuda:2'), covar=tensor([0.0812, 0.0378, 0.0374, 0.0439, 0.1179, 0.0571, 0.0469, 0.0365], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0221, 0.0219, 0.0233, 0.0207, 0.0236, 0.0228, 0.0213], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 21:57:39,880 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.487e+02 3.954e+02 4.680e+02 6.114e+02 1.039e+03, threshold=9.360e+02, percent-clipped=2.0 +2023-03-28 21:57:53,523 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49827.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:58:01,502 INFO [train.py:892] (2/4) Epoch 27, batch 1600, loss[loss=0.1654, simple_loss=0.2538, pruned_loss=0.0385, over 19717.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2467, pruned_loss=0.0465, over 3948786.38 frames. ], batch size: 61, lr: 5.77e-03, grad_scale: 16.0 +2023-03-28 22:00:08,564 INFO [train.py:892] (2/4) Epoch 27, batch 1650, loss[loss=0.1886, simple_loss=0.2693, pruned_loss=0.05401, over 19689.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2467, pruned_loss=0.04673, over 3949248.24 frames. ], batch size: 325, lr: 5.77e-03, grad_scale: 16.0 +2023-03-28 22:00:36,688 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49892.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:01:55,266 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.537e+02 3.614e+02 4.321e+02 5.513e+02 1.291e+03, threshold=8.641e+02, percent-clipped=1.0 +2023-03-28 22:02:02,116 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-28 22:02:20,724 INFO [train.py:892] (2/4) Epoch 27, batch 1700, loss[loss=0.1668, simple_loss=0.2353, pruned_loss=0.04915, over 19846.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2463, pruned_loss=0.0465, over 3949686.27 frames. ], batch size: 137, lr: 5.76e-03, grad_scale: 16.0 +2023-03-28 22:02:44,466 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49940.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:04:24,949 INFO [train.py:892] (2/4) Epoch 27, batch 1750, loss[loss=0.1566, simple_loss=0.2409, pruned_loss=0.03618, over 19734.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2459, pruned_loss=0.04656, over 3950437.51 frames. ], batch size: 51, lr: 5.76e-03, grad_scale: 16.0 +2023-03-28 22:05:48,398 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-28 22:05:54,957 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.637e+02 3.767e+02 4.699e+02 5.313e+02 7.903e+02, threshold=9.399e+02, percent-clipped=0.0 +2023-03-28 22:06:04,126 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6235, 2.7266, 4.0196, 3.0826, 3.4032, 3.1604, 2.3046, 2.3702], + device='cuda:2'), covar=tensor([0.1088, 0.3303, 0.0616, 0.1052, 0.1696, 0.1550, 0.2529, 0.2871], + device='cuda:2'), in_proj_covar=tensor([0.0346, 0.0379, 0.0340, 0.0277, 0.0366, 0.0363, 0.0362, 0.0332], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 22:06:12,889 INFO [train.py:892] (2/4) Epoch 27, batch 1800, loss[loss=0.1808, simple_loss=0.2523, pruned_loss=0.05467, over 19808.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2462, pruned_loss=0.04689, over 3951211.72 frames. ], batch size: 181, lr: 5.76e-03, grad_scale: 16.0 +2023-03-28 22:07:55,168 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50078.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:08:00,317 INFO [train.py:892] (2/4) Epoch 27, batch 1850, loss[loss=0.1749, simple_loss=0.2504, pruned_loss=0.04973, over 19818.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2488, pruned_loss=0.04715, over 3946608.68 frames. ], batch size: 57, lr: 5.76e-03, grad_scale: 16.0 +2023-03-28 22:09:11,381 INFO [train.py:892] (2/4) Epoch 28, batch 0, loss[loss=0.1495, simple_loss=0.2236, pruned_loss=0.0377, over 19857.00 frames. ], tot_loss[loss=0.1495, simple_loss=0.2236, pruned_loss=0.0377, over 19857.00 frames. ], batch size: 106, lr: 5.65e-03, grad_scale: 16.0 +2023-03-28 22:09:11,382 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 22:09:45,364 INFO [train.py:926] (2/4) Epoch 28, validation: loss=0.1765, simple_loss=0.2481, pruned_loss=0.05251, over 2883724.00 frames. +2023-03-28 22:09:45,365 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 22:09:51,455 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50088.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 22:10:30,918 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1627, 2.4631, 3.3229, 2.7417, 2.9311, 2.8498, 2.0370, 2.2318], + device='cuda:2'), covar=tensor([0.1188, 0.2899, 0.0738, 0.1094, 0.1688, 0.1532, 0.2565, 0.2556], + device='cuda:2'), in_proj_covar=tensor([0.0347, 0.0380, 0.0341, 0.0277, 0.0366, 0.0364, 0.0363, 0.0333], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 22:11:19,962 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.385e+02 4.071e+02 4.813e+02 5.962e+02 1.168e+03, threshold=9.627e+02, percent-clipped=3.0 +2023-03-28 22:11:21,123 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50122.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:11:32,563 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50126.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:11:37,032 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50128.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:11:58,850 INFO [train.py:892] (2/4) Epoch 28, batch 50, loss[loss=0.1723, simple_loss=0.2425, pruned_loss=0.05105, over 19760.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2369, pruned_loss=0.0424, over 891970.92 frames. ], batch size: 253, lr: 5.65e-03, grad_scale: 16.0 +2023-03-28 22:11:59,929 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50136.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 22:14:00,295 INFO [train.py:892] (2/4) Epoch 28, batch 100, loss[loss=0.1544, simple_loss=0.2278, pruned_loss=0.04055, over 19796.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2411, pruned_loss=0.04488, over 1570077.98 frames. ], batch size: 45, lr: 5.64e-03, grad_scale: 32.0 +2023-03-28 22:14:01,361 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4092, 2.7677, 2.5114, 1.9512, 2.6142, 2.6889, 2.6763, 2.7278], + device='cuda:2'), covar=tensor([0.0365, 0.0345, 0.0321, 0.0628, 0.0341, 0.0304, 0.0312, 0.0272], + device='cuda:2'), in_proj_covar=tensor([0.0098, 0.0092, 0.0096, 0.0100, 0.0102, 0.0083, 0.0081, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 22:14:07,927 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50189.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:14:18,322 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8736, 5.0660, 5.2843, 5.0670, 5.1865, 4.8931, 4.9849, 4.8303], + device='cuda:2'), covar=tensor([0.1408, 0.1291, 0.0978, 0.1209, 0.0698, 0.0838, 0.1852, 0.1928], + device='cuda:2'), in_proj_covar=tensor([0.0292, 0.0323, 0.0367, 0.0295, 0.0273, 0.0272, 0.0354, 0.0391], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-28 22:14:24,086 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50197.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 22:14:26,135 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6249, 3.3457, 3.8517, 2.9731, 3.9884, 3.2348, 3.3888, 3.8503], + device='cuda:2'), covar=tensor([0.0565, 0.0400, 0.0424, 0.0711, 0.0315, 0.0391, 0.0492, 0.0417], + device='cuda:2'), in_proj_covar=tensor([0.0075, 0.0082, 0.0081, 0.0108, 0.0076, 0.0079, 0.0076, 0.0069], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 22:15:23,505 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.658e+02 4.112e+02 4.923e+02 6.330e+02 1.508e+03, threshold=9.845e+02, percent-clipped=2.0 +2023-03-28 22:15:54,990 INFO [train.py:892] (2/4) Epoch 28, batch 150, loss[loss=0.1675, simple_loss=0.2402, pruned_loss=0.04741, over 19774.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2415, pruned_loss=0.04459, over 2097708.63 frames. ], batch size: 163, lr: 5.64e-03, grad_scale: 32.0 +2023-03-28 22:16:49,997 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50258.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 22:17:57,297 INFO [train.py:892] (2/4) Epoch 28, batch 200, loss[loss=0.1503, simple_loss=0.2324, pruned_loss=0.03412, over 19829.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2464, pruned_loss=0.04671, over 2506936.95 frames. ], batch size: 43, lr: 5.64e-03, grad_scale: 32.0 +2023-03-28 22:18:22,361 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6704, 3.8964, 4.0391, 4.8106, 3.0330, 3.5540, 3.1499, 2.9639], + device='cuda:2'), covar=tensor([0.0457, 0.1987, 0.0894, 0.0348, 0.2227, 0.1032, 0.1193, 0.1672], + device='cuda:2'), in_proj_covar=tensor([0.0239, 0.0332, 0.0245, 0.0197, 0.0247, 0.0204, 0.0214, 0.0216], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 22:19:26,392 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.556e+02 4.059e+02 4.750e+02 5.729e+02 1.085e+03, threshold=9.500e+02, percent-clipped=1.0 +2023-03-28 22:19:44,096 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7130, 2.0739, 2.1970, 1.9995, 2.2652, 2.3683, 2.5893, 2.7667], + device='cuda:2'), covar=tensor([0.0834, 0.1533, 0.1681, 0.2082, 0.1205, 0.1288, 0.0819, 0.0655], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0240, 0.0265, 0.0255, 0.0295, 0.0255, 0.0231, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 22:20:01,047 INFO [train.py:892] (2/4) Epoch 28, batch 250, loss[loss=0.1556, simple_loss=0.2308, pruned_loss=0.04017, over 19884.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2467, pruned_loss=0.04577, over 2825281.61 frames. ], batch size: 92, lr: 5.64e-03, grad_scale: 16.0 +2023-03-28 22:21:44,539 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7868, 2.6504, 4.6802, 4.1068, 4.4680, 4.6227, 4.5232, 4.2988], + device='cuda:2'), covar=tensor([0.0425, 0.0974, 0.0094, 0.0766, 0.0126, 0.0202, 0.0166, 0.0155], + device='cuda:2'), in_proj_covar=tensor([0.0097, 0.0102, 0.0085, 0.0152, 0.0082, 0.0096, 0.0088, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 22:22:07,540 INFO [train.py:892] (2/4) Epoch 28, batch 300, loss[loss=0.1839, simple_loss=0.2371, pruned_loss=0.06534, over 19858.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2468, pruned_loss=0.04628, over 3074739.24 frames. ], batch size: 136, lr: 5.63e-03, grad_scale: 16.0 +2023-03-28 22:23:39,898 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50422.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:23:40,969 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.722e+02 4.064e+02 4.778e+02 6.064e+02 1.211e+03, threshold=9.555e+02, percent-clipped=3.0 +2023-03-28 22:24:15,478 INFO [train.py:892] (2/4) Epoch 28, batch 350, loss[loss=0.142, simple_loss=0.2183, pruned_loss=0.03281, over 19751.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.245, pruned_loss=0.04534, over 3269303.16 frames. ], batch size: 95, lr: 5.63e-03, grad_scale: 16.0 +2023-03-28 22:25:39,669 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50470.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:26:16,880 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50484.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:26:20,286 INFO [train.py:892] (2/4) Epoch 28, batch 400, loss[loss=0.1555, simple_loss=0.2293, pruned_loss=0.04081, over 19793.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2463, pruned_loss=0.04569, over 3418872.46 frames. ], batch size: 193, lr: 5.63e-03, grad_scale: 16.0 +2023-03-28 22:27:38,184 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8622, 4.7337, 5.2794, 4.7471, 4.2404, 4.9895, 4.8794, 5.3985], + device='cuda:2'), covar=tensor([0.0830, 0.0385, 0.0326, 0.0363, 0.0795, 0.0460, 0.0458, 0.0313], + device='cuda:2'), in_proj_covar=tensor([0.0280, 0.0220, 0.0218, 0.0231, 0.0205, 0.0234, 0.0227, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 22:27:49,849 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.616e+02 4.023e+02 4.684e+02 5.835e+02 1.086e+03, threshold=9.368e+02, percent-clipped=2.0 +2023-03-28 22:28:16,113 INFO [train.py:892] (2/4) Epoch 28, batch 450, loss[loss=0.1681, simple_loss=0.2516, pruned_loss=0.0423, over 19600.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2467, pruned_loss=0.04612, over 3536585.74 frames. ], batch size: 50, lr: 5.63e-03, grad_scale: 16.0 +2023-03-28 22:28:42,888 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50546.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:29:01,614 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50553.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 22:30:25,695 INFO [train.py:892] (2/4) Epoch 28, batch 500, loss[loss=0.1788, simple_loss=0.2653, pruned_loss=0.04614, over 19715.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2454, pruned_loss=0.04588, over 3628781.22 frames. ], batch size: 78, lr: 5.62e-03, grad_scale: 16.0 +2023-03-28 22:31:18,892 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50607.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:31:21,550 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8615, 3.2052, 3.3869, 3.8130, 2.6123, 3.1694, 2.5750, 2.5003], + device='cuda:2'), covar=tensor([0.0544, 0.1800, 0.0973, 0.0419, 0.2005, 0.0811, 0.1272, 0.1688], + device='cuda:2'), in_proj_covar=tensor([0.0239, 0.0333, 0.0246, 0.0197, 0.0246, 0.0205, 0.0215, 0.0215], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 22:31:59,939 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.484e+02 3.842e+02 4.586e+02 5.688e+02 1.000e+03, threshold=9.173e+02, percent-clipped=1.0 +2023-03-28 22:32:30,996 INFO [train.py:892] (2/4) Epoch 28, batch 550, loss[loss=0.1538, simple_loss=0.2265, pruned_loss=0.04055, over 19825.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2459, pruned_loss=0.04625, over 3700242.68 frames. ], batch size: 177, lr: 5.62e-03, grad_scale: 16.0 +2023-03-28 22:34:36,327 INFO [train.py:892] (2/4) Epoch 28, batch 600, loss[loss=0.2032, simple_loss=0.2779, pruned_loss=0.06425, over 19711.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.246, pruned_loss=0.04669, over 3755171.12 frames. ], batch size: 315, lr: 5.62e-03, grad_scale: 16.0 +2023-03-28 22:34:42,514 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7843, 4.8117, 5.3782, 4.8627, 4.3730, 5.0713, 5.0456, 5.5492], + device='cuda:2'), covar=tensor([0.1151, 0.0439, 0.0448, 0.0419, 0.0719, 0.0474, 0.0455, 0.0377], + device='cuda:2'), in_proj_covar=tensor([0.0280, 0.0220, 0.0218, 0.0232, 0.0205, 0.0235, 0.0228, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 22:36:08,582 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.507e+02 3.618e+02 4.238e+02 5.077e+02 1.172e+03, threshold=8.476e+02, percent-clipped=2.0 +2023-03-28 22:36:42,001 INFO [train.py:892] (2/4) Epoch 28, batch 650, loss[loss=0.1746, simple_loss=0.257, pruned_loss=0.04607, over 19726.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2469, pruned_loss=0.04696, over 3797481.01 frames. ], batch size: 76, lr: 5.61e-03, grad_scale: 16.0 +2023-03-28 22:38:38,684 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50782.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:38:43,243 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50784.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:38:46,699 INFO [train.py:892] (2/4) Epoch 28, batch 700, loss[loss=0.1739, simple_loss=0.2456, pruned_loss=0.0511, over 19800.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2459, pruned_loss=0.04654, over 3832599.42 frames. ], batch size: 200, lr: 5.61e-03, grad_scale: 16.0 +2023-03-28 22:39:45,227 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50808.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 22:40:20,259 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.436e+02 3.963e+02 4.473e+02 5.641e+02 1.140e+03, threshold=8.946e+02, percent-clipped=3.0 +2023-03-28 22:40:43,853 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50832.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:40:51,141 INFO [train.py:892] (2/4) Epoch 28, batch 750, loss[loss=0.1829, simple_loss=0.2629, pruned_loss=0.05146, over 19768.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2468, pruned_loss=0.04689, over 3857629.38 frames. ], batch size: 226, lr: 5.61e-03, grad_scale: 16.0 +2023-03-28 22:41:08,931 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50843.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:41:33,238 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50853.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 22:42:13,672 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50869.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 22:42:55,256 INFO [train.py:892] (2/4) Epoch 28, batch 800, loss[loss=0.1648, simple_loss=0.2298, pruned_loss=0.04989, over 19829.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2467, pruned_loss=0.04692, over 3877491.63 frames. ], batch size: 147, lr: 5.61e-03, grad_scale: 16.0 +2023-03-28 22:43:31,307 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50901.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 22:43:33,379 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50902.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:44:24,105 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.705e+02 4.018e+02 4.822e+02 5.652e+02 1.134e+03, threshold=9.644e+02, percent-clipped=5.0 +2023-03-28 22:44:40,883 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-28 22:44:54,706 INFO [train.py:892] (2/4) Epoch 28, batch 850, loss[loss=0.1413, simple_loss=0.2138, pruned_loss=0.03438, over 19846.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2466, pruned_loss=0.04654, over 3894046.22 frames. ], batch size: 142, lr: 5.60e-03, grad_scale: 16.0 +2023-03-28 22:45:20,976 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50946.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:46:57,585 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4049, 3.0199, 3.3982, 2.9309, 3.6691, 3.6582, 4.2506, 4.7075], + device='cuda:2'), covar=tensor([0.0545, 0.1613, 0.1461, 0.2132, 0.1706, 0.1354, 0.0588, 0.0474], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0239, 0.0263, 0.0254, 0.0295, 0.0254, 0.0229, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 22:46:58,572 INFO [train.py:892] (2/4) Epoch 28, batch 900, loss[loss=0.1769, simple_loss=0.2538, pruned_loss=0.04997, over 19734.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2452, pruned_loss=0.04587, over 3905928.92 frames. ], batch size: 80, lr: 5.60e-03, grad_scale: 16.0 +2023-03-28 22:47:51,320 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51007.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:48:31,170 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.571e+02 3.452e+02 4.322e+02 5.014e+02 9.389e+02, threshold=8.643e+02, percent-clipped=0.0 +2023-03-28 22:49:04,815 INFO [train.py:892] (2/4) Epoch 28, batch 950, loss[loss=0.1446, simple_loss=0.2236, pruned_loss=0.03286, over 19728.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2454, pruned_loss=0.04569, over 3915580.09 frames. ], batch size: 44, lr: 5.60e-03, grad_scale: 16.0 +2023-03-28 22:49:50,098 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1485, 2.2478, 2.2900, 2.1778, 2.2287, 2.2748, 2.1713, 2.1753], + device='cuda:2'), covar=tensor([0.0329, 0.0296, 0.0333, 0.0261, 0.0359, 0.0303, 0.0392, 0.0369], + device='cuda:2'), in_proj_covar=tensor([0.0080, 0.0074, 0.0077, 0.0071, 0.0085, 0.0078, 0.0095, 0.0068], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 22:50:10,911 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51062.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:51:04,932 INFO [train.py:892] (2/4) Epoch 28, batch 1000, loss[loss=0.2127, simple_loss=0.2841, pruned_loss=0.07071, over 19649.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2453, pruned_loss=0.04588, over 3923447.16 frames. ], batch size: 343, lr: 5.60e-03, grad_scale: 16.0 +2023-03-28 22:52:34,542 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.807e+02 3.916e+02 4.760e+02 5.853e+02 1.174e+03, threshold=9.520e+02, percent-clipped=7.0 +2023-03-28 22:52:35,951 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51123.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:52:54,318 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8017, 3.2693, 3.7195, 3.2522, 3.9950, 4.0161, 4.7107, 5.1743], + device='cuda:2'), covar=tensor([0.0493, 0.1627, 0.1350, 0.2083, 0.1661, 0.1338, 0.0518, 0.0393], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0239, 0.0263, 0.0252, 0.0294, 0.0252, 0.0228, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 22:53:05,988 INFO [train.py:892] (2/4) Epoch 28, batch 1050, loss[loss=0.1861, simple_loss=0.2605, pruned_loss=0.0558, over 19648.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.246, pruned_loss=0.0461, over 3927640.95 frames. ], batch size: 79, lr: 5.59e-03, grad_scale: 16.0 +2023-03-28 22:53:13,362 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51138.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:54:08,779 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51164.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 22:54:25,346 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51172.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 22:55:00,672 INFO [train.py:892] (2/4) Epoch 28, batch 1100, loss[loss=0.1715, simple_loss=0.251, pruned_loss=0.04594, over 19837.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2461, pruned_loss=0.04604, over 3933825.48 frames. ], batch size: 75, lr: 5.59e-03, grad_scale: 16.0 +2023-03-28 22:55:40,107 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5373, 4.8057, 4.8889, 4.7571, 4.4632, 4.8329, 4.3777, 4.4091], + device='cuda:2'), covar=tensor([0.0521, 0.0494, 0.0496, 0.0460, 0.0649, 0.0531, 0.0711, 0.0979], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0281, 0.0295, 0.0257, 0.0261, 0.0246, 0.0263, 0.0312], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 22:55:40,144 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51202.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:55:45,463 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0459, 2.3929, 3.0633, 3.2737, 3.7022, 4.1817, 4.0325, 4.0525], + device='cuda:2'), covar=tensor([0.0912, 0.1912, 0.1386, 0.0661, 0.0442, 0.0264, 0.0348, 0.0421], + device='cuda:2'), in_proj_covar=tensor([0.0156, 0.0169, 0.0176, 0.0149, 0.0133, 0.0128, 0.0120, 0.0113], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 22:56:34,345 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.745e+02 3.704e+02 4.632e+02 5.921e+02 1.286e+03, threshold=9.265e+02, percent-clipped=1.0 +2023-03-28 22:56:57,492 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51233.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 22:57:02,680 INFO [train.py:892] (2/4) Epoch 28, batch 1150, loss[loss=0.232, simple_loss=0.3031, pruned_loss=0.08039, over 19622.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2487, pruned_loss=0.04715, over 3933939.70 frames. ], batch size: 387, lr: 5.59e-03, grad_scale: 16.0 +2023-03-28 22:57:35,100 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3376, 2.6252, 4.3222, 3.8512, 4.2265, 4.3567, 4.2110, 4.1290], + device='cuda:2'), covar=tensor([0.0576, 0.1004, 0.0110, 0.0774, 0.0152, 0.0216, 0.0163, 0.0158], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0104, 0.0087, 0.0155, 0.0085, 0.0098, 0.0090, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 22:57:43,004 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51250.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:59:16,797 INFO [train.py:892] (2/4) Epoch 28, batch 1200, loss[loss=0.1521, simple_loss=0.2223, pruned_loss=0.04093, over 19800.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2481, pruned_loss=0.04663, over 3935739.05 frames. ], batch size: 150, lr: 5.58e-03, grad_scale: 16.0 +2023-03-28 22:59:56,095 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51302.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:00:45,072 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.551e+02 3.659e+02 4.255e+02 5.414e+02 8.940e+02, threshold=8.510e+02, percent-clipped=0.0 +2023-03-28 23:01:16,039 INFO [train.py:892] (2/4) Epoch 28, batch 1250, loss[loss=0.1497, simple_loss=0.2289, pruned_loss=0.03527, over 19846.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2469, pruned_loss=0.04628, over 3939299.75 frames. ], batch size: 118, lr: 5.58e-03, grad_scale: 16.0 +2023-03-28 23:02:00,743 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6226, 2.6023, 2.6820, 2.1647, 2.8167, 2.3680, 2.7721, 2.6929], + device='cuda:2'), covar=tensor([0.0451, 0.0495, 0.0545, 0.0793, 0.0375, 0.0445, 0.0421, 0.0341], + device='cuda:2'), in_proj_covar=tensor([0.0075, 0.0082, 0.0081, 0.0108, 0.0076, 0.0079, 0.0076, 0.0069], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 23:03:06,428 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51382.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:03:13,705 INFO [train.py:892] (2/4) Epoch 28, batch 1300, loss[loss=0.1717, simple_loss=0.2587, pruned_loss=0.04239, over 19609.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2461, pruned_loss=0.04583, over 3942520.58 frames. ], batch size: 51, lr: 5.58e-03, grad_scale: 16.0 +2023-03-28 23:03:31,649 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51392.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:03:31,741 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5185, 2.5471, 2.7613, 2.5445, 2.4883, 2.7357, 2.5102, 2.7140], + device='cuda:2'), covar=tensor([0.0292, 0.0314, 0.0276, 0.0313, 0.0410, 0.0269, 0.0394, 0.0272], + device='cuda:2'), in_proj_covar=tensor([0.0081, 0.0075, 0.0078, 0.0072, 0.0086, 0.0078, 0.0095, 0.0068], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 23:04:35,498 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51418.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:04:44,783 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5479, 4.5653, 2.7594, 4.8892, 5.0561, 2.1685, 4.1533, 3.6606], + device='cuda:2'), covar=tensor([0.0523, 0.0600, 0.2596, 0.0523, 0.0385, 0.2751, 0.0899, 0.0796], + device='cuda:2'), in_proj_covar=tensor([0.0231, 0.0255, 0.0230, 0.0271, 0.0251, 0.0203, 0.0240, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 23:04:45,644 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.672e+02 4.378e+02 5.169e+02 6.239e+02 1.127e+03, threshold=1.034e+03, percent-clipped=3.0 +2023-03-28 23:05:20,063 INFO [train.py:892] (2/4) Epoch 28, batch 1350, loss[loss=0.1708, simple_loss=0.2449, pruned_loss=0.04839, over 19804.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2451, pruned_loss=0.04555, over 3944607.95 frames. ], batch size: 74, lr: 5.58e-03, grad_scale: 16.0 +2023-03-28 23:05:25,333 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51438.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:05:36,761 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51443.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:05:59,713 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51453.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:06:25,774 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51464.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 23:07:22,866 INFO [train.py:892] (2/4) Epoch 28, batch 1400, loss[loss=0.1588, simple_loss=0.2302, pruned_loss=0.04371, over 19863.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2438, pruned_loss=0.04504, over 3947483.33 frames. ], batch size: 136, lr: 5.57e-03, grad_scale: 16.0 +2023-03-28 23:07:23,984 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51486.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:07:44,143 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51495.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:08:23,159 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51512.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 23:08:25,885 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6313, 3.0117, 3.0580, 3.5631, 2.5603, 3.0780, 2.3955, 2.3233], + device='cuda:2'), covar=tensor([0.0643, 0.1781, 0.1147, 0.0474, 0.2045, 0.0811, 0.1428, 0.1800], + device='cuda:2'), in_proj_covar=tensor([0.0241, 0.0332, 0.0246, 0.0198, 0.0246, 0.0205, 0.0215, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 23:08:48,561 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.195e+02 3.889e+02 4.620e+02 5.536e+02 9.901e+02, threshold=9.240e+02, percent-clipped=0.0 +2023-03-28 23:09:01,427 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51528.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 23:09:20,613 INFO [train.py:892] (2/4) Epoch 28, batch 1450, loss[loss=0.1543, simple_loss=0.2309, pruned_loss=0.03882, over 19873.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2445, pruned_loss=0.0452, over 3946195.61 frames. ], batch size: 134, lr: 5.57e-03, grad_scale: 16.0 +2023-03-28 23:09:30,171 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8239, 4.6537, 5.2610, 4.7070, 4.2874, 5.0333, 4.8802, 5.4106], + device='cuda:2'), covar=tensor([0.0910, 0.0413, 0.0376, 0.0418, 0.0758, 0.0513, 0.0446, 0.0333], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0220, 0.0220, 0.0233, 0.0205, 0.0238, 0.0231, 0.0213], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 23:10:02,916 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-28 23:10:13,921 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51556.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:10:53,453 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-28 23:11:27,580 INFO [train.py:892] (2/4) Epoch 28, batch 1500, loss[loss=0.1593, simple_loss=0.2307, pruned_loss=0.04397, over 19815.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.245, pruned_loss=0.04528, over 3946260.76 frames. ], batch size: 167, lr: 5.57e-03, grad_scale: 16.0 +2023-03-28 23:12:07,071 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51602.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:13:00,836 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.657e+02 3.781e+02 4.719e+02 5.439e+02 9.463e+02, threshold=9.439e+02, percent-clipped=1.0 +2023-03-28 23:13:30,307 INFO [train.py:892] (2/4) Epoch 28, batch 1550, loss[loss=0.1644, simple_loss=0.2523, pruned_loss=0.03828, over 19887.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2448, pruned_loss=0.04496, over 3947102.83 frames. ], batch size: 52, lr: 5.57e-03, grad_scale: 16.0 +2023-03-28 23:14:07,639 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51650.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:14:38,662 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0251, 4.7379, 4.7047, 5.0536, 4.6930, 5.2509, 5.1633, 5.3539], + device='cuda:2'), covar=tensor([0.0637, 0.0384, 0.0472, 0.0347, 0.0645, 0.0386, 0.0383, 0.0290], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0172, 0.0198, 0.0171, 0.0168, 0.0153, 0.0146, 0.0194], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 23:15:34,970 INFO [train.py:892] (2/4) Epoch 28, batch 1600, loss[loss=0.1972, simple_loss=0.2741, pruned_loss=0.06014, over 19726.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2465, pruned_loss=0.04552, over 3945263.03 frames. ], batch size: 269, lr: 5.56e-03, grad_scale: 16.0 +2023-03-28 23:16:54,078 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51718.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:17:03,718 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.853e+02 3.747e+02 4.417e+02 5.191e+02 1.106e+03, threshold=8.834e+02, percent-clipped=1.0 +2023-03-28 23:17:34,756 INFO [train.py:892] (2/4) Epoch 28, batch 1650, loss[loss=0.1549, simple_loss=0.2287, pruned_loss=0.04057, over 19800.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.246, pruned_loss=0.04568, over 3946135.73 frames. ], batch size: 200, lr: 5.56e-03, grad_scale: 16.0 +2023-03-28 23:17:40,864 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51738.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:18:00,934 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51747.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:18:03,527 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51748.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:18:50,849 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51766.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:19:37,688 INFO [train.py:892] (2/4) Epoch 28, batch 1700, loss[loss=0.1748, simple_loss=0.2507, pruned_loss=0.04943, over 19735.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2468, pruned_loss=0.04599, over 3945906.39 frames. ], batch size: 80, lr: 5.56e-03, grad_scale: 16.0 +2023-03-28 23:20:14,114 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1857, 4.9411, 4.8762, 5.2373, 4.9384, 5.4711, 5.3687, 5.5256], + device='cuda:2'), covar=tensor([0.0557, 0.0297, 0.0414, 0.0305, 0.0609, 0.0265, 0.0353, 0.0256], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0173, 0.0199, 0.0172, 0.0170, 0.0155, 0.0147, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-28 23:20:27,313 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-28 23:20:36,147 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51808.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:20:55,407 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8076, 4.4992, 4.5553, 4.2836, 4.7165, 3.1384, 3.9035, 2.2588], + device='cuda:2'), covar=tensor([0.0169, 0.0194, 0.0129, 0.0187, 0.0140, 0.0948, 0.0846, 0.1623], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0144, 0.0113, 0.0134, 0.0119, 0.0135, 0.0142, 0.0128], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 23:21:06,661 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.935e+02 3.915e+02 4.350e+02 5.709e+02 1.199e+03, threshold=8.700e+02, percent-clipped=1.0 +2023-03-28 23:21:17,090 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51828.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 23:21:29,706 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6461, 3.1459, 3.6124, 3.1455, 3.8301, 3.8133, 4.4483, 4.9194], + device='cuda:2'), covar=tensor([0.0470, 0.1665, 0.1363, 0.2081, 0.1704, 0.1355, 0.0547, 0.0435], + device='cuda:2'), in_proj_covar=tensor([0.0248, 0.0237, 0.0264, 0.0251, 0.0293, 0.0252, 0.0227, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 23:21:33,057 INFO [train.py:892] (2/4) Epoch 28, batch 1750, loss[loss=0.1542, simple_loss=0.2346, pruned_loss=0.03686, over 19898.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2466, pruned_loss=0.04573, over 3945985.65 frames. ], batch size: 94, lr: 5.55e-03, grad_scale: 16.0 +2023-03-28 23:22:05,880 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51851.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:22:22,922 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4778, 3.3679, 4.9426, 3.5781, 3.9537, 3.7616, 2.6259, 2.8887], + device='cuda:2'), covar=tensor([0.0706, 0.2596, 0.0350, 0.0986, 0.1626, 0.1279, 0.2331, 0.2337], + device='cuda:2'), in_proj_covar=tensor([0.0351, 0.0385, 0.0344, 0.0283, 0.0371, 0.0370, 0.0369, 0.0338], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 23:22:53,506 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51876.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 23:23:13,398 INFO [train.py:892] (2/4) Epoch 28, batch 1800, loss[loss=0.1738, simple_loss=0.2479, pruned_loss=0.0499, over 19754.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2456, pruned_loss=0.04543, over 3948161.06 frames. ], batch size: 256, lr: 5.55e-03, grad_scale: 16.0 +2023-03-28 23:23:19,113 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-03-28 23:23:41,713 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1912, 2.2888, 2.3338, 2.2589, 2.3250, 2.3936, 2.2780, 2.3155], + device='cuda:2'), covar=tensor([0.0374, 0.0311, 0.0308, 0.0300, 0.0388, 0.0273, 0.0408, 0.0313], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0076, 0.0079, 0.0073, 0.0087, 0.0079, 0.0096, 0.0069], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 23:23:50,483 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8718, 3.1895, 3.3622, 3.8509, 2.6446, 3.0977, 2.4477, 2.4208], + device='cuda:2'), covar=tensor([0.0626, 0.1918, 0.0977, 0.0431, 0.2032, 0.0909, 0.1406, 0.1676], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0332, 0.0246, 0.0199, 0.0246, 0.0206, 0.0215, 0.0215], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 23:24:26,510 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.458e+02 3.828e+02 4.368e+02 5.479e+02 1.522e+03, threshold=8.737e+02, percent-clipped=1.0 +2023-03-28 23:24:33,085 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2706, 4.7613, 4.8465, 4.6123, 5.1325, 3.2559, 4.1661, 2.7172], + device='cuda:2'), covar=tensor([0.0188, 0.0210, 0.0158, 0.0196, 0.0175, 0.0963, 0.0945, 0.1556], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0144, 0.0113, 0.0134, 0.0119, 0.0134, 0.0142, 0.0128], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 23:24:52,033 INFO [train.py:892] (2/4) Epoch 28, batch 1850, loss[loss=0.1668, simple_loss=0.2569, pruned_loss=0.03839, over 19672.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2465, pruned_loss=0.04476, over 3948642.84 frames. ], batch size: 55, lr: 5.55e-03, grad_scale: 16.0 +2023-03-28 23:25:59,222 INFO [train.py:892] (2/4) Epoch 29, batch 0, loss[loss=0.1574, simple_loss=0.2352, pruned_loss=0.03978, over 19461.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2352, pruned_loss=0.03978, over 19461.00 frames. ], batch size: 43, lr: 5.45e-03, grad_scale: 16.0 +2023-03-28 23:25:59,223 INFO [train.py:917] (2/4) Computing validation loss +2023-03-28 23:26:37,597 INFO [train.py:926] (2/4) Epoch 29, validation: loss=0.1782, simple_loss=0.2489, pruned_loss=0.05378, over 2883724.00 frames. +2023-03-28 23:26:37,598 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-28 23:28:41,179 INFO [train.py:892] (2/4) Epoch 29, batch 50, loss[loss=0.1452, simple_loss=0.2233, pruned_loss=0.03358, over 19777.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2372, pruned_loss=0.0422, over 891423.02 frames. ], batch size: 91, lr: 5.45e-03, grad_scale: 16.0 +2023-03-28 23:29:49,666 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-03-28 23:30:05,629 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.398e+02 3.680e+02 4.499e+02 5.217e+02 8.656e+02, threshold=8.998e+02, percent-clipped=0.0 +2023-03-28 23:30:16,586 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4183, 2.0076, 2.2866, 2.7020, 3.0717, 3.0759, 3.0823, 3.1038], + device='cuda:2'), covar=tensor([0.1087, 0.1754, 0.1419, 0.0763, 0.0512, 0.0428, 0.0447, 0.0452], + device='cuda:2'), in_proj_covar=tensor([0.0159, 0.0171, 0.0178, 0.0151, 0.0135, 0.0130, 0.0122, 0.0115], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-28 23:30:44,556 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52038.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:30:49,122 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52040.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:30:50,314 INFO [train.py:892] (2/4) Epoch 29, batch 100, loss[loss=0.1454, simple_loss=0.22, pruned_loss=0.03536, over 19827.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2408, pruned_loss=0.04282, over 1569539.36 frames. ], batch size: 177, lr: 5.45e-03, grad_scale: 16.0 +2023-03-28 23:31:10,292 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52048.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:32:05,844 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8583, 2.8602, 4.8310, 4.0754, 4.6227, 4.7802, 4.7001, 4.3960], + device='cuda:2'), covar=tensor([0.0490, 0.0951, 0.0094, 0.0933, 0.0122, 0.0175, 0.0141, 0.0149], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0102, 0.0086, 0.0153, 0.0084, 0.0097, 0.0090, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 23:32:43,558 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52086.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:32:54,794 INFO [train.py:892] (2/4) Epoch 29, batch 150, loss[loss=0.1663, simple_loss=0.241, pruned_loss=0.0458, over 19800.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2431, pruned_loss=0.0441, over 2097793.19 frames. ], batch size: 200, lr: 5.44e-03, grad_scale: 16.0 +2023-03-28 23:33:10,123 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52096.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:33:21,730 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52101.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:33:27,841 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52103.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:34:15,955 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.333e+02 4.011e+02 4.815e+02 5.907e+02 9.716e+02, threshold=9.631e+02, percent-clipped=1.0 +2023-03-28 23:34:59,478 INFO [train.py:892] (2/4) Epoch 29, batch 200, loss[loss=0.2448, simple_loss=0.3172, pruned_loss=0.08619, over 19625.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2445, pruned_loss=0.04486, over 2507671.00 frames. ], batch size: 343, lr: 5.44e-03, grad_scale: 16.0 +2023-03-28 23:35:24,714 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52151.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:35:25,373 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.70 vs. limit=5.0 +2023-03-28 23:36:59,110 INFO [train.py:892] (2/4) Epoch 29, batch 250, loss[loss=0.2812, simple_loss=0.3483, pruned_loss=0.1071, over 19421.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.245, pruned_loss=0.04488, over 2826233.52 frames. ], batch size: 431, lr: 5.44e-03, grad_scale: 16.0 +2023-03-28 23:37:05,201 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52193.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:37:17,888 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52199.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:38:17,958 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.564e+02 3.825e+02 4.477e+02 5.151e+02 8.477e+02, threshold=8.954e+02, percent-clipped=0.0 +2023-03-28 23:39:01,941 INFO [train.py:892] (2/4) Epoch 29, batch 300, loss[loss=0.1683, simple_loss=0.2482, pruned_loss=0.04415, over 19575.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2435, pruned_loss=0.04417, over 3075908.94 frames. ], batch size: 53, lr: 5.44e-03, grad_scale: 16.0 +2023-03-28 23:39:35,688 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52254.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:40:39,203 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-28 23:41:06,896 INFO [train.py:892] (2/4) Epoch 29, batch 350, loss[loss=0.1552, simple_loss=0.2262, pruned_loss=0.04208, over 19793.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.243, pruned_loss=0.04423, over 3270169.75 frames. ], batch size: 173, lr: 5.43e-03, grad_scale: 8.0 +2023-03-28 23:42:02,656 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52312.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:42:30,744 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.445e+02 3.965e+02 4.571e+02 5.589e+02 1.011e+03, threshold=9.142e+02, percent-clipped=1.0 +2023-03-28 23:43:07,773 INFO [train.py:892] (2/4) Epoch 29, batch 400, loss[loss=0.1312, simple_loss=0.2072, pruned_loss=0.0276, over 19793.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.244, pruned_loss=0.0447, over 3420658.11 frames. ], batch size: 111, lr: 5.43e-03, grad_scale: 8.0 +2023-03-28 23:44:27,316 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52373.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:45:13,545 INFO [train.py:892] (2/4) Epoch 29, batch 450, loss[loss=0.1744, simple_loss=0.2495, pruned_loss=0.04968, over 19833.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2436, pruned_loss=0.04435, over 3537051.43 frames. ], batch size: 204, lr: 5.43e-03, grad_scale: 8.0 +2023-03-28 23:45:28,339 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52396.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:45:46,516 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52403.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:46:29,410 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5990, 4.3378, 4.3879, 4.1461, 4.5355, 3.1977, 3.8953, 2.4342], + device='cuda:2'), covar=tensor([0.0161, 0.0204, 0.0121, 0.0184, 0.0140, 0.0896, 0.0646, 0.1313], + device='cuda:2'), in_proj_covar=tensor([0.0104, 0.0144, 0.0113, 0.0133, 0.0119, 0.0134, 0.0142, 0.0127], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 23:46:38,513 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.695e+02 3.819e+02 4.668e+02 5.658e+02 1.148e+03, threshold=9.335e+02, percent-clipped=4.0 +2023-03-28 23:46:42,088 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-03-28 23:47:02,802 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6872, 2.2560, 3.6040, 3.0623, 3.5573, 3.6179, 3.4339, 3.4573], + device='cuda:2'), covar=tensor([0.0675, 0.1027, 0.0122, 0.0460, 0.0152, 0.0232, 0.0190, 0.0189], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0102, 0.0087, 0.0154, 0.0084, 0.0098, 0.0090, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 23:47:22,578 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6908, 3.7464, 2.2733, 3.9437, 4.0719, 1.8231, 3.3520, 3.1639], + device='cuda:2'), covar=tensor([0.0746, 0.0991, 0.2808, 0.0854, 0.0663, 0.2887, 0.1155, 0.0889], + device='cuda:2'), in_proj_covar=tensor([0.0232, 0.0256, 0.0229, 0.0272, 0.0252, 0.0203, 0.0238, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 23:47:23,496 INFO [train.py:892] (2/4) Epoch 29, batch 500, loss[loss=0.1706, simple_loss=0.2474, pruned_loss=0.04688, over 19883.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2437, pruned_loss=0.04413, over 3629596.85 frames. ], batch size: 158, lr: 5.43e-03, grad_scale: 8.0 +2023-03-28 23:47:50,897 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52451.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:49:23,626 INFO [train.py:892] (2/4) Epoch 29, batch 550, loss[loss=0.2822, simple_loss=0.3577, pruned_loss=0.1033, over 19260.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2442, pruned_loss=0.04462, over 3700987.72 frames. ], batch size: 483, lr: 5.42e-03, grad_scale: 8.0 +2023-03-28 23:50:29,858 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6541, 3.7118, 2.2652, 3.9064, 4.0229, 1.8094, 3.2652, 3.0574], + device='cuda:2'), covar=tensor([0.0764, 0.0810, 0.2835, 0.0859, 0.0567, 0.2919, 0.1146, 0.0941], + device='cuda:2'), in_proj_covar=tensor([0.0233, 0.0255, 0.0230, 0.0273, 0.0252, 0.0204, 0.0238, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-28 23:50:40,702 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9430, 4.7819, 5.3855, 4.8388, 4.3593, 5.0877, 4.9915, 5.5333], + device='cuda:2'), covar=tensor([0.0883, 0.0401, 0.0362, 0.0379, 0.0849, 0.0495, 0.0419, 0.0315], + device='cuda:2'), in_proj_covar=tensor([0.0285, 0.0221, 0.0220, 0.0233, 0.0206, 0.0238, 0.0230, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-28 23:50:45,576 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.600e+02 4.376e+02 5.079e+02 6.009e+02 1.236e+03, threshold=1.016e+03, percent-clipped=4.0 +2023-03-28 23:51:25,982 INFO [train.py:892] (2/4) Epoch 29, batch 600, loss[loss=0.2033, simple_loss=0.3085, pruned_loss=0.04906, over 18963.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.244, pruned_loss=0.04487, over 3756597.21 frames. ], batch size: 514, lr: 5.42e-03, grad_scale: 8.0 +2023-03-28 23:51:36,640 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-03-28 23:51:48,582 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52549.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:52:05,047 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52555.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 23:53:25,785 INFO [train.py:892] (2/4) Epoch 29, batch 650, loss[loss=0.1724, simple_loss=0.2591, pruned_loss=0.04291, over 19849.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2442, pruned_loss=0.04524, over 3797445.55 frames. ], batch size: 56, lr: 5.42e-03, grad_scale: 8.0 +2023-03-28 23:54:27,284 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52616.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 23:54:45,455 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.294e+02 3.664e+02 4.679e+02 5.983e+02 1.020e+03, threshold=9.358e+02, percent-clipped=1.0 +2023-03-28 23:55:31,356 INFO [train.py:892] (2/4) Epoch 29, batch 700, loss[loss=0.1553, simple_loss=0.2392, pruned_loss=0.03571, over 19783.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2451, pruned_loss=0.04514, over 3830421.85 frames. ], batch size: 48, lr: 5.41e-03, grad_scale: 8.0 +2023-03-28 23:55:56,676 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.85 vs. limit=5.0 +2023-03-28 23:56:37,559 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-28 23:56:40,064 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52668.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:57:32,195 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.67 vs. limit=5.0 +2023-03-28 23:57:34,828 INFO [train.py:892] (2/4) Epoch 29, batch 750, loss[loss=0.1726, simple_loss=0.2619, pruned_loss=0.04172, over 19830.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2435, pruned_loss=0.0442, over 3858140.58 frames. ], batch size: 57, lr: 5.41e-03, grad_scale: 8.0 +2023-03-28 23:57:48,646 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52696.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:58:51,486 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.283e+02 3.891e+02 4.308e+02 5.320e+02 9.738e+02, threshold=8.617e+02, percent-clipped=1.0 +2023-03-28 23:59:17,051 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-28 23:59:30,939 INFO [train.py:892] (2/4) Epoch 29, batch 800, loss[loss=0.155, simple_loss=0.2395, pruned_loss=0.03524, over 19660.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2432, pruned_loss=0.04382, over 3878789.16 frames. ], batch size: 67, lr: 5.41e-03, grad_scale: 8.0 +2023-03-28 23:59:38,677 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52744.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:00:01,461 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52752.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:01:37,831 INFO [train.py:892] (2/4) Epoch 29, batch 850, loss[loss=0.1652, simple_loss=0.2357, pruned_loss=0.04736, over 19893.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2434, pruned_loss=0.04363, over 3893870.56 frames. ], batch size: 71, lr: 5.41e-03, grad_scale: 8.0 +2023-03-29 00:01:46,584 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9086, 2.3684, 2.8116, 3.2104, 3.6761, 3.9226, 3.8020, 3.9167], + device='cuda:2'), covar=tensor([0.0900, 0.1643, 0.1247, 0.0622, 0.0391, 0.0227, 0.0466, 0.0353], + device='cuda:2'), in_proj_covar=tensor([0.0156, 0.0168, 0.0175, 0.0148, 0.0131, 0.0128, 0.0120, 0.0112], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 00:02:06,002 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-29 00:02:27,651 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-29 00:02:31,999 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3899, 3.3557, 3.6882, 3.3415, 3.1545, 3.6125, 3.4871, 3.7463], + device='cuda:2'), covar=tensor([0.0914, 0.0395, 0.0413, 0.0470, 0.1649, 0.0608, 0.0505, 0.0400], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0220, 0.0220, 0.0232, 0.0206, 0.0239, 0.0230, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 00:02:32,128 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52813.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:02:58,926 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.946e+02 3.447e+02 4.095e+02 4.667e+02 7.447e+02, threshold=8.190e+02, percent-clipped=0.0 +2023-03-29 00:03:38,957 INFO [train.py:892] (2/4) Epoch 29, batch 900, loss[loss=0.2817, simple_loss=0.3504, pruned_loss=0.1065, over 19158.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2437, pruned_loss=0.04365, over 3905331.46 frames. ], batch size: 452, lr: 5.40e-03, grad_scale: 8.0 +2023-03-29 00:03:58,533 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52849.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:04:43,362 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0284, 2.4803, 3.9851, 3.5280, 3.9461, 3.9952, 3.7612, 3.6869], + device='cuda:2'), covar=tensor([0.0571, 0.0962, 0.0111, 0.0516, 0.0127, 0.0214, 0.0175, 0.0187], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0103, 0.0087, 0.0153, 0.0084, 0.0098, 0.0090, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 00:05:32,661 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2887, 4.9034, 4.9618, 4.7128, 5.2608, 3.1484, 4.1526, 2.9476], + device='cuda:2'), covar=tensor([0.0172, 0.0188, 0.0137, 0.0180, 0.0127, 0.1008, 0.0938, 0.1212], + device='cuda:2'), in_proj_covar=tensor([0.0106, 0.0145, 0.0113, 0.0134, 0.0119, 0.0134, 0.0143, 0.0128], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 00:05:35,925 INFO [train.py:892] (2/4) Epoch 29, batch 950, loss[loss=0.1725, simple_loss=0.2502, pruned_loss=0.04738, over 19914.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2443, pruned_loss=0.04372, over 3916016.07 frames. ], batch size: 53, lr: 5.40e-03, grad_scale: 8.0 +2023-03-29 00:05:52,703 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52897.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:06:28,127 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52911.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 00:06:58,635 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.525e+02 3.679e+02 4.624e+02 5.272e+02 1.115e+03, threshold=9.248e+02, percent-clipped=1.0 +2023-03-29 00:07:41,662 INFO [train.py:892] (2/4) Epoch 29, batch 1000, loss[loss=0.1595, simple_loss=0.2327, pruned_loss=0.04308, over 19731.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.2449, pruned_loss=0.04397, over 3922522.51 frames. ], batch size: 71, lr: 5.40e-03, grad_scale: 8.0 +2023-03-29 00:07:58,606 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-29 00:08:02,767 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0944, 2.6490, 3.1636, 3.3892, 3.8840, 4.3026, 4.1336, 4.2519], + device='cuda:2'), covar=tensor([0.0950, 0.1701, 0.1222, 0.0631, 0.0408, 0.0241, 0.0365, 0.0408], + device='cuda:2'), in_proj_covar=tensor([0.0157, 0.0169, 0.0176, 0.0149, 0.0132, 0.0130, 0.0121, 0.0113], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 00:08:28,209 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9399, 3.1238, 3.2559, 3.1186, 3.0226, 3.0008, 3.0102, 3.1759], + device='cuda:2'), covar=tensor([0.0289, 0.0323, 0.0245, 0.0284, 0.0341, 0.0315, 0.0338, 0.0354], + device='cuda:2'), in_proj_covar=tensor([0.0083, 0.0076, 0.0079, 0.0073, 0.0087, 0.0079, 0.0096, 0.0069], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 00:08:46,453 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52968.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:08:59,123 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 00:09:39,920 INFO [train.py:892] (2/4) Epoch 29, batch 1050, loss[loss=0.147, simple_loss=0.228, pruned_loss=0.03303, over 19640.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2442, pruned_loss=0.04379, over 3929825.43 frames. ], batch size: 66, lr: 5.40e-03, grad_scale: 8.0 +2023-03-29 00:10:42,610 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53016.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:10:47,246 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7759, 2.7566, 1.7545, 3.1936, 2.8819, 3.0555, 3.1978, 2.4810], + device='cuda:2'), covar=tensor([0.0612, 0.0691, 0.1517, 0.0575, 0.0627, 0.0498, 0.0562, 0.0913], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0142, 0.0141, 0.0150, 0.0132, 0.0133, 0.0146, 0.0143], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 00:10:59,360 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.507e+02 3.902e+02 4.763e+02 5.659e+02 9.296e+02, threshold=9.526e+02, percent-clipped=1.0 +2023-03-29 00:11:39,410 INFO [train.py:892] (2/4) Epoch 29, batch 1100, loss[loss=0.1916, simple_loss=0.257, pruned_loss=0.06316, over 19802.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2441, pruned_loss=0.04409, over 3935638.87 frames. ], batch size: 224, lr: 5.39e-03, grad_scale: 8.0 +2023-03-29 00:12:17,998 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53056.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:13:27,768 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7852, 2.2253, 2.7373, 3.0529, 3.4900, 3.6644, 3.5994, 3.6195], + device='cuda:2'), covar=tensor([0.0988, 0.1644, 0.1296, 0.0679, 0.0409, 0.0273, 0.0346, 0.0398], + device='cuda:2'), in_proj_covar=tensor([0.0157, 0.0168, 0.0176, 0.0149, 0.0132, 0.0129, 0.0121, 0.0113], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 00:13:40,608 INFO [train.py:892] (2/4) Epoch 29, batch 1150, loss[loss=0.1559, simple_loss=0.2416, pruned_loss=0.0351, over 19810.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2447, pruned_loss=0.04466, over 3938585.83 frames. ], batch size: 67, lr: 5.39e-03, grad_scale: 8.0 +2023-03-29 00:14:25,718 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53108.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:14:48,411 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53117.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:15:03,919 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.770e+02 4.130e+02 4.907e+02 5.867e+02 8.841e+02, threshold=9.813e+02, percent-clipped=0.0 +2023-03-29 00:15:41,712 INFO [train.py:892] (2/4) Epoch 29, batch 1200, loss[loss=0.1697, simple_loss=0.2391, pruned_loss=0.05017, over 19814.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2449, pruned_loss=0.04481, over 3940258.37 frames. ], batch size: 123, lr: 5.39e-03, grad_scale: 8.0 +2023-03-29 00:15:45,162 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2918, 2.6051, 2.4081, 1.7785, 2.4226, 2.6064, 2.4763, 2.5690], + device='cuda:2'), covar=tensor([0.0387, 0.0365, 0.0293, 0.0570, 0.0370, 0.0297, 0.0312, 0.0273], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0095, 0.0098, 0.0101, 0.0104, 0.0084, 0.0084, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 00:17:43,426 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-29 00:17:48,592 INFO [train.py:892] (2/4) Epoch 29, batch 1250, loss[loss=0.1477, simple_loss=0.2202, pruned_loss=0.03757, over 19797.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2434, pruned_loss=0.04432, over 3943512.20 frames. ], batch size: 107, lr: 5.39e-03, grad_scale: 8.0 +2023-03-29 00:18:19,743 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-29 00:18:38,641 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53211.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 00:18:44,082 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1160, 2.8305, 3.2159, 2.8835, 3.3690, 3.3204, 3.9654, 4.4216], + device='cuda:2'), covar=tensor([0.0572, 0.1769, 0.1500, 0.2117, 0.1637, 0.1607, 0.0662, 0.0479], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0237, 0.0263, 0.0251, 0.0292, 0.0253, 0.0228, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 00:19:09,954 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.614e+02 3.724e+02 4.127e+02 5.069e+02 9.146e+02, threshold=8.253e+02, percent-clipped=0.0 +2023-03-29 00:19:52,135 INFO [train.py:892] (2/4) Epoch 29, batch 1300, loss[loss=0.1577, simple_loss=0.232, pruned_loss=0.04171, over 19872.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2431, pruned_loss=0.04439, over 3945767.68 frames. ], batch size: 138, lr: 5.38e-03, grad_scale: 8.0 +2023-03-29 00:19:53,077 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53241.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:20:39,659 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53259.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 00:21:58,523 INFO [train.py:892] (2/4) Epoch 29, batch 1350, loss[loss=0.1681, simple_loss=0.2436, pruned_loss=0.0463, over 19806.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2446, pruned_loss=0.04474, over 3946235.40 frames. ], batch size: 47, lr: 5.38e-03, grad_scale: 8.0 +2023-03-29 00:22:26,055 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53302.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:22:30,813 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53304.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:22:59,208 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53315.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:23:19,484 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.794e+02 3.937e+02 4.567e+02 5.405e+02 8.872e+02, threshold=9.134e+02, percent-clipped=4.0 +2023-03-29 00:24:04,126 INFO [train.py:892] (2/4) Epoch 29, batch 1400, loss[loss=0.1478, simple_loss=0.2226, pruned_loss=0.03653, over 19787.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2443, pruned_loss=0.04458, over 3946764.24 frames. ], batch size: 178, lr: 5.38e-03, grad_scale: 8.0 +2023-03-29 00:24:37,186 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3049, 5.6347, 5.6680, 5.5800, 5.2704, 5.6477, 5.1145, 5.1117], + device='cuda:2'), covar=tensor([0.0447, 0.0403, 0.0479, 0.0409, 0.0574, 0.0467, 0.0670, 0.0945], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0283, 0.0292, 0.0257, 0.0260, 0.0246, 0.0264, 0.0309], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 00:25:04,809 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53365.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:25:09,289 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53367.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:25:30,921 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53376.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:26:04,692 INFO [train.py:892] (2/4) Epoch 29, batch 1450, loss[loss=0.2267, simple_loss=0.3092, pruned_loss=0.07214, over 19855.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2443, pruned_loss=0.04445, over 3948672.60 frames. ], batch size: 56, lr: 5.38e-03, grad_scale: 8.0 +2023-03-29 00:26:48,398 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53408.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:26:52,459 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3678, 3.1368, 3.2225, 3.3758, 3.2737, 3.2847, 3.4282, 3.6064], + device='cuda:2'), covar=tensor([0.0668, 0.0522, 0.0593, 0.0433, 0.0747, 0.0697, 0.0476, 0.0382], + device='cuda:2'), in_proj_covar=tensor([0.0149, 0.0173, 0.0199, 0.0173, 0.0170, 0.0155, 0.0149, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 00:26:59,049 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53412.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:27:27,174 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.651e+02 3.715e+02 4.371e+02 5.322e+02 9.816e+02, threshold=8.743e+02, percent-clipped=2.0 +2023-03-29 00:27:38,843 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53428.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 00:28:09,449 INFO [train.py:892] (2/4) Epoch 29, batch 1500, loss[loss=0.1812, simple_loss=0.2748, pruned_loss=0.04385, over 19533.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2462, pruned_loss=0.04488, over 3945276.25 frames. ], batch size: 54, lr: 5.37e-03, grad_scale: 8.0 +2023-03-29 00:28:49,111 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53456.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:29:35,138 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.51 vs. limit=5.0 +2023-03-29 00:30:14,157 INFO [train.py:892] (2/4) Epoch 29, batch 1550, loss[loss=0.1466, simple_loss=0.2272, pruned_loss=0.03298, over 19777.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2454, pruned_loss=0.044, over 3945778.94 frames. ], batch size: 191, lr: 5.37e-03, grad_scale: 8.0 +2023-03-29 00:31:28,849 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.518e+02 3.925e+02 4.450e+02 5.196e+02 1.158e+03, threshold=8.900e+02, percent-clipped=2.0 +2023-03-29 00:32:15,026 INFO [train.py:892] (2/4) Epoch 29, batch 1600, loss[loss=0.1655, simple_loss=0.24, pruned_loss=0.0455, over 19787.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2437, pruned_loss=0.04327, over 3947479.70 frames. ], batch size: 83, lr: 5.37e-03, grad_scale: 8.0 +2023-03-29 00:34:05,153 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0709, 2.8677, 3.1328, 2.8072, 3.3081, 3.3019, 3.9296, 4.3023], + device='cuda:2'), covar=tensor([0.0619, 0.1652, 0.1666, 0.2252, 0.1763, 0.1575, 0.0607, 0.0608], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0239, 0.0267, 0.0254, 0.0296, 0.0255, 0.0231, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 00:34:19,706 INFO [train.py:892] (2/4) Epoch 29, batch 1650, loss[loss=0.238, simple_loss=0.3441, pruned_loss=0.06597, over 17946.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2437, pruned_loss=0.04343, over 3945623.65 frames. ], batch size: 633, lr: 5.37e-03, grad_scale: 8.0 +2023-03-29 00:34:36,499 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53597.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:35:41,047 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.655e+02 3.870e+02 4.535e+02 5.487e+02 1.255e+03, threshold=9.070e+02, percent-clipped=2.0 +2023-03-29 00:36:23,483 INFO [train.py:892] (2/4) Epoch 29, batch 1700, loss[loss=0.1516, simple_loss=0.2322, pruned_loss=0.03544, over 19778.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2432, pruned_loss=0.04341, over 3946797.77 frames. ], batch size: 52, lr: 5.36e-03, grad_scale: 8.0 +2023-03-29 00:37:00,362 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3856, 3.7482, 3.1342, 2.6935, 3.1625, 3.6575, 3.5598, 3.6083], + device='cuda:2'), covar=tensor([0.0204, 0.0179, 0.0240, 0.0423, 0.0301, 0.0188, 0.0169, 0.0192], + device='cuda:2'), in_proj_covar=tensor([0.0102, 0.0096, 0.0099, 0.0102, 0.0105, 0.0085, 0.0085, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 00:37:10,054 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.87 vs. limit=5.0 +2023-03-29 00:37:12,288 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53660.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:37:40,331 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53671.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:38:14,085 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-03-29 00:38:27,064 INFO [train.py:892] (2/4) Epoch 29, batch 1750, loss[loss=0.1437, simple_loss=0.2197, pruned_loss=0.03381, over 19880.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2429, pruned_loss=0.04311, over 3948649.26 frames. ], batch size: 47, lr: 5.36e-03, grad_scale: 8.0 +2023-03-29 00:39:14,851 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53712.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:39:37,578 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53723.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 00:39:38,984 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.549e+02 3.643e+02 4.593e+02 5.639e+02 1.111e+03, threshold=9.186e+02, percent-clipped=1.0 +2023-03-29 00:40:13,037 INFO [train.py:892] (2/4) Epoch 29, batch 1800, loss[loss=0.1821, simple_loss=0.2564, pruned_loss=0.05389, over 19815.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.243, pruned_loss=0.04355, over 3949100.15 frames. ], batch size: 202, lr: 5.36e-03, grad_scale: 8.0 +2023-03-29 00:40:35,281 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.9339, 6.2861, 6.3327, 6.1623, 5.9470, 6.2380, 5.6308, 5.5573], + device='cuda:2'), covar=tensor([0.0384, 0.0388, 0.0422, 0.0423, 0.0545, 0.0511, 0.0643, 0.0949], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0281, 0.0292, 0.0256, 0.0258, 0.0244, 0.0263, 0.0306], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 00:40:39,872 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-29 00:40:53,939 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53760.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:41:55,152 INFO [train.py:892] (2/4) Epoch 29, batch 1850, loss[loss=0.1782, simple_loss=0.2603, pruned_loss=0.0481, over 19832.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.244, pruned_loss=0.04287, over 3947742.62 frames. ], batch size: 57, lr: 5.36e-03, grad_scale: 8.0 +2023-03-29 00:42:59,934 INFO [train.py:892] (2/4) Epoch 30, batch 0, loss[loss=0.1565, simple_loss=0.2335, pruned_loss=0.03973, over 19801.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2335, pruned_loss=0.03973, over 19801.00 frames. ], batch size: 86, lr: 5.27e-03, grad_scale: 8.0 +2023-03-29 00:42:59,934 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 00:43:13,910 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0544, 2.9205, 4.5449, 3.3769, 3.7620, 3.3951, 2.4537, 2.5787], + device='cuda:2'), covar=tensor([0.1078, 0.3689, 0.0559, 0.1095, 0.1845, 0.1711, 0.2881, 0.2883], + device='cuda:2'), in_proj_covar=tensor([0.0352, 0.0388, 0.0348, 0.0285, 0.0373, 0.0374, 0.0371, 0.0340], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 00:43:35,248 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2069, 2.8176, 2.9062, 3.0856, 3.0633, 2.8163, 4.2224, 4.5272], + device='cuda:2'), covar=tensor([0.1241, 0.1635, 0.1564, 0.2146, 0.2071, 0.2044, 0.0575, 0.0347], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0238, 0.0266, 0.0252, 0.0295, 0.0256, 0.0232, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 00:43:35,879 INFO [train.py:926] (2/4) Epoch 30, validation: loss=0.1794, simple_loss=0.2489, pruned_loss=0.05491, over 2883724.00 frames. +2023-03-29 00:43:35,881 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-29 00:43:50,293 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5104, 2.7140, 3.4870, 2.9690, 3.8670, 3.6845, 4.4168, 4.8879], + device='cuda:2'), covar=tensor([0.0594, 0.2050, 0.1527, 0.2342, 0.1644, 0.1452, 0.0632, 0.0428], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0238, 0.0266, 0.0252, 0.0295, 0.0256, 0.0232, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 00:44:13,466 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3374, 4.9322, 4.9205, 5.2810, 5.0575, 5.5450, 5.3428, 5.6442], + device='cuda:2'), covar=tensor([0.0561, 0.0349, 0.0412, 0.0292, 0.0542, 0.0317, 0.0413, 0.0251], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0175, 0.0202, 0.0174, 0.0172, 0.0157, 0.0150, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 00:44:26,893 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-29 00:44:48,486 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.508e+02 3.762e+02 4.456e+02 5.146e+02 7.887e+02, threshold=8.911e+02, percent-clipped=0.0 +2023-03-29 00:45:19,633 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1795, 2.7543, 3.3624, 3.3773, 3.8892, 4.4908, 4.2182, 4.3816], + device='cuda:2'), covar=tensor([0.0831, 0.1560, 0.1151, 0.0624, 0.0330, 0.0193, 0.0326, 0.0411], + device='cuda:2'), in_proj_covar=tensor([0.0158, 0.0170, 0.0178, 0.0151, 0.0134, 0.0131, 0.0122, 0.0114], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 00:45:28,911 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6047, 3.6872, 2.2265, 3.8377, 3.9450, 1.8077, 3.2628, 3.0594], + device='cuda:2'), covar=tensor([0.0795, 0.0844, 0.2793, 0.0799, 0.0641, 0.2828, 0.1112, 0.0874], + device='cuda:2'), in_proj_covar=tensor([0.0232, 0.0254, 0.0230, 0.0274, 0.0252, 0.0204, 0.0239, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 00:45:46,653 INFO [train.py:892] (2/4) Epoch 30, batch 50, loss[loss=0.1453, simple_loss=0.2239, pruned_loss=0.03337, over 19726.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2377, pruned_loss=0.04142, over 890991.90 frames. ], batch size: 99, lr: 5.26e-03, grad_scale: 8.0 +2023-03-29 00:47:42,765 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2386, 3.4744, 3.0188, 2.5703, 2.9093, 3.5247, 3.3264, 3.3972], + device='cuda:2'), covar=tensor([0.0265, 0.0325, 0.0272, 0.0465, 0.0357, 0.0196, 0.0212, 0.0204], + device='cuda:2'), in_proj_covar=tensor([0.0102, 0.0096, 0.0099, 0.0101, 0.0105, 0.0085, 0.0085, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 00:47:46,001 INFO [train.py:892] (2/4) Epoch 30, batch 100, loss[loss=0.1719, simple_loss=0.253, pruned_loss=0.04533, over 19849.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2364, pruned_loss=0.04051, over 1570270.42 frames. ], batch size: 56, lr: 5.26e-03, grad_scale: 8.0 +2023-03-29 00:47:49,923 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53897.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:48:56,409 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.692e+02 3.728e+02 4.256e+02 5.465e+02 1.327e+03, threshold=8.513e+02, percent-clipped=4.0 +2023-03-29 00:49:35,879 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6467, 2.7213, 2.8195, 2.8707, 2.7885, 2.7971, 2.7204, 3.0095], + device='cuda:2'), covar=tensor([0.0313, 0.0404, 0.0308, 0.0276, 0.0397, 0.0399, 0.0507, 0.0399], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0079, 0.0082, 0.0075, 0.0089, 0.0081, 0.0098, 0.0071], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 00:49:47,357 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9025, 3.8577, 2.3172, 4.0884, 4.2621, 1.9700, 3.5101, 3.2793], + device='cuda:2'), covar=tensor([0.0696, 0.0885, 0.3086, 0.0883, 0.0551, 0.3010, 0.1140, 0.0907], + device='cuda:2'), in_proj_covar=tensor([0.0234, 0.0257, 0.0232, 0.0276, 0.0254, 0.0205, 0.0241, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 00:49:50,692 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53945.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:49:51,905 INFO [train.py:892] (2/4) Epoch 30, batch 150, loss[loss=0.1454, simple_loss=0.2209, pruned_loss=0.03494, over 19636.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2365, pruned_loss=0.04083, over 2098238.91 frames. ], batch size: 68, lr: 5.26e-03, grad_scale: 8.0 +2023-03-29 00:50:27,101 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53960.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:50:54,570 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53971.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:51:57,037 INFO [train.py:892] (2/4) Epoch 30, batch 200, loss[loss=0.166, simple_loss=0.2317, pruned_loss=0.05013, over 19875.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2397, pruned_loss=0.04193, over 2508285.85 frames. ], batch size: 136, lr: 5.26e-03, grad_scale: 8.0 +2023-03-29 00:52:32,052 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=54008.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:52:59,031 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=54019.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:53:09,761 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=54023.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 00:53:11,544 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.594e+02 3.930e+02 4.416e+02 5.090e+02 1.215e+03, threshold=8.832e+02, percent-clipped=2.0 +2023-03-29 00:53:26,060 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6039, 3.5530, 3.4822, 3.2448, 3.6065, 2.7290, 2.9128, 1.6858], + device='cuda:2'), covar=tensor([0.0269, 0.0269, 0.0184, 0.0246, 0.0200, 0.1189, 0.0778, 0.1939], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0146, 0.0114, 0.0134, 0.0120, 0.0135, 0.0144, 0.0128], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 00:54:01,463 INFO [train.py:892] (2/4) Epoch 30, batch 250, loss[loss=0.191, simple_loss=0.26, pruned_loss=0.06103, over 19767.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2395, pruned_loss=0.04224, over 2827909.13 frames. ], batch size: 247, lr: 5.25e-03, grad_scale: 8.0 +2023-03-29 00:54:14,098 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5350, 3.6111, 2.2336, 3.7478, 3.8490, 1.8630, 3.1996, 3.0064], + device='cuda:2'), covar=tensor([0.0814, 0.0792, 0.2826, 0.0767, 0.0632, 0.2649, 0.1083, 0.0932], + device='cuda:2'), in_proj_covar=tensor([0.0236, 0.0258, 0.0233, 0.0277, 0.0255, 0.0206, 0.0241, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 00:55:00,446 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3429, 2.4541, 2.5106, 2.5126, 2.4304, 2.5772, 2.4601, 2.6535], + device='cuda:2'), covar=tensor([0.0361, 0.0331, 0.0298, 0.0281, 0.0458, 0.0295, 0.0379, 0.0291], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0078, 0.0082, 0.0075, 0.0089, 0.0081, 0.0098, 0.0071], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 00:55:04,743 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=54071.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:56:07,032 INFO [train.py:892] (2/4) Epoch 30, batch 300, loss[loss=0.1779, simple_loss=0.2528, pruned_loss=0.05153, over 19800.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2407, pruned_loss=0.04328, over 3076347.88 frames. ], batch size: 150, lr: 5.25e-03, grad_scale: 8.0 +2023-03-29 00:56:08,309 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.8114, 2.0096, 1.8039, 1.1812, 1.8840, 1.9984, 1.8572, 1.9252], + device='cuda:2'), covar=tensor([0.0376, 0.0301, 0.0341, 0.0588, 0.0373, 0.0268, 0.0292, 0.0264], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0096, 0.0098, 0.0101, 0.0104, 0.0085, 0.0085, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 00:56:28,560 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-29 00:57:14,796 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.600e+02 3.558e+02 4.130e+02 5.342e+02 1.010e+03, threshold=8.261e+02, percent-clipped=1.0 +2023-03-29 00:58:16,905 INFO [train.py:892] (2/4) Epoch 30, batch 350, loss[loss=0.1686, simple_loss=0.2602, pruned_loss=0.03845, over 19818.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2412, pruned_loss=0.04333, over 3270539.32 frames. ], batch size: 72, lr: 5.25e-03, grad_scale: 8.0 +2023-03-29 00:58:53,231 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0108, 2.9460, 3.3230, 2.5293, 3.2669, 2.8117, 3.1036, 3.2176], + device='cuda:2'), covar=tensor([0.0689, 0.0482, 0.0375, 0.0758, 0.0424, 0.0465, 0.0477, 0.0332], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0084, 0.0082, 0.0109, 0.0078, 0.0080, 0.0078, 0.0070], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 00:59:07,371 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5261, 3.4430, 3.8145, 3.4934, 3.2993, 3.7195, 3.5866, 3.8623], + device='cuda:2'), covar=tensor([0.0879, 0.0392, 0.0402, 0.0445, 0.1462, 0.0625, 0.0517, 0.0429], + device='cuda:2'), in_proj_covar=tensor([0.0285, 0.0221, 0.0223, 0.0234, 0.0208, 0.0241, 0.0230, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 01:00:25,815 INFO [train.py:892] (2/4) Epoch 30, batch 400, loss[loss=0.1517, simple_loss=0.2272, pruned_loss=0.03813, over 19868.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2414, pruned_loss=0.04347, over 3422266.70 frames. ], batch size: 158, lr: 5.25e-03, grad_scale: 8.0 +2023-03-29 01:00:35,788 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-29 01:00:41,405 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9730, 3.8947, 4.2930, 3.9234, 3.6708, 4.1927, 3.9919, 4.3830], + device='cuda:2'), covar=tensor([0.0878, 0.0384, 0.0407, 0.0418, 0.1178, 0.0530, 0.0508, 0.0366], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0221, 0.0223, 0.0234, 0.0208, 0.0241, 0.0230, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 01:01:25,378 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.58 vs. limit=2.0 +2023-03-29 01:01:38,161 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.477e+02 3.958e+02 4.510e+02 5.368e+02 1.015e+03, threshold=9.019e+02, percent-clipped=3.0 +2023-03-29 01:02:36,044 INFO [train.py:892] (2/4) Epoch 30, batch 450, loss[loss=0.1489, simple_loss=0.2257, pruned_loss=0.03602, over 19814.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2428, pruned_loss=0.04367, over 3539803.37 frames. ], batch size: 98, lr: 5.24e-03, grad_scale: 8.0 +2023-03-29 01:04:09,324 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.9516, 1.9484, 2.0742, 2.0454, 2.0132, 2.0769, 1.9188, 2.1039], + device='cuda:2'), covar=tensor([0.0381, 0.0367, 0.0330, 0.0339, 0.0497, 0.0342, 0.0486, 0.0340], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0078, 0.0082, 0.0076, 0.0089, 0.0081, 0.0098, 0.0071], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 01:04:31,210 INFO [train.py:892] (2/4) Epoch 30, batch 500, loss[loss=0.1669, simple_loss=0.2589, pruned_loss=0.03744, over 19716.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2436, pruned_loss=0.0439, over 3629216.51 frames. ], batch size: 54, lr: 5.24e-03, grad_scale: 16.0 +2023-03-29 01:05:41,307 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.522e+02 3.919e+02 4.339e+02 5.578e+02 1.318e+03, threshold=8.679e+02, percent-clipped=2.0 +2023-03-29 01:05:49,258 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.07 vs. limit=2.0 +2023-03-29 01:06:34,645 INFO [train.py:892] (2/4) Epoch 30, batch 550, loss[loss=0.1603, simple_loss=0.2416, pruned_loss=0.03956, over 19785.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.245, pruned_loss=0.04474, over 3699613.53 frames. ], batch size: 42, lr: 5.24e-03, grad_scale: 16.0 +2023-03-29 01:07:41,839 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3407, 4.2151, 4.6346, 4.2470, 3.8896, 4.4563, 4.2889, 4.7263], + device='cuda:2'), covar=tensor([0.0743, 0.0357, 0.0364, 0.0377, 0.1032, 0.0542, 0.0503, 0.0357], + device='cuda:2'), in_proj_covar=tensor([0.0285, 0.0223, 0.0224, 0.0235, 0.0208, 0.0243, 0.0231, 0.0215], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 01:08:39,848 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7836, 3.4891, 3.5735, 3.7869, 3.5664, 3.7146, 3.8702, 4.0794], + device='cuda:2'), covar=tensor([0.0649, 0.0498, 0.0559, 0.0402, 0.0734, 0.0630, 0.0458, 0.0325], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0171, 0.0198, 0.0171, 0.0168, 0.0154, 0.0147, 0.0194], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 01:08:41,061 INFO [train.py:892] (2/4) Epoch 30, batch 600, loss[loss=0.1754, simple_loss=0.2547, pruned_loss=0.04805, over 19837.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.244, pruned_loss=0.04453, over 3756126.42 frames. ], batch size: 239, lr: 5.24e-03, grad_scale: 16.0 +2023-03-29 01:09:49,426 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.185e+02 3.804e+02 4.477e+02 5.601e+02 8.237e+02, threshold=8.953e+02, percent-clipped=0.0 +2023-03-29 01:09:53,026 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-29 01:10:46,707 INFO [train.py:892] (2/4) Epoch 30, batch 650, loss[loss=0.1835, simple_loss=0.2561, pruned_loss=0.05547, over 19747.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2437, pruned_loss=0.04447, over 3798840.76 frames. ], batch size: 276, lr: 5.23e-03, grad_scale: 16.0 +2023-03-29 01:11:10,765 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-29 01:12:02,540 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8936, 4.1462, 4.2134, 5.1163, 3.2531, 3.6907, 3.1568, 2.9426], + device='cuda:2'), covar=tensor([0.0383, 0.1922, 0.0749, 0.0250, 0.1853, 0.0929, 0.1146, 0.1560], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0329, 0.0246, 0.0200, 0.0246, 0.0207, 0.0215, 0.0215], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 01:12:06,980 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8011, 3.7754, 2.3337, 3.9856, 4.1016, 1.9130, 3.3484, 3.2747], + device='cuda:2'), covar=tensor([0.0714, 0.0890, 0.2796, 0.0787, 0.0614, 0.3027, 0.1223, 0.0848], + device='cuda:2'), in_proj_covar=tensor([0.0230, 0.0252, 0.0228, 0.0271, 0.0250, 0.0201, 0.0236, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 01:12:28,607 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-29 01:12:36,214 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4904, 2.0316, 2.3337, 2.7435, 3.1994, 3.3388, 3.2111, 3.2280], + device='cuda:2'), covar=tensor([0.1089, 0.1781, 0.1397, 0.0796, 0.0468, 0.0343, 0.0441, 0.0488], + device='cuda:2'), in_proj_covar=tensor([0.0160, 0.0172, 0.0178, 0.0152, 0.0135, 0.0132, 0.0123, 0.0116], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 01:12:48,857 INFO [train.py:892] (2/4) Epoch 30, batch 700, loss[loss=0.1504, simple_loss=0.2296, pruned_loss=0.03558, over 19880.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2424, pruned_loss=0.04385, over 3833736.88 frames. ], batch size: 97, lr: 5.23e-03, grad_scale: 16.0 +2023-03-29 01:13:56,073 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.8818, 6.1406, 6.1454, 6.0452, 5.8587, 6.1273, 5.4726, 5.5484], + device='cuda:2'), covar=tensor([0.0318, 0.0385, 0.0433, 0.0337, 0.0448, 0.0451, 0.0639, 0.0875], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0283, 0.0293, 0.0257, 0.0261, 0.0247, 0.0266, 0.0310], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 01:13:59,621 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.972e+02 4.007e+02 4.588e+02 5.401e+02 1.198e+03, threshold=9.175e+02, percent-clipped=2.0 +2023-03-29 01:14:34,120 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5065, 2.7484, 3.9060, 3.1550, 3.2843, 3.1518, 2.2851, 2.4435], + device='cuda:2'), covar=tensor([0.1179, 0.3229, 0.0681, 0.1081, 0.1791, 0.1541, 0.2644, 0.2873], + device='cuda:2'), in_proj_covar=tensor([0.0352, 0.0388, 0.0348, 0.0284, 0.0372, 0.0372, 0.0371, 0.0340], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 01:14:53,550 INFO [train.py:892] (2/4) Epoch 30, batch 750, loss[loss=0.1467, simple_loss=0.2306, pruned_loss=0.0314, over 19647.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2436, pruned_loss=0.04423, over 3856389.44 frames. ], batch size: 69, lr: 5.23e-03, grad_scale: 16.0 +2023-03-29 01:17:01,216 INFO [train.py:892] (2/4) Epoch 30, batch 800, loss[loss=0.1782, simple_loss=0.256, pruned_loss=0.05023, over 19750.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2422, pruned_loss=0.04334, over 3877967.80 frames. ], batch size: 250, lr: 5.23e-03, grad_scale: 16.0 +2023-03-29 01:18:07,885 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.698e+02 3.768e+02 4.525e+02 5.531e+02 9.208e+02, threshold=9.049e+02, percent-clipped=1.0 +2023-03-29 01:19:02,295 INFO [train.py:892] (2/4) Epoch 30, batch 850, loss[loss=0.15, simple_loss=0.2405, pruned_loss=0.02973, over 19814.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2428, pruned_loss=0.04343, over 3894212.25 frames. ], batch size: 72, lr: 5.22e-03, grad_scale: 16.0 +2023-03-29 01:19:48,448 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54664.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 01:20:19,806 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0859, 2.7481, 3.3933, 3.2953, 3.8032, 4.3284, 4.1783, 4.3090], + device='cuda:2'), covar=tensor([0.0964, 0.1618, 0.1124, 0.0690, 0.0384, 0.0232, 0.0318, 0.0288], + device='cuda:2'), in_proj_covar=tensor([0.0158, 0.0169, 0.0176, 0.0150, 0.0134, 0.0130, 0.0121, 0.0114], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 01:21:04,089 INFO [train.py:892] (2/4) Epoch 30, batch 900, loss[loss=0.1643, simple_loss=0.2433, pruned_loss=0.04264, over 19769.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2428, pruned_loss=0.04359, over 3906630.77 frames. ], batch size: 70, lr: 5.22e-03, grad_scale: 16.0 +2023-03-29 01:21:23,673 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1184, 3.1015, 3.4250, 2.5450, 3.5366, 2.9203, 3.1160, 3.4250], + device='cuda:2'), covar=tensor([0.0797, 0.0485, 0.0570, 0.0830, 0.0331, 0.0471, 0.0600, 0.0318], + device='cuda:2'), in_proj_covar=tensor([0.0078, 0.0085, 0.0083, 0.0111, 0.0079, 0.0082, 0.0080, 0.0071], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 01:22:16,864 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.427e+02 3.830e+02 4.433e+02 5.548e+02 1.086e+03, threshold=8.866e+02, percent-clipped=1.0 +2023-03-29 01:22:22,722 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54725.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 01:23:12,082 INFO [train.py:892] (2/4) Epoch 30, batch 950, loss[loss=0.1733, simple_loss=0.2487, pruned_loss=0.04893, over 19810.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2452, pruned_loss=0.04478, over 3913514.39 frames. ], batch size: 72, lr: 5.22e-03, grad_scale: 16.0 +2023-03-29 01:24:22,559 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4923, 3.5209, 3.9512, 2.9603, 4.0147, 3.2931, 3.5361, 3.7267], + device='cuda:2'), covar=tensor([0.0759, 0.0459, 0.0478, 0.0776, 0.0404, 0.0420, 0.0469, 0.0361], + device='cuda:2'), in_proj_covar=tensor([0.0078, 0.0085, 0.0083, 0.0111, 0.0079, 0.0082, 0.0080, 0.0071], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 01:25:12,154 INFO [train.py:892] (2/4) Epoch 30, batch 1000, loss[loss=0.15, simple_loss=0.2213, pruned_loss=0.0394, over 19818.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2441, pruned_loss=0.04451, over 3920909.64 frames. ], batch size: 148, lr: 5.22e-03, grad_scale: 16.0 +2023-03-29 01:26:22,849 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 3.838e+02 4.641e+02 5.585e+02 1.320e+03, threshold=9.281e+02, percent-clipped=3.0 +2023-03-29 01:27:18,743 INFO [train.py:892] (2/4) Epoch 30, batch 1050, loss[loss=0.1675, simple_loss=0.2652, pruned_loss=0.03487, over 19913.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2462, pruned_loss=0.04541, over 3925633.26 frames. ], batch size: 53, lr: 5.21e-03, grad_scale: 16.0 +2023-03-29 01:27:59,588 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54862.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:29:25,262 INFO [train.py:892] (2/4) Epoch 30, batch 1100, loss[loss=0.1865, simple_loss=0.264, pruned_loss=0.05453, over 19820.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.245, pruned_loss=0.04486, over 3931558.04 frames. ], batch size: 72, lr: 5.21e-03, grad_scale: 16.0 +2023-03-29 01:30:37,447 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54923.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 01:30:38,514 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.418e+02 3.807e+02 4.386e+02 5.197e+02 8.292e+02, threshold=8.772e+02, percent-clipped=0.0 +2023-03-29 01:31:31,545 INFO [train.py:892] (2/4) Epoch 30, batch 1150, loss[loss=0.1614, simple_loss=0.2371, pruned_loss=0.04282, over 19855.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2461, pruned_loss=0.04507, over 3934398.02 frames. ], batch size: 142, lr: 5.21e-03, grad_scale: 16.0 +2023-03-29 01:31:43,119 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4194, 2.6253, 3.7229, 3.0899, 3.1774, 3.0284, 2.1705, 2.3121], + device='cuda:2'), covar=tensor([0.1144, 0.2827, 0.0626, 0.1023, 0.1655, 0.1492, 0.2547, 0.2688], + device='cuda:2'), in_proj_covar=tensor([0.0352, 0.0387, 0.0348, 0.0285, 0.0372, 0.0374, 0.0372, 0.0340], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 01:32:32,836 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3814, 2.7012, 2.4274, 1.8792, 2.4318, 2.6296, 2.6059, 2.6937], + device='cuda:2'), covar=tensor([0.0369, 0.0331, 0.0311, 0.0585, 0.0383, 0.0313, 0.0284, 0.0247], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0096, 0.0098, 0.0101, 0.0104, 0.0086, 0.0085, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 01:33:13,190 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7684, 3.4628, 3.5864, 3.7950, 3.6163, 3.8912, 3.7938, 4.0162], + device='cuda:2'), covar=tensor([0.0848, 0.0666, 0.0725, 0.0563, 0.0811, 0.0642, 0.0843, 0.0549], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0175, 0.0202, 0.0174, 0.0172, 0.0157, 0.0149, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 01:33:31,564 INFO [train.py:892] (2/4) Epoch 30, batch 1200, loss[loss=0.2501, simple_loss=0.3214, pruned_loss=0.08943, over 19494.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2458, pruned_loss=0.04511, over 3938771.49 frames. ], batch size: 396, lr: 5.21e-03, grad_scale: 16.0 +2023-03-29 01:34:33,658 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55020.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 01:34:43,799 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.409e+02 3.740e+02 4.299e+02 5.341e+02 1.989e+03, threshold=8.597e+02, percent-clipped=2.0 +2023-03-29 01:35:37,513 INFO [train.py:892] (2/4) Epoch 30, batch 1250, loss[loss=0.1959, simple_loss=0.2719, pruned_loss=0.05999, over 19689.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2446, pruned_loss=0.04462, over 3942040.60 frames. ], batch size: 337, lr: 5.21e-03, grad_scale: 16.0 +2023-03-29 01:35:42,385 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4104, 4.2894, 4.7317, 4.3826, 3.9599, 4.5565, 4.3905, 4.8338], + device='cuda:2'), covar=tensor([0.0845, 0.0430, 0.0382, 0.0379, 0.1011, 0.0495, 0.0475, 0.0353], + device='cuda:2'), in_proj_covar=tensor([0.0285, 0.0225, 0.0224, 0.0235, 0.0210, 0.0243, 0.0232, 0.0217], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 01:37:48,025 INFO [train.py:892] (2/4) Epoch 30, batch 1300, loss[loss=0.1628, simple_loss=0.2463, pruned_loss=0.03971, over 19776.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2434, pruned_loss=0.04398, over 3943309.25 frames. ], batch size: 152, lr: 5.20e-03, grad_scale: 16.0 +2023-03-29 01:38:08,754 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5281, 4.7514, 4.8104, 4.6886, 4.4340, 4.7734, 4.3009, 4.3337], + device='cuda:2'), covar=tensor([0.0481, 0.0488, 0.0505, 0.0471, 0.0659, 0.0519, 0.0766, 0.1017], + device='cuda:2'), in_proj_covar=tensor([0.0267, 0.0285, 0.0296, 0.0259, 0.0263, 0.0248, 0.0267, 0.0311], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 01:38:12,818 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7207, 3.4584, 3.7204, 2.8188, 3.8660, 3.2485, 3.5015, 3.7029], + device='cuda:2'), covar=tensor([0.0648, 0.0433, 0.0611, 0.0834, 0.0456, 0.0458, 0.0428, 0.0656], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0086, 0.0083, 0.0111, 0.0079, 0.0081, 0.0079, 0.0071], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 01:38:55,046 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.487e+02 3.571e+02 4.407e+02 5.506e+02 1.177e+03, threshold=8.814e+02, percent-clipped=1.0 +2023-03-29 01:39:52,085 INFO [train.py:892] (2/4) Epoch 30, batch 1350, loss[loss=0.1792, simple_loss=0.2567, pruned_loss=0.05086, over 19775.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.2444, pruned_loss=0.04417, over 3942753.20 frames. ], batch size: 247, lr: 5.20e-03, grad_scale: 16.0 +2023-03-29 01:39:58,492 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55148.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:41:00,297 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1745, 2.0930, 2.2504, 2.2142, 2.2800, 2.2777, 2.2636, 2.4040], + device='cuda:2'), covar=tensor([0.0412, 0.0368, 0.0348, 0.0363, 0.0473, 0.0387, 0.0433, 0.0304], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0079, 0.0082, 0.0076, 0.0089, 0.0082, 0.0099, 0.0071], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 01:41:51,241 INFO [train.py:892] (2/4) Epoch 30, batch 1400, loss[loss=0.1591, simple_loss=0.2369, pruned_loss=0.04062, over 19889.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2445, pruned_loss=0.04431, over 3943148.52 frames. ], batch size: 63, lr: 5.20e-03, grad_scale: 16.0 +2023-03-29 01:42:28,517 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55209.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:42:34,922 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55211.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:42:52,081 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55218.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 01:43:04,806 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.260e+02 3.753e+02 4.356e+02 5.536e+02 1.082e+03, threshold=8.713e+02, percent-clipped=2.0 +2023-03-29 01:43:37,355 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7733, 2.7927, 1.7245, 3.1954, 2.9458, 3.1216, 3.2009, 2.6028], + device='cuda:2'), covar=tensor([0.0706, 0.0700, 0.1788, 0.0683, 0.0657, 0.0564, 0.0647, 0.0932], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0143, 0.0143, 0.0152, 0.0133, 0.0135, 0.0147, 0.0145], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 01:43:53,715 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 01:44:02,191 INFO [train.py:892] (2/4) Epoch 30, batch 1450, loss[loss=0.1395, simple_loss=0.2154, pruned_loss=0.03178, over 19855.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2436, pruned_loss=0.04369, over 3945726.47 frames. ], batch size: 85, lr: 5.20e-03, grad_scale: 16.0 +2023-03-29 01:44:12,099 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5885, 2.5975, 1.6581, 2.8891, 2.6937, 2.7576, 2.9108, 2.3972], + device='cuda:2'), covar=tensor([0.0710, 0.0767, 0.1576, 0.0683, 0.0718, 0.0608, 0.0637, 0.0911], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0142, 0.0143, 0.0152, 0.0133, 0.0134, 0.0147, 0.0145], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 01:44:57,708 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1299, 3.4614, 3.6868, 4.1998, 2.8481, 3.3775, 2.5845, 2.6145], + device='cuda:2'), covar=tensor([0.0554, 0.2002, 0.0860, 0.0374, 0.1993, 0.0902, 0.1435, 0.1641], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0328, 0.0245, 0.0200, 0.0244, 0.0206, 0.0214, 0.0216], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 01:45:06,738 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55272.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:46:06,666 INFO [train.py:892] (2/4) Epoch 30, batch 1500, loss[loss=0.2042, simple_loss=0.2748, pruned_loss=0.06677, over 19704.00 frames. ], tot_loss[loss=0.165, simple_loss=0.243, pruned_loss=0.04347, over 3947255.09 frames. ], batch size: 325, lr: 5.19e-03, grad_scale: 16.0 +2023-03-29 01:46:22,836 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9426, 3.3039, 3.4180, 3.9805, 2.6892, 3.2162, 2.7402, 2.4806], + device='cuda:2'), covar=tensor([0.0575, 0.1808, 0.1062, 0.0427, 0.2026, 0.0903, 0.1295, 0.1769], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0328, 0.0244, 0.0199, 0.0244, 0.0206, 0.0214, 0.0215], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 01:47:08,031 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55320.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 01:47:16,424 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.567e+02 3.622e+02 4.364e+02 5.637e+02 1.011e+03, threshold=8.727e+02, percent-clipped=2.0 +2023-03-29 01:47:32,637 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.60 vs. limit=5.0 +2023-03-29 01:47:50,389 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1603, 1.6148, 1.8321, 2.3502, 2.5059, 2.6279, 2.5449, 2.5689], + device='cuda:2'), covar=tensor([0.1119, 0.1986, 0.1677, 0.0881, 0.0651, 0.0494, 0.0514, 0.0573], + device='cuda:2'), in_proj_covar=tensor([0.0159, 0.0170, 0.0177, 0.0152, 0.0136, 0.0131, 0.0123, 0.0116], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 01:48:09,060 INFO [train.py:892] (2/4) Epoch 30, batch 1550, loss[loss=0.1526, simple_loss=0.2321, pruned_loss=0.03653, over 19848.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2428, pruned_loss=0.04326, over 3947928.86 frames. ], batch size: 49, lr: 5.19e-03, grad_scale: 16.0 +2023-03-29 01:48:14,142 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-03-29 01:49:02,378 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55368.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 01:49:16,156 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-29 01:50:11,369 INFO [train.py:892] (2/4) Epoch 30, batch 1600, loss[loss=0.172, simple_loss=0.2428, pruned_loss=0.0506, over 19651.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2428, pruned_loss=0.04328, over 3948781.48 frames. ], batch size: 47, lr: 5.19e-03, grad_scale: 16.0 +2023-03-29 01:50:18,502 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0200, 2.6051, 4.0764, 3.6728, 3.9809, 4.0924, 3.8814, 3.8374], + device='cuda:2'), covar=tensor([0.0579, 0.0936, 0.0114, 0.0608, 0.0150, 0.0242, 0.0198, 0.0188], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0103, 0.0088, 0.0154, 0.0085, 0.0098, 0.0091, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 01:51:20,581 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.706e+02 3.762e+02 4.518e+02 5.538e+02 9.560e+02, threshold=9.036e+02, percent-clipped=2.0 +2023-03-29 01:52:18,740 INFO [train.py:892] (2/4) Epoch 30, batch 1650, loss[loss=0.1598, simple_loss=0.2296, pruned_loss=0.04498, over 19795.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2416, pruned_loss=0.0428, over 3950193.33 frames. ], batch size: 185, lr: 5.19e-03, grad_scale: 16.0 +2023-03-29 01:54:24,834 INFO [train.py:892] (2/4) Epoch 30, batch 1700, loss[loss=0.1528, simple_loss=0.2343, pruned_loss=0.03567, over 19552.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2422, pruned_loss=0.04333, over 3950002.81 frames. ], batch size: 47, lr: 5.18e-03, grad_scale: 16.0 +2023-03-29 01:54:44,193 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55504.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:55:19,225 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55518.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 01:55:22,718 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-29 01:55:32,300 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.784e+02 3.668e+02 4.195e+02 5.159e+02 8.554e+02, threshold=8.391e+02, percent-clipped=0.0 +2023-03-29 01:56:20,113 INFO [train.py:892] (2/4) Epoch 30, batch 1750, loss[loss=0.1465, simple_loss=0.2239, pruned_loss=0.03453, over 19836.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2414, pruned_loss=0.04339, over 3950974.95 frames. ], batch size: 75, lr: 5.18e-03, grad_scale: 16.0 +2023-03-29 01:57:01,868 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55566.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:57:04,544 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55567.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:58:06,240 INFO [train.py:892] (2/4) Epoch 30, batch 1800, loss[loss=0.2604, simple_loss=0.3643, pruned_loss=0.07823, over 18011.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2438, pruned_loss=0.04422, over 3947356.98 frames. ], batch size: 633, lr: 5.18e-03, grad_scale: 16.0 +2023-03-29 01:59:02,411 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.794e+02 4.015e+02 4.867e+02 6.116e+02 1.314e+03, threshold=9.734e+02, percent-clipped=8.0 +2023-03-29 01:59:44,973 INFO [train.py:892] (2/4) Epoch 30, batch 1850, loss[loss=0.1779, simple_loss=0.2698, pruned_loss=0.04298, over 19854.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2449, pruned_loss=0.04375, over 3946905.31 frames. ], batch size: 58, lr: 5.18e-03, grad_scale: 16.0 +2023-03-29 02:00:52,127 INFO [train.py:892] (2/4) Epoch 31, batch 0, loss[loss=0.1779, simple_loss=0.2537, pruned_loss=0.05105, over 19627.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2537, pruned_loss=0.05105, over 19627.00 frames. ], batch size: 330, lr: 5.09e-03, grad_scale: 16.0 +2023-03-29 02:00:52,128 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 02:01:29,196 INFO [train.py:926] (2/4) Epoch 31, validation: loss=0.1803, simple_loss=0.2493, pruned_loss=0.05567, over 2883724.00 frames. +2023-03-29 02:01:29,198 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-29 02:02:51,607 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1843, 4.8608, 4.9267, 5.2648, 4.8887, 5.4643, 5.3371, 5.5474], + device='cuda:2'), covar=tensor([0.0660, 0.0341, 0.0428, 0.0317, 0.0592, 0.0357, 0.0369, 0.0338], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0176, 0.0201, 0.0174, 0.0172, 0.0157, 0.0148, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 02:03:38,842 INFO [train.py:892] (2/4) Epoch 31, batch 50, loss[loss=0.1557, simple_loss=0.2332, pruned_loss=0.03904, over 19834.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2443, pruned_loss=0.04354, over 888354.80 frames. ], batch size: 208, lr: 5.09e-03, grad_scale: 16.0 +2023-03-29 02:04:41,987 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.130e+02 3.666e+02 4.158e+02 5.108e+02 9.085e+02, threshold=8.317e+02, percent-clipped=0.0 +2023-03-29 02:05:06,827 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9805, 2.2702, 4.2824, 3.7073, 4.1504, 4.1715, 4.0123, 4.1467], + device='cuda:2'), covar=tensor([0.0857, 0.1393, 0.0148, 0.0922, 0.0175, 0.0306, 0.0244, 0.0183], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0104, 0.0089, 0.0155, 0.0086, 0.0098, 0.0092, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 02:05:38,309 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55748.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:05:43,123 INFO [train.py:892] (2/4) Epoch 31, batch 100, loss[loss=0.1632, simple_loss=0.2479, pruned_loss=0.03921, over 19950.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2429, pruned_loss=0.04287, over 1567545.99 frames. ], batch size: 52, lr: 5.09e-03, grad_scale: 16.0 +2023-03-29 02:06:40,819 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1905, 3.2129, 4.6660, 3.5283, 3.7927, 3.6958, 2.6092, 2.7956], + device='cuda:2'), covar=tensor([0.0906, 0.2966, 0.0463, 0.1017, 0.1604, 0.1398, 0.2440, 0.2696], + device='cuda:2'), in_proj_covar=tensor([0.0352, 0.0388, 0.0347, 0.0285, 0.0372, 0.0375, 0.0373, 0.0340], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 02:06:48,275 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2289, 3.0268, 3.1063, 3.2541, 3.1172, 3.1302, 3.3198, 3.5073], + device='cuda:2'), covar=tensor([0.0775, 0.0524, 0.0609, 0.0485, 0.0755, 0.0751, 0.0488, 0.0405], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0176, 0.0202, 0.0175, 0.0173, 0.0158, 0.0149, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 02:07:47,103 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0397, 2.9961, 1.9102, 3.6052, 3.2312, 3.5289, 3.6278, 2.9089], + device='cuda:2'), covar=tensor([0.0681, 0.0708, 0.1812, 0.0672, 0.0630, 0.0523, 0.0610, 0.0818], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0144, 0.0144, 0.0152, 0.0133, 0.0135, 0.0147, 0.0145], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 02:07:54,492 INFO [train.py:892] (2/4) Epoch 31, batch 150, loss[loss=0.1682, simple_loss=0.2482, pruned_loss=0.0441, over 19889.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2427, pruned_loss=0.04326, over 2096615.51 frames. ], batch size: 92, lr: 5.08e-03, grad_scale: 16.0 +2023-03-29 02:08:04,200 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55804.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:08:17,807 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55809.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:08:54,022 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.568e+02 3.733e+02 4.638e+02 5.672e+02 1.192e+03, threshold=9.276e+02, percent-clipped=1.0 +2023-03-29 02:10:02,794 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55848.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:10:09,555 INFO [train.py:892] (2/4) Epoch 31, batch 200, loss[loss=0.2205, simple_loss=0.2911, pruned_loss=0.07497, over 19687.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2432, pruned_loss=0.04277, over 2506624.37 frames. ], batch size: 325, lr: 5.08e-03, grad_scale: 16.0 +2023-03-29 02:10:13,104 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55852.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:10:39,670 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4223, 2.4046, 1.5158, 2.7105, 2.4946, 2.6015, 2.7370, 2.1386], + device='cuda:2'), covar=tensor([0.0757, 0.0882, 0.1646, 0.0723, 0.0786, 0.0611, 0.0656, 0.0985], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0145, 0.0144, 0.0153, 0.0133, 0.0135, 0.0148, 0.0146], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 02:10:53,720 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55867.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:11:20,245 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-29 02:12:23,367 INFO [train.py:892] (2/4) Epoch 31, batch 250, loss[loss=0.1443, simple_loss=0.2126, pruned_loss=0.03798, over 19866.00 frames. ], tot_loss[loss=0.163, simple_loss=0.2416, pruned_loss=0.04218, over 2827039.03 frames. ], batch size: 129, lr: 5.08e-03, grad_scale: 16.0 +2023-03-29 02:12:44,200 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55909.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:12:59,714 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55915.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:13:03,675 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7395, 2.7814, 2.9213, 2.3512, 3.0283, 2.4997, 2.8492, 2.8780], + device='cuda:2'), covar=tensor([0.0590, 0.0530, 0.0562, 0.0839, 0.0359, 0.0544, 0.0512, 0.0367], + device='cuda:2'), in_proj_covar=tensor([0.0078, 0.0085, 0.0083, 0.0111, 0.0079, 0.0081, 0.0080, 0.0072], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 02:13:23,201 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.896e+02 3.622e+02 4.377e+02 5.144e+02 8.817e+02, threshold=8.755e+02, percent-clipped=0.0 +2023-03-29 02:14:32,046 INFO [train.py:892] (2/4) Epoch 31, batch 300, loss[loss=0.1575, simple_loss=0.2321, pruned_loss=0.04148, over 19846.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.241, pruned_loss=0.04187, over 3075812.02 frames. ], batch size: 190, lr: 5.08e-03, grad_scale: 16.0 +2023-03-29 02:16:45,165 INFO [train.py:892] (2/4) Epoch 31, batch 350, loss[loss=0.1416, simple_loss=0.2292, pruned_loss=0.02699, over 19796.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2396, pruned_loss=0.04121, over 3271025.32 frames. ], batch size: 68, lr: 5.08e-03, grad_scale: 16.0 +2023-03-29 02:17:12,025 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.4760, 4.9926, 5.0997, 4.8005, 5.4069, 3.3812, 4.2427, 2.5716], + device='cuda:2'), covar=tensor([0.0148, 0.0176, 0.0134, 0.0189, 0.0109, 0.0868, 0.0951, 0.1507], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0145, 0.0112, 0.0134, 0.0118, 0.0134, 0.0142, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 02:17:36,531 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5488, 2.5298, 1.6009, 2.8683, 2.6139, 2.7556, 2.8972, 2.3077], + device='cuda:2'), covar=tensor([0.0752, 0.0847, 0.1576, 0.0683, 0.0702, 0.0584, 0.0632, 0.0959], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0144, 0.0144, 0.0152, 0.0132, 0.0135, 0.0147, 0.0145], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 02:17:41,467 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.448e+02 3.652e+02 4.141e+02 5.135e+02 1.534e+03, threshold=8.281e+02, percent-clipped=2.0 +2023-03-29 02:17:46,494 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5890, 4.6813, 2.6857, 4.9752, 5.1617, 2.2449, 4.3936, 3.6441], + device='cuda:2'), covar=tensor([0.0600, 0.0682, 0.2495, 0.0611, 0.0439, 0.2604, 0.0828, 0.0848], + device='cuda:2'), in_proj_covar=tensor([0.0232, 0.0255, 0.0228, 0.0272, 0.0252, 0.0203, 0.0239, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 02:18:47,936 INFO [train.py:892] (2/4) Epoch 31, batch 400, loss[loss=0.144, simple_loss=0.2253, pruned_loss=0.03132, over 19475.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.239, pruned_loss=0.04079, over 3422501.79 frames. ], batch size: 43, lr: 5.07e-03, grad_scale: 16.0 +2023-03-29 02:20:53,732 INFO [train.py:892] (2/4) Epoch 31, batch 450, loss[loss=0.1705, simple_loss=0.2473, pruned_loss=0.04689, over 19802.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2392, pruned_loss=0.04087, over 3537493.19 frames. ], batch size: 82, lr: 5.07e-03, grad_scale: 16.0 +2023-03-29 02:21:03,388 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56104.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:21:56,007 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.311e+02 3.637e+02 4.331e+02 5.341e+02 9.053e+02, threshold=8.662e+02, percent-clipped=1.0 +2023-03-29 02:22:47,341 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56145.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:22:54,316 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9281, 4.7658, 5.3068, 4.8952, 4.2908, 5.0820, 4.9601, 5.4618], + device='cuda:2'), covar=tensor([0.0799, 0.0353, 0.0329, 0.0365, 0.0716, 0.0469, 0.0429, 0.0281], + device='cuda:2'), in_proj_covar=tensor([0.0283, 0.0221, 0.0221, 0.0232, 0.0206, 0.0241, 0.0230, 0.0216], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 02:23:00,004 INFO [train.py:892] (2/4) Epoch 31, batch 500, loss[loss=0.1661, simple_loss=0.2409, pruned_loss=0.0457, over 19817.00 frames. ], tot_loss[loss=0.1614, simple_loss=0.2401, pruned_loss=0.04136, over 3629385.22 frames. ], batch size: 184, lr: 5.07e-03, grad_scale: 16.0 +2023-03-29 02:23:38,786 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2203, 2.5265, 2.2931, 1.7118, 2.2807, 2.4755, 2.3568, 2.4703], + device='cuda:2'), covar=tensor([0.0418, 0.0268, 0.0326, 0.0597, 0.0381, 0.0298, 0.0317, 0.0273], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0094, 0.0098, 0.0100, 0.0103, 0.0085, 0.0085, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 02:24:42,826 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9534, 5.1582, 5.4077, 5.1357, 5.2087, 4.9562, 5.1168, 4.9431], + device='cuda:2'), covar=tensor([0.1413, 0.1391, 0.0884, 0.1203, 0.0715, 0.0898, 0.1792, 0.1929], + device='cuda:2'), in_proj_covar=tensor([0.0295, 0.0331, 0.0367, 0.0300, 0.0274, 0.0281, 0.0358, 0.0387], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 02:25:07,710 INFO [train.py:892] (2/4) Epoch 31, batch 550, loss[loss=0.1486, simple_loss=0.2179, pruned_loss=0.03963, over 19767.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2403, pruned_loss=0.04188, over 3701455.08 frames. ], batch size: 163, lr: 5.07e-03, grad_scale: 16.0 +2023-03-29 02:25:15,892 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56204.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:25:20,616 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56206.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:26:05,155 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.698e+02 3.907e+02 4.783e+02 5.520e+02 2.240e+03, threshold=9.567e+02, percent-clipped=4.0 +2023-03-29 02:27:16,738 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6867, 2.6884, 1.7273, 3.1005, 2.8122, 3.0348, 3.1295, 2.5261], + device='cuda:2'), covar=tensor([0.0692, 0.0784, 0.1712, 0.0710, 0.0751, 0.0599, 0.0680, 0.0894], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0143, 0.0143, 0.0151, 0.0132, 0.0135, 0.0146, 0.0144], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 02:27:17,921 INFO [train.py:892] (2/4) Epoch 31, batch 600, loss[loss=0.1549, simple_loss=0.2316, pruned_loss=0.03912, over 19755.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2403, pruned_loss=0.04164, over 3756446.02 frames. ], batch size: 209, lr: 5.06e-03, grad_scale: 16.0 +2023-03-29 02:29:20,423 INFO [train.py:892] (2/4) Epoch 31, batch 650, loss[loss=0.2015, simple_loss=0.2859, pruned_loss=0.05853, over 19719.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2405, pruned_loss=0.042, over 3799802.63 frames. ], batch size: 54, lr: 5.06e-03, grad_scale: 32.0 +2023-03-29 02:30:21,239 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.457e+02 3.714e+02 4.450e+02 5.010e+02 8.599e+02, threshold=8.900e+02, percent-clipped=0.0 +2023-03-29 02:31:30,808 INFO [train.py:892] (2/4) Epoch 31, batch 700, loss[loss=0.1629, simple_loss=0.2487, pruned_loss=0.03851, over 19773.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2417, pruned_loss=0.04235, over 3834201.61 frames. ], batch size: 46, lr: 5.06e-03, grad_scale: 32.0 +2023-03-29 02:32:44,022 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2196, 3.9466, 4.0477, 4.2645, 3.9022, 4.2606, 4.3323, 4.5363], + device='cuda:2'), covar=tensor([0.0684, 0.0429, 0.0502, 0.0391, 0.0756, 0.0522, 0.0396, 0.0301], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0177, 0.0201, 0.0175, 0.0173, 0.0158, 0.0151, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 02:33:34,587 INFO [train.py:892] (2/4) Epoch 31, batch 750, loss[loss=0.1796, simple_loss=0.2564, pruned_loss=0.05138, over 19804.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.242, pruned_loss=0.04243, over 3860787.02 frames. ], batch size: 202, lr: 5.06e-03, grad_scale: 32.0 +2023-03-29 02:33:43,721 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56404.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:34:25,287 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.58 vs. limit=5.0 +2023-03-29 02:34:36,545 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.075e+02 3.994e+02 4.748e+02 5.714e+02 1.014e+03, threshold=9.496e+02, percent-clipped=1.0 +2023-03-29 02:35:40,565 INFO [train.py:892] (2/4) Epoch 31, batch 800, loss[loss=0.1707, simple_loss=0.2497, pruned_loss=0.04583, over 19781.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2414, pruned_loss=0.04242, over 3881547.31 frames. ], batch size: 66, lr: 5.06e-03, grad_scale: 32.0 +2023-03-29 02:35:44,467 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56452.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:37:14,421 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-03-29 02:37:49,404 INFO [train.py:892] (2/4) Epoch 31, batch 850, loss[loss=0.1465, simple_loss=0.2251, pruned_loss=0.03394, over 19809.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2409, pruned_loss=0.04246, over 3897504.37 frames. ], batch size: 47, lr: 5.05e-03, grad_scale: 32.0 +2023-03-29 02:37:50,601 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56501.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:37:58,399 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56504.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:38:41,567 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56523.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 02:38:42,535 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.680e+02 3.782e+02 4.394e+02 5.442e+02 8.928e+02, threshold=8.788e+02, percent-clipped=0.0 +2023-03-29 02:39:38,243 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-29 02:39:52,793 INFO [train.py:892] (2/4) Epoch 31, batch 900, loss[loss=0.1871, simple_loss=0.2658, pruned_loss=0.05424, over 19670.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2415, pruned_loss=0.04261, over 3907901.71 frames. ], batch size: 325, lr: 5.05e-03, grad_scale: 32.0 +2023-03-29 02:39:56,935 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56552.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:40:49,929 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9686, 5.0591, 5.3941, 5.1227, 5.2010, 4.8819, 5.1793, 4.8912], + device='cuda:2'), covar=tensor([0.1329, 0.1441, 0.0828, 0.1281, 0.0724, 0.0906, 0.1676, 0.1934], + device='cuda:2'), in_proj_covar=tensor([0.0294, 0.0330, 0.0368, 0.0298, 0.0274, 0.0281, 0.0358, 0.0391], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 02:41:04,127 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.9622, 6.2385, 6.2638, 6.1413, 5.9301, 6.2055, 5.5199, 5.6323], + device='cuda:2'), covar=tensor([0.0390, 0.0427, 0.0393, 0.0388, 0.0543, 0.0481, 0.0679, 0.0905], + device='cuda:2'), in_proj_covar=tensor([0.0269, 0.0286, 0.0294, 0.0258, 0.0264, 0.0248, 0.0267, 0.0314], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 02:41:18,979 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56584.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 02:42:00,152 INFO [train.py:892] (2/4) Epoch 31, batch 950, loss[loss=0.1753, simple_loss=0.2477, pruned_loss=0.0515, over 19839.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2414, pruned_loss=0.04236, over 3917875.16 frames. ], batch size: 177, lr: 5.05e-03, grad_scale: 32.0 +2023-03-29 02:42:56,282 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-29 02:43:01,364 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.678e+02 3.807e+02 4.389e+02 5.044e+02 8.722e+02, threshold=8.778e+02, percent-clipped=0.0 +2023-03-29 02:44:14,708 INFO [train.py:892] (2/4) Epoch 31, batch 1000, loss[loss=0.1412, simple_loss=0.2198, pruned_loss=0.03132, over 19802.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2416, pruned_loss=0.04263, over 3925392.60 frames. ], batch size: 105, lr: 5.05e-03, grad_scale: 32.0 +2023-03-29 02:46:17,957 INFO [train.py:892] (2/4) Epoch 31, batch 1050, loss[loss=0.1724, simple_loss=0.2453, pruned_loss=0.04977, over 19779.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2439, pruned_loss=0.04372, over 3929548.55 frames. ], batch size: 213, lr: 5.04e-03, grad_scale: 32.0 +2023-03-29 02:46:19,342 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5160, 3.6823, 2.3763, 4.3083, 3.9190, 4.2695, 4.2859, 3.3246], + device='cuda:2'), covar=tensor([0.0574, 0.0519, 0.1398, 0.0567, 0.0514, 0.0385, 0.0587, 0.0733], + device='cuda:2'), in_proj_covar=tensor([0.0141, 0.0142, 0.0141, 0.0149, 0.0130, 0.0133, 0.0144, 0.0142], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 02:47:17,649 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.497e+02 3.728e+02 4.373e+02 5.189e+02 1.039e+03, threshold=8.746e+02, percent-clipped=2.0 +2023-03-29 02:48:25,859 INFO [train.py:892] (2/4) Epoch 31, batch 1100, loss[loss=0.2522, simple_loss=0.353, pruned_loss=0.07568, over 17962.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2429, pruned_loss=0.0433, over 3933653.81 frames. ], batch size: 633, lr: 5.04e-03, grad_scale: 32.0 +2023-03-29 02:50:26,123 INFO [train.py:892] (2/4) Epoch 31, batch 1150, loss[loss=0.1629, simple_loss=0.2356, pruned_loss=0.04511, over 19781.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.242, pruned_loss=0.04279, over 3937127.40 frames. ], batch size: 211, lr: 5.04e-03, grad_scale: 32.0 +2023-03-29 02:50:27,132 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56801.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:50:32,064 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56803.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:51:22,628 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.381e+02 3.820e+02 4.662e+02 5.416e+02 9.191e+02, threshold=9.324e+02, percent-clipped=1.0 +2023-03-29 02:52:25,366 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2453, 3.5347, 3.0837, 2.6530, 3.1544, 3.3342, 3.3590, 3.4133], + device='cuda:2'), covar=tensor([0.0253, 0.0290, 0.0278, 0.0458, 0.0318, 0.0348, 0.0228, 0.0223], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0097, 0.0100, 0.0102, 0.0106, 0.0088, 0.0087, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 02:52:27,698 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56849.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:52:31,087 INFO [train.py:892] (2/4) Epoch 31, batch 1200, loss[loss=0.1465, simple_loss=0.2274, pruned_loss=0.0328, over 19804.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.242, pruned_loss=0.04275, over 3940054.37 frames. ], batch size: 224, lr: 5.04e-03, grad_scale: 32.0 +2023-03-29 02:53:06,347 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56864.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:53:45,253 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56879.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 02:54:12,104 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2128, 4.0893, 4.5170, 4.1422, 3.7964, 4.3508, 4.1724, 4.6170], + device='cuda:2'), covar=tensor([0.0851, 0.0370, 0.0384, 0.0442, 0.1034, 0.0619, 0.0516, 0.0345], + device='cuda:2'), in_proj_covar=tensor([0.0287, 0.0223, 0.0223, 0.0236, 0.0208, 0.0245, 0.0233, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 02:54:12,211 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9576, 2.9080, 1.8522, 3.4998, 3.2477, 3.4011, 3.4665, 2.7735], + device='cuda:2'), covar=tensor([0.0727, 0.0741, 0.1744, 0.0604, 0.0612, 0.0573, 0.0667, 0.0846], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0143, 0.0143, 0.0151, 0.0132, 0.0134, 0.0146, 0.0144], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 02:54:41,990 INFO [train.py:892] (2/4) Epoch 31, batch 1250, loss[loss=0.1394, simple_loss=0.2084, pruned_loss=0.03524, over 19799.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2407, pruned_loss=0.04195, over 3942424.98 frames. ], batch size: 174, lr: 5.04e-03, grad_scale: 32.0 +2023-03-29 02:54:54,321 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6653, 4.3454, 4.4563, 4.1864, 4.6419, 3.1471, 3.8428, 2.2253], + device='cuda:2'), covar=tensor([0.0174, 0.0226, 0.0140, 0.0192, 0.0142, 0.0973, 0.0769, 0.1520], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0148, 0.0114, 0.0135, 0.0120, 0.0136, 0.0144, 0.0128], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 02:55:03,259 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56908.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:55:16,987 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-29 02:55:43,184 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.950e+02 3.644e+02 4.260e+02 5.333e+02 1.198e+03, threshold=8.519e+02, percent-clipped=0.0 +2023-03-29 02:56:45,886 INFO [train.py:892] (2/4) Epoch 31, batch 1300, loss[loss=0.1501, simple_loss=0.2331, pruned_loss=0.03348, over 19777.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.2416, pruned_loss=0.04268, over 3944810.75 frames. ], batch size: 94, lr: 5.03e-03, grad_scale: 32.0 +2023-03-29 02:57:09,471 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8647, 3.2051, 3.0903, 3.1521, 2.8094, 3.0597, 2.7720, 3.1524], + device='cuda:2'), covar=tensor([0.0319, 0.0277, 0.0321, 0.0262, 0.0387, 0.0287, 0.0420, 0.0348], + device='cuda:2'), in_proj_covar=tensor([0.0087, 0.0081, 0.0084, 0.0078, 0.0091, 0.0084, 0.0101, 0.0073], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 02:57:31,424 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56969.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:58:08,005 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7850, 3.8901, 2.2800, 4.0041, 4.1544, 1.8884, 3.3889, 3.2575], + device='cuda:2'), covar=tensor([0.0746, 0.0877, 0.2851, 0.0946, 0.0617, 0.2832, 0.1165, 0.0843], + device='cuda:2'), in_proj_covar=tensor([0.0233, 0.0257, 0.0230, 0.0276, 0.0254, 0.0204, 0.0240, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 02:58:27,081 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56993.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:58:31,596 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56995.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:58:45,961 INFO [train.py:892] (2/4) Epoch 31, batch 1350, loss[loss=0.1527, simple_loss=0.2252, pruned_loss=0.04015, over 19854.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2414, pruned_loss=0.04257, over 3947175.22 frames. ], batch size: 78, lr: 5.03e-03, grad_scale: 32.0 +2023-03-29 02:59:37,592 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.342e+02 3.868e+02 4.711e+02 5.636e+02 9.457e+02, threshold=9.422e+02, percent-clipped=6.0 +2023-03-29 03:00:14,766 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8234, 2.8441, 1.7727, 2.8413, 2.9747, 1.4576, 2.5279, 2.3368], + device='cuda:2'), covar=tensor([0.1034, 0.0929, 0.2821, 0.0917, 0.0782, 0.2521, 0.1126, 0.1122], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0259, 0.0232, 0.0278, 0.0257, 0.0206, 0.0241, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 03:00:41,601 INFO [train.py:892] (2/4) Epoch 31, batch 1400, loss[loss=0.1617, simple_loss=0.2455, pruned_loss=0.03895, over 19894.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2426, pruned_loss=0.04268, over 3945841.06 frames. ], batch size: 63, lr: 5.03e-03, grad_scale: 32.0 +2023-03-29 03:00:50,405 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57054.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:00:54,551 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57056.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:02:21,126 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1503, 2.5316, 4.2361, 3.6720, 4.0603, 4.2477, 3.9780, 4.0350], + device='cuda:2'), covar=tensor([0.0606, 0.1021, 0.0116, 0.0748, 0.0161, 0.0224, 0.0205, 0.0168], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0104, 0.0089, 0.0154, 0.0085, 0.0098, 0.0091, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 03:02:44,876 INFO [train.py:892] (2/4) Epoch 31, batch 1450, loss[loss=0.1601, simple_loss=0.2437, pruned_loss=0.03827, over 19594.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2425, pruned_loss=0.0425, over 3944890.19 frames. ], batch size: 45, lr: 5.03e-03, grad_scale: 32.0 +2023-03-29 03:03:41,333 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.443e+02 3.544e+02 4.145e+02 5.196e+02 9.297e+02, threshold=8.291e+02, percent-clipped=0.0 +2023-03-29 03:03:51,245 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7787, 2.8822, 3.5947, 2.9303, 3.8922, 3.7636, 4.5771, 5.0869], + device='cuda:2'), covar=tensor([0.0433, 0.1976, 0.1409, 0.2374, 0.1601, 0.1475, 0.0522, 0.0387], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0240, 0.0268, 0.0254, 0.0295, 0.0258, 0.0232, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 03:04:03,926 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6753, 2.7291, 4.8383, 4.0488, 4.5075, 4.7747, 4.5272, 4.4269], + device='cuda:2'), covar=tensor([0.0566, 0.1009, 0.0098, 0.0915, 0.0162, 0.0209, 0.0178, 0.0151], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0104, 0.0089, 0.0153, 0.0085, 0.0098, 0.0090, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 03:04:44,697 INFO [train.py:892] (2/4) Epoch 31, batch 1500, loss[loss=0.1852, simple_loss=0.2604, pruned_loss=0.05504, over 19773.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2423, pruned_loss=0.04242, over 3945611.02 frames. ], batch size: 247, lr: 5.02e-03, grad_scale: 32.0 +2023-03-29 03:05:07,834 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57159.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:05:51,087 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.64 vs. limit=5.0 +2023-03-29 03:05:55,194 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57179.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 03:06:12,956 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57186.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:06:22,301 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.65 vs. limit=2.0 +2023-03-29 03:06:41,345 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57197.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:06:49,030 INFO [train.py:892] (2/4) Epoch 31, batch 1550, loss[loss=0.1386, simple_loss=0.2225, pruned_loss=0.0274, over 19792.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2427, pruned_loss=0.04269, over 3946757.65 frames. ], batch size: 111, lr: 5.02e-03, grad_scale: 32.0 +2023-03-29 03:07:43,701 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.504e+02 3.735e+02 4.518e+02 5.755e+02 1.077e+03, threshold=9.035e+02, percent-clipped=5.0 +2023-03-29 03:07:52,670 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57227.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 03:08:30,488 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57244.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:08:38,650 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57247.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:08:46,367 INFO [train.py:892] (2/4) Epoch 31, batch 1600, loss[loss=0.1363, simple_loss=0.2129, pruned_loss=0.02983, over 19796.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.2407, pruned_loss=0.04174, over 3948913.72 frames. ], batch size: 174, lr: 5.02e-03, grad_scale: 32.0 +2023-03-29 03:08:53,887 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9145, 5.0012, 5.3101, 5.0797, 5.1663, 4.7957, 5.0635, 4.8390], + device='cuda:2'), covar=tensor([0.1384, 0.1660, 0.0921, 0.1281, 0.0755, 0.1030, 0.1781, 0.2055], + device='cuda:2'), in_proj_covar=tensor([0.0297, 0.0336, 0.0373, 0.0301, 0.0278, 0.0286, 0.0362, 0.0392], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 03:09:03,992 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57258.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 03:09:20,654 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57264.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:10:45,144 INFO [train.py:892] (2/4) Epoch 31, batch 1650, loss[loss=0.155, simple_loss=0.239, pruned_loss=0.03548, over 19722.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.2405, pruned_loss=0.04166, over 3949123.36 frames. ], batch size: 52, lr: 5.02e-03, grad_scale: 32.0 +2023-03-29 03:10:58,486 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57305.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:11:38,984 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.517e+02 3.692e+02 4.492e+02 5.232e+02 9.843e+02, threshold=8.984e+02, percent-clipped=1.0 +2023-03-29 03:12:37,165 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57349.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:12:40,554 INFO [train.py:892] (2/4) Epoch 31, batch 1700, loss[loss=0.1579, simple_loss=0.2362, pruned_loss=0.03984, over 19872.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.2407, pruned_loss=0.04179, over 3950150.07 frames. ], batch size: 108, lr: 5.02e-03, grad_scale: 32.0 +2023-03-29 03:12:41,491 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57351.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:14:26,291 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57398.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:14:31,700 INFO [train.py:892] (2/4) Epoch 31, batch 1750, loss[loss=0.1627, simple_loss=0.2261, pruned_loss=0.04959, over 19833.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.2404, pruned_loss=0.0419, over 3950221.69 frames. ], batch size: 144, lr: 5.01e-03, grad_scale: 32.0 +2023-03-29 03:15:05,693 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0187, 4.1329, 2.4833, 4.3433, 4.5404, 1.9813, 3.7559, 3.4819], + device='cuda:2'), covar=tensor([0.0690, 0.0820, 0.2709, 0.0768, 0.0561, 0.2889, 0.1030, 0.0827], + device='cuda:2'), in_proj_covar=tensor([0.0233, 0.0257, 0.0230, 0.0276, 0.0254, 0.0203, 0.0239, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 03:15:20,065 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.125e+02 3.770e+02 4.540e+02 5.122e+02 1.411e+03, threshold=9.080e+02, percent-clipped=2.0 +2023-03-29 03:16:11,414 INFO [train.py:892] (2/4) Epoch 31, batch 1800, loss[loss=0.1595, simple_loss=0.2249, pruned_loss=0.04708, over 19875.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2406, pruned_loss=0.04235, over 3949278.15 frames. ], batch size: 159, lr: 5.01e-03, grad_scale: 16.0 +2023-03-29 03:16:27,880 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57459.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:16:27,992 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57459.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:16:33,427 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57462.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:17:02,541 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0944, 3.4210, 3.0397, 2.5445, 3.0392, 3.3155, 3.3219, 3.3436], + device='cuda:2'), covar=tensor([0.0342, 0.0269, 0.0267, 0.0547, 0.0321, 0.0281, 0.0208, 0.0229], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0097, 0.0100, 0.0103, 0.0106, 0.0087, 0.0087, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 03:17:33,824 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57495.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:17:39,248 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4415, 3.5328, 2.2085, 4.1669, 3.7190, 4.1748, 4.1678, 3.2572], + device='cuda:2'), covar=tensor([0.0617, 0.0614, 0.1403, 0.0600, 0.0612, 0.0350, 0.0553, 0.0770], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0145, 0.0144, 0.0154, 0.0134, 0.0136, 0.0148, 0.0146], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 03:17:44,842 INFO [train.py:892] (2/4) Epoch 31, batch 1850, loss[loss=0.1646, simple_loss=0.2517, pruned_loss=0.03876, over 19830.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2434, pruned_loss=0.04254, over 3948655.62 frames. ], batch size: 57, lr: 5.01e-03, grad_scale: 16.0 +2023-03-29 03:17:48,974 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-03-29 03:18:57,349 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1828, 3.9961, 3.9813, 3.7744, 4.1922, 3.0162, 3.4998, 1.9221], + device='cuda:2'), covar=tensor([0.0199, 0.0225, 0.0146, 0.0189, 0.0133, 0.0967, 0.0635, 0.1601], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0147, 0.0114, 0.0135, 0.0119, 0.0136, 0.0143, 0.0128], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 03:18:58,451 INFO [train.py:892] (2/4) Epoch 32, batch 0, loss[loss=0.1608, simple_loss=0.2359, pruned_loss=0.04286, over 19764.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2359, pruned_loss=0.04286, over 19764.00 frames. ], batch size: 198, lr: 4.93e-03, grad_scale: 16.0 +2023-03-29 03:18:58,451 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 03:19:34,344 INFO [train.py:926] (2/4) Epoch 32, validation: loss=0.1821, simple_loss=0.2499, pruned_loss=0.05717, over 2883724.00 frames. +2023-03-29 03:19:34,345 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-29 03:19:37,623 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57507.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:20:17,658 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57523.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:20:21,035 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.254e+02 3.434e+02 4.255e+02 5.001e+02 8.278e+02, threshold=8.509e+02, percent-clipped=0.0 +2023-03-29 03:21:01,568 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57542.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:21:28,469 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57553.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 03:21:33,949 INFO [train.py:892] (2/4) Epoch 32, batch 50, loss[loss=0.1387, simple_loss=0.2088, pruned_loss=0.03423, over 19842.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2401, pruned_loss=0.04263, over 890325.17 frames. ], batch size: 124, lr: 4.93e-03, grad_scale: 16.0 +2023-03-29 03:21:35,132 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57556.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:21:53,737 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57564.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:21:58,206 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4269, 2.4534, 1.7028, 2.6414, 2.4965, 2.5495, 2.6671, 2.2036], + device='cuda:2'), covar=tensor([0.0677, 0.0720, 0.1352, 0.0662, 0.0664, 0.0557, 0.0592, 0.0891], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0144, 0.0143, 0.0153, 0.0133, 0.0135, 0.0147, 0.0145], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 03:23:18,867 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57600.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:23:31,174 INFO [train.py:892] (2/4) Epoch 32, batch 100, loss[loss=0.1448, simple_loss=0.2165, pruned_loss=0.03656, over 19750.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2418, pruned_loss=0.04306, over 1566393.09 frames. ], batch size: 155, lr: 4.92e-03, grad_scale: 16.0 +2023-03-29 03:23:47,402 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57612.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:24:17,748 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.599e+02 3.672e+02 4.484e+02 5.551e+02 1.135e+03, threshold=8.969e+02, percent-clipped=1.0 +2023-03-29 03:25:12,276 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57649.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:25:16,289 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57651.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:25:27,036 INFO [train.py:892] (2/4) Epoch 32, batch 150, loss[loss=0.161, simple_loss=0.2375, pruned_loss=0.04221, over 19780.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2398, pruned_loss=0.04184, over 2094826.03 frames. ], batch size: 193, lr: 4.92e-03, grad_scale: 16.0 +2023-03-29 03:25:34,912 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-03-29 03:25:41,440 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2603, 4.7839, 4.9197, 4.6756, 5.2047, 3.2970, 4.2282, 2.8000], + device='cuda:2'), covar=tensor([0.0171, 0.0189, 0.0131, 0.0173, 0.0119, 0.0868, 0.0770, 0.1286], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0148, 0.0114, 0.0136, 0.0120, 0.0136, 0.0144, 0.0128], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 03:26:32,180 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-29 03:27:07,498 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57697.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:27:11,773 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57699.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:27:27,787 INFO [train.py:892] (2/4) Epoch 32, batch 200, loss[loss=0.1721, simple_loss=0.2528, pruned_loss=0.0457, over 19771.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2416, pruned_loss=0.04181, over 2504294.98 frames. ], batch size: 69, lr: 4.92e-03, grad_scale: 16.0 +2023-03-29 03:28:10,866 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.596e+02 3.546e+02 4.264e+02 5.379e+02 8.670e+02, threshold=8.529e+02, percent-clipped=0.0 +2023-03-29 03:28:22,023 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8803, 4.6040, 4.6790, 4.9582, 4.6564, 5.0928, 5.0636, 5.2680], + device='cuda:2'), covar=tensor([0.0677, 0.0427, 0.0488, 0.0374, 0.0682, 0.0449, 0.0414, 0.0291], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0176, 0.0201, 0.0174, 0.0174, 0.0158, 0.0150, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 03:28:30,904 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57733.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:29:16,452 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57754.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:29:19,481 INFO [train.py:892] (2/4) Epoch 32, batch 250, loss[loss=0.1535, simple_loss=0.2326, pruned_loss=0.03719, over 19871.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2409, pruned_loss=0.0412, over 2824438.72 frames. ], batch size: 64, lr: 4.92e-03, grad_scale: 16.0 +2023-03-29 03:30:49,981 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57794.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:31:19,666 INFO [train.py:892] (2/4) Epoch 32, batch 300, loss[loss=0.1782, simple_loss=0.2451, pruned_loss=0.05561, over 19785.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.242, pruned_loss=0.04176, over 3072391.89 frames. ], batch size: 178, lr: 4.92e-03, grad_scale: 16.0 +2023-03-29 03:31:50,689 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57818.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:32:05,152 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.255e+02 3.451e+02 4.259e+02 5.251e+02 1.158e+03, threshold=8.517e+02, percent-clipped=3.0 +2023-03-29 03:32:48,089 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57842.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:33:08,954 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57851.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:33:13,431 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57853.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 03:33:19,345 INFO [train.py:892] (2/4) Epoch 32, batch 350, loss[loss=0.1535, simple_loss=0.2364, pruned_loss=0.03532, over 19647.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2427, pruned_loss=0.04201, over 3262941.82 frames. ], batch size: 69, lr: 4.91e-03, grad_scale: 16.0 +2023-03-29 03:34:27,265 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6029, 3.7358, 2.2959, 3.8671, 4.0274, 1.8344, 3.2812, 3.1843], + device='cuda:2'), covar=tensor([0.0796, 0.0906, 0.2758, 0.0812, 0.0669, 0.2701, 0.1131, 0.0838], + device='cuda:2'), in_proj_covar=tensor([0.0234, 0.0257, 0.0230, 0.0275, 0.0254, 0.0203, 0.0240, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 03:34:39,374 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57890.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:35:01,133 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57900.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:35:03,047 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57901.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:35:14,089 INFO [train.py:892] (2/4) Epoch 32, batch 400, loss[loss=0.1389, simple_loss=0.2173, pruned_loss=0.03023, over 19776.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2413, pruned_loss=0.04139, over 3415430.26 frames. ], batch size: 152, lr: 4.91e-03, grad_scale: 16.0 +2023-03-29 03:35:59,241 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.499e+02 3.927e+02 4.614e+02 5.637e+02 8.834e+02, threshold=9.228e+02, percent-clipped=1.0 +2023-03-29 03:36:53,752 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57948.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:37:11,501 INFO [train.py:892] (2/4) Epoch 32, batch 450, loss[loss=0.1688, simple_loss=0.24, pruned_loss=0.04879, over 19820.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.2403, pruned_loss=0.04103, over 3534131.54 frames. ], batch size: 128, lr: 4.91e-03, grad_scale: 16.0 +2023-03-29 03:37:17,537 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.76 vs. limit=5.0 +2023-03-29 03:39:10,414 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7806, 3.3175, 3.6418, 3.1686, 3.9776, 4.0054, 4.5140, 5.0972], + device='cuda:2'), covar=tensor([0.0429, 0.1529, 0.1351, 0.2137, 0.1494, 0.1210, 0.0564, 0.0375], + device='cuda:2'), in_proj_covar=tensor([0.0255, 0.0240, 0.0268, 0.0255, 0.0297, 0.0258, 0.0232, 0.0257], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 03:39:11,307 INFO [train.py:892] (2/4) Epoch 32, batch 500, loss[loss=0.1591, simple_loss=0.2485, pruned_loss=0.03487, over 19524.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2408, pruned_loss=0.04133, over 3625968.28 frames. ], batch size: 54, lr: 4.91e-03, grad_scale: 16.0 +2023-03-29 03:39:55,699 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.669e+02 3.901e+02 4.425e+02 5.266e+02 9.447e+02, threshold=8.850e+02, percent-clipped=1.0 +2023-03-29 03:40:09,134 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1244, 4.6532, 4.7892, 4.5630, 5.0623, 3.2831, 4.0821, 2.5539], + device='cuda:2'), covar=tensor([0.0156, 0.0223, 0.0130, 0.0173, 0.0124, 0.0916, 0.0868, 0.1455], + device='cuda:2'), in_proj_covar=tensor([0.0104, 0.0147, 0.0113, 0.0134, 0.0119, 0.0135, 0.0142, 0.0127], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 03:41:02,895 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58054.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:41:07,807 INFO [train.py:892] (2/4) Epoch 32, batch 550, loss[loss=0.1426, simple_loss=0.2289, pruned_loss=0.02813, over 19769.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2413, pruned_loss=0.0416, over 3697138.78 frames. ], batch size: 49, lr: 4.91e-03, grad_scale: 16.0 +2023-03-29 03:42:24,158 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58089.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:42:56,943 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58102.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:43:04,376 INFO [train.py:892] (2/4) Epoch 32, batch 600, loss[loss=0.1427, simple_loss=0.2199, pruned_loss=0.03281, over 19735.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2401, pruned_loss=0.04099, over 3753926.12 frames. ], batch size: 118, lr: 4.90e-03, grad_scale: 16.0 +2023-03-29 03:43:30,632 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-29 03:43:34,269 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58118.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:43:48,667 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.610e+02 3.618e+02 4.223e+02 5.011e+02 6.745e+02, threshold=8.447e+02, percent-clipped=0.0 +2023-03-29 03:44:15,950 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3531, 3.2136, 3.3933, 2.7187, 3.6158, 2.9958, 3.2893, 3.4293], + device='cuda:2'), covar=tensor([0.0523, 0.0432, 0.0571, 0.0778, 0.0311, 0.0454, 0.0455, 0.0277], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0085, 0.0084, 0.0110, 0.0079, 0.0082, 0.0080, 0.0073], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 03:44:27,769 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58141.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:44:33,789 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58144.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:44:49,584 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58151.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:44:49,612 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7229, 2.2864, 2.4819, 2.9440, 3.3060, 3.4367, 3.3030, 3.4454], + device='cuda:2'), covar=tensor([0.1020, 0.1612, 0.1453, 0.0712, 0.0477, 0.0358, 0.0495, 0.0393], + device='cuda:2'), in_proj_covar=tensor([0.0160, 0.0169, 0.0179, 0.0152, 0.0138, 0.0133, 0.0125, 0.0116], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 03:45:00,219 INFO [train.py:892] (2/4) Epoch 32, batch 650, loss[loss=0.1379, simple_loss=0.2147, pruned_loss=0.03056, over 19900.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.239, pruned_loss=0.0411, over 3798133.68 frames. ], batch size: 113, lr: 4.90e-03, grad_scale: 16.0 +2023-03-29 03:45:19,289 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6412, 2.9654, 3.1158, 3.5261, 2.5023, 3.0643, 2.3857, 2.3276], + device='cuda:2'), covar=tensor([0.0650, 0.1649, 0.1146, 0.0507, 0.2143, 0.0878, 0.1491, 0.1719], + device='cuda:2'), in_proj_covar=tensor([0.0246, 0.0331, 0.0250, 0.0205, 0.0249, 0.0210, 0.0220, 0.0216], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 03:45:21,013 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58166.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:45:21,257 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1988, 2.2571, 2.2365, 2.3337, 2.2899, 2.3023, 2.2554, 2.4350], + device='cuda:2'), covar=tensor([0.0408, 0.0373, 0.0387, 0.0284, 0.0445, 0.0380, 0.0459, 0.0286], + device='cuda:2'), in_proj_covar=tensor([0.0086, 0.0081, 0.0083, 0.0078, 0.0090, 0.0083, 0.0100, 0.0073], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 03:46:36,834 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58199.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:46:43,684 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58202.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:46:51,984 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58205.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:46:52,984 INFO [train.py:892] (2/4) Epoch 32, batch 700, loss[loss=0.155, simple_loss=0.2334, pruned_loss=0.03825, over 19794.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2392, pruned_loss=0.04121, over 3832445.47 frames. ], batch size: 73, lr: 4.90e-03, grad_scale: 16.0 +2023-03-29 03:47:15,651 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6157, 4.9368, 4.9730, 4.8804, 4.5319, 4.9362, 4.4462, 4.5000], + device='cuda:2'), covar=tensor([0.0481, 0.0480, 0.0487, 0.0448, 0.0671, 0.0502, 0.0715, 0.0986], + device='cuda:2'), in_proj_covar=tensor([0.0268, 0.0285, 0.0299, 0.0260, 0.0263, 0.0249, 0.0267, 0.0311], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 03:47:39,125 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.613e+02 3.735e+02 4.349e+02 5.105e+02 9.277e+02, threshold=8.697e+02, percent-clipped=1.0 +2023-03-29 03:47:47,339 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-03-29 03:48:03,242 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.9742, 1.8496, 1.9899, 1.9815, 1.9593, 2.0196, 1.9434, 2.0533], + device='cuda:2'), covar=tensor([0.0363, 0.0407, 0.0341, 0.0320, 0.0439, 0.0333, 0.0468, 0.0311], + device='cuda:2'), in_proj_covar=tensor([0.0086, 0.0080, 0.0083, 0.0078, 0.0090, 0.0083, 0.0100, 0.0073], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 03:48:54,085 INFO [train.py:892] (2/4) Epoch 32, batch 750, loss[loss=0.1404, simple_loss=0.224, pruned_loss=0.02842, over 19759.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2392, pruned_loss=0.04094, over 3859694.44 frames. ], batch size: 102, lr: 4.90e-03, grad_scale: 16.0 +2023-03-29 03:49:48,777 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0799, 2.8004, 3.1476, 3.3008, 3.8986, 4.3206, 4.1665, 4.2687], + device='cuda:2'), covar=tensor([0.0934, 0.1513, 0.1248, 0.0673, 0.0364, 0.0227, 0.0313, 0.0355], + device='cuda:2'), in_proj_covar=tensor([0.0160, 0.0169, 0.0178, 0.0151, 0.0138, 0.0133, 0.0125, 0.0117], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 03:49:52,861 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0452, 2.9200, 5.0169, 4.1191, 4.6030, 4.9613, 4.8288, 4.6343], + device='cuda:2'), covar=tensor([0.0447, 0.0937, 0.0087, 0.0968, 0.0150, 0.0174, 0.0133, 0.0135], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0103, 0.0088, 0.0153, 0.0086, 0.0098, 0.0090, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 03:50:47,176 INFO [train.py:892] (2/4) Epoch 32, batch 800, loss[loss=0.1748, simple_loss=0.2544, pruned_loss=0.04762, over 19733.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2396, pruned_loss=0.04068, over 3881026.27 frames. ], batch size: 51, lr: 4.90e-03, grad_scale: 16.0 +2023-03-29 03:51:10,049 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0251, 3.7325, 3.8658, 4.0472, 3.7558, 4.0258, 4.1026, 4.2972], + device='cuda:2'), covar=tensor([0.0703, 0.0459, 0.0548, 0.0397, 0.0725, 0.0573, 0.0460, 0.0329], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0177, 0.0202, 0.0176, 0.0174, 0.0159, 0.0150, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 03:51:31,606 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.700e+02 3.818e+02 4.452e+02 5.202e+02 1.002e+03, threshold=8.904e+02, percent-clipped=2.0 +2023-03-29 03:51:59,006 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2522, 3.2448, 4.8334, 3.6486, 3.8502, 3.7315, 2.6346, 2.8398], + device='cuda:2'), covar=tensor([0.0844, 0.2688, 0.0429, 0.0979, 0.1642, 0.1347, 0.2523, 0.2576], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0385, 0.0347, 0.0285, 0.0370, 0.0374, 0.0371, 0.0343], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 03:52:43,141 INFO [train.py:892] (2/4) Epoch 32, batch 850, loss[loss=0.1374, simple_loss=0.2105, pruned_loss=0.03214, over 19829.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2397, pruned_loss=0.04084, over 3896935.71 frames. ], batch size: 127, lr: 4.89e-03, grad_scale: 16.0 +2023-03-29 03:53:31,817 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2324, 4.1050, 4.5084, 4.1632, 3.8164, 4.3583, 4.2390, 4.5731], + device='cuda:2'), covar=tensor([0.0756, 0.0372, 0.0332, 0.0369, 0.1050, 0.0526, 0.0421, 0.0341], + device='cuda:2'), in_proj_covar=tensor([0.0286, 0.0223, 0.0222, 0.0234, 0.0208, 0.0245, 0.0232, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 03:53:59,998 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58389.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:54:39,281 INFO [train.py:892] (2/4) Epoch 32, batch 900, loss[loss=0.1604, simple_loss=0.2352, pruned_loss=0.04282, over 19631.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2385, pruned_loss=0.04044, over 3909355.88 frames. ], batch size: 65, lr: 4.89e-03, grad_scale: 16.0 +2023-03-29 03:55:21,924 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.766e+02 3.700e+02 4.222e+02 4.872e+02 9.847e+02, threshold=8.445e+02, percent-clipped=1.0 +2023-03-29 03:55:23,380 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.07 vs. limit=2.0 +2023-03-29 03:55:51,441 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58437.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:56:34,992 INFO [train.py:892] (2/4) Epoch 32, batch 950, loss[loss=0.1532, simple_loss=0.2348, pruned_loss=0.03584, over 19803.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2394, pruned_loss=0.04073, over 3918509.97 frames. ], batch size: 148, lr: 4.89e-03, grad_scale: 16.0 +2023-03-29 03:56:44,625 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4097, 3.4631, 2.2141, 4.0623, 3.7015, 4.0309, 4.1270, 3.2587], + device='cuda:2'), covar=tensor([0.0600, 0.0624, 0.1576, 0.0618, 0.0586, 0.0449, 0.0556, 0.0736], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0145, 0.0144, 0.0153, 0.0135, 0.0136, 0.0149, 0.0146], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 03:58:05,419 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58497.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:58:11,659 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58500.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:58:24,164 INFO [train.py:892] (2/4) Epoch 32, batch 1000, loss[loss=0.1335, simple_loss=0.2187, pruned_loss=0.02415, over 19740.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.239, pruned_loss=0.04024, over 3926427.07 frames. ], batch size: 118, lr: 4.89e-03, grad_scale: 16.0 +2023-03-29 03:59:09,105 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.374e+02 3.686e+02 4.298e+02 5.253e+02 9.198e+02, threshold=8.596e+02, percent-clipped=1.0 +2023-03-29 04:00:20,497 INFO [train.py:892] (2/4) Epoch 32, batch 1050, loss[loss=0.1482, simple_loss=0.2237, pruned_loss=0.03638, over 19759.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2396, pruned_loss=0.04033, over 3932146.42 frames. ], batch size: 44, lr: 4.88e-03, grad_scale: 16.0 +2023-03-29 04:00:28,066 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4000, 3.6598, 3.1736, 2.7209, 3.2707, 3.6126, 3.5697, 3.6180], + device='cuda:2'), covar=tensor([0.0236, 0.0259, 0.0260, 0.0513, 0.0282, 0.0234, 0.0179, 0.0178], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0098, 0.0100, 0.0103, 0.0106, 0.0088, 0.0088, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 04:00:43,859 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-29 04:02:18,290 INFO [train.py:892] (2/4) Epoch 32, batch 1100, loss[loss=0.1502, simple_loss=0.2256, pruned_loss=0.03742, over 19763.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2402, pruned_loss=0.04091, over 3935484.54 frames. ], batch size: 100, lr: 4.88e-03, grad_scale: 16.0 +2023-03-29 04:02:58,432 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2953, 3.3956, 2.1207, 3.5156, 3.6276, 1.7519, 2.9849, 2.8279], + device='cuda:2'), covar=tensor([0.0921, 0.0942, 0.2832, 0.0847, 0.0709, 0.2638, 0.1220, 0.0983], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0258, 0.0231, 0.0277, 0.0257, 0.0205, 0.0241, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 04:03:01,462 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.871e+02 3.808e+02 4.430e+02 5.351e+02 1.082e+03, threshold=8.860e+02, percent-clipped=2.0 +2023-03-29 04:03:13,279 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.0493, 1.9660, 1.3964, 2.0540, 2.0270, 1.9719, 2.0101, 1.6856], + device='cuda:2'), covar=tensor([0.0716, 0.0857, 0.1199, 0.0593, 0.0762, 0.0634, 0.0637, 0.1056], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0145, 0.0144, 0.0153, 0.0135, 0.0137, 0.0149, 0.0146], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 04:03:15,729 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.93 vs. limit=5.0 +2023-03-29 04:04:08,053 INFO [train.py:892] (2/4) Epoch 32, batch 1150, loss[loss=0.1431, simple_loss=0.2247, pruned_loss=0.03073, over 19881.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.2403, pruned_loss=0.04109, over 3938105.72 frames. ], batch size: 88, lr: 4.88e-03, grad_scale: 16.0 +2023-03-29 04:05:05,308 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58681.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:05:35,086 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6160, 2.7604, 4.0036, 3.1353, 3.2691, 3.1563, 2.3502, 2.4702], + device='cuda:2'), covar=tensor([0.1173, 0.3154, 0.0619, 0.1126, 0.1868, 0.1555, 0.2588, 0.2856], + device='cuda:2'), in_proj_covar=tensor([0.0353, 0.0389, 0.0350, 0.0287, 0.0375, 0.0377, 0.0374, 0.0344], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 04:06:02,799 INFO [train.py:892] (2/4) Epoch 32, batch 1200, loss[loss=0.1419, simple_loss=0.2198, pruned_loss=0.03205, over 19678.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2408, pruned_loss=0.04135, over 3941230.98 frames. ], batch size: 52, lr: 4.88e-03, grad_scale: 16.0 +2023-03-29 04:06:46,129 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 3.575e+02 4.310e+02 5.528e+02 1.202e+03, threshold=8.619e+02, percent-clipped=2.0 +2023-03-29 04:06:47,027 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0257, 3.7395, 3.8523, 4.0455, 3.8341, 4.0006, 4.0905, 4.3167], + device='cuda:2'), covar=tensor([0.0705, 0.0484, 0.0546, 0.0419, 0.0694, 0.0584, 0.0461, 0.0297], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0179, 0.0203, 0.0178, 0.0176, 0.0161, 0.0152, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 04:07:29,324 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58742.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:07:57,930 INFO [train.py:892] (2/4) Epoch 32, batch 1250, loss[loss=0.1459, simple_loss=0.2307, pruned_loss=0.03058, over 19846.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2398, pruned_loss=0.04109, over 3943536.37 frames. ], batch size: 118, lr: 4.88e-03, grad_scale: 16.0 +2023-03-29 04:08:45,745 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58775.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:09:34,852 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58797.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:09:41,903 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58800.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:09:55,325 INFO [train.py:892] (2/4) Epoch 32, batch 1300, loss[loss=0.1616, simple_loss=0.2389, pruned_loss=0.04213, over 19740.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.241, pruned_loss=0.04157, over 3943705.65 frames. ], batch size: 219, lr: 4.87e-03, grad_scale: 16.0 +2023-03-29 04:10:37,437 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.259e+02 3.414e+02 4.354e+02 4.908e+02 8.205e+02, threshold=8.708e+02, percent-clipped=0.0 +2023-03-29 04:10:59,852 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58836.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:11:07,427 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3744, 4.2713, 4.6727, 4.3024, 3.9749, 4.5073, 4.3595, 4.7714], + device='cuda:2'), covar=tensor([0.0758, 0.0373, 0.0340, 0.0386, 0.0987, 0.0517, 0.0466, 0.0309], + device='cuda:2'), in_proj_covar=tensor([0.0287, 0.0225, 0.0225, 0.0237, 0.0210, 0.0249, 0.0235, 0.0221], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 04:11:18,802 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.19 vs. limit=5.0 +2023-03-29 04:11:20,207 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58845.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:11:28,498 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58848.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:11:34,809 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.57 vs. limit=5.0 +2023-03-29 04:11:46,373 INFO [train.py:892] (2/4) Epoch 32, batch 1350, loss[loss=0.1458, simple_loss=0.2259, pruned_loss=0.03284, over 19800.00 frames. ], tot_loss[loss=0.1614, simple_loss=0.2405, pruned_loss=0.04109, over 3944967.89 frames. ], batch size: 65, lr: 4.87e-03, grad_scale: 16.0 +2023-03-29 04:13:32,110 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4329, 2.1175, 3.4971, 2.9552, 3.4852, 3.5587, 3.3359, 3.3839], + device='cuda:2'), covar=tensor([0.0821, 0.1118, 0.0125, 0.0497, 0.0155, 0.0243, 0.0225, 0.0198], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0104, 0.0090, 0.0155, 0.0087, 0.0099, 0.0091, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 04:13:39,839 INFO [train.py:892] (2/4) Epoch 32, batch 1400, loss[loss=0.1498, simple_loss=0.2263, pruned_loss=0.03664, over 19589.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.2412, pruned_loss=0.04155, over 3946182.04 frames. ], batch size: 44, lr: 4.87e-03, grad_scale: 16.0 +2023-03-29 04:13:40,759 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58906.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:14:27,793 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.427e+02 3.533e+02 4.416e+02 5.664e+02 1.235e+03, threshold=8.833e+02, percent-clipped=5.0 +2023-03-29 04:14:59,085 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6295, 3.5348, 3.7911, 2.8842, 3.9352, 3.3157, 3.5842, 3.8006], + device='cuda:2'), covar=tensor([0.0729, 0.0368, 0.0531, 0.0763, 0.0407, 0.0389, 0.0443, 0.0313], + device='cuda:2'), in_proj_covar=tensor([0.0080, 0.0087, 0.0085, 0.0112, 0.0080, 0.0083, 0.0081, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 04:15:40,828 INFO [train.py:892] (2/4) Epoch 32, batch 1450, loss[loss=0.1488, simple_loss=0.227, pruned_loss=0.03531, over 19785.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2406, pruned_loss=0.04116, over 3946919.54 frames. ], batch size: 211, lr: 4.87e-03, grad_scale: 16.0 +2023-03-29 04:16:06,737 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58967.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:16:59,238 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8893, 2.4525, 3.0407, 3.1621, 3.6904, 4.0626, 3.9179, 3.9401], + device='cuda:2'), covar=tensor([0.1022, 0.1774, 0.1301, 0.0729, 0.0450, 0.0261, 0.0367, 0.0482], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0169, 0.0178, 0.0153, 0.0137, 0.0134, 0.0126, 0.0117], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 04:17:08,503 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7402, 2.8314, 4.1681, 3.2317, 3.4797, 3.2096, 2.3581, 2.5113], + device='cuda:2'), covar=tensor([0.1112, 0.3195, 0.0576, 0.1199, 0.1804, 0.1661, 0.2770, 0.2867], + device='cuda:2'), in_proj_covar=tensor([0.0354, 0.0390, 0.0351, 0.0288, 0.0376, 0.0377, 0.0374, 0.0346], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 04:17:32,721 INFO [train.py:892] (2/4) Epoch 32, batch 1500, loss[loss=0.164, simple_loss=0.2381, pruned_loss=0.04499, over 19644.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.2405, pruned_loss=0.04105, over 3946949.89 frames. ], batch size: 69, lr: 4.87e-03, grad_scale: 16.0 +2023-03-29 04:18:18,936 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.783e+02 3.777e+02 4.436e+02 5.379e+02 1.000e+03, threshold=8.872e+02, percent-clipped=1.0 +2023-03-29 04:18:48,034 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59037.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:19:27,079 INFO [train.py:892] (2/4) Epoch 32, batch 1550, loss[loss=0.1444, simple_loss=0.2329, pruned_loss=0.02795, over 19737.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2419, pruned_loss=0.04168, over 3945602.70 frames. ], batch size: 77, lr: 4.86e-03, grad_scale: 16.0 +2023-03-29 04:20:42,556 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3874, 4.5030, 2.6654, 4.6854, 4.9413, 2.1195, 4.2467, 3.5360], + device='cuda:2'), covar=tensor([0.0645, 0.0623, 0.2579, 0.0632, 0.0473, 0.2729, 0.0818, 0.0833], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0259, 0.0231, 0.0278, 0.0257, 0.0205, 0.0242, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 04:21:25,987 INFO [train.py:892] (2/4) Epoch 32, batch 1600, loss[loss=0.1737, simple_loss=0.2588, pruned_loss=0.04435, over 19527.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.241, pruned_loss=0.04134, over 3948082.12 frames. ], batch size: 54, lr: 4.86e-03, grad_scale: 16.0 +2023-03-29 04:22:08,641 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.456e+02 3.560e+02 4.342e+02 5.121e+02 7.778e+02, threshold=8.685e+02, percent-clipped=0.0 +2023-03-29 04:22:23,520 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59131.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:23:16,319 INFO [train.py:892] (2/4) Epoch 32, batch 1650, loss[loss=0.1462, simple_loss=0.2335, pruned_loss=0.02951, over 19874.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2394, pruned_loss=0.04058, over 3950502.92 frames. ], batch size: 77, lr: 4.86e-03, grad_scale: 16.0 +2023-03-29 04:23:33,844 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59164.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:25:14,208 INFO [train.py:892] (2/4) Epoch 32, batch 1700, loss[loss=0.1372, simple_loss=0.2113, pruned_loss=0.03153, over 19820.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2393, pruned_loss=0.04077, over 3950641.02 frames. ], batch size: 128, lr: 4.86e-03, grad_scale: 16.0 +2023-03-29 04:26:00,331 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.757e+02 3.824e+02 4.316e+02 5.380e+02 8.214e+02, threshold=8.632e+02, percent-clipped=0.0 +2023-03-29 04:26:01,327 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59225.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:27:06,698 INFO [train.py:892] (2/4) Epoch 32, batch 1750, loss[loss=0.1605, simple_loss=0.239, pruned_loss=0.04097, over 19659.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2393, pruned_loss=0.04057, over 3950602.90 frames. ], batch size: 67, lr: 4.86e-03, grad_scale: 16.0 +2023-03-29 04:27:18,860 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59262.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:28:46,843 INFO [train.py:892] (2/4) Epoch 32, batch 1800, loss[loss=0.1309, simple_loss=0.214, pruned_loss=0.02392, over 19747.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2403, pruned_loss=0.041, over 3949448.89 frames. ], batch size: 110, lr: 4.85e-03, grad_scale: 16.0 +2023-03-29 04:29:23,505 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.122e+02 3.692e+02 4.509e+02 5.287e+02 9.435e+02, threshold=9.017e+02, percent-clipped=2.0 +2023-03-29 04:29:40,819 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.37 vs. limit=5.0 +2023-03-29 04:29:45,385 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59337.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:30:19,882 INFO [train.py:892] (2/4) Epoch 32, batch 1850, loss[loss=0.1573, simple_loss=0.2367, pruned_loss=0.03893, over 19818.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.2413, pruned_loss=0.04095, over 3946667.40 frames. ], batch size: 57, lr: 4.85e-03, grad_scale: 16.0 +2023-03-29 04:31:27,305 INFO [train.py:892] (2/4) Epoch 33, batch 0, loss[loss=0.1579, simple_loss=0.2335, pruned_loss=0.0412, over 19792.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2335, pruned_loss=0.0412, over 19792.00 frames. ], batch size: 173, lr: 4.78e-03, grad_scale: 16.0 +2023-03-29 04:31:27,305 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 04:32:02,114 INFO [train.py:926] (2/4) Epoch 33, validation: loss=0.1828, simple_loss=0.2501, pruned_loss=0.05775, over 2883724.00 frames. +2023-03-29 04:32:02,115 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-29 04:32:58,842 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59385.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:34:00,957 INFO [train.py:892] (2/4) Epoch 33, batch 50, loss[loss=0.2288, simple_loss=0.3063, pruned_loss=0.07562, over 19463.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2355, pruned_loss=0.03817, over 888366.83 frames. ], batch size: 396, lr: 4.77e-03, grad_scale: 32.0 +2023-03-29 04:34:31,972 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.011e+02 3.734e+02 4.113e+02 4.881e+02 1.279e+03, threshold=8.226e+02, percent-clipped=1.0 +2023-03-29 04:34:45,604 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59431.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:35:56,489 INFO [train.py:892] (2/4) Epoch 33, batch 100, loss[loss=0.1488, simple_loss=0.2284, pruned_loss=0.03459, over 19835.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2357, pruned_loss=0.0386, over 1567479.74 frames. ], batch size: 75, lr: 4.77e-03, grad_scale: 32.0 +2023-03-29 04:36:10,170 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7410, 2.7019, 2.8466, 2.2824, 2.9608, 2.5326, 2.8211, 2.7855], + device='cuda:2'), covar=tensor([0.0645, 0.0530, 0.0480, 0.0910, 0.0388, 0.0508, 0.0538, 0.0412], + device='cuda:2'), in_proj_covar=tensor([0.0080, 0.0088, 0.0086, 0.0114, 0.0081, 0.0084, 0.0082, 0.0075], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 04:36:36,995 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59479.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:37:08,465 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5647, 2.8908, 3.0343, 3.4064, 2.4300, 3.0492, 2.2483, 2.2578], + device='cuda:2'), covar=tensor([0.0609, 0.1746, 0.1125, 0.0510, 0.2132, 0.0843, 0.1465, 0.1719], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0331, 0.0251, 0.0206, 0.0250, 0.0210, 0.0220, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 04:37:31,895 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59502.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:37:51,792 INFO [train.py:892] (2/4) Epoch 33, batch 150, loss[loss=0.1512, simple_loss=0.233, pruned_loss=0.03468, over 19825.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2372, pruned_loss=0.03925, over 2095741.60 frames. ], batch size: 103, lr: 4.77e-03, grad_scale: 32.0 +2023-03-29 04:38:07,339 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-29 04:38:12,583 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59520.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:38:22,515 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.374e+02 3.870e+02 4.435e+02 5.412e+02 1.132e+03, threshold=8.870e+02, percent-clipped=1.0 +2023-03-29 04:39:44,901 INFO [train.py:892] (2/4) Epoch 33, batch 200, loss[loss=0.177, simple_loss=0.265, pruned_loss=0.04449, over 19695.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2378, pruned_loss=0.03957, over 2507375.77 frames. ], batch size: 315, lr: 4.77e-03, grad_scale: 32.0 +2023-03-29 04:39:48,037 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59562.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:39:50,069 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59563.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:41:35,578 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59610.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:41:36,996 INFO [train.py:892] (2/4) Epoch 33, batch 250, loss[loss=0.1611, simple_loss=0.2394, pruned_loss=0.04144, over 19800.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2366, pruned_loss=0.03898, over 2828437.63 frames. ], batch size: 200, lr: 4.77e-03, grad_scale: 32.0 +2023-03-29 04:41:48,543 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-29 04:42:08,473 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.374e+02 3.603e+02 4.122e+02 4.867e+02 8.945e+02, threshold=8.243e+02, percent-clipped=1.0 +2023-03-29 04:43:31,404 INFO [train.py:892] (2/4) Epoch 33, batch 300, loss[loss=0.1597, simple_loss=0.2403, pruned_loss=0.03958, over 19826.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2375, pruned_loss=0.03913, over 3075964.97 frames. ], batch size: 204, lr: 4.76e-03, grad_scale: 32.0 +2023-03-29 04:44:18,280 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5063, 3.0769, 3.4001, 3.0347, 3.7153, 3.6985, 4.2843, 4.7900], + device='cuda:2'), covar=tensor([0.0462, 0.1540, 0.1638, 0.2094, 0.1510, 0.1340, 0.0572, 0.0373], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0241, 0.0269, 0.0254, 0.0298, 0.0257, 0.0233, 0.0257], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 04:45:17,700 INFO [train.py:892] (2/4) Epoch 33, batch 350, loss[loss=0.1564, simple_loss=0.2223, pruned_loss=0.04521, over 19838.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2383, pruned_loss=0.0398, over 3269929.94 frames. ], batch size: 145, lr: 4.76e-03, grad_scale: 32.0 +2023-03-29 04:45:51,028 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.193e+02 3.458e+02 4.083e+02 4.831e+02 8.519e+02, threshold=8.167e+02, percent-clipped=1.0 +2023-03-29 04:47:10,220 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.16 vs. limit=5.0 +2023-03-29 04:47:13,245 INFO [train.py:892] (2/4) Epoch 33, batch 400, loss[loss=0.1335, simple_loss=0.2128, pruned_loss=0.02716, over 19902.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2373, pruned_loss=0.03949, over 3422375.81 frames. ], batch size: 116, lr: 4.76e-03, grad_scale: 32.0 +2023-03-29 04:47:27,449 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8584, 3.7034, 3.6787, 3.4378, 3.8360, 2.7599, 3.1317, 1.7714], + device='cuda:2'), covar=tensor([0.0208, 0.0248, 0.0169, 0.0222, 0.0159, 0.1144, 0.0673, 0.1722], + device='cuda:2'), in_proj_covar=tensor([0.0104, 0.0146, 0.0114, 0.0134, 0.0119, 0.0134, 0.0142, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 04:47:37,943 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5351, 3.7759, 2.2143, 3.8987, 3.9975, 1.9381, 3.1995, 2.8654], + device='cuda:2'), covar=tensor([0.0795, 0.0722, 0.2761, 0.0710, 0.0549, 0.2585, 0.1271, 0.0961], + device='cuda:2'), in_proj_covar=tensor([0.0234, 0.0259, 0.0231, 0.0276, 0.0256, 0.0204, 0.0241, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 04:48:21,595 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2966, 3.9976, 4.0801, 4.2819, 4.0574, 4.3590, 4.4376, 4.6134], + device='cuda:2'), covar=tensor([0.0749, 0.0471, 0.0544, 0.0419, 0.0707, 0.0544, 0.0465, 0.0303], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0176, 0.0200, 0.0176, 0.0174, 0.0159, 0.0150, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 04:49:08,559 INFO [train.py:892] (2/4) Epoch 33, batch 450, loss[loss=0.1926, simple_loss=0.275, pruned_loss=0.05514, over 19649.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2384, pruned_loss=0.04021, over 3539482.09 frames. ], batch size: 330, lr: 4.76e-03, grad_scale: 32.0 +2023-03-29 04:49:28,971 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59820.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:49:39,970 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.419e+02 3.507e+02 4.283e+02 5.159e+02 1.029e+03, threshold=8.565e+02, percent-clipped=3.0 +2023-03-29 04:50:19,119 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-03-29 04:50:33,074 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59848.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:50:50,630 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.62 vs. limit=5.0 +2023-03-29 04:50:53,694 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59858.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:50:59,520 INFO [train.py:892] (2/4) Epoch 33, batch 500, loss[loss=0.1564, simple_loss=0.2353, pruned_loss=0.03871, over 19885.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.239, pruned_loss=0.04015, over 3628238.66 frames. ], batch size: 63, lr: 4.76e-03, grad_scale: 32.0 +2023-03-29 04:51:19,597 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59868.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:52:00,929 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.8707, 6.1391, 6.1779, 6.0696, 5.9083, 6.1740, 5.4922, 5.6001], + device='cuda:2'), covar=tensor([0.0337, 0.0398, 0.0447, 0.0409, 0.0467, 0.0440, 0.0614, 0.0821], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0290, 0.0303, 0.0265, 0.0270, 0.0255, 0.0271, 0.0318], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 04:52:06,917 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59889.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:52:38,203 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5161, 2.0684, 2.3881, 2.7656, 3.1404, 3.2666, 3.1375, 3.2302], + device='cuda:2'), covar=tensor([0.1141, 0.1805, 0.1417, 0.0856, 0.0576, 0.0389, 0.0507, 0.0498], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0171, 0.0179, 0.0154, 0.0138, 0.0134, 0.0126, 0.0117], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 04:52:50,671 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8598, 4.6511, 5.1961, 4.7809, 4.1923, 4.9615, 4.8222, 5.3886], + device='cuda:2'), covar=tensor([0.0838, 0.0392, 0.0379, 0.0363, 0.0929, 0.0442, 0.0452, 0.0302], + device='cuda:2'), in_proj_covar=tensor([0.0286, 0.0224, 0.0225, 0.0236, 0.0210, 0.0249, 0.0234, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 04:52:52,783 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59909.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:52:55,894 INFO [train.py:892] (2/4) Epoch 33, batch 550, loss[loss=0.1515, simple_loss=0.2299, pruned_loss=0.03657, over 19779.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2384, pruned_loss=0.04039, over 3701372.34 frames. ], batch size: 46, lr: 4.75e-03, grad_scale: 32.0 +2023-03-29 04:53:25,347 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.97 vs. limit=5.0 +2023-03-29 04:53:28,218 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.466e+02 3.637e+02 4.346e+02 5.121e+02 1.028e+03, threshold=8.693e+02, percent-clipped=1.0 +2023-03-29 04:54:26,002 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59950.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:54:50,040 INFO [train.py:892] (2/4) Epoch 33, batch 600, loss[loss=0.1488, simple_loss=0.2242, pruned_loss=0.03664, over 19822.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2382, pruned_loss=0.04033, over 3757673.44 frames. ], batch size: 187, lr: 4.75e-03, grad_scale: 32.0 +2023-03-29 04:56:47,911 INFO [train.py:892] (2/4) Epoch 33, batch 650, loss[loss=0.1328, simple_loss=0.203, pruned_loss=0.03124, over 19773.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2373, pruned_loss=0.03999, over 3801226.11 frames. ], batch size: 116, lr: 4.75e-03, grad_scale: 32.0 +2023-03-29 04:57:20,323 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.343e+02 3.751e+02 4.539e+02 5.312e+02 8.817e+02, threshold=9.077e+02, percent-clipped=1.0 +2023-03-29 04:57:54,980 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60040.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:57:57,004 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9666, 4.6648, 4.7443, 4.9966, 4.6817, 5.1877, 5.1025, 5.3320], + device='cuda:2'), covar=tensor([0.0641, 0.0360, 0.0387, 0.0279, 0.0591, 0.0342, 0.0383, 0.0219], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0177, 0.0201, 0.0176, 0.0176, 0.0161, 0.0151, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 04:58:13,817 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9827, 3.7273, 3.8436, 4.0325, 3.8174, 3.9391, 4.0764, 4.2946], + device='cuda:2'), covar=tensor([0.0691, 0.0464, 0.0506, 0.0387, 0.0751, 0.0604, 0.0457, 0.0316], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0177, 0.0201, 0.0176, 0.0175, 0.0160, 0.0151, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 04:58:22,691 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60052.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:58:38,901 INFO [train.py:892] (2/4) Epoch 33, batch 700, loss[loss=0.1559, simple_loss=0.2391, pruned_loss=0.03632, over 19891.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2377, pruned_loss=0.04008, over 3834769.45 frames. ], batch size: 61, lr: 4.75e-03, grad_scale: 32.0 +2023-03-29 04:58:48,364 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60065.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:59:18,390 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60078.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:59:51,159 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5927, 4.4601, 4.9970, 4.5847, 4.1391, 4.8270, 4.6065, 5.1893], + device='cuda:2'), covar=tensor([0.0957, 0.0434, 0.0422, 0.0413, 0.0827, 0.0490, 0.0509, 0.0350], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0223, 0.0223, 0.0235, 0.0209, 0.0246, 0.0233, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 04:59:59,421 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0717, 3.7533, 3.8810, 4.0613, 3.7587, 4.0029, 4.1170, 4.3253], + device='cuda:2'), covar=tensor([0.0634, 0.0415, 0.0513, 0.0390, 0.0775, 0.0571, 0.0439, 0.0306], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0178, 0.0202, 0.0177, 0.0176, 0.0160, 0.0151, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 05:00:08,310 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60101.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 05:00:28,995 INFO [train.py:892] (2/4) Epoch 33, batch 750, loss[loss=0.1481, simple_loss=0.2285, pruned_loss=0.03383, over 19700.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2383, pruned_loss=0.04041, over 3860550.89 frames. ], batch size: 78, lr: 4.75e-03, grad_scale: 32.0 +2023-03-29 05:00:33,782 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60113.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 05:00:59,951 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.577e+02 3.667e+02 4.352e+02 5.271e+02 8.551e+02, threshold=8.703e+02, percent-clipped=0.0 +2023-03-29 05:01:03,233 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60126.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:01:13,963 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 05:01:34,642 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60139.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:02:17,873 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60158.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:02:23,085 INFO [train.py:892] (2/4) Epoch 33, batch 800, loss[loss=0.1286, simple_loss=0.2045, pruned_loss=0.0263, over 19784.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2397, pruned_loss=0.04077, over 3877971.38 frames. ], batch size: 94, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:03:47,840 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60198.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:04:03,564 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60204.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:04:08,047 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60206.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:04:17,807 INFO [train.py:892] (2/4) Epoch 33, batch 850, loss[loss=0.1518, simple_loss=0.2239, pruned_loss=0.03982, over 19791.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2393, pruned_loss=0.04037, over 3894407.08 frames. ], batch size: 172, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:04:48,205 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.61 vs. limit=5.0 +2023-03-29 05:04:48,783 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.588e+02 3.634e+02 4.278e+02 5.134e+02 8.109e+02, threshold=8.556e+02, percent-clipped=0.0 +2023-03-29 05:05:36,993 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60245.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:06:02,372 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7710, 3.3041, 3.6659, 3.2425, 4.0193, 4.0254, 4.4599, 5.0765], + device='cuda:2'), covar=tensor([0.0495, 0.1613, 0.1299, 0.2073, 0.1384, 0.1180, 0.0625, 0.0450], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0244, 0.0272, 0.0256, 0.0301, 0.0261, 0.0236, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 05:06:08,083 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60259.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:06:11,098 INFO [train.py:892] (2/4) Epoch 33, batch 900, loss[loss=0.1534, simple_loss=0.2338, pruned_loss=0.03647, over 19829.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2406, pruned_loss=0.04081, over 3905542.08 frames. ], batch size: 76, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:08:06,811 INFO [train.py:892] (2/4) Epoch 33, batch 950, loss[loss=0.186, simple_loss=0.268, pruned_loss=0.05199, over 19771.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.2414, pruned_loss=0.04119, over 3914613.57 frames. ], batch size: 70, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:08:37,041 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.481e+02 3.823e+02 4.629e+02 5.710e+02 9.734e+02, threshold=9.258e+02, percent-clipped=1.0 +2023-03-29 05:08:48,610 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9507, 4.6105, 4.6703, 4.4077, 4.9220, 3.1530, 4.0119, 2.4742], + device='cuda:2'), covar=tensor([0.0178, 0.0195, 0.0148, 0.0187, 0.0131, 0.0986, 0.0828, 0.1501], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0147, 0.0115, 0.0135, 0.0120, 0.0136, 0.0142, 0.0128], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 05:09:56,800 INFO [train.py:892] (2/4) Epoch 33, batch 1000, loss[loss=0.1515, simple_loss=0.239, pruned_loss=0.032, over 19660.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2408, pruned_loss=0.04064, over 3923310.41 frames. ], batch size: 50, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:11:13,847 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60396.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 05:11:41,609 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60408.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 05:11:48,653 INFO [train.py:892] (2/4) Epoch 33, batch 1050, loss[loss=0.1582, simple_loss=0.2333, pruned_loss=0.04159, over 19797.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2401, pruned_loss=0.04031, over 3929735.19 frames. ], batch size: 126, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:12:11,024 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60421.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:12:21,533 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.510e+02 3.870e+02 4.484e+02 5.211e+02 8.559e+02, threshold=8.968e+02, percent-clipped=0.0 +2023-03-29 05:12:26,696 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.6368, 5.9999, 6.0897, 5.8860, 5.7811, 5.7400, 5.7410, 5.5921], + device='cuda:2'), covar=tensor([0.1231, 0.1328, 0.0815, 0.1097, 0.0610, 0.0638, 0.1951, 0.1780], + device='cuda:2'), in_proj_covar=tensor([0.0295, 0.0336, 0.0370, 0.0300, 0.0277, 0.0284, 0.0365, 0.0391], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 05:12:41,226 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60434.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:13:40,374 INFO [train.py:892] (2/4) Epoch 33, batch 1100, loss[loss=0.1502, simple_loss=0.2313, pruned_loss=0.03453, over 19812.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2412, pruned_loss=0.04144, over 3933856.71 frames. ], batch size: 82, lr: 4.73e-03, grad_scale: 32.0 +2023-03-29 05:13:43,140 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0498, 4.7134, 4.8275, 5.0768, 4.8216, 5.2903, 5.2136, 5.4005], + device='cuda:2'), covar=tensor([0.0671, 0.0378, 0.0465, 0.0315, 0.0576, 0.0337, 0.0402, 0.0264], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0177, 0.0201, 0.0176, 0.0174, 0.0159, 0.0151, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 05:15:21,722 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60504.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:15:35,154 INFO [train.py:892] (2/4) Epoch 33, batch 1150, loss[loss=0.1293, simple_loss=0.2049, pruned_loss=0.0269, over 19744.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.2406, pruned_loss=0.041, over 3936244.28 frames. ], batch size: 106, lr: 4.73e-03, grad_scale: 32.0 +2023-03-29 05:15:40,663 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.55 vs. limit=5.0 +2023-03-29 05:16:08,381 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.786e+02 3.806e+02 4.491e+02 5.149e+02 7.976e+02, threshold=8.981e+02, percent-clipped=0.0 +2023-03-29 05:16:13,420 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9383, 2.5187, 2.9778, 3.2792, 3.7620, 4.1179, 4.0398, 4.0099], + device='cuda:2'), covar=tensor([0.0968, 0.1642, 0.1320, 0.0652, 0.0389, 0.0257, 0.0345, 0.0517], + device='cuda:2'), in_proj_covar=tensor([0.0161, 0.0169, 0.0177, 0.0152, 0.0136, 0.0134, 0.0125, 0.0116], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 05:16:54,284 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60545.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:16:54,885 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.76 vs. limit=5.0 +2023-03-29 05:17:08,808 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60552.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:17:13,033 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60554.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:17:29,247 INFO [train.py:892] (2/4) Epoch 33, batch 1200, loss[loss=0.1638, simple_loss=0.2461, pruned_loss=0.0408, over 19637.00 frames. ], tot_loss[loss=0.1614, simple_loss=0.2404, pruned_loss=0.04119, over 3939506.81 frames. ], batch size: 68, lr: 4.73e-03, grad_scale: 32.0 +2023-03-29 05:18:27,283 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2331, 2.5940, 2.3560, 1.8036, 2.3864, 2.5599, 2.4962, 2.5459], + device='cuda:2'), covar=tensor([0.0425, 0.0290, 0.0333, 0.0564, 0.0389, 0.0308, 0.0292, 0.0251], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0100, 0.0102, 0.0104, 0.0107, 0.0090, 0.0090, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 05:18:42,753 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60593.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:19:21,759 INFO [train.py:892] (2/4) Epoch 33, batch 1250, loss[loss=0.1528, simple_loss=0.234, pruned_loss=0.03584, over 19892.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2391, pruned_loss=0.04082, over 3942825.07 frames. ], batch size: 62, lr: 4.73e-03, grad_scale: 32.0 +2023-03-29 05:19:52,026 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.888e+02 3.769e+02 4.430e+02 5.364e+02 1.022e+03, threshold=8.861e+02, percent-clipped=3.0 +2023-03-29 05:21:14,267 INFO [train.py:892] (2/4) Epoch 33, batch 1300, loss[loss=0.152, simple_loss=0.2314, pruned_loss=0.03635, over 19772.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2378, pruned_loss=0.0402, over 3945292.42 frames. ], batch size: 66, lr: 4.73e-03, grad_scale: 32.0 +2023-03-29 05:22:34,643 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60696.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:23:03,433 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60708.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:23:08,876 INFO [train.py:892] (2/4) Epoch 33, batch 1350, loss[loss=0.1577, simple_loss=0.2376, pruned_loss=0.03887, over 19770.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2381, pruned_loss=0.04017, over 3946831.34 frames. ], batch size: 130, lr: 4.72e-03, grad_scale: 16.0 +2023-03-29 05:23:20,256 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8100, 3.0260, 3.0037, 2.9904, 2.8892, 3.0606, 2.8171, 3.0948], + device='cuda:2'), covar=tensor([0.0400, 0.0314, 0.0376, 0.0317, 0.0439, 0.0322, 0.0435, 0.0366], + device='cuda:2'), in_proj_covar=tensor([0.0089, 0.0083, 0.0086, 0.0080, 0.0093, 0.0086, 0.0103, 0.0075], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 05:23:27,917 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60718.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:23:33,913 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60721.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:23:36,101 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0363, 3.0598, 1.9325, 3.6538, 3.3658, 3.5917, 3.6581, 2.8971], + device='cuda:2'), covar=tensor([0.0694, 0.0715, 0.1701, 0.0588, 0.0684, 0.0494, 0.0658, 0.0840], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0145, 0.0144, 0.0154, 0.0135, 0.0138, 0.0149, 0.0148], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 05:23:44,304 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.407e+02 3.398e+02 4.212e+02 5.272e+02 1.001e+03, threshold=8.423e+02, percent-clipped=0.0 +2023-03-29 05:23:51,449 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.6648, 5.9677, 6.0049, 5.8856, 5.6942, 5.9940, 5.3234, 5.3730], + device='cuda:2'), covar=tensor([0.0446, 0.0452, 0.0457, 0.0425, 0.0514, 0.0512, 0.0695, 0.1012], + device='cuda:2'), in_proj_covar=tensor([0.0272, 0.0285, 0.0299, 0.0262, 0.0267, 0.0253, 0.0269, 0.0316], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 05:24:01,955 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60734.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:24:24,759 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60744.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:24:47,423 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9479, 1.9592, 2.4412, 2.7836, 1.7910, 2.5601, 2.0765, 1.8582], + device='cuda:2'), covar=tensor([0.0858, 0.0975, 0.1381, 0.0718, 0.2633, 0.0976, 0.1402, 0.1826], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0332, 0.0252, 0.0207, 0.0252, 0.0212, 0.0222, 0.0221], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 05:24:53,327 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60756.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:25:04,411 INFO [train.py:892] (2/4) Epoch 33, batch 1400, loss[loss=0.1515, simple_loss=0.2256, pruned_loss=0.03868, over 19792.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2383, pruned_loss=0.04021, over 3947004.63 frames. ], batch size: 151, lr: 4.72e-03, grad_scale: 16.0 +2023-03-29 05:25:09,694 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5103, 2.5109, 2.4871, 2.6115, 2.6088, 2.5745, 2.5054, 2.5927], + device='cuda:2'), covar=tensor([0.0370, 0.0370, 0.0447, 0.0311, 0.0462, 0.0354, 0.0455, 0.0425], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0084, 0.0087, 0.0081, 0.0094, 0.0087, 0.0104, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 05:25:21,886 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60769.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:25:42,968 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-29 05:25:44,882 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60779.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:25:50,851 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60782.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:26:54,134 INFO [train.py:892] (2/4) Epoch 33, batch 1450, loss[loss=0.1525, simple_loss=0.2376, pruned_loss=0.03375, over 19794.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2398, pruned_loss=0.04086, over 3947894.95 frames. ], batch size: 79, lr: 4.72e-03, grad_scale: 16.0 +2023-03-29 05:27:25,501 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.764e+02 3.944e+02 4.679e+02 5.484e+02 1.088e+03, threshold=9.358e+02, percent-clipped=4.0 +2023-03-29 05:28:30,903 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60854.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:28:46,111 INFO [train.py:892] (2/4) Epoch 33, batch 1500, loss[loss=0.1479, simple_loss=0.2181, pruned_loss=0.03887, over 19794.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2404, pruned_loss=0.04093, over 3946517.99 frames. ], batch size: 162, lr: 4.72e-03, grad_scale: 16.0 +2023-03-29 05:29:31,767 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7385, 3.4785, 3.6163, 3.7613, 3.5900, 3.7352, 3.8089, 4.0200], + device='cuda:2'), covar=tensor([0.0705, 0.0447, 0.0507, 0.0414, 0.0701, 0.0575, 0.0452, 0.0321], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0179, 0.0203, 0.0179, 0.0176, 0.0161, 0.0154, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 05:30:06,314 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1603, 3.8718, 3.9935, 4.1606, 3.9507, 4.2025, 4.2221, 4.4420], + device='cuda:2'), covar=tensor([0.0680, 0.0443, 0.0532, 0.0447, 0.0723, 0.0544, 0.0506, 0.0337], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0180, 0.0204, 0.0180, 0.0177, 0.0162, 0.0155, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 05:30:20,984 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60902.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:30:40,146 INFO [train.py:892] (2/4) Epoch 33, batch 1550, loss[loss=0.1552, simple_loss=0.2325, pruned_loss=0.03893, over 19789.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2396, pruned_loss=0.04045, over 3948551.52 frames. ], batch size: 236, lr: 4.72e-03, grad_scale: 16.0 +2023-03-29 05:30:45,556 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0374, 3.0114, 4.7059, 3.3473, 3.6088, 3.4174, 2.5349, 2.6897], + device='cuda:2'), covar=tensor([0.1018, 0.3219, 0.0497, 0.1195, 0.1905, 0.1664, 0.2552, 0.2747], + device='cuda:2'), in_proj_covar=tensor([0.0352, 0.0390, 0.0351, 0.0288, 0.0374, 0.0377, 0.0376, 0.0347], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 05:31:12,865 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.819e+02 3.799e+02 4.581e+02 5.362e+02 9.999e+02, threshold=9.162e+02, percent-clipped=1.0 +2023-03-29 05:32:34,465 INFO [train.py:892] (2/4) Epoch 33, batch 1600, loss[loss=0.1345, simple_loss=0.2101, pruned_loss=0.02945, over 19692.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2392, pruned_loss=0.04032, over 3948856.47 frames. ], batch size: 46, lr: 4.71e-03, grad_scale: 16.0 +2023-03-29 05:32:55,642 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2293, 4.9231, 4.9823, 5.2556, 4.9346, 5.4800, 5.3688, 5.5517], + device='cuda:2'), covar=tensor([0.0642, 0.0402, 0.0447, 0.0347, 0.0730, 0.0362, 0.0398, 0.0346], + device='cuda:2'), in_proj_covar=tensor([0.0155, 0.0180, 0.0205, 0.0179, 0.0178, 0.0162, 0.0154, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 05:33:31,501 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-29 05:34:10,739 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9830, 2.9959, 5.1837, 4.2850, 4.8072, 5.1324, 4.9696, 4.7017], + device='cuda:2'), covar=tensor([0.0473, 0.0890, 0.0082, 0.0892, 0.0141, 0.0142, 0.0130, 0.0136], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0104, 0.0088, 0.0153, 0.0086, 0.0098, 0.0090, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 05:34:30,662 INFO [train.py:892] (2/4) Epoch 33, batch 1650, loss[loss=0.1324, simple_loss=0.2142, pruned_loss=0.0253, over 19698.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2397, pruned_loss=0.04047, over 3948198.35 frames. ], batch size: 85, lr: 4.71e-03, grad_scale: 16.0 +2023-03-29 05:34:34,027 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3584, 3.6205, 3.1357, 2.7763, 3.2812, 3.7003, 3.5506, 3.5685], + device='cuda:2'), covar=tensor([0.0248, 0.0289, 0.0262, 0.0437, 0.0293, 0.0206, 0.0176, 0.0156], + device='cuda:2'), in_proj_covar=tensor([0.0106, 0.0099, 0.0101, 0.0103, 0.0106, 0.0089, 0.0090, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 05:35:04,279 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.576e+02 3.785e+02 4.446e+02 5.538e+02 8.034e+02, threshold=8.891e+02, percent-clipped=0.0 +2023-03-29 05:36:28,678 INFO [train.py:892] (2/4) Epoch 33, batch 1700, loss[loss=0.1519, simple_loss=0.2245, pruned_loss=0.03968, over 19777.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2413, pruned_loss=0.04103, over 3945419.86 frames. ], batch size: 154, lr: 4.71e-03, grad_scale: 16.0 +2023-03-29 05:36:29,909 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61061.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:37:08,436 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61074.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:38:23,939 INFO [train.py:892] (2/4) Epoch 33, batch 1750, loss[loss=0.1308, simple_loss=0.2053, pruned_loss=0.02809, over 19913.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2403, pruned_loss=0.04068, over 3946513.69 frames. ], batch size: 45, lr: 4.71e-03, grad_scale: 16.0 +2023-03-29 05:38:45,043 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61122.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:38:52,712 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.343e+02 3.701e+02 4.217e+02 4.986e+02 8.389e+02, threshold=8.433e+02, percent-clipped=0.0 +2023-03-29 05:39:59,806 INFO [train.py:892] (2/4) Epoch 33, batch 1800, loss[loss=0.1689, simple_loss=0.2504, pruned_loss=0.04368, over 19578.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2396, pruned_loss=0.04034, over 3947814.06 frames. ], batch size: 42, lr: 4.71e-03, grad_scale: 16.0 +2023-03-29 05:41:31,939 INFO [train.py:892] (2/4) Epoch 33, batch 1850, loss[loss=0.1635, simple_loss=0.2507, pruned_loss=0.03809, over 19850.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2408, pruned_loss=0.04049, over 3947347.78 frames. ], batch size: 58, lr: 4.70e-03, grad_scale: 16.0 +2023-03-29 05:42:34,503 INFO [train.py:892] (2/4) Epoch 34, batch 0, loss[loss=0.1435, simple_loss=0.2175, pruned_loss=0.03476, over 19842.00 frames. ], tot_loss[loss=0.1435, simple_loss=0.2175, pruned_loss=0.03476, over 19842.00 frames. ], batch size: 109, lr: 4.63e-03, grad_scale: 16.0 +2023-03-29 05:42:34,504 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 05:42:56,975 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5798, 4.0458, 3.8831, 3.8800, 4.0894, 3.9295, 3.8618, 3.6635], + device='cuda:2'), covar=tensor([0.2137, 0.1354, 0.1625, 0.1437, 0.0852, 0.0975, 0.1983, 0.2252], + device='cuda:2'), in_proj_covar=tensor([0.0299, 0.0340, 0.0372, 0.0303, 0.0279, 0.0289, 0.0368, 0.0397], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 05:43:04,140 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7871, 3.0407, 3.3518, 3.6092, 2.7965, 3.1211, 2.5221, 2.6204], + device='cuda:2'), covar=tensor([0.0527, 0.1610, 0.0884, 0.0503, 0.1847, 0.0762, 0.1353, 0.1506], + device='cuda:2'), in_proj_covar=tensor([0.0246, 0.0329, 0.0251, 0.0205, 0.0249, 0.0211, 0.0220, 0.0218], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 05:43:07,404 INFO [train.py:926] (2/4) Epoch 34, validation: loss=0.1816, simple_loss=0.2491, pruned_loss=0.05706, over 2883724.00 frames. +2023-03-29 05:43:07,405 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-29 05:43:30,409 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.548e+02 3.493e+02 4.214e+02 5.020e+02 1.069e+03, threshold=8.428e+02, percent-clipped=3.0 +2023-03-29 05:44:23,187 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1805, 3.8814, 4.0120, 4.2031, 3.9442, 4.2318, 4.2764, 4.4995], + device='cuda:2'), covar=tensor([0.0702, 0.0485, 0.0576, 0.0439, 0.0756, 0.0578, 0.0499, 0.0327], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0180, 0.0204, 0.0178, 0.0177, 0.0161, 0.0153, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 05:45:06,393 INFO [train.py:892] (2/4) Epoch 34, batch 50, loss[loss=0.1725, simple_loss=0.2512, pruned_loss=0.04688, over 19751.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2342, pruned_loss=0.03968, over 892441.47 frames. ], batch size: 276, lr: 4.63e-03, grad_scale: 16.0 +2023-03-29 05:47:01,411 INFO [train.py:892] (2/4) Epoch 34, batch 100, loss[loss=0.174, simple_loss=0.2614, pruned_loss=0.04334, over 19678.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.238, pruned_loss=0.03928, over 1566711.82 frames. ], batch size: 337, lr: 4.63e-03, grad_scale: 16.0 +2023-03-29 05:47:24,784 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.578e+02 3.716e+02 4.447e+02 5.513e+02 1.175e+03, threshold=8.893e+02, percent-clipped=3.0 +2023-03-29 05:48:57,021 INFO [train.py:892] (2/4) Epoch 34, batch 150, loss[loss=0.1778, simple_loss=0.249, pruned_loss=0.05333, over 19705.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2368, pruned_loss=0.03907, over 2095244.29 frames. ], batch size: 85, lr: 4.63e-03, grad_scale: 16.0 +2023-03-29 05:49:18,359 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61374.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:50:14,631 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0400, 4.7423, 4.8101, 5.0928, 4.7730, 5.2732, 5.1481, 5.4172], + device='cuda:2'), covar=tensor([0.0691, 0.0401, 0.0421, 0.0348, 0.0590, 0.0393, 0.0424, 0.0254], + device='cuda:2'), in_proj_covar=tensor([0.0155, 0.0180, 0.0205, 0.0179, 0.0178, 0.0162, 0.0154, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 05:50:56,751 INFO [train.py:892] (2/4) Epoch 34, batch 200, loss[loss=0.1894, simple_loss=0.2979, pruned_loss=0.0405, over 18753.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2378, pruned_loss=0.03884, over 2505853.18 frames. ], batch size: 564, lr: 4.63e-03, grad_scale: 16.0 +2023-03-29 05:50:59,673 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61417.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:51:10,215 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61422.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:51:10,941 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 05:51:17,537 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4192, 3.7934, 3.9439, 4.5430, 2.9752, 3.4216, 2.7954, 2.8588], + device='cuda:2'), covar=tensor([0.0526, 0.1858, 0.0835, 0.0352, 0.2010, 0.0937, 0.1327, 0.1595], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0328, 0.0251, 0.0205, 0.0250, 0.0212, 0.0220, 0.0218], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 05:51:18,481 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.390e+02 3.871e+02 4.515e+02 5.267e+02 1.071e+03, threshold=9.030e+02, percent-clipped=3.0 +2023-03-29 05:52:53,111 INFO [train.py:892] (2/4) Epoch 34, batch 250, loss[loss=0.225, simple_loss=0.3048, pruned_loss=0.07266, over 19608.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.238, pruned_loss=0.0392, over 2824838.51 frames. ], batch size: 376, lr: 4.62e-03, grad_scale: 16.0 +2023-03-29 05:54:06,428 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-29 05:54:47,111 INFO [train.py:892] (2/4) Epoch 34, batch 300, loss[loss=0.1677, simple_loss=0.2551, pruned_loss=0.04017, over 19903.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2391, pruned_loss=0.03954, over 3074546.96 frames. ], batch size: 50, lr: 4.62e-03, grad_scale: 16.0 +2023-03-29 05:55:09,936 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.400e+02 3.531e+02 4.342e+02 5.324e+02 1.066e+03, threshold=8.684e+02, percent-clipped=3.0 +2023-03-29 05:55:47,649 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 05:56:23,748 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2515, 2.4044, 2.6530, 2.3833, 2.8161, 2.8017, 3.1399, 3.3739], + device='cuda:2'), covar=tensor([0.0749, 0.1625, 0.1564, 0.2139, 0.1473, 0.1380, 0.0755, 0.0730], + device='cuda:2'), in_proj_covar=tensor([0.0255, 0.0241, 0.0268, 0.0255, 0.0299, 0.0257, 0.0235, 0.0258], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 05:56:44,417 INFO [train.py:892] (2/4) Epoch 34, batch 350, loss[loss=0.1559, simple_loss=0.2384, pruned_loss=0.03672, over 19683.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2394, pruned_loss=0.0397, over 3268494.61 frames. ], batch size: 45, lr: 4.62e-03, grad_scale: 16.0 +2023-03-29 05:58:21,607 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3013, 3.4414, 2.2343, 4.0192, 3.6822, 3.9707, 4.0327, 3.1833], + device='cuda:2'), covar=tensor([0.0628, 0.0669, 0.1467, 0.0635, 0.0635, 0.0421, 0.0596, 0.0830], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0145, 0.0144, 0.0154, 0.0135, 0.0137, 0.0150, 0.0147], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 05:58:27,842 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7011, 4.0025, 4.1810, 4.7704, 3.2436, 3.6492, 2.9869, 3.0200], + device='cuda:2'), covar=tensor([0.0441, 0.1722, 0.0797, 0.0395, 0.1971, 0.1019, 0.1267, 0.1617], + device='cuda:2'), in_proj_covar=tensor([0.0247, 0.0328, 0.0251, 0.0206, 0.0249, 0.0212, 0.0221, 0.0218], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 05:58:37,547 INFO [train.py:892] (2/4) Epoch 34, batch 400, loss[loss=0.1478, simple_loss=0.2298, pruned_loss=0.03293, over 19760.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.239, pruned_loss=0.03942, over 3418553.31 frames. ], batch size: 49, lr: 4.62e-03, grad_scale: 16.0 +2023-03-29 05:58:58,227 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1004, 2.5938, 4.3735, 3.7926, 4.2271, 4.2895, 4.1344, 4.0855], + device='cuda:2'), covar=tensor([0.0742, 0.1092, 0.0145, 0.0837, 0.0197, 0.0250, 0.0208, 0.0200], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0103, 0.0088, 0.0152, 0.0086, 0.0098, 0.0090, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 05:59:04,289 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.292e+02 3.497e+02 4.355e+02 5.264e+02 1.050e+03, threshold=8.709e+02, percent-clipped=2.0 +2023-03-29 06:00:32,497 INFO [train.py:892] (2/4) Epoch 34, batch 450, loss[loss=0.1886, simple_loss=0.2691, pruned_loss=0.05402, over 19710.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2385, pruned_loss=0.03957, over 3537924.46 frames. ], batch size: 295, lr: 4.62e-03, grad_scale: 16.0 +2023-03-29 06:00:48,320 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3778, 2.6119, 3.6848, 2.8810, 3.1159, 2.9850, 2.1930, 2.3230], + device='cuda:2'), covar=tensor([0.1183, 0.2883, 0.0649, 0.1266, 0.1816, 0.1541, 0.2646, 0.2801], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0390, 0.0349, 0.0287, 0.0374, 0.0377, 0.0377, 0.0346], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 06:01:48,394 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8206, 2.4672, 2.7898, 3.1006, 3.5000, 3.7723, 3.7086, 3.6812], + device='cuda:2'), covar=tensor([0.1037, 0.1563, 0.1237, 0.0695, 0.0472, 0.0302, 0.0383, 0.0446], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0171, 0.0180, 0.0152, 0.0137, 0.0134, 0.0125, 0.0118], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 06:02:27,846 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8311, 2.2462, 2.8664, 2.4972, 2.5095, 2.5804, 1.8852, 2.0646], + device='cuda:2'), covar=tensor([0.1214, 0.2318, 0.0910, 0.1118, 0.1935, 0.1400, 0.2691, 0.2387], + device='cuda:2'), in_proj_covar=tensor([0.0349, 0.0388, 0.0347, 0.0286, 0.0372, 0.0375, 0.0374, 0.0344], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 06:02:28,755 INFO [train.py:892] (2/4) Epoch 34, batch 500, loss[loss=0.2003, simple_loss=0.3047, pruned_loss=0.04798, over 18950.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2384, pruned_loss=0.03973, over 3628614.55 frames. ], batch size: 514, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:02:32,327 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61717.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:02:51,071 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.296e+02 3.666e+02 4.435e+02 5.198e+02 9.761e+02, threshold=8.870e+02, percent-clipped=3.0 +2023-03-29 06:03:07,848 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61732.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:03:32,898 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61743.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:03:56,231 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-29 06:04:06,799 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-29 06:04:20,987 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61765.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:04:22,375 INFO [train.py:892] (2/4) Epoch 34, batch 550, loss[loss=0.176, simple_loss=0.2506, pruned_loss=0.05069, over 19872.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2384, pruned_loss=0.0396, over 3699937.29 frames. ], batch size: 48, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:04:44,775 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2161, 2.5807, 2.3108, 1.7665, 2.3578, 2.5366, 2.4578, 2.5380], + device='cuda:2'), covar=tensor([0.0441, 0.0306, 0.0312, 0.0583, 0.0386, 0.0296, 0.0312, 0.0239], + device='cuda:2'), in_proj_covar=tensor([0.0106, 0.0098, 0.0100, 0.0103, 0.0106, 0.0089, 0.0089, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 06:05:28,155 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61793.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:05:55,422 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61804.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:06:24,060 INFO [train.py:892] (2/4) Epoch 34, batch 600, loss[loss=0.1441, simple_loss=0.2212, pruned_loss=0.0335, over 19825.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2378, pruned_loss=0.03995, over 3755826.85 frames. ], batch size: 167, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:06:46,762 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-03-29 06:06:47,556 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.635e+02 3.514e+02 4.079e+02 4.976e+02 9.879e+02, threshold=8.158e+02, percent-clipped=2.0 +2023-03-29 06:08:22,953 INFO [train.py:892] (2/4) Epoch 34, batch 650, loss[loss=0.1523, simple_loss=0.2314, pruned_loss=0.03663, over 19813.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2394, pruned_loss=0.04088, over 3798476.87 frames. ], batch size: 132, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:10:09,744 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61914.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:10:14,468 INFO [train.py:892] (2/4) Epoch 34, batch 700, loss[loss=0.1465, simple_loss=0.2279, pruned_loss=0.03251, over 19722.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2393, pruned_loss=0.04084, over 3833277.90 frames. ], batch size: 62, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:10:38,126 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.620e+02 3.763e+02 4.447e+02 5.196e+02 1.125e+03, threshold=8.894e+02, percent-clipped=5.0 +2023-03-29 06:11:28,728 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2567, 2.2724, 2.3808, 2.3233, 2.3257, 2.3907, 2.3306, 2.4545], + device='cuda:2'), covar=tensor([0.0408, 0.0365, 0.0367, 0.0309, 0.0455, 0.0341, 0.0438, 0.0318], + device='cuda:2'), in_proj_covar=tensor([0.0089, 0.0083, 0.0087, 0.0080, 0.0093, 0.0086, 0.0103, 0.0075], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 06:12:12,045 INFO [train.py:892] (2/4) Epoch 34, batch 750, loss[loss=0.1605, simple_loss=0.2464, pruned_loss=0.03726, over 19635.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2384, pruned_loss=0.04012, over 3859429.16 frames. ], batch size: 68, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:12:34,552 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61975.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:13:51,153 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7285, 2.8196, 4.4695, 3.2529, 3.5205, 3.2754, 2.4193, 2.5598], + device='cuda:2'), covar=tensor([0.1194, 0.3536, 0.0516, 0.1140, 0.1933, 0.1615, 0.2719, 0.2876], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0388, 0.0348, 0.0286, 0.0372, 0.0376, 0.0374, 0.0345], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 06:14:10,220 INFO [train.py:892] (2/4) Epoch 34, batch 800, loss[loss=0.1551, simple_loss=0.2346, pruned_loss=0.03778, over 19810.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2375, pruned_loss=0.03951, over 3880220.81 frames. ], batch size: 82, lr: 4.60e-03, grad_scale: 16.0 +2023-03-29 06:14:31,338 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.697e+02 3.764e+02 4.280e+02 5.112e+02 1.282e+03, threshold=8.560e+02, percent-clipped=3.0 +2023-03-29 06:15:08,291 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6950, 4.3707, 4.4504, 4.6604, 4.3615, 4.7590, 4.7423, 4.9651], + device='cuda:2'), covar=tensor([0.0570, 0.0387, 0.0467, 0.0342, 0.0698, 0.0437, 0.0415, 0.0264], + device='cuda:2'), in_proj_covar=tensor([0.0156, 0.0181, 0.0206, 0.0179, 0.0179, 0.0163, 0.0155, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 06:16:03,792 INFO [train.py:892] (2/4) Epoch 34, batch 850, loss[loss=0.1529, simple_loss=0.2361, pruned_loss=0.03478, over 19791.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2371, pruned_loss=0.03926, over 3896474.32 frames. ], batch size: 68, lr: 4.60e-03, grad_scale: 16.0 +2023-03-29 06:16:53,208 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62088.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:17:17,395 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62099.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:17:54,509 INFO [train.py:892] (2/4) Epoch 34, batch 900, loss[loss=0.15, simple_loss=0.2293, pruned_loss=0.03537, over 19710.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2372, pruned_loss=0.03922, over 3907549.24 frames. ], batch size: 61, lr: 4.60e-03, grad_scale: 16.0 +2023-03-29 06:18:09,232 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62122.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:18:10,878 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4985, 4.3179, 4.7974, 4.3687, 4.1120, 4.7067, 4.4776, 4.9651], + device='cuda:2'), covar=tensor([0.0821, 0.0399, 0.0371, 0.0416, 0.0862, 0.0484, 0.0497, 0.0294], + device='cuda:2'), in_proj_covar=tensor([0.0285, 0.0225, 0.0224, 0.0238, 0.0210, 0.0248, 0.0238, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 06:18:16,037 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.740e+02 3.509e+02 4.216e+02 5.041e+02 1.543e+03, threshold=8.432e+02, percent-clipped=2.0 +2023-03-29 06:18:38,920 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5622, 4.4066, 4.8833, 4.4649, 4.1419, 4.7490, 4.5267, 5.0138], + device='cuda:2'), covar=tensor([0.0835, 0.0403, 0.0385, 0.0424, 0.0825, 0.0537, 0.0510, 0.0363], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0225, 0.0223, 0.0237, 0.0209, 0.0247, 0.0237, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 06:19:48,577 INFO [train.py:892] (2/4) Epoch 34, batch 950, loss[loss=0.1439, simple_loss=0.2243, pruned_loss=0.03176, over 19848.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2375, pruned_loss=0.03919, over 3916557.61 frames. ], batch size: 115, lr: 4.60e-03, grad_scale: 16.0 +2023-03-29 06:20:28,562 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62183.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:21:25,620 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7320, 4.9869, 5.0391, 4.9145, 4.7396, 4.9941, 4.5274, 4.5406], + device='cuda:2'), covar=tensor([0.0487, 0.0494, 0.0486, 0.0417, 0.0613, 0.0488, 0.0673, 0.0850], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0292, 0.0306, 0.0267, 0.0271, 0.0258, 0.0273, 0.0320], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 06:21:45,066 INFO [train.py:892] (2/4) Epoch 34, batch 1000, loss[loss=0.1779, simple_loss=0.2614, pruned_loss=0.04715, over 19780.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.238, pruned_loss=0.03926, over 3922862.90 frames. ], batch size: 215, lr: 4.60e-03, grad_scale: 16.0 +2023-03-29 06:22:04,006 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-29 06:22:08,035 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.587e+02 3.779e+02 4.502e+02 5.626e+02 1.320e+03, threshold=9.004e+02, percent-clipped=5.0 +2023-03-29 06:22:57,984 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0140, 2.6324, 3.2621, 3.1786, 3.6979, 4.1357, 3.8873, 3.9943], + device='cuda:2'), covar=tensor([0.0987, 0.1533, 0.1139, 0.0752, 0.0442, 0.0231, 0.0429, 0.0410], + device='cuda:2'), in_proj_covar=tensor([0.0164, 0.0172, 0.0182, 0.0154, 0.0139, 0.0135, 0.0127, 0.0119], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 06:23:39,541 INFO [train.py:892] (2/4) Epoch 34, batch 1050, loss[loss=0.1534, simple_loss=0.2313, pruned_loss=0.03778, over 19811.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2393, pruned_loss=0.03981, over 3926769.29 frames. ], batch size: 181, lr: 4.59e-03, grad_scale: 16.0 +2023-03-29 06:23:49,550 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62270.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:23:55,856 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62273.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:25:31,613 INFO [train.py:892] (2/4) Epoch 34, batch 1100, loss[loss=0.1575, simple_loss=0.2381, pruned_loss=0.03841, over 19852.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2381, pruned_loss=0.03951, over 3932981.37 frames. ], batch size: 56, lr: 4.59e-03, grad_scale: 16.0 +2023-03-29 06:25:50,518 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4574, 4.4544, 4.8493, 4.5891, 4.7609, 4.3185, 4.5546, 4.3662], + device='cuda:2'), covar=tensor([0.1494, 0.1631, 0.0935, 0.1301, 0.0817, 0.0941, 0.1987, 0.2038], + device='cuda:2'), in_proj_covar=tensor([0.0296, 0.0337, 0.0369, 0.0302, 0.0277, 0.0286, 0.0365, 0.0390], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 06:25:55,922 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 3.591e+02 4.112e+02 5.051e+02 8.825e+02, threshold=8.223e+02, percent-clipped=0.0 +2023-03-29 06:26:15,548 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62334.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:27:28,362 INFO [train.py:892] (2/4) Epoch 34, batch 1150, loss[loss=0.1572, simple_loss=0.232, pruned_loss=0.0412, over 19773.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2375, pruned_loss=0.03933, over 3935279.06 frames. ], batch size: 108, lr: 4.59e-03, grad_scale: 16.0 +2023-03-29 06:28:19,835 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62388.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:28:26,260 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62391.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:28:46,481 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62399.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:29:25,710 INFO [train.py:892] (2/4) Epoch 34, batch 1200, loss[loss=0.2141, simple_loss=0.3135, pruned_loss=0.05731, over 18875.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.239, pruned_loss=0.03972, over 3937217.23 frames. ], batch size: 514, lr: 4.59e-03, grad_scale: 16.0 +2023-03-29 06:29:49,453 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.662e+02 4.259e+02 5.186e+02 8.409e+02, threshold=8.517e+02, percent-clipped=1.0 +2023-03-29 06:30:12,883 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62436.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:30:27,575 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2772, 5.5359, 5.5866, 5.4439, 5.2460, 5.5537, 5.0151, 4.9929], + device='cuda:2'), covar=tensor([0.0424, 0.0476, 0.0489, 0.0459, 0.0584, 0.0503, 0.0684, 0.0968], + device='cuda:2'), in_proj_covar=tensor([0.0280, 0.0294, 0.0308, 0.0271, 0.0275, 0.0260, 0.0276, 0.0324], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 06:30:40,844 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62447.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:30:51,291 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62452.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:31:20,399 INFO [train.py:892] (2/4) Epoch 34, batch 1250, loss[loss=0.1491, simple_loss=0.2233, pruned_loss=0.0374, over 19836.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2395, pruned_loss=0.03991, over 3937577.82 frames. ], batch size: 101, lr: 4.59e-03, grad_scale: 16.0 +2023-03-29 06:31:46,039 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62478.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:33:12,364 INFO [train.py:892] (2/4) Epoch 34, batch 1300, loss[loss=0.1757, simple_loss=0.2492, pruned_loss=0.05105, over 19835.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2397, pruned_loss=0.04007, over 3940449.06 frames. ], batch size: 208, lr: 4.58e-03, grad_scale: 16.0 +2023-03-29 06:33:37,412 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.393e+02 3.741e+02 4.359e+02 5.306e+02 1.139e+03, threshold=8.717e+02, percent-clipped=4.0 +2023-03-29 06:34:21,122 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62545.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:35:08,978 INFO [train.py:892] (2/4) Epoch 34, batch 1350, loss[loss=0.1453, simple_loss=0.2147, pruned_loss=0.03795, over 19846.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2393, pruned_loss=0.0401, over 3942624.56 frames. ], batch size: 124, lr: 4.58e-03, grad_scale: 16.0 +2023-03-29 06:35:17,992 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62570.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:35:50,176 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62585.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:36:41,063 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62606.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:37:03,307 INFO [train.py:892] (2/4) Epoch 34, batch 1400, loss[loss=0.1549, simple_loss=0.2367, pruned_loss=0.03661, over 19694.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2391, pruned_loss=0.04002, over 3944170.55 frames. ], batch size: 74, lr: 4.58e-03, grad_scale: 16.0 +2023-03-29 06:37:08,501 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62618.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:37:26,794 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.572e+02 3.612e+02 4.260e+02 5.220e+02 9.254e+02, threshold=8.520e+02, percent-clipped=0.0 +2023-03-29 06:37:35,455 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62629.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:38:13,469 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62646.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:38:58,245 INFO [train.py:892] (2/4) Epoch 34, batch 1450, loss[loss=0.15, simple_loss=0.229, pruned_loss=0.03545, over 19881.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2392, pruned_loss=0.03973, over 3945141.08 frames. ], batch size: 97, lr: 4.58e-03, grad_scale: 16.0 +2023-03-29 06:39:00,670 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62666.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:39:35,726 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-29 06:39:40,211 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3118, 3.2015, 3.3738, 2.6794, 3.5054, 2.9284, 3.2329, 3.3805], + device='cuda:2'), covar=tensor([0.0746, 0.0415, 0.0665, 0.0777, 0.0385, 0.0505, 0.0570, 0.0356], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0087, 0.0084, 0.0111, 0.0080, 0.0083, 0.0081, 0.0075], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 06:40:56,701 INFO [train.py:892] (2/4) Epoch 34, batch 1500, loss[loss=0.1363, simple_loss=0.2122, pruned_loss=0.03018, over 19744.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2381, pruned_loss=0.03926, over 3947201.24 frames. ], batch size: 44, lr: 4.58e-03, grad_scale: 32.0 +2023-03-29 06:41:19,189 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.451e+02 3.877e+02 4.374e+02 5.206e+02 9.071e+02, threshold=8.749e+02, percent-clipped=3.0 +2023-03-29 06:41:22,501 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62727.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:42:11,236 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62747.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:42:20,109 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7951, 2.8887, 4.3973, 3.2640, 3.4047, 3.3428, 2.4099, 2.6275], + device='cuda:2'), covar=tensor([0.1128, 0.3260, 0.0496, 0.1137, 0.1950, 0.1568, 0.2566, 0.2730], + device='cuda:2'), in_proj_covar=tensor([0.0352, 0.0391, 0.0351, 0.0288, 0.0373, 0.0381, 0.0378, 0.0348], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 06:42:53,794 INFO [train.py:892] (2/4) Epoch 34, batch 1550, loss[loss=0.1829, simple_loss=0.2626, pruned_loss=0.05164, over 19816.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2396, pruned_loss=0.04016, over 3946750.69 frames. ], batch size: 231, lr: 4.58e-03, grad_scale: 32.0 +2023-03-29 06:43:11,302 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62773.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:43:13,320 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8320, 2.5894, 4.0425, 3.5143, 3.8769, 4.0203, 3.8250, 3.7159], + device='cuda:2'), covar=tensor([0.0676, 0.0910, 0.0112, 0.0547, 0.0167, 0.0215, 0.0173, 0.0210], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0105, 0.0090, 0.0153, 0.0087, 0.0099, 0.0091, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 06:43:22,538 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62778.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:44:53,495 INFO [train.py:892] (2/4) Epoch 34, batch 1600, loss[loss=0.1476, simple_loss=0.2269, pruned_loss=0.03419, over 19767.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2388, pruned_loss=0.03972, over 3948116.14 frames. ], batch size: 100, lr: 4.57e-03, grad_scale: 32.0 +2023-03-29 06:45:16,007 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.873e+02 3.915e+02 4.396e+02 5.347e+02 9.230e+02, threshold=8.791e+02, percent-clipped=1.0 +2023-03-29 06:45:17,036 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62826.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:45:17,246 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6864, 2.6090, 2.7523, 2.1960, 2.8366, 2.3393, 2.7749, 2.6488], + device='cuda:2'), covar=tensor([0.0561, 0.0518, 0.0488, 0.0895, 0.0374, 0.0561, 0.0449, 0.0451], + device='cuda:2'), in_proj_covar=tensor([0.0080, 0.0088, 0.0084, 0.0112, 0.0081, 0.0084, 0.0082, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 06:45:34,213 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62834.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:46:47,351 INFO [train.py:892] (2/4) Epoch 34, batch 1650, loss[loss=0.1306, simple_loss=0.2059, pruned_loss=0.02768, over 19843.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2374, pruned_loss=0.0394, over 3949922.25 frames. ], batch size: 109, lr: 4.57e-03, grad_scale: 32.0 +2023-03-29 06:48:09,078 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62901.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:48:42,512 INFO [train.py:892] (2/4) Epoch 34, batch 1700, loss[loss=0.1434, simple_loss=0.2116, pruned_loss=0.03766, over 19793.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2385, pruned_loss=0.03967, over 3948905.59 frames. ], batch size: 105, lr: 4.57e-03, grad_scale: 32.0 +2023-03-29 06:49:05,131 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.243e+02 3.824e+02 4.369e+02 5.372e+02 9.431e+02, threshold=8.739e+02, percent-clipped=1.0 +2023-03-29 06:49:10,949 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-03-29 06:49:14,426 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62929.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:49:41,253 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62941.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:50:33,633 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5749, 2.5089, 2.6488, 2.1586, 2.6994, 2.2764, 2.6414, 2.5693], + device='cuda:2'), covar=tensor([0.0448, 0.0502, 0.0522, 0.0831, 0.0365, 0.0553, 0.0535, 0.0387], + device='cuda:2'), in_proj_covar=tensor([0.0081, 0.0089, 0.0086, 0.0113, 0.0082, 0.0085, 0.0083, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 06:50:34,875 INFO [train.py:892] (2/4) Epoch 34, batch 1750, loss[loss=0.1925, simple_loss=0.2673, pruned_loss=0.05883, over 19707.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2384, pruned_loss=0.03978, over 3949737.68 frames. ], batch size: 283, lr: 4.57e-03, grad_scale: 32.0 +2023-03-29 06:50:56,697 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62977.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:52:08,783 INFO [train.py:892] (2/4) Epoch 34, batch 1800, loss[loss=0.1393, simple_loss=0.2219, pruned_loss=0.02832, over 19858.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2377, pruned_loss=0.03973, over 3950571.91 frames. ], batch size: 78, lr: 4.57e-03, grad_scale: 32.0 +2023-03-29 06:52:20,225 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63022.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:52:26,910 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.273e+02 3.774e+02 4.619e+02 5.631e+02 1.047e+03, threshold=9.238e+02, percent-clipped=1.0 +2023-03-29 06:53:01,957 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3503, 4.4742, 4.5748, 4.5717, 4.2057, 4.5223, 4.1318, 3.7998], + device='cuda:2'), covar=tensor([0.1013, 0.1187, 0.0973, 0.0743, 0.1195, 0.1001, 0.1315, 0.2192], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0295, 0.0308, 0.0271, 0.0277, 0.0261, 0.0276, 0.0324], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 06:53:07,219 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63047.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:53:40,516 INFO [train.py:892] (2/4) Epoch 34, batch 1850, loss[loss=0.1393, simple_loss=0.2283, pruned_loss=0.02515, over 19836.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2387, pruned_loss=0.03926, over 3950098.92 frames. ], batch size: 57, lr: 4.56e-03, grad_scale: 32.0 +2023-03-29 06:54:44,126 INFO [train.py:892] (2/4) Epoch 35, batch 0, loss[loss=0.197, simple_loss=0.2705, pruned_loss=0.06176, over 19705.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2705, pruned_loss=0.06176, over 19705.00 frames. ], batch size: 337, lr: 4.50e-03, grad_scale: 32.0 +2023-03-29 06:54:44,127 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 06:55:00,094 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0206, 2.7345, 3.1762, 3.2666, 3.6577, 4.0715, 3.8411, 3.9261], + device='cuda:2'), covar=tensor([0.0964, 0.1479, 0.1203, 0.0718, 0.0501, 0.0253, 0.0412, 0.0385], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0169, 0.0179, 0.0152, 0.0138, 0.0134, 0.0125, 0.0118], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 06:55:18,886 INFO [train.py:926] (2/4) Epoch 35, validation: loss=0.1837, simple_loss=0.2499, pruned_loss=0.05876, over 2883724.00 frames. +2023-03-29 06:55:18,887 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-29 06:56:18,677 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63095.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:57:16,274 INFO [train.py:892] (2/4) Epoch 35, batch 50, loss[loss=0.1792, simple_loss=0.2649, pruned_loss=0.04678, over 19750.00 frames. ], tot_loss[loss=0.1506, simple_loss=0.2291, pruned_loss=0.03601, over 892830.06 frames. ], batch size: 250, lr: 4.50e-03, grad_scale: 32.0 +2023-03-29 06:57:28,117 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.370e+02 3.277e+02 3.899e+02 4.647e+02 1.054e+03, threshold=7.797e+02, percent-clipped=1.0 +2023-03-29 06:57:36,761 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63129.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:57:55,920 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-29 06:59:14,194 INFO [train.py:892] (2/4) Epoch 35, batch 100, loss[loss=0.1611, simple_loss=0.2463, pruned_loss=0.03798, over 19756.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2339, pruned_loss=0.0374, over 1571851.48 frames. ], batch size: 276, lr: 4.49e-03, grad_scale: 32.0 +2023-03-29 07:00:25,398 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63201.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:01:09,273 INFO [train.py:892] (2/4) Epoch 35, batch 150, loss[loss=0.1335, simple_loss=0.2103, pruned_loss=0.02837, over 19888.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2364, pruned_loss=0.03877, over 2097456.18 frames. ], batch size: 47, lr: 4.49e-03, grad_scale: 16.0 +2023-03-29 07:01:10,376 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63221.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:01:22,852 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.863e+02 3.708e+02 4.219e+02 5.359e+02 8.315e+02, threshold=8.439e+02, percent-clipped=1.0 +2023-03-29 07:01:57,385 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63241.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:02:14,848 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63249.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:03:02,045 INFO [train.py:892] (2/4) Epoch 35, batch 200, loss[loss=0.138, simple_loss=0.2187, pruned_loss=0.02864, over 19649.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2395, pruned_loss=0.03999, over 2507780.74 frames. ], batch size: 69, lr: 4.49e-03, grad_scale: 16.0 +2023-03-29 07:03:28,866 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63282.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:03:45,912 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63289.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:04:09,670 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4099, 3.2136, 3.4860, 2.6909, 3.7210, 3.0135, 3.2003, 3.4058], + device='cuda:2'), covar=tensor([0.0694, 0.0481, 0.0522, 0.0841, 0.0309, 0.0493, 0.0499, 0.0429], + device='cuda:2'), in_proj_covar=tensor([0.0081, 0.0089, 0.0086, 0.0114, 0.0082, 0.0085, 0.0083, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 07:04:56,636 INFO [train.py:892] (2/4) Epoch 35, batch 250, loss[loss=0.1322, simple_loss=0.2113, pruned_loss=0.02657, over 19860.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2377, pruned_loss=0.03942, over 2828814.37 frames. ], batch size: 118, lr: 4.49e-03, grad_scale: 16.0 +2023-03-29 07:05:00,055 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63322.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:05:09,782 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.149e+02 3.669e+02 4.457e+02 5.249e+02 9.820e+02, threshold=8.914e+02, percent-clipped=1.0 +2023-03-29 07:06:48,043 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63370.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:06:49,434 INFO [train.py:892] (2/4) Epoch 35, batch 300, loss[loss=0.1578, simple_loss=0.2341, pruned_loss=0.0407, over 19945.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.237, pruned_loss=0.03908, over 3077844.15 frames. ], batch size: 46, lr: 4.49e-03, grad_scale: 16.0 +2023-03-29 07:08:02,665 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8222, 3.2171, 3.3249, 3.7401, 2.7051, 3.1380, 2.4540, 2.5478], + device='cuda:2'), covar=tensor([0.0593, 0.1962, 0.1059, 0.0495, 0.2056, 0.0908, 0.1490, 0.1686], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0334, 0.0252, 0.0209, 0.0252, 0.0214, 0.0224, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 07:08:45,603 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63420.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:08:46,582 INFO [train.py:892] (2/4) Epoch 35, batch 350, loss[loss=0.1425, simple_loss=0.2199, pruned_loss=0.03256, over 19753.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2381, pruned_loss=0.03955, over 3270746.58 frames. ], batch size: 97, lr: 4.49e-03, grad_scale: 16.0 +2023-03-29 07:08:49,607 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63422.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:09:00,249 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.282e+02 3.589e+02 4.143e+02 4.777e+02 8.790e+02, threshold=8.287e+02, percent-clipped=0.0 +2023-03-29 07:09:08,386 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63429.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:10:37,171 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1743, 3.2076, 2.0642, 3.7038, 3.4522, 3.7059, 3.7704, 3.0275], + device='cuda:2'), covar=tensor([0.0669, 0.0755, 0.1634, 0.0694, 0.0626, 0.0489, 0.0663, 0.0833], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0146, 0.0145, 0.0155, 0.0136, 0.0138, 0.0150, 0.0148], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 07:10:44,424 INFO [train.py:892] (2/4) Epoch 35, batch 400, loss[loss=0.1536, simple_loss=0.2414, pruned_loss=0.03291, over 19610.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.238, pruned_loss=0.03915, over 3422094.05 frames. ], batch size: 48, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:10:58,991 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63477.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:10:59,527 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-03-29 07:11:07,785 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63481.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:11:13,404 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63483.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:12:42,324 INFO [train.py:892] (2/4) Epoch 35, batch 450, loss[loss=0.1461, simple_loss=0.2343, pruned_loss=0.029, over 19898.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2393, pruned_loss=0.03964, over 3537418.05 frames. ], batch size: 116, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:12:56,129 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.456e+02 3.739e+02 4.362e+02 5.360e+02 8.901e+02, threshold=8.724e+02, percent-clipped=1.0 +2023-03-29 07:14:28,035 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5275, 2.8161, 2.5740, 2.0642, 2.5837, 2.7434, 2.7883, 2.7112], + device='cuda:2'), covar=tensor([0.0380, 0.0341, 0.0309, 0.0541, 0.0368, 0.0343, 0.0268, 0.0307], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0101, 0.0104, 0.0104, 0.0108, 0.0091, 0.0091, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 07:14:32,241 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63569.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:14:35,580 INFO [train.py:892] (2/4) Epoch 35, batch 500, loss[loss=0.1552, simple_loss=0.2399, pruned_loss=0.03519, over 19770.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2391, pruned_loss=0.03983, over 3628196.76 frames. ], batch size: 233, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:14:52,560 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63577.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:15:10,953 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63586.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:16:32,814 INFO [train.py:892] (2/4) Epoch 35, batch 550, loss[loss=0.1664, simple_loss=0.2442, pruned_loss=0.04433, over 19764.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.239, pruned_loss=0.04002, over 3699075.62 frames. ], batch size: 244, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:16:47,193 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.269e+02 3.713e+02 4.298e+02 5.040e+02 8.851e+02, threshold=8.596e+02, percent-clipped=1.0 +2023-03-29 07:16:54,516 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63630.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:17:34,934 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63647.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:18:08,703 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1842, 2.2138, 2.2730, 2.2643, 2.3942, 2.3874, 2.3182, 2.4144], + device='cuda:2'), covar=tensor([0.0526, 0.0444, 0.0397, 0.0366, 0.0460, 0.0348, 0.0488, 0.0316], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0085, 0.0087, 0.0081, 0.0094, 0.0087, 0.0104, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 07:18:28,408 INFO [train.py:892] (2/4) Epoch 35, batch 600, loss[loss=0.1753, simple_loss=0.2545, pruned_loss=0.04811, over 19785.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2378, pruned_loss=0.03947, over 3756498.48 frames. ], batch size: 280, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:19:04,346 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1132, 2.3802, 2.2501, 1.6861, 2.2742, 2.3640, 2.2478, 2.3464], + device='cuda:2'), covar=tensor([0.0433, 0.0303, 0.0308, 0.0573, 0.0351, 0.0311, 0.0298, 0.0281], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0101, 0.0103, 0.0104, 0.0107, 0.0091, 0.0091, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 07:20:27,003 INFO [train.py:892] (2/4) Epoch 35, batch 650, loss[loss=0.1779, simple_loss=0.2521, pruned_loss=0.05182, over 19775.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2369, pruned_loss=0.03928, over 3799703.25 frames. ], batch size: 263, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:20:30,581 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0699, 3.2009, 3.1514, 3.2207, 3.0696, 3.2252, 3.0825, 3.3257], + device='cuda:2'), covar=tensor([0.0301, 0.0315, 0.0388, 0.0288, 0.0383, 0.0347, 0.0396, 0.0362], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0085, 0.0087, 0.0081, 0.0095, 0.0087, 0.0104, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 07:20:40,709 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.573e+02 3.756e+02 4.198e+02 4.970e+02 9.612e+02, threshold=8.396e+02, percent-clipped=1.0 +2023-03-29 07:21:04,104 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6685, 2.7244, 2.8027, 2.7871, 2.7223, 2.8038, 2.6426, 2.8925], + device='cuda:2'), covar=tensor([0.0325, 0.0368, 0.0367, 0.0334, 0.0449, 0.0327, 0.0456, 0.0361], + device='cuda:2'), in_proj_covar=tensor([0.0091, 0.0085, 0.0088, 0.0082, 0.0095, 0.0087, 0.0104, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 07:21:06,063 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6811, 3.8799, 4.1004, 4.7446, 3.0525, 3.5546, 2.7099, 2.8673], + device='cuda:2'), covar=tensor([0.0434, 0.1985, 0.0838, 0.0346, 0.2072, 0.1080, 0.1429, 0.1720], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0332, 0.0250, 0.0207, 0.0249, 0.0212, 0.0223, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 07:22:16,880 INFO [train.py:892] (2/4) Epoch 35, batch 700, loss[loss=0.1392, simple_loss=0.229, pruned_loss=0.02472, over 19746.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2378, pruned_loss=0.03892, over 3831458.42 frames. ], batch size: 84, lr: 4.47e-03, grad_scale: 16.0 +2023-03-29 07:22:29,930 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63776.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:22:34,084 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63778.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:23:50,915 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63810.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:23:53,172 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0135, 4.6809, 4.7173, 4.4457, 5.0161, 3.1931, 4.0867, 2.6149], + device='cuda:2'), covar=tensor([0.0164, 0.0187, 0.0148, 0.0187, 0.0131, 0.1003, 0.0724, 0.1353], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0150, 0.0116, 0.0138, 0.0122, 0.0137, 0.0144, 0.0130], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 07:24:14,503 INFO [train.py:892] (2/4) Epoch 35, batch 750, loss[loss=0.1669, simple_loss=0.2484, pruned_loss=0.04272, over 19685.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2381, pruned_loss=0.03944, over 3856763.44 frames. ], batch size: 265, lr: 4.47e-03, grad_scale: 16.0 +2023-03-29 07:24:28,550 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.314e+02 3.905e+02 4.508e+02 5.292e+02 1.021e+03, threshold=9.015e+02, percent-clipped=3.0 +2023-03-29 07:25:13,117 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63845.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:26:13,516 INFO [train.py:892] (2/4) Epoch 35, batch 800, loss[loss=0.1498, simple_loss=0.2215, pruned_loss=0.03909, over 19867.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.237, pruned_loss=0.03895, over 3878086.97 frames. ], batch size: 136, lr: 4.47e-03, grad_scale: 16.0 +2023-03-29 07:26:14,595 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63871.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:26:28,878 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63877.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:27:35,430 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-29 07:27:37,166 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63906.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:28:01,419 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63916.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:28:11,334 INFO [train.py:892] (2/4) Epoch 35, batch 850, loss[loss=0.1458, simple_loss=0.2282, pruned_loss=0.03171, over 19891.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2378, pruned_loss=0.03885, over 3892267.38 frames. ], batch size: 71, lr: 4.47e-03, grad_scale: 16.0 +2023-03-29 07:28:21,657 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63925.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:28:21,680 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63925.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:28:25,052 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 3.650e+02 4.447e+02 5.355e+02 8.426e+02, threshold=8.894e+02, percent-clipped=0.0 +2023-03-29 07:29:02,890 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63942.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:29:50,706 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9520, 2.4072, 2.8959, 3.0439, 3.5630, 3.8282, 3.6973, 3.7652], + device='cuda:2'), covar=tensor([0.0933, 0.1583, 0.1214, 0.0763, 0.0491, 0.0295, 0.0447, 0.0347], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0170, 0.0180, 0.0152, 0.0138, 0.0134, 0.0126, 0.0118], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 07:30:06,799 INFO [train.py:892] (2/4) Epoch 35, batch 900, loss[loss=0.1552, simple_loss=0.228, pruned_loss=0.04126, over 19813.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2388, pruned_loss=0.0394, over 3904377.34 frames. ], batch size: 173, lr: 4.47e-03, grad_scale: 16.0 +2023-03-29 07:30:14,103 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7087, 4.7333, 5.0834, 4.8383, 5.0088, 4.5811, 4.8009, 4.6003], + device='cuda:2'), covar=tensor([0.1630, 0.1589, 0.0882, 0.1254, 0.0826, 0.0954, 0.1882, 0.2030], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0341, 0.0379, 0.0308, 0.0282, 0.0292, 0.0370, 0.0400], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 07:30:21,906 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63977.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:30:37,810 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5273, 4.7680, 4.8574, 4.7327, 4.4867, 4.8102, 4.3790, 4.3570], + device='cuda:2'), covar=tensor([0.0455, 0.0481, 0.0448, 0.0419, 0.0616, 0.0492, 0.0658, 0.0952], + device='cuda:2'), in_proj_covar=tensor([0.0277, 0.0291, 0.0304, 0.0267, 0.0273, 0.0257, 0.0273, 0.0319], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 07:32:03,690 INFO [train.py:892] (2/4) Epoch 35, batch 950, loss[loss=0.1937, simple_loss=0.2737, pruned_loss=0.05689, over 19661.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2385, pruned_loss=0.03946, over 3915119.41 frames. ], batch size: 299, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:32:08,601 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64023.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:32:16,289 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.345e+02 3.650e+02 4.248e+02 4.892e+02 8.257e+02, threshold=8.496e+02, percent-clipped=0.0 +2023-03-29 07:33:58,706 INFO [train.py:892] (2/4) Epoch 35, batch 1000, loss[loss=0.1559, simple_loss=0.2389, pruned_loss=0.03647, over 19855.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2378, pruned_loss=0.03931, over 3923773.00 frames. ], batch size: 56, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:34:09,941 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64076.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:34:14,114 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64078.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:34:27,968 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64084.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:35:51,141 INFO [train.py:892] (2/4) Epoch 35, batch 1050, loss[loss=0.153, simple_loss=0.2335, pruned_loss=0.03623, over 19727.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2381, pruned_loss=0.03924, over 3929238.21 frames. ], batch size: 95, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:35:59,004 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64124.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:36:04,345 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64126.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:36:05,624 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.257e+02 3.875e+02 4.468e+02 5.208e+02 1.393e+03, threshold=8.936e+02, percent-clipped=2.0 +2023-03-29 07:36:09,126 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6720, 4.4249, 4.4561, 4.7018, 4.3907, 4.8625, 4.7875, 4.9960], + device='cuda:2'), covar=tensor([0.0700, 0.0460, 0.0479, 0.0380, 0.0753, 0.0448, 0.0408, 0.0305], + device='cuda:2'), in_proj_covar=tensor([0.0157, 0.0183, 0.0206, 0.0182, 0.0181, 0.0164, 0.0156, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 07:36:16,896 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1786, 2.2873, 1.5124, 2.3617, 2.2330, 2.2831, 2.3111, 1.9056], + device='cuda:2'), covar=tensor([0.0774, 0.0787, 0.1376, 0.0679, 0.0736, 0.0655, 0.0667, 0.1109], + device='cuda:2'), in_proj_covar=tensor([0.0145, 0.0147, 0.0146, 0.0155, 0.0137, 0.0139, 0.0151, 0.0149], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 07:37:37,675 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64166.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:37:45,617 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7559, 4.1147, 4.2096, 4.9019, 3.2108, 3.5883, 3.0884, 2.9160], + device='cuda:2'), covar=tensor([0.0427, 0.1618, 0.0831, 0.0303, 0.1902, 0.1004, 0.1184, 0.1466], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0332, 0.0252, 0.0208, 0.0250, 0.0212, 0.0223, 0.0218], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 07:37:51,234 INFO [train.py:892] (2/4) Epoch 35, batch 1100, loss[loss=0.146, simple_loss=0.2249, pruned_loss=0.03358, over 19881.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2384, pruned_loss=0.03956, over 3933451.53 frames. ], batch size: 63, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:38:09,699 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-29 07:39:00,202 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64201.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:39:34,600 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-29 07:39:48,445 INFO [train.py:892] (2/4) Epoch 35, batch 1150, loss[loss=0.1515, simple_loss=0.2301, pruned_loss=0.0364, over 19672.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.237, pruned_loss=0.03909, over 3937316.49 frames. ], batch size: 73, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:39:57,720 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64225.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:40:00,940 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.058e+02 3.821e+02 4.310e+02 5.075e+02 9.198e+02, threshold=8.620e+02, percent-clipped=1.0 +2023-03-29 07:40:13,241 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64232.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:40:37,919 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64242.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:40:40,759 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-29 07:41:41,966 INFO [train.py:892] (2/4) Epoch 35, batch 1200, loss[loss=0.1729, simple_loss=0.2432, pruned_loss=0.05136, over 19801.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2381, pruned_loss=0.03966, over 3939588.42 frames. ], batch size: 168, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:41:46,071 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64272.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:41:48,212 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64273.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:41:48,618 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8782, 3.5057, 3.6965, 3.1880, 4.1123, 4.0676, 4.5944, 5.2136], + device='cuda:2'), covar=tensor([0.0429, 0.1398, 0.1361, 0.2231, 0.1319, 0.1186, 0.0573, 0.0422], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0244, 0.0269, 0.0257, 0.0302, 0.0261, 0.0236, 0.0262], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 07:42:25,752 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-29 07:42:26,910 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64290.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:42:31,529 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3037, 3.4018, 2.1707, 3.4636, 3.5658, 1.7551, 3.0035, 2.8242], + device='cuda:2'), covar=tensor([0.0834, 0.0851, 0.2593, 0.0875, 0.0704, 0.2592, 0.1118, 0.0925], + device='cuda:2'), in_proj_covar=tensor([0.0238, 0.0261, 0.0233, 0.0282, 0.0261, 0.0207, 0.0241, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 07:42:33,683 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64293.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:43:27,541 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3662, 3.6196, 2.3052, 4.1243, 3.7381, 4.1312, 4.1425, 3.2451], + device='cuda:2'), covar=tensor([0.0619, 0.0584, 0.1501, 0.0562, 0.0600, 0.0379, 0.0569, 0.0780], + device='cuda:2'), in_proj_covar=tensor([0.0146, 0.0147, 0.0146, 0.0155, 0.0137, 0.0139, 0.0152, 0.0150], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 07:43:37,210 INFO [train.py:892] (2/4) Epoch 35, batch 1250, loss[loss=0.1549, simple_loss=0.2323, pruned_loss=0.03875, over 19801.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2373, pruned_loss=0.03918, over 3942479.69 frames. ], batch size: 172, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:43:50,329 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-03-29 07:43:50,725 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.911e+02 3.890e+02 4.539e+02 5.516e+02 1.564e+03, threshold=9.078e+02, percent-clipped=1.0 +2023-03-29 07:44:36,172 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64345.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:44:40,918 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-29 07:45:36,295 INFO [train.py:892] (2/4) Epoch 35, batch 1300, loss[loss=0.1544, simple_loss=0.239, pruned_loss=0.03489, over 19565.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2372, pruned_loss=0.03913, over 3943078.89 frames. ], batch size: 47, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:45:55,074 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64379.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:46:59,478 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64406.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:47:31,793 INFO [train.py:892] (2/4) Epoch 35, batch 1350, loss[loss=0.147, simple_loss=0.2244, pruned_loss=0.03474, over 19798.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2376, pruned_loss=0.03912, over 3945586.53 frames. ], batch size: 120, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:47:42,999 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1126, 3.3326, 2.9899, 2.5239, 2.9658, 3.3465, 3.2142, 3.3304], + device='cuda:2'), covar=tensor([0.0284, 0.0309, 0.0290, 0.0511, 0.0359, 0.0226, 0.0272, 0.0208], + device='cuda:2'), in_proj_covar=tensor([0.0108, 0.0101, 0.0104, 0.0104, 0.0108, 0.0091, 0.0092, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 07:47:46,399 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.065e+02 3.591e+02 4.239e+02 5.222e+02 9.468e+02, threshold=8.478e+02, percent-clipped=2.0 +2023-03-29 07:49:14,361 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64465.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:49:14,789 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-29 07:49:16,605 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64466.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:49:25,914 INFO [train.py:892] (2/4) Epoch 35, batch 1400, loss[loss=0.1805, simple_loss=0.253, pruned_loss=0.05399, over 19785.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2376, pruned_loss=0.03957, over 3947327.86 frames. ], batch size: 236, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:50:37,447 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64501.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:51:09,401 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64514.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:51:23,333 INFO [train.py:892] (2/4) Epoch 35, batch 1450, loss[loss=0.1387, simple_loss=0.2139, pruned_loss=0.0318, over 19827.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2379, pruned_loss=0.03917, over 3947920.21 frames. ], batch size: 101, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:51:35,145 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64526.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:51:36,073 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.625e+02 3.695e+02 4.182e+02 5.095e+02 9.377e+02, threshold=8.363e+02, percent-clipped=1.0 +2023-03-29 07:52:31,724 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64549.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:52:57,002 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8721, 3.5802, 3.7576, 3.0589, 4.1044, 3.4627, 3.5611, 4.0737], + device='cuda:2'), covar=tensor([0.0574, 0.0446, 0.0863, 0.0720, 0.0322, 0.0380, 0.0515, 0.0279], + device='cuda:2'), in_proj_covar=tensor([0.0081, 0.0089, 0.0085, 0.0112, 0.0082, 0.0085, 0.0083, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 07:53:21,046 INFO [train.py:892] (2/4) Epoch 35, batch 1500, loss[loss=0.1674, simple_loss=0.2457, pruned_loss=0.0445, over 19816.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2381, pruned_loss=0.03913, over 3947148.58 frames. ], batch size: 166, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:53:24,928 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64572.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:54:00,626 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64588.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:54:02,543 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64589.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:55:13,370 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64620.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:55:15,304 INFO [train.py:892] (2/4) Epoch 35, batch 1550, loss[loss=0.1369, simple_loss=0.216, pruned_loss=0.02891, over 19753.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2381, pruned_loss=0.03864, over 3945142.02 frames. ], batch size: 102, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 07:55:27,762 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.542e+02 3.591e+02 4.055e+02 5.054e+02 8.662e+02, threshold=8.110e+02, percent-clipped=1.0 +2023-03-29 07:56:18,123 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64648.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:56:22,714 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64650.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:57:10,761 INFO [train.py:892] (2/4) Epoch 35, batch 1600, loss[loss=0.1528, simple_loss=0.2305, pruned_loss=0.03755, over 19732.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2378, pruned_loss=0.03833, over 3947163.40 frames. ], batch size: 62, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 07:57:31,547 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64679.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:57:35,538 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-03-29 07:57:40,095 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-29 07:58:22,494 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64701.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:58:26,729 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64703.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:58:40,134 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64709.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:59:08,797 INFO [train.py:892] (2/4) Epoch 35, batch 1650, loss[loss=0.1736, simple_loss=0.2561, pruned_loss=0.04552, over 19754.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.238, pruned_loss=0.0384, over 3947404.04 frames. ], batch size: 253, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 07:59:21,887 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.745e+02 3.702e+02 4.321e+02 4.955e+02 1.270e+03, threshold=8.641e+02, percent-clipped=4.0 +2023-03-29 07:59:22,821 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64727.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:59:43,766 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3755, 5.8687, 5.9473, 5.7754, 5.6423, 5.6398, 5.6114, 5.5195], + device='cuda:2'), covar=tensor([0.1447, 0.1246, 0.0767, 0.1081, 0.0665, 0.0720, 0.1737, 0.1712], + device='cuda:2'), in_proj_covar=tensor([0.0300, 0.0340, 0.0376, 0.0304, 0.0278, 0.0288, 0.0366, 0.0394], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 08:00:03,335 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64745.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:00:10,988 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.8452, 6.1818, 6.1840, 6.1023, 5.9534, 6.1632, 5.5243, 5.5748], + device='cuda:2'), covar=tensor([0.0383, 0.0401, 0.0483, 0.0367, 0.0531, 0.0478, 0.0689, 0.0995], + device='cuda:2'), in_proj_covar=tensor([0.0280, 0.0294, 0.0306, 0.0266, 0.0275, 0.0259, 0.0273, 0.0321], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 08:00:20,017 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64752.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:00:46,826 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64764.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:01:02,068 INFO [train.py:892] (2/4) Epoch 35, batch 1700, loss[loss=0.1694, simple_loss=0.2341, pruned_loss=0.05236, over 19812.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2394, pruned_loss=0.0391, over 3946902.60 frames. ], batch size: 123, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 08:02:25,961 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64806.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:02:39,378 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64813.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:02:55,108 INFO [train.py:892] (2/4) Epoch 35, batch 1750, loss[loss=0.1347, simple_loss=0.2157, pruned_loss=0.02685, over 19763.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2388, pruned_loss=0.03928, over 3948051.41 frames. ], batch size: 100, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 08:02:55,753 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64821.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:03:07,044 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.308e+02 3.852e+02 4.447e+02 5.753e+02 1.014e+03, threshold=8.894e+02, percent-clipped=4.0 +2023-03-29 08:03:21,958 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3350, 3.6092, 3.7826, 4.4175, 2.9977, 3.3932, 2.7050, 2.7514], + device='cuda:2'), covar=tensor([0.0501, 0.1913, 0.0905, 0.0358, 0.1939, 0.1018, 0.1345, 0.1571], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0331, 0.0253, 0.0208, 0.0251, 0.0213, 0.0223, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 08:04:10,026 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.5242, 1.3065, 1.5187, 1.4859, 1.4045, 1.4679, 1.2526, 1.4932], + device='cuda:2'), covar=tensor([0.0408, 0.0425, 0.0362, 0.0365, 0.0495, 0.0360, 0.0581, 0.0383], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0085, 0.0087, 0.0081, 0.0094, 0.0086, 0.0103, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 08:04:35,088 INFO [train.py:892] (2/4) Epoch 35, batch 1800, loss[loss=0.1618, simple_loss=0.2402, pruned_loss=0.0417, over 19748.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2405, pruned_loss=0.03973, over 3944555.94 frames. ], batch size: 221, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 08:05:06,249 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64888.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:06:07,614 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0690, 4.7834, 4.8017, 5.0999, 4.7287, 5.3021, 5.2161, 5.4219], + device='cuda:2'), covar=tensor([0.0714, 0.0426, 0.0498, 0.0387, 0.0794, 0.0385, 0.0395, 0.0304], + device='cuda:2'), in_proj_covar=tensor([0.0158, 0.0182, 0.0206, 0.0183, 0.0180, 0.0164, 0.0156, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 08:06:09,178 INFO [train.py:892] (2/4) Epoch 35, batch 1850, loss[loss=0.1544, simple_loss=0.2422, pruned_loss=0.0333, over 19822.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2405, pruned_loss=0.03909, over 3947002.84 frames. ], batch size: 57, lr: 4.43e-03, grad_scale: 16.0 +2023-03-29 08:07:11,818 INFO [train.py:892] (2/4) Epoch 36, batch 0, loss[loss=0.1435, simple_loss=0.2222, pruned_loss=0.03242, over 19837.00 frames. ], tot_loss[loss=0.1435, simple_loss=0.2222, pruned_loss=0.03242, over 19837.00 frames. ], batch size: 184, lr: 4.37e-03, grad_scale: 16.0 +2023-03-29 08:07:11,819 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 08:07:46,030 INFO [train.py:926] (2/4) Epoch 36, validation: loss=0.183, simple_loss=0.249, pruned_loss=0.05846, over 2883724.00 frames. +2023-03-29 08:07:46,031 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-29 08:07:48,052 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.083e+02 3.485e+02 4.267e+02 5.108e+02 8.561e+02, threshold=8.534e+02, percent-clipped=0.0 +2023-03-29 08:08:10,070 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64936.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:08:19,529 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0078, 2.6167, 3.0522, 3.1424, 3.8156, 4.1823, 4.1607, 4.1971], + device='cuda:2'), covar=tensor([0.0992, 0.1577, 0.1353, 0.0769, 0.0413, 0.0306, 0.0319, 0.0364], + device='cuda:2'), in_proj_covar=tensor([0.0161, 0.0168, 0.0180, 0.0152, 0.0137, 0.0133, 0.0126, 0.0117], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 08:08:30,563 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5799, 2.1510, 2.3988, 2.8347, 3.3097, 3.4164, 3.3357, 3.3325], + device='cuda:2'), covar=tensor([0.1118, 0.1694, 0.1450, 0.0801, 0.0474, 0.0371, 0.0417, 0.0527], + device='cuda:2'), in_proj_covar=tensor([0.0161, 0.0168, 0.0179, 0.0152, 0.0137, 0.0133, 0.0126, 0.0117], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 08:08:32,655 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64945.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:09:41,969 INFO [train.py:892] (2/4) Epoch 36, batch 50, loss[loss=0.1507, simple_loss=0.2302, pruned_loss=0.03558, over 19890.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.2323, pruned_loss=0.03742, over 891572.90 frames. ], batch size: 87, lr: 4.37e-03, grad_scale: 16.0 +2023-03-29 08:09:42,934 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64976.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:10:42,203 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65001.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:10:49,938 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65004.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:11:33,500 INFO [train.py:892] (2/4) Epoch 36, batch 100, loss[loss=0.1602, simple_loss=0.2471, pruned_loss=0.03662, over 19638.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2338, pruned_loss=0.03824, over 1569862.93 frames. ], batch size: 299, lr: 4.37e-03, grad_scale: 16.0 +2023-03-29 08:11:35,850 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.481e+02 3.835e+02 4.535e+02 5.519e+02 9.407e+02, threshold=9.071e+02, percent-clipped=1.0 +2023-03-29 08:11:59,347 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65037.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 08:12:26,454 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65049.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:12:42,352 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65055.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:12:50,284 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65059.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:13:25,762 INFO [train.py:892] (2/4) Epoch 36, batch 150, loss[loss=0.1708, simple_loss=0.2525, pruned_loss=0.04456, over 19738.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2354, pruned_loss=0.03829, over 2098015.59 frames. ], batch size: 80, lr: 4.37e-03, grad_scale: 16.0 +2023-03-29 08:14:25,097 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65101.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:14:39,305 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65108.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:14:45,181 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1441, 2.0916, 2.1832, 2.1817, 2.1097, 2.2907, 2.0991, 2.2352], + device='cuda:2'), covar=tensor([0.0420, 0.0344, 0.0382, 0.0385, 0.0545, 0.0353, 0.0532, 0.0369], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0085, 0.0087, 0.0082, 0.0094, 0.0087, 0.0103, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 08:14:58,851 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65116.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:15:10,881 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65121.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:15:13,319 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4022, 4.6003, 2.7364, 4.8945, 5.0708, 2.2454, 4.3484, 3.6465], + device='cuda:2'), covar=tensor([0.0667, 0.0663, 0.2506, 0.0537, 0.0457, 0.2658, 0.0835, 0.0837], + device='cuda:2'), in_proj_covar=tensor([0.0238, 0.0260, 0.0233, 0.0280, 0.0260, 0.0206, 0.0242, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 08:15:22,296 INFO [train.py:892] (2/4) Epoch 36, batch 200, loss[loss=0.1696, simple_loss=0.2519, pruned_loss=0.04367, over 19778.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2358, pruned_loss=0.03791, over 2510211.08 frames. ], batch size: 66, lr: 4.36e-03, grad_scale: 16.0 +2023-03-29 08:15:24,691 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.519e+02 3.588e+02 4.285e+02 5.132e+02 1.208e+03, threshold=8.571e+02, percent-clipped=3.0 +2023-03-29 08:15:57,638 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1409, 1.9231, 3.2745, 2.6387, 3.2495, 3.3062, 3.0228, 3.1280], + device='cuda:2'), covar=tensor([0.1080, 0.1306, 0.0150, 0.0476, 0.0186, 0.0265, 0.0277, 0.0238], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0106, 0.0091, 0.0154, 0.0088, 0.0101, 0.0092, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 08:16:29,069 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-29 08:17:01,275 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65169.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:17:15,442 INFO [train.py:892] (2/4) Epoch 36, batch 250, loss[loss=0.1625, simple_loss=0.2411, pruned_loss=0.04196, over 19827.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.238, pruned_loss=0.03856, over 2826655.71 frames. ], batch size: 229, lr: 4.36e-03, grad_scale: 16.0 +2023-03-29 08:19:08,897 INFO [train.py:892] (2/4) Epoch 36, batch 300, loss[loss=0.1613, simple_loss=0.241, pruned_loss=0.04084, over 19827.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2374, pruned_loss=0.03868, over 3075916.50 frames. ], batch size: 147, lr: 4.36e-03, grad_scale: 32.0 +2023-03-29 08:19:12,292 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.658e+02 3.728e+02 4.313e+02 5.514e+02 9.328e+02, threshold=8.626e+02, percent-clipped=1.0 +2023-03-29 08:19:53,972 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65245.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:20:47,079 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6452, 2.7171, 4.0268, 3.1408, 3.3594, 3.1340, 2.3641, 2.4668], + device='cuda:2'), covar=tensor([0.1314, 0.3428, 0.0632, 0.1171, 0.1856, 0.1663, 0.2891, 0.2973], + device='cuda:2'), in_proj_covar=tensor([0.0354, 0.0393, 0.0351, 0.0289, 0.0377, 0.0384, 0.0380, 0.0353], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 08:21:04,016 INFO [train.py:892] (2/4) Epoch 36, batch 350, loss[loss=0.1483, simple_loss=0.2195, pruned_loss=0.03859, over 19740.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2359, pruned_loss=0.03815, over 3270566.32 frames. ], batch size: 179, lr: 4.36e-03, grad_scale: 32.0 +2023-03-29 08:21:23,755 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65285.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:21:42,252 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65293.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:22:08,689 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65304.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:22:56,429 INFO [train.py:892] (2/4) Epoch 36, batch 400, loss[loss=0.1404, simple_loss=0.2235, pruned_loss=0.02871, over 19754.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2345, pruned_loss=0.03735, over 3421664.43 frames. ], batch size: 44, lr: 4.36e-03, grad_scale: 32.0 +2023-03-29 08:22:58,380 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.673e+02 3.890e+02 4.309e+02 5.103e+02 7.724e+02, threshold=8.618e+02, percent-clipped=0.0 +2023-03-29 08:23:09,794 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65332.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 08:23:45,266 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65346.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:23:58,028 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65352.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:24:14,580 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65359.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:24:54,093 INFO [train.py:892] (2/4) Epoch 36, batch 450, loss[loss=0.1453, simple_loss=0.2298, pruned_loss=0.03042, over 19865.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2351, pruned_loss=0.03757, over 3539976.32 frames. ], batch size: 48, lr: 4.36e-03, grad_scale: 16.0 +2023-03-29 08:25:49,881 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65401.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:26:03,622 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65407.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:26:07,359 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65408.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:26:14,970 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65411.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:26:46,791 INFO [train.py:892] (2/4) Epoch 36, batch 500, loss[loss=0.1614, simple_loss=0.234, pruned_loss=0.04439, over 19732.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2355, pruned_loss=0.03766, over 3631436.49 frames. ], batch size: 179, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:26:52,435 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.389e+02 3.662e+02 4.205e+02 4.670e+02 6.884e+02, threshold=8.409e+02, percent-clipped=0.0 +2023-03-29 08:27:37,907 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65449.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:27:55,789 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65456.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:28:38,039 INFO [train.py:892] (2/4) Epoch 36, batch 550, loss[loss=0.1575, simple_loss=0.2427, pruned_loss=0.03619, over 19712.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2354, pruned_loss=0.03785, over 3701215.24 frames. ], batch size: 101, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:30:22,201 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4202, 4.3039, 4.7372, 4.3126, 3.9935, 4.5135, 4.3740, 4.8187], + device='cuda:2'), covar=tensor([0.0747, 0.0343, 0.0318, 0.0391, 0.0901, 0.0544, 0.0429, 0.0292], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0226, 0.0226, 0.0238, 0.0208, 0.0249, 0.0238, 0.0223], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 08:30:27,362 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.4937, 5.0531, 5.1253, 4.8595, 5.3963, 3.3285, 4.2857, 2.6000], + device='cuda:2'), covar=tensor([0.0151, 0.0187, 0.0134, 0.0198, 0.0143, 0.0931, 0.0950, 0.1582], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0150, 0.0116, 0.0137, 0.0121, 0.0138, 0.0145, 0.0130], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 08:30:30,663 INFO [train.py:892] (2/4) Epoch 36, batch 600, loss[loss=0.1452, simple_loss=0.2229, pruned_loss=0.03375, over 19801.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2361, pruned_loss=0.03867, over 3756817.24 frames. ], batch size: 47, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:30:34,423 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.355e+02 3.593e+02 4.441e+02 5.313e+02 1.329e+03, threshold=8.882e+02, percent-clipped=4.0 +2023-03-29 08:30:35,763 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.02 vs. limit=5.0 +2023-03-29 08:30:42,671 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65532.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 08:32:21,962 INFO [train.py:892] (2/4) Epoch 36, batch 650, loss[loss=0.1636, simple_loss=0.2429, pruned_loss=0.04212, over 19898.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.236, pruned_loss=0.03873, over 3800173.69 frames. ], batch size: 113, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:33:02,641 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65593.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 08:34:14,088 INFO [train.py:892] (2/4) Epoch 36, batch 700, loss[loss=0.1498, simple_loss=0.2309, pruned_loss=0.03437, over 19756.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2352, pruned_loss=0.03791, over 3832969.63 frames. ], batch size: 188, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:34:18,133 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.459e+02 3.698e+02 4.189e+02 4.947e+02 1.521e+03, threshold=8.379e+02, percent-clipped=3.0 +2023-03-29 08:34:30,928 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65632.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:34:52,627 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65641.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:35:39,605 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2696, 3.5639, 3.7008, 4.2220, 2.9273, 3.3995, 2.6771, 2.5993], + device='cuda:2'), covar=tensor([0.0441, 0.1703, 0.0882, 0.0395, 0.1867, 0.0911, 0.1324, 0.1547], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0330, 0.0253, 0.0208, 0.0251, 0.0213, 0.0222, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 08:36:12,641 INFO [train.py:892] (2/4) Epoch 36, batch 750, loss[loss=0.134, simple_loss=0.2124, pruned_loss=0.02783, over 19736.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2351, pruned_loss=0.03796, over 3859298.98 frames. ], batch size: 47, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:36:22,141 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65680.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:37:08,155 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3177, 3.2209, 4.9788, 3.5696, 3.8876, 3.7315, 2.6872, 2.9282], + device='cuda:2'), covar=tensor([0.0953, 0.3022, 0.0427, 0.1107, 0.1783, 0.1452, 0.2551, 0.2685], + device='cuda:2'), in_proj_covar=tensor([0.0354, 0.0393, 0.0350, 0.0289, 0.0376, 0.0384, 0.0380, 0.0352], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 08:37:18,513 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2561, 2.6443, 3.1714, 3.4212, 3.9484, 4.4429, 4.3359, 4.3685], + device='cuda:2'), covar=tensor([0.0867, 0.1707, 0.1318, 0.0624, 0.0392, 0.0201, 0.0270, 0.0331], + device='cuda:2'), in_proj_covar=tensor([0.0164, 0.0171, 0.0181, 0.0154, 0.0140, 0.0135, 0.0127, 0.0120], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 08:37:34,551 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65711.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:38:07,166 INFO [train.py:892] (2/4) Epoch 36, batch 800, loss[loss=0.1473, simple_loss=0.228, pruned_loss=0.0333, over 19845.00 frames. ], tot_loss[loss=0.156, simple_loss=0.236, pruned_loss=0.03797, over 3879488.89 frames. ], batch size: 109, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:38:11,737 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.627e+02 3.709e+02 4.446e+02 5.789e+02 1.138e+03, threshold=8.893e+02, percent-clipped=2.0 +2023-03-29 08:39:04,716 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4900, 4.7127, 4.7484, 4.6120, 4.3910, 4.7571, 4.2581, 4.3316], + device='cuda:2'), covar=tensor([0.0550, 0.0548, 0.0534, 0.0458, 0.0718, 0.0499, 0.0746, 0.0949], + device='cuda:2'), in_proj_covar=tensor([0.0281, 0.0297, 0.0307, 0.0270, 0.0277, 0.0260, 0.0276, 0.0323], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 08:39:24,888 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65759.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:40:02,198 INFO [train.py:892] (2/4) Epoch 36, batch 850, loss[loss=0.1709, simple_loss=0.2545, pruned_loss=0.04369, over 19750.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2359, pruned_loss=0.03788, over 3895631.41 frames. ], batch size: 250, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:41:57,340 INFO [train.py:892] (2/4) Epoch 36, batch 900, loss[loss=0.1224, simple_loss=0.2034, pruned_loss=0.02065, over 19804.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2365, pruned_loss=0.03804, over 3906672.89 frames. ], batch size: 47, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:42:01,149 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.659e+02 3.793e+02 4.465e+02 5.730e+02 1.109e+03, threshold=8.930e+02, percent-clipped=1.0 +2023-03-29 08:42:24,458 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65838.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 08:42:51,945 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.67 vs. limit=5.0 +2023-03-29 08:43:50,241 INFO [train.py:892] (2/4) Epoch 36, batch 950, loss[loss=0.177, simple_loss=0.2509, pruned_loss=0.05155, over 19762.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2365, pruned_loss=0.03803, over 3916713.13 frames. ], batch size: 233, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:44:19,526 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65888.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 08:44:45,465 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65899.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 08:45:42,706 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3596, 3.4522, 2.0293, 4.0249, 3.6238, 4.0803, 4.0153, 3.1544], + device='cuda:2'), covar=tensor([0.0661, 0.0644, 0.1788, 0.0706, 0.0690, 0.0413, 0.0728, 0.0827], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0148, 0.0145, 0.0157, 0.0136, 0.0140, 0.0152, 0.0149], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 08:45:43,722 INFO [train.py:892] (2/4) Epoch 36, batch 1000, loss[loss=0.1567, simple_loss=0.2395, pruned_loss=0.0369, over 19888.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2365, pruned_loss=0.03797, over 3922597.23 frames. ], batch size: 62, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:45:47,943 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.017e+02 3.590e+02 4.101e+02 4.810e+02 1.037e+03, threshold=8.201e+02, percent-clipped=3.0 +2023-03-29 08:46:20,456 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65941.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:47:38,741 INFO [train.py:892] (2/4) Epoch 36, batch 1050, loss[loss=0.156, simple_loss=0.2368, pruned_loss=0.0376, over 19841.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2373, pruned_loss=0.03825, over 3929964.21 frames. ], batch size: 90, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:48:10,608 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65989.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:48:56,391 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7601, 3.4944, 3.8275, 3.0106, 3.9557, 3.3354, 3.5267, 3.9682], + device='cuda:2'), covar=tensor([0.0640, 0.0410, 0.0507, 0.0720, 0.0436, 0.0381, 0.0489, 0.0289], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0091, 0.0087, 0.0114, 0.0083, 0.0086, 0.0084, 0.0078], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 08:49:06,134 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5281, 4.3385, 4.3515, 4.1280, 4.5588, 2.9867, 3.8776, 2.0864], + device='cuda:2'), covar=tensor([0.0212, 0.0224, 0.0165, 0.0209, 0.0154, 0.1128, 0.0691, 0.1616], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0149, 0.0115, 0.0137, 0.0121, 0.0137, 0.0145, 0.0129], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 08:49:35,978 INFO [train.py:892] (2/4) Epoch 36, batch 1100, loss[loss=0.1806, simple_loss=0.266, pruned_loss=0.04763, over 19552.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2384, pruned_loss=0.03882, over 3933869.08 frames. ], batch size: 60, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:49:39,604 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.898e+02 3.611e+02 4.375e+02 5.188e+02 7.346e+02, threshold=8.750e+02, percent-clipped=0.0 +2023-03-29 08:51:23,863 INFO [train.py:892] (2/4) Epoch 36, batch 1150, loss[loss=0.1576, simple_loss=0.2348, pruned_loss=0.04019, over 19806.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2377, pruned_loss=0.03894, over 3938397.69 frames. ], batch size: 126, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:51:32,861 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.56 vs. limit=5.0 +2023-03-29 08:53:15,605 INFO [train.py:892] (2/4) Epoch 36, batch 1200, loss[loss=0.1382, simple_loss=0.2156, pruned_loss=0.03036, over 19587.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2376, pruned_loss=0.03856, over 3941452.71 frames. ], batch size: 42, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:53:19,998 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.230e+02 3.760e+02 4.454e+02 5.058e+02 1.051e+03, threshold=8.908e+02, percent-clipped=2.0 +2023-03-29 08:53:58,145 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66144.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:54:33,343 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66160.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 08:55:09,247 INFO [train.py:892] (2/4) Epoch 36, batch 1250, loss[loss=0.1324, simple_loss=0.2106, pruned_loss=0.02708, over 19654.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2366, pruned_loss=0.03815, over 3941973.00 frames. ], batch size: 47, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:55:38,288 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66188.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 08:55:51,123 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66194.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 08:56:16,224 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66205.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 08:56:52,685 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66221.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 08:57:03,463 INFO [train.py:892] (2/4) Epoch 36, batch 1300, loss[loss=0.1423, simple_loss=0.2273, pruned_loss=0.02865, over 19637.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2369, pruned_loss=0.03853, over 3943589.66 frames. ], batch size: 68, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:57:07,571 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.794e+02 3.837e+02 4.506e+02 5.422e+02 1.103e+03, threshold=9.011e+02, percent-clipped=2.0 +2023-03-29 08:57:25,563 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66236.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 08:57:29,781 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9999, 4.6566, 4.7370, 4.4478, 4.9765, 3.1321, 4.0322, 2.4018], + device='cuda:2'), covar=tensor([0.0182, 0.0221, 0.0145, 0.0217, 0.0141, 0.1025, 0.0929, 0.1628], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0150, 0.0116, 0.0137, 0.0121, 0.0137, 0.0145, 0.0130], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 08:57:42,064 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5903, 3.9566, 4.1206, 4.6850, 3.2354, 3.4221, 2.9371, 2.9201], + device='cuda:2'), covar=tensor([0.0486, 0.1890, 0.0815, 0.0381, 0.1869, 0.1080, 0.1371, 0.1644], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0330, 0.0252, 0.0207, 0.0251, 0.0214, 0.0224, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 08:58:16,506 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66258.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:58:30,408 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9705, 3.8482, 4.2203, 3.8622, 3.6776, 4.1119, 3.9086, 4.3057], + device='cuda:2'), covar=tensor([0.0708, 0.0352, 0.0363, 0.0377, 0.1032, 0.0524, 0.0495, 0.0319], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0225, 0.0224, 0.0236, 0.0206, 0.0247, 0.0237, 0.0221], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 08:58:57,020 INFO [train.py:892] (2/4) Epoch 36, batch 1350, loss[loss=0.152, simple_loss=0.2256, pruned_loss=0.03918, over 19755.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2367, pruned_loss=0.03824, over 3945537.75 frames. ], batch size: 179, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 09:00:26,700 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6248, 3.2157, 3.5282, 3.1046, 3.8144, 3.8456, 4.3847, 4.8699], + device='cuda:2'), covar=tensor([0.0438, 0.1569, 0.1375, 0.2056, 0.1489, 0.1223, 0.0583, 0.0445], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0244, 0.0272, 0.0258, 0.0304, 0.0261, 0.0237, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 09:00:37,105 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66319.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:00:49,665 INFO [train.py:892] (2/4) Epoch 36, batch 1400, loss[loss=0.1633, simple_loss=0.2437, pruned_loss=0.04147, over 19687.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2359, pruned_loss=0.03797, over 3947700.28 frames. ], batch size: 265, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:00:54,077 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.446e+02 3.517e+02 4.188e+02 4.908e+02 8.356e+02, threshold=8.377e+02, percent-clipped=0.0 +2023-03-29 09:02:06,189 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4324, 3.4530, 2.3338, 4.1136, 3.7455, 4.0426, 4.1633, 3.2088], + device='cuda:2'), covar=tensor([0.0614, 0.0619, 0.1535, 0.0633, 0.0638, 0.0448, 0.0554, 0.0827], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0148, 0.0145, 0.0157, 0.0137, 0.0140, 0.0152, 0.0149], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 09:02:43,633 INFO [train.py:892] (2/4) Epoch 36, batch 1450, loss[loss=0.1753, simple_loss=0.2547, pruned_loss=0.04801, over 19738.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2368, pruned_loss=0.03848, over 3947168.52 frames. ], batch size: 140, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:04:27,608 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 09:04:34,690 INFO [train.py:892] (2/4) Epoch 36, batch 1500, loss[loss=0.172, simple_loss=0.2429, pruned_loss=0.0505, over 19801.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2364, pruned_loss=0.03824, over 3948396.72 frames. ], batch size: 126, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:04:40,373 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.359e+02 3.684e+02 4.435e+02 5.309e+02 1.081e+03, threshold=8.870e+02, percent-clipped=2.0 +2023-03-29 09:04:55,501 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3008, 2.5696, 2.7679, 3.1405, 2.0328, 2.8776, 1.9942, 2.0559], + device='cuda:2'), covar=tensor([0.0677, 0.1455, 0.1146, 0.0583, 0.2452, 0.0877, 0.1579, 0.1681], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0332, 0.0253, 0.0208, 0.0253, 0.0214, 0.0224, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 09:06:32,052 INFO [train.py:892] (2/4) Epoch 36, batch 1550, loss[loss=0.1481, simple_loss=0.2271, pruned_loss=0.03454, over 19846.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2368, pruned_loss=0.03817, over 3948670.53 frames. ], batch size: 137, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:07:11,651 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66494.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:07:25,585 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66500.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:08:01,251 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66516.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 09:08:11,925 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66521.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:08:20,347 INFO [train.py:892] (2/4) Epoch 36, batch 1600, loss[loss=0.1379, simple_loss=0.2171, pruned_loss=0.02936, over 19709.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2364, pruned_loss=0.03789, over 3948542.93 frames. ], batch size: 85, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:08:24,090 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.296e+02 3.591e+02 4.311e+02 5.202e+02 1.053e+03, threshold=8.622e+02, percent-clipped=1.0 +2023-03-29 09:08:35,647 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7571, 2.7839, 1.8010, 3.1652, 2.9437, 3.0798, 3.2017, 2.6032], + device='cuda:2'), covar=tensor([0.0767, 0.0834, 0.1705, 0.0782, 0.0726, 0.0668, 0.0679, 0.0964], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0149, 0.0146, 0.0158, 0.0138, 0.0141, 0.0152, 0.0150], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 09:08:49,484 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7900, 2.8618, 2.9321, 2.9558, 2.8947, 2.8270, 2.8050, 2.8708], + device='cuda:2'), covar=tensor([0.0357, 0.0371, 0.0342, 0.0315, 0.0357, 0.0385, 0.0414, 0.0419], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0087, 0.0090, 0.0083, 0.0096, 0.0089, 0.0107, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 09:08:57,599 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66542.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:10:14,431 INFO [train.py:892] (2/4) Epoch 36, batch 1650, loss[loss=0.2009, simple_loss=0.2799, pruned_loss=0.06088, over 19597.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2376, pruned_loss=0.03819, over 3948970.93 frames. ], batch size: 367, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:10:20,377 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.25 vs. limit=5.0 +2023-03-29 09:10:29,320 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66582.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:11:42,689 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66614.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:12:09,543 INFO [train.py:892] (2/4) Epoch 36, batch 1700, loss[loss=0.1424, simple_loss=0.2249, pruned_loss=0.02993, over 19872.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2366, pruned_loss=0.03791, over 3950054.10 frames. ], batch size: 108, lr: 4.31e-03, grad_scale: 16.0 +2023-03-29 09:12:13,638 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.577e+02 3.609e+02 4.438e+02 5.504e+02 8.717e+02, threshold=8.877e+02, percent-clipped=1.0 +2023-03-29 09:13:35,340 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-29 09:13:57,345 INFO [train.py:892] (2/4) Epoch 36, batch 1750, loss[loss=0.1289, simple_loss=0.2056, pruned_loss=0.02614, over 19768.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2357, pruned_loss=0.03762, over 3949627.02 frames. ], batch size: 119, lr: 4.31e-03, grad_scale: 16.0 +2023-03-29 09:14:57,758 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-29 09:14:59,254 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.88 vs. limit=5.0 +2023-03-29 09:15:32,721 INFO [train.py:892] (2/4) Epoch 36, batch 1800, loss[loss=0.1327, simple_loss=0.2116, pruned_loss=0.0269, over 19847.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.235, pruned_loss=0.03726, over 3950343.34 frames. ], batch size: 112, lr: 4.31e-03, grad_scale: 16.0 +2023-03-29 09:15:36,380 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.260e+02 3.634e+02 4.363e+02 5.062e+02 8.323e+02, threshold=8.726e+02, percent-clipped=0.0 +2023-03-29 09:15:42,725 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66731.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:17:03,387 INFO [train.py:892] (2/4) Epoch 36, batch 1850, loss[loss=0.1873, simple_loss=0.2776, pruned_loss=0.04846, over 19852.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2396, pruned_loss=0.03856, over 3946761.95 frames. ], batch size: 58, lr: 4.31e-03, grad_scale: 16.0 +2023-03-29 09:18:08,604 INFO [train.py:892] (2/4) Epoch 37, batch 0, loss[loss=0.2004, simple_loss=0.2833, pruned_loss=0.05877, over 19637.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2833, pruned_loss=0.05877, over 19637.00 frames. ], batch size: 367, lr: 4.25e-03, grad_scale: 16.0 +2023-03-29 09:18:08,604 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 09:18:41,826 INFO [train.py:926] (2/4) Epoch 37, validation: loss=0.1834, simple_loss=0.2492, pruned_loss=0.05881, over 2883724.00 frames. +2023-03-29 09:18:41,828 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-29 09:18:58,456 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5573, 3.2314, 3.5240, 2.7835, 3.7260, 3.0938, 3.3412, 3.5711], + device='cuda:2'), covar=tensor([0.0548, 0.0503, 0.0618, 0.0768, 0.0338, 0.0415, 0.0473, 0.0342], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0091, 0.0087, 0.0114, 0.0083, 0.0086, 0.0084, 0.0078], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 09:19:11,111 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66792.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 09:19:27,974 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66800.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 09:20:03,783 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66816.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:20:31,170 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.683e+02 3.599e+02 4.155e+02 4.773e+02 9.045e+02, threshold=8.309e+02, percent-clipped=1.0 +2023-03-29 09:20:39,148 INFO [train.py:892] (2/4) Epoch 37, batch 50, loss[loss=0.1369, simple_loss=0.2192, pruned_loss=0.02731, over 19839.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2328, pruned_loss=0.03694, over 892078.61 frames. ], batch size: 90, lr: 4.25e-03, grad_scale: 16.0 +2023-03-29 09:21:04,074 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7699, 4.5659, 5.1283, 4.6216, 4.2019, 4.8937, 4.6700, 5.2756], + device='cuda:2'), covar=tensor([0.0802, 0.0406, 0.0359, 0.0376, 0.0842, 0.0457, 0.0485, 0.0321], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0228, 0.0226, 0.0239, 0.0209, 0.0247, 0.0239, 0.0222], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 09:21:15,286 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66848.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:21:51,432 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66864.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:22:19,768 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66877.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:22:30,257 INFO [train.py:892] (2/4) Epoch 37, batch 100, loss[loss=0.1578, simple_loss=0.2503, pruned_loss=0.03261, over 19526.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.234, pruned_loss=0.03735, over 1569876.54 frames. ], batch size: 54, lr: 4.25e-03, grad_scale: 16.0 +2023-03-29 09:23:45,914 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66914.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:24:15,780 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.134e+02 3.769e+02 4.346e+02 5.384e+02 1.182e+03, threshold=8.692e+02, percent-clipped=4.0 +2023-03-29 09:24:22,075 INFO [train.py:892] (2/4) Epoch 37, batch 150, loss[loss=0.1337, simple_loss=0.2153, pruned_loss=0.02602, over 19695.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2337, pruned_loss=0.03697, over 2097806.41 frames. ], batch size: 82, lr: 4.25e-03, grad_scale: 16.0 +2023-03-29 09:24:45,944 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.73 vs. limit=5.0 +2023-03-29 09:25:24,915 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66957.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:25:27,161 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8831, 2.4368, 2.8072, 3.1062, 3.5747, 3.8345, 3.7284, 3.6593], + device='cuda:2'), covar=tensor([0.0984, 0.1621, 0.1336, 0.0744, 0.0486, 0.0296, 0.0437, 0.0493], + device='cuda:2'), in_proj_covar=tensor([0.0161, 0.0168, 0.0178, 0.0153, 0.0138, 0.0134, 0.0127, 0.0119], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 09:25:35,488 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66962.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:26:13,788 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0589, 4.1880, 4.4387, 4.2248, 4.3908, 3.9963, 4.1565, 3.9883], + device='cuda:2'), covar=tensor([0.1540, 0.1629, 0.0976, 0.1249, 0.0969, 0.1018, 0.2061, 0.2129], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0339, 0.0374, 0.0304, 0.0280, 0.0287, 0.0368, 0.0392], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 09:26:19,106 INFO [train.py:892] (2/4) Epoch 37, batch 200, loss[loss=0.1702, simple_loss=0.2517, pruned_loss=0.04434, over 19802.00 frames. ], tot_loss[loss=0.155, simple_loss=0.2349, pruned_loss=0.03758, over 2508355.78 frames. ], batch size: 288, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:27:15,573 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-29 09:27:45,869 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67018.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 09:28:07,315 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.541e+02 3.516e+02 4.008e+02 4.634e+02 9.207e+02, threshold=8.015e+02, percent-clipped=1.0 +2023-03-29 09:28:13,845 INFO [train.py:892] (2/4) Epoch 37, batch 250, loss[loss=0.164, simple_loss=0.2425, pruned_loss=0.04276, over 19694.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2354, pruned_loss=0.03703, over 2826145.24 frames. ], batch size: 75, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:30:07,560 INFO [train.py:892] (2/4) Epoch 37, batch 300, loss[loss=0.1374, simple_loss=0.2182, pruned_loss=0.02827, over 19894.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2359, pruned_loss=0.03758, over 3074550.22 frames. ], batch size: 113, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:30:14,823 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67083.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:30:22,918 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67087.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 09:31:58,039 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.283e+02 3.982e+02 4.417e+02 5.136e+02 1.288e+03, threshold=8.835e+02, percent-clipped=3.0 +2023-03-29 09:32:05,856 INFO [train.py:892] (2/4) Epoch 37, batch 350, loss[loss=0.1481, simple_loss=0.225, pruned_loss=0.03562, over 19789.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2362, pruned_loss=0.03829, over 3268454.33 frames. ], batch size: 154, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:32:34,408 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67144.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:32:34,555 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8184, 3.3814, 3.7790, 3.1621, 4.0234, 4.0079, 4.6212, 5.1666], + device='cuda:2'), covar=tensor([0.0513, 0.1573, 0.1360, 0.2356, 0.1581, 0.1335, 0.0570, 0.0466], + device='cuda:2'), in_proj_covar=tensor([0.0257, 0.0243, 0.0271, 0.0257, 0.0303, 0.0262, 0.0237, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 09:32:44,843 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8225, 3.1495, 3.1324, 3.1358, 2.9609, 3.0230, 2.9047, 3.1066], + device='cuda:2'), covar=tensor([0.0395, 0.0278, 0.0298, 0.0300, 0.0385, 0.0362, 0.0356, 0.0363], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0086, 0.0089, 0.0083, 0.0096, 0.0089, 0.0105, 0.0078], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 09:33:49,605 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67177.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 09:33:57,500 INFO [train.py:892] (2/4) Epoch 37, batch 400, loss[loss=0.1837, simple_loss=0.2653, pruned_loss=0.05101, over 19728.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2365, pruned_loss=0.0379, over 3420405.67 frames. ], batch size: 269, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:34:42,345 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7902, 4.5040, 4.6005, 4.8321, 4.5519, 4.9581, 4.9035, 5.1061], + device='cuda:2'), covar=tensor([0.0626, 0.0389, 0.0455, 0.0353, 0.0661, 0.0479, 0.0394, 0.0270], + device='cuda:2'), in_proj_covar=tensor([0.0159, 0.0183, 0.0206, 0.0182, 0.0181, 0.0164, 0.0156, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 09:34:50,829 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9617, 3.2865, 3.4651, 3.9080, 2.7553, 3.1028, 2.6412, 2.5472], + device='cuda:2'), covar=tensor([0.0560, 0.1918, 0.0978, 0.0478, 0.2017, 0.0997, 0.1453, 0.1711], + device='cuda:2'), in_proj_covar=tensor([0.0248, 0.0328, 0.0250, 0.0207, 0.0250, 0.0212, 0.0223, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 09:35:34,615 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67225.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:35:41,237 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.424e+02 3.741e+02 4.233e+02 5.097e+02 1.151e+03, threshold=8.466e+02, percent-clipped=1.0 +2023-03-29 09:35:49,527 INFO [train.py:892] (2/4) Epoch 37, batch 450, loss[loss=0.1505, simple_loss=0.225, pruned_loss=0.038, over 19877.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2368, pruned_loss=0.03833, over 3538880.15 frames. ], batch size: 139, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:36:17,238 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5210, 3.8940, 4.1084, 4.6340, 3.0205, 3.5330, 2.8019, 2.8235], + device='cuda:2'), covar=tensor([0.0455, 0.1521, 0.0705, 0.0350, 0.1837, 0.0921, 0.1263, 0.1495], + device='cuda:2'), in_proj_covar=tensor([0.0248, 0.0328, 0.0250, 0.0207, 0.0250, 0.0212, 0.0223, 0.0218], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 09:37:36,835 INFO [train.py:892] (2/4) Epoch 37, batch 500, loss[loss=0.154, simple_loss=0.2366, pruned_loss=0.03565, over 19750.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2363, pruned_loss=0.03835, over 3631590.18 frames. ], batch size: 250, lr: 4.23e-03, grad_scale: 16.0 +2023-03-29 09:38:51,673 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67313.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:39:24,435 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.430e+02 3.820e+02 4.510e+02 5.565e+02 1.076e+03, threshold=9.020e+02, percent-clipped=2.0 +2023-03-29 09:39:30,773 INFO [train.py:892] (2/4) Epoch 37, batch 550, loss[loss=0.159, simple_loss=0.2463, pruned_loss=0.03586, over 19725.00 frames. ], tot_loss[loss=0.157, simple_loss=0.237, pruned_loss=0.03855, over 3700847.28 frames. ], batch size: 50, lr: 4.23e-03, grad_scale: 16.0 +2023-03-29 09:41:25,519 INFO [train.py:892] (2/4) Epoch 37, batch 600, loss[loss=0.1778, simple_loss=0.2656, pruned_loss=0.04496, over 19848.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2374, pruned_loss=0.03903, over 3756171.43 frames. ], batch size: 58, lr: 4.23e-03, grad_scale: 32.0 +2023-03-29 09:41:37,210 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-29 09:41:42,964 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67387.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:42:48,665 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1590, 4.2986, 2.5411, 4.4400, 4.6402, 2.0276, 3.8803, 3.4034], + device='cuda:2'), covar=tensor([0.0660, 0.0641, 0.2705, 0.0794, 0.0488, 0.2834, 0.1031, 0.0900], + device='cuda:2'), in_proj_covar=tensor([0.0243, 0.0267, 0.0238, 0.0286, 0.0265, 0.0210, 0.0248, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 09:43:17,842 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.189e+02 3.534e+02 4.252e+02 5.287e+02 1.120e+03, threshold=8.504e+02, percent-clipped=1.0 +2023-03-29 09:43:23,995 INFO [train.py:892] (2/4) Epoch 37, batch 650, loss[loss=0.1534, simple_loss=0.2245, pruned_loss=0.04115, over 19785.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2375, pruned_loss=0.03881, over 3797486.45 frames. ], batch size: 178, lr: 4.23e-03, grad_scale: 32.0 +2023-03-29 09:43:33,722 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67435.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:43:42,608 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67439.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:44:16,791 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7051, 2.6790, 4.8289, 4.0964, 4.4985, 4.7608, 4.5952, 4.3828], + device='cuda:2'), covar=tensor([0.0565, 0.1051, 0.0101, 0.0783, 0.0171, 0.0192, 0.0154, 0.0178], + device='cuda:2'), in_proj_covar=tensor([0.0102, 0.0105, 0.0091, 0.0153, 0.0088, 0.0100, 0.0091, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 09:45:16,672 INFO [train.py:892] (2/4) Epoch 37, batch 700, loss[loss=0.1783, simple_loss=0.2445, pruned_loss=0.05611, over 19801.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2366, pruned_loss=0.0383, over 3832193.75 frames. ], batch size: 181, lr: 4.23e-03, grad_scale: 32.0 +2023-03-29 09:45:52,488 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6704, 3.8385, 3.8345, 3.6950, 3.6856, 3.8006, 3.4011, 3.3444], + device='cuda:2'), covar=tensor([0.0560, 0.0568, 0.0582, 0.0544, 0.0687, 0.0562, 0.0721, 0.1109], + device='cuda:2'), in_proj_covar=tensor([0.0278, 0.0296, 0.0304, 0.0266, 0.0278, 0.0258, 0.0273, 0.0319], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 09:46:31,464 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67513.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:46:36,139 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-29 09:47:00,455 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9484, 4.0414, 2.4623, 4.1883, 4.3273, 1.9601, 3.5776, 3.3362], + device='cuda:2'), covar=tensor([0.0692, 0.0789, 0.2677, 0.0805, 0.0584, 0.2874, 0.1032, 0.0853], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0266, 0.0237, 0.0285, 0.0264, 0.0209, 0.0247, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 09:47:05,443 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.319e+02 3.696e+02 4.491e+02 5.364e+02 8.859e+02, threshold=8.982e+02, percent-clipped=1.0 +2023-03-29 09:47:12,728 INFO [train.py:892] (2/4) Epoch 37, batch 750, loss[loss=0.2051, simple_loss=0.2847, pruned_loss=0.06275, over 19566.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2368, pruned_loss=0.03816, over 3857144.61 frames. ], batch size: 376, lr: 4.23e-03, grad_scale: 32.0 +2023-03-29 09:47:45,101 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.74 vs. limit=5.0 +2023-03-29 09:48:32,724 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3564, 2.6371, 2.4105, 1.9100, 2.4708, 2.6675, 2.6130, 2.5790], + device='cuda:2'), covar=tensor([0.0400, 0.0363, 0.0354, 0.0652, 0.0402, 0.0275, 0.0279, 0.0296], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0103, 0.0104, 0.0105, 0.0109, 0.0092, 0.0093, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 09:48:51,595 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67574.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:49:07,700 INFO [train.py:892] (2/4) Epoch 37, batch 800, loss[loss=0.1379, simple_loss=0.2199, pruned_loss=0.02793, over 19854.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2374, pruned_loss=0.03814, over 3878363.96 frames. ], batch size: 85, lr: 4.22e-03, grad_scale: 32.0 +2023-03-29 09:49:18,226 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7130, 4.4477, 4.4496, 4.7194, 4.4869, 4.8048, 4.8210, 4.9790], + device='cuda:2'), covar=tensor([0.0719, 0.0488, 0.0592, 0.0418, 0.0709, 0.0527, 0.0471, 0.0370], + device='cuda:2'), in_proj_covar=tensor([0.0159, 0.0184, 0.0208, 0.0183, 0.0182, 0.0165, 0.0157, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 09:50:18,575 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67613.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:50:51,537 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.125e+02 3.592e+02 4.129e+02 4.799e+02 7.012e+02, threshold=8.258e+02, percent-clipped=0.0 +2023-03-29 09:50:55,281 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.15 vs. limit=5.0 +2023-03-29 09:50:58,155 INFO [train.py:892] (2/4) Epoch 37, batch 850, loss[loss=0.1761, simple_loss=0.2502, pruned_loss=0.05103, over 19750.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.237, pruned_loss=0.03768, over 3892540.32 frames. ], batch size: 250, lr: 4.22e-03, grad_scale: 32.0 +2023-03-29 09:51:45,650 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3253, 4.4584, 2.6548, 4.6925, 4.9188, 2.2021, 4.1579, 3.6311], + device='cuda:2'), covar=tensor([0.0678, 0.0764, 0.2718, 0.0682, 0.0536, 0.2884, 0.1027, 0.0976], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0266, 0.0237, 0.0286, 0.0265, 0.0209, 0.0247, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 09:52:10,324 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67661.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:52:53,838 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67680.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:52:54,892 INFO [train.py:892] (2/4) Epoch 37, batch 900, loss[loss=0.1302, simple_loss=0.1999, pruned_loss=0.03021, over 19875.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2367, pruned_loss=0.03753, over 3904592.91 frames. ], batch size: 125, lr: 4.22e-03, grad_scale: 16.0 +2023-03-29 09:53:21,620 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67692.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:54:45,463 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.385e+02 3.765e+02 4.292e+02 5.023e+02 7.542e+02, threshold=8.583e+02, percent-clipped=0.0 +2023-03-29 09:54:49,553 INFO [train.py:892] (2/4) Epoch 37, batch 950, loss[loss=0.1604, simple_loss=0.2449, pruned_loss=0.03799, over 19766.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2377, pruned_loss=0.03765, over 3913243.39 frames. ], batch size: 88, lr: 4.22e-03, grad_scale: 16.0 +2023-03-29 09:55:09,550 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67739.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:55:15,594 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67741.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:55:41,470 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67753.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 09:56:23,551 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7940, 3.4733, 3.7453, 2.7824, 4.0440, 3.2580, 3.5982, 3.9092], + device='cuda:2'), covar=tensor([0.0717, 0.0413, 0.0663, 0.0876, 0.0346, 0.0439, 0.0485, 0.0370], + device='cuda:2'), in_proj_covar=tensor([0.0083, 0.0091, 0.0088, 0.0115, 0.0084, 0.0087, 0.0085, 0.0078], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 09:56:41,361 INFO [train.py:892] (2/4) Epoch 37, batch 1000, loss[loss=0.1871, simple_loss=0.2601, pruned_loss=0.05708, over 19648.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2378, pruned_loss=0.03778, over 3920876.32 frames. ], batch size: 69, lr: 4.22e-03, grad_scale: 16.0 +2023-03-29 09:56:54,988 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67787.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:58:28,696 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.309e+02 3.606e+02 4.244e+02 4.990e+02 9.256e+02, threshold=8.487e+02, percent-clipped=3.0 +2023-03-29 09:58:31,975 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2620, 3.2258, 4.8990, 3.6055, 3.8980, 3.6833, 2.6481, 2.8515], + device='cuda:2'), covar=tensor([0.0853, 0.2898, 0.0386, 0.1016, 0.1680, 0.1474, 0.2495, 0.2479], + device='cuda:2'), in_proj_covar=tensor([0.0354, 0.0393, 0.0352, 0.0290, 0.0377, 0.0386, 0.0381, 0.0352], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 09:58:34,529 INFO [train.py:892] (2/4) Epoch 37, batch 1050, loss[loss=0.152, simple_loss=0.2493, pruned_loss=0.02732, over 19661.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2376, pruned_loss=0.03783, over 3926832.25 frames. ], batch size: 55, lr: 4.22e-03, grad_scale: 16.0 +2023-03-29 09:58:41,929 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7787, 3.4760, 3.8249, 2.9374, 3.9308, 3.2306, 3.5533, 3.8761], + device='cuda:2'), covar=tensor([0.0559, 0.0423, 0.0536, 0.0746, 0.0401, 0.0402, 0.0419, 0.0290], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0091, 0.0087, 0.0114, 0.0084, 0.0086, 0.0084, 0.0078], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 09:59:13,826 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2693, 1.6905, 1.8748, 2.4567, 2.6838, 2.8334, 2.6714, 2.7329], + device='cuda:2'), covar=tensor([0.1091, 0.1965, 0.1674, 0.0841, 0.0625, 0.0452, 0.0513, 0.0554], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0171, 0.0181, 0.0155, 0.0140, 0.0136, 0.0129, 0.0120], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 10:00:00,866 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67869.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 10:00:25,300 INFO [train.py:892] (2/4) Epoch 37, batch 1100, loss[loss=0.1428, simple_loss=0.2265, pruned_loss=0.02958, over 19654.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2374, pruned_loss=0.03808, over 3931504.72 frames. ], batch size: 67, lr: 4.22e-03, grad_scale: 16.0 +2023-03-29 10:01:30,493 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8670, 2.8080, 2.9506, 2.5049, 3.0387, 2.4893, 2.9279, 2.9505], + device='cuda:2'), covar=tensor([0.0649, 0.0545, 0.0523, 0.0767, 0.0391, 0.0559, 0.0570, 0.0427], + device='cuda:2'), in_proj_covar=tensor([0.0083, 0.0091, 0.0088, 0.0114, 0.0084, 0.0087, 0.0085, 0.0078], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 10:02:13,707 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.252e+02 3.748e+02 4.417e+02 5.113e+02 1.132e+03, threshold=8.834e+02, percent-clipped=3.0 +2023-03-29 10:02:17,995 INFO [train.py:892] (2/4) Epoch 37, batch 1150, loss[loss=0.1481, simple_loss=0.2314, pruned_loss=0.03239, over 19799.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2381, pruned_loss=0.03819, over 3931567.58 frames. ], batch size: 65, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:04:11,709 INFO [train.py:892] (2/4) Epoch 37, batch 1200, loss[loss=0.1577, simple_loss=0.2478, pruned_loss=0.03387, over 19728.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2373, pruned_loss=0.03778, over 3936577.75 frames. ], batch size: 50, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:04:12,716 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67981.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:06:05,466 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.592e+02 3.677e+02 4.252e+02 5.217e+02 1.129e+03, threshold=8.505e+02, percent-clipped=2.0 +2023-03-29 10:06:09,671 INFO [train.py:892] (2/4) Epoch 37, batch 1250, loss[loss=0.1635, simple_loss=0.2487, pruned_loss=0.03912, over 19839.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2375, pruned_loss=0.03774, over 3937448.98 frames. ], batch size: 90, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:06:20,216 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68036.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:06:35,647 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68042.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:06:51,018 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68048.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 10:08:04,311 INFO [train.py:892] (2/4) Epoch 37, batch 1300, loss[loss=0.1753, simple_loss=0.2581, pruned_loss=0.04629, over 19550.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2367, pruned_loss=0.03784, over 3941109.03 frames. ], batch size: 60, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:08:18,882 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68087.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 10:08:28,705 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.9967, 2.2268, 2.0106, 1.4302, 2.0514, 2.1765, 2.0427, 2.1123], + device='cuda:2'), covar=tensor([0.0436, 0.0313, 0.0363, 0.0612, 0.0407, 0.0335, 0.0342, 0.0321], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0102, 0.0104, 0.0104, 0.0108, 0.0092, 0.0093, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 10:09:54,432 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.204e+02 3.685e+02 4.439e+02 5.105e+02 9.466e+02, threshold=8.879e+02, percent-clipped=1.0 +2023-03-29 10:09:58,222 INFO [train.py:892] (2/4) Epoch 37, batch 1350, loss[loss=0.1448, simple_loss=0.219, pruned_loss=0.03527, over 19858.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2361, pruned_loss=0.03771, over 3944267.47 frames. ], batch size: 104, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:10:35,138 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68148.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 10:10:47,274 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1431, 2.5462, 4.2037, 3.7177, 4.0093, 4.1322, 3.9783, 3.8584], + device='cuda:2'), covar=tensor([0.0566, 0.0948, 0.0108, 0.0519, 0.0159, 0.0213, 0.0169, 0.0200], + device='cuda:2'), in_proj_covar=tensor([0.0102, 0.0105, 0.0090, 0.0153, 0.0088, 0.0100, 0.0091, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 10:11:23,979 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68169.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:11:48,285 INFO [train.py:892] (2/4) Epoch 37, batch 1400, loss[loss=0.1547, simple_loss=0.2246, pruned_loss=0.04243, over 19725.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2369, pruned_loss=0.03766, over 3942479.13 frames. ], batch size: 219, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:13:09,655 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68217.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:13:35,454 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.616e+02 3.628e+02 4.259e+02 5.104e+02 1.150e+03, threshold=8.519e+02, percent-clipped=2.0 +2023-03-29 10:13:41,105 INFO [train.py:892] (2/4) Epoch 37, batch 1450, loss[loss=0.1534, simple_loss=0.238, pruned_loss=0.03439, over 19595.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2378, pruned_loss=0.03778, over 3944027.57 frames. ], batch size: 45, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:14:33,364 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3960, 3.1354, 3.2568, 3.3547, 3.2758, 3.3036, 3.4707, 3.6305], + device='cuda:2'), covar=tensor([0.0693, 0.0560, 0.0589, 0.0516, 0.0794, 0.0718, 0.0474, 0.0373], + device='cuda:2'), in_proj_covar=tensor([0.0157, 0.0182, 0.0205, 0.0180, 0.0179, 0.0164, 0.0155, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 10:15:37,197 INFO [train.py:892] (2/4) Epoch 37, batch 1500, loss[loss=0.1564, simple_loss=0.2398, pruned_loss=0.03649, over 19780.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2382, pruned_loss=0.03819, over 3945549.63 frames. ], batch size: 87, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:17:23,947 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.350e+02 3.594e+02 4.207e+02 5.059e+02 8.265e+02, threshold=8.414e+02, percent-clipped=0.0 +2023-03-29 10:17:29,429 INFO [train.py:892] (2/4) Epoch 37, batch 1550, loss[loss=0.1466, simple_loss=0.225, pruned_loss=0.03412, over 19833.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2384, pruned_loss=0.03843, over 3943950.42 frames. ], batch size: 208, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:17:40,398 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68336.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:17:42,245 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68337.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:17:44,569 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6814, 3.8392, 2.2724, 4.0086, 4.1170, 1.8461, 3.4297, 3.1848], + device='cuda:2'), covar=tensor([0.0843, 0.0781, 0.2924, 0.0768, 0.0570, 0.2840, 0.1067, 0.0888], + device='cuda:2'), in_proj_covar=tensor([0.0241, 0.0264, 0.0235, 0.0283, 0.0262, 0.0207, 0.0244, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 10:17:44,766 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.53 vs. limit=5.0 +2023-03-29 10:18:06,655 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68348.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:19:17,873 INFO [train.py:892] (2/4) Epoch 37, batch 1600, loss[loss=0.1391, simple_loss=0.2263, pruned_loss=0.02594, over 19737.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2394, pruned_loss=0.03862, over 3943135.05 frames. ], batch size: 118, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:19:26,506 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68384.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:19:54,637 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68396.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:20:01,401 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68399.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:21:05,116 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.0879, 2.3334, 2.1591, 1.5986, 2.2220, 2.3656, 2.2750, 2.3047], + device='cuda:2'), covar=tensor([0.0459, 0.0356, 0.0381, 0.0646, 0.0440, 0.0328, 0.0314, 0.0308], + device='cuda:2'), in_proj_covar=tensor([0.0111, 0.0103, 0.0105, 0.0106, 0.0109, 0.0093, 0.0093, 0.0093], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 10:21:10,234 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.336e+02 3.362e+02 4.148e+02 5.414e+02 8.370e+02, threshold=8.296e+02, percent-clipped=0.0 +2023-03-29 10:21:15,841 INFO [train.py:892] (2/4) Epoch 37, batch 1650, loss[loss=0.1528, simple_loss=0.235, pruned_loss=0.03534, over 19724.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2385, pruned_loss=0.03845, over 3944958.78 frames. ], batch size: 71, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:21:23,247 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4978, 3.6185, 2.2224, 3.7326, 3.8511, 1.7849, 3.1950, 2.9700], + device='cuda:2'), covar=tensor([0.0857, 0.0873, 0.2767, 0.0897, 0.0660, 0.2754, 0.1204, 0.0998], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0265, 0.0236, 0.0284, 0.0263, 0.0208, 0.0244, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 10:21:44,772 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68443.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 10:22:24,149 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68460.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:23:09,511 INFO [train.py:892] (2/4) Epoch 37, batch 1700, loss[loss=0.1568, simple_loss=0.2374, pruned_loss=0.03813, over 19787.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2373, pruned_loss=0.03817, over 3947458.75 frames. ], batch size: 65, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:24:53,969 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.100e+02 3.371e+02 4.024e+02 4.531e+02 9.295e+02, threshold=8.049e+02, percent-clipped=2.0 +2023-03-29 10:24:57,881 INFO [train.py:892] (2/4) Epoch 37, batch 1750, loss[loss=0.1494, simple_loss=0.225, pruned_loss=0.03693, over 19770.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2363, pruned_loss=0.03774, over 3948724.08 frames. ], batch size: 191, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:25:02,591 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9013, 3.2867, 3.3731, 3.8117, 2.6199, 3.1466, 2.4856, 2.5077], + device='cuda:2'), covar=tensor([0.0566, 0.1632, 0.1038, 0.0468, 0.2104, 0.0951, 0.1420, 0.1706], + device='cuda:2'), in_proj_covar=tensor([0.0249, 0.0328, 0.0251, 0.0208, 0.0249, 0.0213, 0.0223, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 10:25:37,722 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0142, 3.3122, 2.9824, 2.5018, 2.9683, 3.2333, 3.2578, 3.2321], + device='cuda:2'), covar=tensor([0.0313, 0.0326, 0.0298, 0.0545, 0.0349, 0.0425, 0.0284, 0.0231], + device='cuda:2'), in_proj_covar=tensor([0.0110, 0.0103, 0.0105, 0.0106, 0.0109, 0.0093, 0.0093, 0.0093], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 10:26:31,227 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68578.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:26:35,983 INFO [train.py:892] (2/4) Epoch 37, batch 1800, loss[loss=0.1453, simple_loss=0.226, pruned_loss=0.03227, over 19669.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2359, pruned_loss=0.03753, over 3947116.48 frames. ], batch size: 73, lr: 4.19e-03, grad_scale: 16.0 +2023-03-29 10:28:03,179 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.060e+02 3.457e+02 4.221e+02 5.089e+02 1.107e+03, threshold=8.442e+02, percent-clipped=2.0 +2023-03-29 10:28:06,908 INFO [train.py:892] (2/4) Epoch 37, batch 1850, loss[loss=0.1827, simple_loss=0.2674, pruned_loss=0.04904, over 19829.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2379, pruned_loss=0.03769, over 3946818.64 frames. ], batch size: 57, lr: 4.19e-03, grad_scale: 16.0 +2023-03-29 10:29:10,988 INFO [train.py:892] (2/4) Epoch 38, batch 0, loss[loss=0.1792, simple_loss=0.261, pruned_loss=0.0487, over 19694.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.261, pruned_loss=0.0487, over 19694.00 frames. ], batch size: 315, lr: 4.14e-03, grad_scale: 16.0 +2023-03-29 10:29:10,989 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 10:29:46,257 INFO [train.py:926] (2/4) Epoch 38, validation: loss=0.1847, simple_loss=0.2497, pruned_loss=0.05979, over 2883724.00 frames. +2023-03-29 10:29:46,258 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-29 10:29:49,759 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68637.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:29:54,535 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68639.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:31:45,405 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68685.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:31:45,584 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68685.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:31:46,735 INFO [train.py:892] (2/4) Epoch 38, batch 50, loss[loss=0.143, simple_loss=0.223, pruned_loss=0.03153, over 19570.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2303, pruned_loss=0.03516, over 892109.38 frames. ], batch size: 42, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:33:14,584 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4243, 4.2454, 4.2185, 3.9242, 4.4226, 3.0049, 3.7303, 2.0732], + device='cuda:2'), covar=tensor([0.0230, 0.0234, 0.0170, 0.0220, 0.0166, 0.1107, 0.0765, 0.1711], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0150, 0.0116, 0.0138, 0.0122, 0.0138, 0.0145, 0.0131], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 10:33:21,835 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.214e+02 3.660e+02 4.318e+02 5.204e+02 7.882e+02, threshold=8.636e+02, percent-clipped=0.0 +2023-03-29 10:33:36,554 INFO [train.py:892] (2/4) Epoch 38, batch 100, loss[loss=0.16, simple_loss=0.253, pruned_loss=0.03355, over 19815.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.236, pruned_loss=0.03747, over 1568731.22 frames. ], batch size: 50, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:33:54,291 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68743.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 10:34:01,159 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68746.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:34:22,495 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68755.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:35:24,032 INFO [train.py:892] (2/4) Epoch 38, batch 150, loss[loss=0.1403, simple_loss=0.2145, pruned_loss=0.03303, over 19882.00 frames. ], tot_loss[loss=0.1528, simple_loss=0.2336, pruned_loss=0.036, over 2095650.61 frames. ], batch size: 136, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:35:35,850 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7681, 2.7700, 2.8827, 2.4053, 2.9390, 2.5159, 2.8455, 2.8141], + device='cuda:2'), covar=tensor([0.0571, 0.0558, 0.0497, 0.0858, 0.0440, 0.0549, 0.0501, 0.0443], + device='cuda:2'), in_proj_covar=tensor([0.0083, 0.0092, 0.0088, 0.0115, 0.0084, 0.0087, 0.0085, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 10:35:38,037 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68791.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 10:37:03,405 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.249e+02 3.607e+02 4.338e+02 5.110e+02 1.083e+03, threshold=8.676e+02, percent-clipped=2.0 +2023-03-29 10:37:19,964 INFO [train.py:892] (2/4) Epoch 38, batch 200, loss[loss=0.1872, simple_loss=0.2719, pruned_loss=0.05128, over 19712.00 frames. ], tot_loss[loss=0.1548, simple_loss=0.2357, pruned_loss=0.03693, over 2505713.80 frames. ], batch size: 337, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:37:20,863 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3686, 4.5103, 4.7757, 4.5431, 4.6865, 4.2978, 4.5242, 4.2591], + device='cuda:2'), covar=tensor([0.1525, 0.1573, 0.0842, 0.1290, 0.0846, 0.0983, 0.1782, 0.2165], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0341, 0.0377, 0.0309, 0.0284, 0.0288, 0.0367, 0.0397], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 10:37:55,501 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5554, 3.7333, 3.9278, 3.6653, 3.9500, 3.5377, 3.6508, 3.4111], + device='cuda:2'), covar=tensor([0.1726, 0.1661, 0.1090, 0.1495, 0.0979, 0.1163, 0.2038, 0.2488], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0341, 0.0377, 0.0309, 0.0284, 0.0289, 0.0368, 0.0398], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 10:39:12,962 INFO [train.py:892] (2/4) Epoch 38, batch 250, loss[loss=0.1516, simple_loss=0.2391, pruned_loss=0.0321, over 19881.00 frames. ], tot_loss[loss=0.1548, simple_loss=0.2354, pruned_loss=0.03708, over 2824302.46 frames. ], batch size: 77, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:40:13,397 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-29 10:40:53,773 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.221e+02 3.576e+02 4.212e+02 4.903e+02 9.274e+02, threshold=8.425e+02, percent-clipped=1.0 +2023-03-29 10:41:06,847 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68934.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:41:10,043 INFO [train.py:892] (2/4) Epoch 38, batch 300, loss[loss=0.1582, simple_loss=0.2491, pruned_loss=0.0336, over 19719.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2368, pruned_loss=0.03742, over 3072104.23 frames. ], batch size: 54, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:42:28,310 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.59 vs. limit=5.0 +2023-03-29 10:43:04,367 INFO [train.py:892] (2/4) Epoch 38, batch 350, loss[loss=0.1411, simple_loss=0.2135, pruned_loss=0.03431, over 19877.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2373, pruned_loss=0.03799, over 3266790.00 frames. ], batch size: 139, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:43:19,807 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0626, 5.2184, 5.4917, 5.1983, 5.2760, 5.0278, 5.2166, 4.9328], + device='cuda:2'), covar=tensor([0.1443, 0.1456, 0.0857, 0.1295, 0.0727, 0.0872, 0.1848, 0.2100], + device='cuda:2'), in_proj_covar=tensor([0.0305, 0.0345, 0.0380, 0.0312, 0.0286, 0.0291, 0.0370, 0.0401], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 10:44:39,915 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.577e+02 3.628e+02 4.401e+02 5.124e+02 1.005e+03, threshold=8.801e+02, percent-clipped=1.0 +2023-03-29 10:44:56,358 INFO [train.py:892] (2/4) Epoch 38, batch 400, loss[loss=0.1635, simple_loss=0.2475, pruned_loss=0.03973, over 19883.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2363, pruned_loss=0.03776, over 3419092.16 frames. ], batch size: 84, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:45:08,732 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69041.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:45:13,693 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69043.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:45:27,817 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4260, 4.2954, 4.7307, 4.3196, 3.9638, 4.5447, 4.3598, 4.8178], + device='cuda:2'), covar=tensor([0.0786, 0.0385, 0.0354, 0.0381, 0.1014, 0.0540, 0.0510, 0.0345], + device='cuda:2'), in_proj_covar=tensor([0.0288, 0.0229, 0.0228, 0.0240, 0.0209, 0.0251, 0.0242, 0.0226], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 10:45:34,694 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.70 vs. limit=5.0 +2023-03-29 10:45:41,746 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69055.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:46:43,590 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-29 10:46:50,030 INFO [train.py:892] (2/4) Epoch 38, batch 450, loss[loss=0.1535, simple_loss=0.2306, pruned_loss=0.03817, over 19744.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2371, pruned_loss=0.03804, over 3537772.99 frames. ], batch size: 209, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:47:28,752 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69103.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:47:31,063 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69104.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:48:23,826 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.342e+02 3.756e+02 4.232e+02 5.064e+02 8.468e+02, threshold=8.465e+02, percent-clipped=0.0 +2023-03-29 10:48:39,777 INFO [train.py:892] (2/4) Epoch 38, batch 500, loss[loss=0.1666, simple_loss=0.2582, pruned_loss=0.03753, over 19672.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2363, pruned_loss=0.03748, over 3629608.05 frames. ], batch size: 58, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:49:38,301 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69162.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:49:52,837 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69168.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:50:32,782 INFO [train.py:892] (2/4) Epoch 38, batch 550, loss[loss=0.1508, simple_loss=0.2374, pruned_loss=0.03205, over 19647.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2353, pruned_loss=0.0373, over 3701685.64 frames. ], batch size: 68, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:51:20,525 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6606, 2.7744, 5.0745, 4.3547, 4.8055, 4.8065, 4.7646, 4.5801], + device='cuda:2'), covar=tensor([0.0621, 0.1092, 0.0108, 0.0863, 0.0174, 0.0236, 0.0153, 0.0173], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0106, 0.0091, 0.0154, 0.0089, 0.0101, 0.0092, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 10:51:29,326 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5750, 2.8173, 4.8737, 4.0880, 4.6296, 4.7096, 4.5965, 4.4648], + device='cuda:2'), covar=tensor([0.0589, 0.0979, 0.0100, 0.0855, 0.0137, 0.0190, 0.0154, 0.0155], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0106, 0.0091, 0.0154, 0.0089, 0.0101, 0.0092, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 10:51:57,152 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69223.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:52:02,637 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69225.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:52:11,494 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.978e+02 3.709e+02 4.324e+02 5.706e+02 8.766e+02, threshold=8.649e+02, percent-clipped=1.0 +2023-03-29 10:52:12,565 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69229.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:52:23,868 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69234.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:52:27,231 INFO [train.py:892] (2/4) Epoch 38, batch 600, loss[loss=0.2015, simple_loss=0.272, pruned_loss=0.0655, over 19650.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2348, pruned_loss=0.0369, over 3756761.20 frames. ], batch size: 343, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:52:49,628 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69245.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 10:53:13,415 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-03-29 10:53:53,934 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69273.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:54:14,119 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69282.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:54:22,212 INFO [train.py:892] (2/4) Epoch 38, batch 650, loss[loss=0.1563, simple_loss=0.2327, pruned_loss=0.03996, over 19747.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.2339, pruned_loss=0.03663, over 3799781.19 frames. ], batch size: 139, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:54:23,356 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69286.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:55:11,564 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69306.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 10:55:42,135 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69320.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:55:59,563 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.644e+02 3.609e+02 4.195e+02 5.280e+02 9.691e+02, threshold=8.389e+02, percent-clipped=2.0 +2023-03-29 10:56:15,717 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69334.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 10:56:18,450 INFO [train.py:892] (2/4) Epoch 38, batch 700, loss[loss=0.1549, simple_loss=0.2389, pruned_loss=0.03551, over 19811.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.236, pruned_loss=0.0371, over 3829817.93 frames. ], batch size: 50, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:56:30,267 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69341.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:56:43,028 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4247, 4.5493, 2.8015, 4.8636, 5.0564, 2.1929, 4.3097, 3.5278], + device='cuda:2'), covar=tensor([0.0745, 0.0737, 0.2722, 0.0718, 0.0440, 0.2854, 0.0923, 0.1000], + device='cuda:2'), in_proj_covar=tensor([0.0240, 0.0263, 0.0236, 0.0283, 0.0262, 0.0206, 0.0244, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 10:57:30,655 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69368.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:58:00,767 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69381.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:58:11,798 INFO [train.py:892] (2/4) Epoch 38, batch 750, loss[loss=0.1545, simple_loss=0.2385, pruned_loss=0.03519, over 19809.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2351, pruned_loss=0.03662, over 3856994.65 frames. ], batch size: 82, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 10:58:19,034 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69389.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:58:29,524 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69394.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:58:40,587 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69399.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:59:51,962 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.082e+02 3.634e+02 4.546e+02 5.374e+02 9.782e+02, threshold=9.091e+02, percent-clipped=2.0 +2023-03-29 10:59:53,007 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69429.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:00:07,013 INFO [train.py:892] (2/4) Epoch 38, batch 800, loss[loss=0.162, simple_loss=0.2506, pruned_loss=0.03674, over 19843.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2351, pruned_loss=0.03654, over 3878180.13 frames. ], batch size: 58, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 11:00:47,402 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9116, 3.8231, 3.7498, 3.5292, 3.9324, 2.8743, 3.2416, 1.9098], + device='cuda:2'), covar=tensor([0.0207, 0.0232, 0.0160, 0.0199, 0.0149, 0.1085, 0.0660, 0.1703], + device='cuda:2'), in_proj_covar=tensor([0.0108, 0.0150, 0.0116, 0.0139, 0.0122, 0.0138, 0.0146, 0.0131], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-29 11:00:53,713 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69455.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:01:42,870 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5695, 3.4577, 3.5297, 3.7064, 3.5743, 3.7228, 3.6477, 3.7251], + device='cuda:2'), covar=tensor([0.0999, 0.0691, 0.0703, 0.0538, 0.0862, 0.0722, 0.0676, 0.0696], + device='cuda:2'), in_proj_covar=tensor([0.0159, 0.0183, 0.0206, 0.0181, 0.0180, 0.0165, 0.0157, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 11:02:01,821 INFO [train.py:892] (2/4) Epoch 38, batch 850, loss[loss=0.1515, simple_loss=0.2343, pruned_loss=0.03432, over 19856.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2346, pruned_loss=0.03644, over 3895591.13 frames. ], batch size: 78, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 11:03:16,126 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69518.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:03:16,254 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1334, 4.9578, 5.5267, 4.9950, 4.3558, 5.2056, 5.1327, 5.6417], + device='cuda:2'), covar=tensor([0.0769, 0.0393, 0.0317, 0.0384, 0.0772, 0.0457, 0.0427, 0.0267], + device='cuda:2'), in_proj_covar=tensor([0.0290, 0.0231, 0.0229, 0.0242, 0.0212, 0.0254, 0.0243, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 11:03:30,087 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69524.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:03:39,265 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.319e+02 3.651e+02 4.238e+02 5.380e+02 9.885e+02, threshold=8.476e+02, percent-clipped=3.0 +2023-03-29 11:03:54,828 INFO [train.py:892] (2/4) Epoch 38, batch 900, loss[loss=0.1632, simple_loss=0.2523, pruned_loss=0.03701, over 19529.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.235, pruned_loss=0.03678, over 3906777.09 frames. ], batch size: 54, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 11:05:35,691 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69581.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:05:45,043 INFO [train.py:892] (2/4) Epoch 38, batch 950, loss[loss=0.1478, simple_loss=0.2311, pruned_loss=0.03219, over 19695.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2348, pruned_loss=0.03642, over 3916312.29 frames. ], batch size: 101, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 11:06:21,724 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69601.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 11:07:25,055 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.229e+02 3.525e+02 4.148e+02 4.911e+02 8.887e+02, threshold=8.297e+02, percent-clipped=1.0 +2023-03-29 11:07:25,880 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69629.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 11:07:41,348 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0733, 3.0514, 4.6926, 3.5682, 3.7951, 3.5323, 2.6122, 2.7347], + device='cuda:2'), covar=tensor([0.0937, 0.2939, 0.0416, 0.0998, 0.1659, 0.1440, 0.2501, 0.2619], + device='cuda:2'), in_proj_covar=tensor([0.0356, 0.0396, 0.0354, 0.0293, 0.0379, 0.0391, 0.0385, 0.0355], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 11:07:42,211 INFO [train.py:892] (2/4) Epoch 38, batch 1000, loss[loss=0.1436, simple_loss=0.223, pruned_loss=0.03213, over 19773.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2345, pruned_loss=0.03659, over 3923125.18 frames. ], batch size: 69, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 11:09:09,859 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69674.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 11:09:14,192 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69676.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:09:41,227 INFO [train.py:892] (2/4) Epoch 38, batch 1050, loss[loss=0.1314, simple_loss=0.2101, pruned_loss=0.02629, over 19761.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2346, pruned_loss=0.03656, over 3928795.77 frames. ], batch size: 88, lr: 4.10e-03, grad_scale: 32.0 +2023-03-29 11:10:12,275 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69699.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:11:13,819 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69724.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:11:23,832 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.661e+02 3.738e+02 4.279e+02 5.183e+02 8.031e+02, threshold=8.559e+02, percent-clipped=0.0 +2023-03-29 11:11:41,145 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69735.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 11:11:42,248 INFO [train.py:892] (2/4) Epoch 38, batch 1100, loss[loss=0.1614, simple_loss=0.2467, pruned_loss=0.03802, over 19794.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2353, pruned_loss=0.03698, over 3933874.90 frames. ], batch size: 224, lr: 4.10e-03, grad_scale: 32.0 +2023-03-29 11:12:07,677 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69747.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:12:15,117 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69750.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:13:36,903 INFO [train.py:892] (2/4) Epoch 38, batch 1150, loss[loss=0.1515, simple_loss=0.2301, pruned_loss=0.0365, over 19782.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2358, pruned_loss=0.03743, over 3935773.96 frames. ], batch size: 193, lr: 4.10e-03, grad_scale: 32.0 +2023-03-29 11:14:51,200 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69818.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:15:03,456 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69824.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:15:13,290 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.448e+02 3.531e+02 4.116e+02 4.879e+02 7.988e+02, threshold=8.232e+02, percent-clipped=0.0 +2023-03-29 11:15:27,947 INFO [train.py:892] (2/4) Epoch 38, batch 1200, loss[loss=0.1386, simple_loss=0.2214, pruned_loss=0.02786, over 19863.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.235, pruned_loss=0.03711, over 3938943.11 frames. ], batch size: 85, lr: 4.10e-03, grad_scale: 32.0 +2023-03-29 11:16:15,864 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1850, 3.1838, 4.8709, 3.6446, 3.7548, 3.5211, 2.6507, 2.7809], + device='cuda:2'), covar=tensor([0.0944, 0.2972, 0.0378, 0.0956, 0.1845, 0.1494, 0.2535, 0.2702], + device='cuda:2'), in_proj_covar=tensor([0.0356, 0.0397, 0.0355, 0.0294, 0.0379, 0.0390, 0.0384, 0.0356], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 11:16:35,295 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6704, 4.4882, 5.0446, 4.5360, 4.1280, 4.7813, 4.6257, 5.1342], + device='cuda:2'), covar=tensor([0.0840, 0.0402, 0.0327, 0.0388, 0.0905, 0.0500, 0.0464, 0.0330], + device='cuda:2'), in_proj_covar=tensor([0.0290, 0.0231, 0.0230, 0.0243, 0.0212, 0.0254, 0.0243, 0.0228], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 11:16:39,727 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69866.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:16:53,624 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69872.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:17:15,088 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69881.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:17:25,245 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1131, 2.6924, 4.4064, 3.8282, 4.3067, 4.3568, 4.2257, 4.0782], + device='cuda:2'), covar=tensor([0.0667, 0.0981, 0.0114, 0.0673, 0.0140, 0.0232, 0.0166, 0.0189], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0106, 0.0091, 0.0154, 0.0089, 0.0101, 0.0092, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 11:17:26,324 INFO [train.py:892] (2/4) Epoch 38, batch 1250, loss[loss=0.1383, simple_loss=0.2198, pruned_loss=0.02837, over 19850.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.235, pruned_loss=0.03698, over 3941595.43 frames. ], batch size: 106, lr: 4.10e-03, grad_scale: 32.0 +2023-03-29 11:18:00,559 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69901.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 11:18:19,805 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3570, 1.7736, 2.0059, 2.6437, 2.9220, 3.0361, 2.8947, 2.9583], + device='cuda:2'), covar=tensor([0.1160, 0.1985, 0.1711, 0.0814, 0.0643, 0.0432, 0.0506, 0.0524], + device='cuda:2'), in_proj_covar=tensor([0.0164, 0.0171, 0.0180, 0.0155, 0.0140, 0.0136, 0.0130, 0.0120], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 11:19:06,497 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69929.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:19:06,667 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69929.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 11:19:08,017 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.501e+02 3.622e+02 4.531e+02 5.693e+02 1.011e+03, threshold=9.061e+02, percent-clipped=2.0 +2023-03-29 11:19:20,443 INFO [train.py:892] (2/4) Epoch 38, batch 1300, loss[loss=0.1303, simple_loss=0.2099, pruned_loss=0.02532, over 19896.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.234, pruned_loss=0.03657, over 3944105.94 frames. ], batch size: 116, lr: 4.10e-03, grad_scale: 16.0 +2023-03-29 11:19:46,461 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69949.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 11:20:47,581 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69976.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:20:49,292 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69977.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:21:07,869 INFO [train.py:892] (2/4) Epoch 38, batch 1350, loss[loss=0.146, simple_loss=0.2212, pruned_loss=0.0354, over 19872.00 frames. ], tot_loss[loss=0.153, simple_loss=0.2334, pruned_loss=0.03628, over 3945891.85 frames. ], batch size: 138, lr: 4.10e-03, grad_scale: 16.0 +2023-03-29 11:22:03,720 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1159, 4.2138, 4.4472, 4.2004, 4.4098, 3.9505, 4.2223, 3.9672], + device='cuda:2'), covar=tensor([0.1544, 0.1534, 0.1004, 0.1330, 0.1116, 0.1118, 0.1899, 0.2167], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0345, 0.0379, 0.0311, 0.0288, 0.0291, 0.0370, 0.0400], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 11:22:40,229 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70024.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:22:40,306 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70024.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:22:51,493 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.339e+02 3.460e+02 3.863e+02 4.574e+02 6.887e+02, threshold=7.727e+02, percent-clipped=0.0 +2023-03-29 11:22:52,386 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70030.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 11:23:05,941 INFO [train.py:892] (2/4) Epoch 38, batch 1400, loss[loss=0.1523, simple_loss=0.2307, pruned_loss=0.03692, over 19736.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2335, pruned_loss=0.03632, over 3946564.22 frames. ], batch size: 118, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:23:41,222 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70050.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:24:30,351 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70072.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:25:00,853 INFO [train.py:892] (2/4) Epoch 38, batch 1450, loss[loss=0.1483, simple_loss=0.2247, pruned_loss=0.03595, over 19786.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2347, pruned_loss=0.03669, over 3946974.93 frames. ], batch size: 105, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:25:26,169 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70098.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:25:49,718 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-29 11:25:53,012 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70110.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:26:38,226 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.553e+02 3.599e+02 4.475e+02 5.242e+02 1.040e+03, threshold=8.950e+02, percent-clipped=4.0 +2023-03-29 11:26:50,801 INFO [train.py:892] (2/4) Epoch 38, batch 1500, loss[loss=0.1744, simple_loss=0.2582, pruned_loss=0.04532, over 19700.00 frames. ], tot_loss[loss=0.1525, simple_loss=0.2328, pruned_loss=0.03612, over 3948397.67 frames. ], batch size: 283, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:28:12,810 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70171.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:28:47,254 INFO [train.py:892] (2/4) Epoch 38, batch 1550, loss[loss=0.1322, simple_loss=0.2155, pruned_loss=0.0244, over 19794.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.2333, pruned_loss=0.0361, over 3949454.14 frames. ], batch size: 126, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:30:25,194 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.741e+02 3.595e+02 4.117e+02 4.902e+02 9.648e+02, threshold=8.233e+02, percent-clipped=1.0 +2023-03-29 11:30:39,821 INFO [train.py:892] (2/4) Epoch 38, batch 1600, loss[loss=0.1669, simple_loss=0.2436, pruned_loss=0.04507, over 19651.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2338, pruned_loss=0.03625, over 3948952.55 frames. ], batch size: 47, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:31:34,420 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70260.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:32:29,336 INFO [train.py:892] (2/4) Epoch 38, batch 1650, loss[loss=0.1431, simple_loss=0.2232, pruned_loss=0.03151, over 19733.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2316, pruned_loss=0.03532, over 3950593.48 frames. ], batch size: 77, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:33:52,863 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70321.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:34:12,141 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.483e+02 3.682e+02 4.356e+02 5.206e+02 1.027e+03, threshold=8.712e+02, percent-clipped=2.0 +2023-03-29 11:34:13,188 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70330.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 11:34:27,846 INFO [train.py:892] (2/4) Epoch 38, batch 1700, loss[loss=0.1848, simple_loss=0.2635, pruned_loss=0.05302, over 19676.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.2332, pruned_loss=0.03606, over 3949932.13 frames. ], batch size: 64, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:34:45,292 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1269, 3.0554, 4.6463, 3.5547, 3.6703, 3.5059, 2.5294, 2.7320], + device='cuda:2'), covar=tensor([0.0951, 0.3157, 0.0458, 0.1113, 0.1927, 0.1468, 0.2765, 0.2806], + device='cuda:2'), in_proj_covar=tensor([0.0358, 0.0398, 0.0356, 0.0295, 0.0381, 0.0391, 0.0386, 0.0357], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 11:35:59,711 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70378.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 11:36:12,511 INFO [train.py:892] (2/4) Epoch 38, batch 1750, loss[loss=0.1379, simple_loss=0.2219, pruned_loss=0.02691, over 19780.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.233, pruned_loss=0.03613, over 3950547.86 frames. ], batch size: 87, lr: 4.08e-03, grad_scale: 16.0 +2023-03-29 11:37:39,920 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.846e+02 3.811e+02 4.551e+02 5.592e+02 1.733e+03, threshold=9.102e+02, percent-clipped=3.0 +2023-03-29 11:37:51,274 INFO [train.py:892] (2/4) Epoch 38, batch 1800, loss[loss=0.1627, simple_loss=0.2404, pruned_loss=0.04248, over 19804.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.2341, pruned_loss=0.0365, over 3949696.81 frames. ], batch size: 224, lr: 4.08e-03, grad_scale: 16.0 +2023-03-29 11:38:48,038 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70466.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:38:53,640 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5931, 2.7140, 4.9218, 3.9876, 4.6906, 4.7913, 4.6222, 4.5107], + device='cuda:2'), covar=tensor([0.0628, 0.1101, 0.0103, 0.0877, 0.0147, 0.0182, 0.0164, 0.0158], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0106, 0.0092, 0.0155, 0.0089, 0.0101, 0.0092, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 11:39:26,451 INFO [train.py:892] (2/4) Epoch 38, batch 1850, loss[loss=0.1547, simple_loss=0.2481, pruned_loss=0.03068, over 19843.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2362, pruned_loss=0.03635, over 3945732.09 frames. ], batch size: 58, lr: 4.08e-03, grad_scale: 16.0 +2023-03-29 11:40:29,589 INFO [train.py:892] (2/4) Epoch 39, batch 0, loss[loss=0.1396, simple_loss=0.2179, pruned_loss=0.03066, over 19738.00 frames. ], tot_loss[loss=0.1396, simple_loss=0.2179, pruned_loss=0.03066, over 19738.00 frames. ], batch size: 209, lr: 4.03e-03, grad_scale: 16.0 +2023-03-29 11:40:29,589 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 11:40:48,939 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9712, 2.8874, 1.9944, 3.3107, 3.1290, 3.2059, 3.3415, 2.8238], + device='cuda:2'), covar=tensor([0.0717, 0.0798, 0.1676, 0.0761, 0.0687, 0.0619, 0.0701, 0.0860], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0150, 0.0147, 0.0160, 0.0138, 0.0143, 0.0154, 0.0152], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 11:41:05,156 INFO [train.py:926] (2/4) Epoch 39, validation: loss=0.1858, simple_loss=0.25, pruned_loss=0.06079, over 2883724.00 frames. +2023-03-29 11:41:05,157 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-29 11:42:14,806 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9562, 3.1682, 3.2916, 3.2460, 2.9646, 3.1591, 2.9575, 3.2269], + device='cuda:2'), covar=tensor([0.0360, 0.0339, 0.0347, 0.0251, 0.0441, 0.0339, 0.0446, 0.0350], + device='cuda:2'), in_proj_covar=tensor([0.0093, 0.0087, 0.0090, 0.0085, 0.0097, 0.0090, 0.0106, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 11:42:37,523 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.196e+02 3.428e+02 3.863e+02 4.713e+02 6.861e+02, threshold=7.726e+02, percent-clipped=0.0 +2023-03-29 11:42:45,952 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2642, 2.2670, 2.3375, 2.3428, 2.3380, 2.3846, 2.3039, 2.4479], + device='cuda:2'), covar=tensor([0.0432, 0.0417, 0.0434, 0.0364, 0.0489, 0.0398, 0.0525, 0.0358], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0087, 0.0090, 0.0085, 0.0097, 0.0090, 0.0106, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 11:43:01,757 INFO [train.py:892] (2/4) Epoch 39, batch 50, loss[loss=0.1359, simple_loss=0.2074, pruned_loss=0.03218, over 19792.00 frames. ], tot_loss[loss=0.1493, simple_loss=0.2298, pruned_loss=0.03436, over 890458.87 frames. ], batch size: 151, lr: 4.03e-03, grad_scale: 16.0 +2023-03-29 11:44:19,783 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-29 11:44:59,629 INFO [train.py:892] (2/4) Epoch 39, batch 100, loss[loss=0.1553, simple_loss=0.2456, pruned_loss=0.0325, over 19813.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.2313, pruned_loss=0.03452, over 1568101.79 frames. ], batch size: 82, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:45:14,798 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7142, 4.5584, 5.0864, 4.5822, 4.1560, 4.8768, 4.6843, 5.2101], + device='cuda:2'), covar=tensor([0.0833, 0.0394, 0.0353, 0.0407, 0.0871, 0.0504, 0.0470, 0.0341], + device='cuda:2'), in_proj_covar=tensor([0.0290, 0.0232, 0.0231, 0.0244, 0.0214, 0.0255, 0.0245, 0.0229], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 11:45:57,279 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70616.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:45:59,222 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9857, 5.1818, 5.4614, 5.1758, 5.3168, 4.9761, 5.1359, 4.9378], + device='cuda:2'), covar=tensor([0.1344, 0.1358, 0.0773, 0.1114, 0.0665, 0.0833, 0.1705, 0.1957], + device='cuda:2'), in_proj_covar=tensor([0.0299, 0.0342, 0.0374, 0.0306, 0.0283, 0.0286, 0.0367, 0.0396], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 11:46:29,510 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.491e+02 3.857e+02 4.460e+02 4.985e+02 1.010e+03, threshold=8.919e+02, percent-clipped=5.0 +2023-03-29 11:46:43,679 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3878, 2.6292, 2.8103, 3.2630, 2.2239, 2.8645, 2.2455, 2.1618], + device='cuda:2'), covar=tensor([0.0689, 0.1495, 0.1220, 0.0577, 0.2231, 0.0927, 0.1404, 0.1723], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0328, 0.0250, 0.0208, 0.0249, 0.0212, 0.0222, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 11:46:53,612 INFO [train.py:892] (2/4) Epoch 39, batch 150, loss[loss=0.2112, simple_loss=0.2891, pruned_loss=0.06661, over 19625.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.2334, pruned_loss=0.03573, over 2096235.68 frames. ], batch size: 359, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:48:51,185 INFO [train.py:892] (2/4) Epoch 39, batch 200, loss[loss=0.1432, simple_loss=0.223, pruned_loss=0.03166, over 19772.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2335, pruned_loss=0.03614, over 2507496.07 frames. ], batch size: 163, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:49:57,897 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0681, 3.2401, 3.2308, 3.2741, 3.1154, 2.9771, 3.0847, 3.3895], + device='cuda:2'), covar=tensor([0.0300, 0.0371, 0.0315, 0.0256, 0.0336, 0.0388, 0.0394, 0.0303], + device='cuda:2'), in_proj_covar=tensor([0.0093, 0.0088, 0.0090, 0.0085, 0.0097, 0.0090, 0.0106, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 11:50:22,143 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.699e+02 3.379e+02 4.143e+02 5.094e+02 9.478e+02, threshold=8.286e+02, percent-clipped=1.0 +2023-03-29 11:50:49,019 INFO [train.py:892] (2/4) Epoch 39, batch 250, loss[loss=0.1397, simple_loss=0.2257, pruned_loss=0.0268, over 19835.00 frames. ], tot_loss[loss=0.1518, simple_loss=0.2323, pruned_loss=0.03562, over 2827783.01 frames. ], batch size: 43, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:51:48,008 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70766.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:52:29,801 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1165, 2.1299, 2.2349, 2.1531, 2.1607, 2.2255, 2.1410, 2.2720], + device='cuda:2'), covar=tensor([0.0421, 0.0365, 0.0359, 0.0370, 0.0444, 0.0383, 0.0473, 0.0328], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0087, 0.0090, 0.0084, 0.0097, 0.0090, 0.0106, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 11:52:42,431 INFO [train.py:892] (2/4) Epoch 39, batch 300, loss[loss=0.1562, simple_loss=0.2303, pruned_loss=0.041, over 19572.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.2331, pruned_loss=0.03576, over 3076820.11 frames. ], batch size: 42, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:52:55,474 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.5332, 5.0170, 5.0832, 4.7737, 5.3809, 3.3814, 4.3073, 2.7322], + device='cuda:2'), covar=tensor([0.0130, 0.0176, 0.0127, 0.0183, 0.0134, 0.0920, 0.0893, 0.1413], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0150, 0.0116, 0.0138, 0.0122, 0.0137, 0.0145, 0.0130], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 11:53:37,738 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70814.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:53:44,319 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-29 11:54:14,741 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.577e+02 3.476e+02 3.994e+02 4.897e+02 8.420e+02, threshold=7.989e+02, percent-clipped=1.0 +2023-03-29 11:54:43,441 INFO [train.py:892] (2/4) Epoch 39, batch 350, loss[loss=0.1745, simple_loss=0.2502, pruned_loss=0.04943, over 19884.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2343, pruned_loss=0.03651, over 3269973.03 frames. ], batch size: 62, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:56:39,299 INFO [train.py:892] (2/4) Epoch 39, batch 400, loss[loss=0.1337, simple_loss=0.2148, pruned_loss=0.0263, over 19721.00 frames. ], tot_loss[loss=0.154, simple_loss=0.2347, pruned_loss=0.0366, over 3420833.77 frames. ], batch size: 104, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:57:38,307 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70916.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:58:08,800 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.576e+02 3.611e+02 4.279e+02 5.158e+02 1.128e+03, threshold=8.559e+02, percent-clipped=5.0 +2023-03-29 11:58:31,800 INFO [train.py:892] (2/4) Epoch 39, batch 450, loss[loss=0.1727, simple_loss=0.2471, pruned_loss=0.04911, over 19690.00 frames. ], tot_loss[loss=0.1532, simple_loss=0.234, pruned_loss=0.03616, over 3539391.62 frames. ], batch size: 45, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:59:27,833 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70964.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:00:29,099 INFO [train.py:892] (2/4) Epoch 39, batch 500, loss[loss=0.1308, simple_loss=0.2077, pruned_loss=0.0269, over 19811.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2348, pruned_loss=0.03653, over 3629708.47 frames. ], batch size: 117, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:00:32,122 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70992.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:02:01,502 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.739e+02 3.809e+02 4.349e+02 5.400e+02 1.199e+03, threshold=8.697e+02, percent-clipped=2.0 +2023-03-29 12:02:02,613 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71030.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:02:27,669 INFO [train.py:892] (2/4) Epoch 39, batch 550, loss[loss=0.1395, simple_loss=0.2203, pruned_loss=0.02938, over 19800.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2367, pruned_loss=0.03801, over 3700353.08 frames. ], batch size: 195, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:02:54,019 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71053.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:04:21,953 INFO [train.py:892] (2/4) Epoch 39, batch 600, loss[loss=0.1668, simple_loss=0.2398, pruned_loss=0.04693, over 19805.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.236, pruned_loss=0.03783, over 3756704.41 frames. ], batch size: 67, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:04:22,904 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71091.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:04:30,227 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.86 vs. limit=5.0 +2023-03-29 12:05:55,698 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.519e+02 3.574e+02 4.295e+02 5.387e+02 1.048e+03, threshold=8.591e+02, percent-clipped=4.0 +2023-03-29 12:05:59,052 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5136, 2.5327, 2.8029, 2.5250, 2.9887, 2.8773, 3.3861, 3.6991], + device='cuda:2'), covar=tensor([0.0632, 0.1675, 0.1520, 0.2073, 0.1473, 0.1485, 0.0650, 0.0585], + device='cuda:2'), in_proj_covar=tensor([0.0259, 0.0244, 0.0271, 0.0258, 0.0305, 0.0262, 0.0236, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 12:06:26,048 INFO [train.py:892] (2/4) Epoch 39, batch 650, loss[loss=0.1311, simple_loss=0.2165, pruned_loss=0.02284, over 19795.00 frames. ], tot_loss[loss=0.1552, simple_loss=0.2355, pruned_loss=0.03743, over 3798765.99 frames. ], batch size: 83, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:07:21,502 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-29 12:07:51,151 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4945, 3.0954, 3.4230, 2.9296, 3.6848, 3.6777, 4.2425, 4.7361], + device='cuda:2'), covar=tensor([0.0583, 0.1597, 0.1538, 0.2250, 0.1771, 0.1321, 0.0640, 0.0522], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0243, 0.0270, 0.0257, 0.0303, 0.0261, 0.0235, 0.0265], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 12:07:51,534 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.55 vs. limit=5.0 +2023-03-29 12:08:25,799 INFO [train.py:892] (2/4) Epoch 39, batch 700, loss[loss=0.152, simple_loss=0.2312, pruned_loss=0.03637, over 19830.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2364, pruned_loss=0.03717, over 3831561.90 frames. ], batch size: 143, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:08:29,821 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.50 vs. limit=5.0 +2023-03-29 12:09:56,520 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.218e+02 3.479e+02 4.128e+02 5.224e+02 8.144e+02, threshold=8.256e+02, percent-clipped=0.0 +2023-03-29 12:10:19,501 INFO [train.py:892] (2/4) Epoch 39, batch 750, loss[loss=0.1473, simple_loss=0.2156, pruned_loss=0.03954, over 19875.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2357, pruned_loss=0.0369, over 3858801.27 frames. ], batch size: 125, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:12:12,847 INFO [train.py:892] (2/4) Epoch 39, batch 800, loss[loss=0.1554, simple_loss=0.2339, pruned_loss=0.03845, over 19776.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2351, pruned_loss=0.03668, over 3879815.65 frames. ], batch size: 233, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:13:43,659 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.446e+02 3.401e+02 4.193e+02 5.158e+02 9.890e+02, threshold=8.386e+02, percent-clipped=3.0 +2023-03-29 12:14:08,929 INFO [train.py:892] (2/4) Epoch 39, batch 850, loss[loss=0.1347, simple_loss=0.2203, pruned_loss=0.02452, over 19945.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2342, pruned_loss=0.03643, over 3896694.03 frames. ], batch size: 46, lr: 4.00e-03, grad_scale: 16.0 +2023-03-29 12:14:25,040 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71348.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:15:12,547 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2172, 1.6986, 1.8505, 2.4041, 2.6243, 2.7672, 2.6126, 2.6705], + device='cuda:2'), covar=tensor([0.1206, 0.1947, 0.1802, 0.0872, 0.0680, 0.0467, 0.0558, 0.0594], + device='cuda:2'), in_proj_covar=tensor([0.0164, 0.0169, 0.0182, 0.0155, 0.0140, 0.0136, 0.0130, 0.0121], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 12:15:33,192 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5745, 2.1338, 2.3312, 2.7365, 3.1396, 3.2528, 3.1567, 3.1529], + device='cuda:2'), covar=tensor([0.1121, 0.1678, 0.1579, 0.0841, 0.0555, 0.0416, 0.0576, 0.0606], + device='cuda:2'), in_proj_covar=tensor([0.0164, 0.0169, 0.0181, 0.0155, 0.0140, 0.0136, 0.0130, 0.0121], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 12:15:53,690 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71386.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:16:03,891 INFO [train.py:892] (2/4) Epoch 39, batch 900, loss[loss=0.1248, simple_loss=0.2064, pruned_loss=0.02161, over 19824.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2338, pruned_loss=0.03615, over 3907904.00 frames. ], batch size: 103, lr: 4.00e-03, grad_scale: 16.0 +2023-03-29 12:17:36,394 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.339e+02 3.295e+02 3.866e+02 4.930e+02 9.721e+02, threshold=7.731e+02, percent-clipped=3.0 +2023-03-29 12:18:00,860 INFO [train.py:892] (2/4) Epoch 39, batch 950, loss[loss=0.1365, simple_loss=0.2182, pruned_loss=0.02738, over 19781.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2338, pruned_loss=0.03619, over 3916772.99 frames. ], batch size: 94, lr: 4.00e-03, grad_scale: 8.0 +2023-03-29 12:19:56,182 INFO [train.py:892] (2/4) Epoch 39, batch 1000, loss[loss=0.1829, simple_loss=0.2516, pruned_loss=0.0571, over 19840.00 frames. ], tot_loss[loss=0.1525, simple_loss=0.2332, pruned_loss=0.03587, over 3923815.47 frames. ], batch size: 177, lr: 4.00e-03, grad_scale: 8.0 +2023-03-29 12:21:17,910 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-29 12:21:28,780 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.429e+02 3.687e+02 4.367e+02 5.594e+02 8.534e+02, threshold=8.735e+02, percent-clipped=3.0 +2023-03-29 12:21:52,906 INFO [train.py:892] (2/4) Epoch 39, batch 1050, loss[loss=0.1522, simple_loss=0.2347, pruned_loss=0.03489, over 19749.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.234, pruned_loss=0.03613, over 3928920.47 frames. ], batch size: 259, lr: 4.00e-03, grad_scale: 8.0 +2023-03-29 12:23:09,896 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-29 12:23:33,656 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3534, 2.5385, 3.7002, 2.9646, 3.1629, 2.9272, 2.2156, 2.3120], + device='cuda:2'), covar=tensor([0.1338, 0.3381, 0.0713, 0.1249, 0.1932, 0.1883, 0.2926, 0.3052], + device='cuda:2'), in_proj_covar=tensor([0.0353, 0.0394, 0.0352, 0.0293, 0.0377, 0.0387, 0.0382, 0.0355], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 12:23:45,766 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0894, 3.1106, 3.0697, 3.1316, 3.0257, 3.1255, 2.9206, 3.1031], + device='cuda:2'), covar=tensor([0.0258, 0.0320, 0.0367, 0.0364, 0.0389, 0.0346, 0.0386, 0.0448], + device='cuda:2'), in_proj_covar=tensor([0.0093, 0.0088, 0.0091, 0.0086, 0.0098, 0.0090, 0.0106, 0.0080], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 12:23:51,075 INFO [train.py:892] (2/4) Epoch 39, batch 1100, loss[loss=0.16, simple_loss=0.2355, pruned_loss=0.04226, over 19836.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2344, pruned_loss=0.03613, over 3932756.52 frames. ], batch size: 171, lr: 4.00e-03, grad_scale: 8.0 +2023-03-29 12:24:28,078 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0270, 4.0778, 2.4938, 4.2691, 4.4363, 1.9889, 3.6866, 3.3482], + device='cuda:2'), covar=tensor([0.0711, 0.0835, 0.2640, 0.0766, 0.0644, 0.2837, 0.1124, 0.0924], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0267, 0.0237, 0.0286, 0.0267, 0.0208, 0.0246, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 12:24:43,420 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.48 vs. limit=5.0 +2023-03-29 12:24:48,547 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71616.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:25:24,241 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.877e+02 3.477e+02 4.222e+02 5.368e+02 8.846e+02, threshold=8.443e+02, percent-clipped=1.0 +2023-03-29 12:25:46,066 INFO [train.py:892] (2/4) Epoch 39, batch 1150, loss[loss=0.1604, simple_loss=0.2416, pruned_loss=0.03963, over 19771.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2347, pruned_loss=0.0364, over 3935711.44 frames. ], batch size: 70, lr: 4.00e-03, grad_scale: 8.0 +2023-03-29 12:26:02,273 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71648.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:26:28,976 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5125, 3.6700, 2.4037, 4.3009, 3.8489, 4.2008, 4.3055, 3.3574], + device='cuda:2'), covar=tensor([0.0599, 0.0580, 0.1448, 0.0521, 0.0545, 0.0410, 0.0512, 0.0784], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0148, 0.0146, 0.0158, 0.0137, 0.0142, 0.0152, 0.0151], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 12:27:08,367 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71677.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:27:26,115 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71686.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:27:38,338 INFO [train.py:892] (2/4) Epoch 39, batch 1200, loss[loss=0.1532, simple_loss=0.2267, pruned_loss=0.03986, over 19816.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.236, pruned_loss=0.0369, over 3938063.68 frames. ], batch size: 202, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:27:53,255 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71696.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:29:12,329 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.421e+02 3.452e+02 4.248e+02 4.889e+02 8.041e+02, threshold=8.496e+02, percent-clipped=0.0 +2023-03-29 12:29:21,151 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71734.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:29:35,327 INFO [train.py:892] (2/4) Epoch 39, batch 1250, loss[loss=0.138, simple_loss=0.2202, pruned_loss=0.02789, over 19837.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2365, pruned_loss=0.03727, over 3940357.89 frames. ], batch size: 90, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:31:32,673 INFO [train.py:892] (2/4) Epoch 39, batch 1300, loss[loss=0.1969, simple_loss=0.2624, pruned_loss=0.06571, over 19799.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.235, pruned_loss=0.03668, over 3943383.03 frames. ], batch size: 126, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:33:04,690 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.312e+02 3.787e+02 4.345e+02 5.084e+02 1.281e+03, threshold=8.689e+02, percent-clipped=1.0 +2023-03-29 12:33:29,419 INFO [train.py:892] (2/4) Epoch 39, batch 1350, loss[loss=0.1277, simple_loss=0.2093, pruned_loss=0.02311, over 19780.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2354, pruned_loss=0.03641, over 3944560.15 frames. ], batch size: 215, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:34:22,420 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0377, 2.8835, 3.2040, 2.7801, 3.3172, 3.2494, 3.8410, 4.2379], + device='cuda:2'), covar=tensor([0.0633, 0.1783, 0.1617, 0.2325, 0.1731, 0.1591, 0.0612, 0.0592], + device='cuda:2'), in_proj_covar=tensor([0.0262, 0.0246, 0.0274, 0.0262, 0.0308, 0.0266, 0.0239, 0.0269], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 12:34:30,916 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4408, 3.3188, 3.6897, 3.3557, 3.1985, 3.6337, 3.5039, 3.7397], + device='cuda:2'), covar=tensor([0.0834, 0.0418, 0.0413, 0.0447, 0.1545, 0.0578, 0.0481, 0.0417], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0227, 0.0227, 0.0239, 0.0210, 0.0251, 0.0240, 0.0225], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 12:35:29,332 INFO [train.py:892] (2/4) Epoch 39, batch 1400, loss[loss=0.141, simple_loss=0.2247, pruned_loss=0.02859, over 19774.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2355, pruned_loss=0.03604, over 3943716.08 frames. ], batch size: 108, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:36:38,148 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71919.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:37:03,103 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.539e+02 3.590e+02 4.169e+02 5.056e+02 8.405e+02, threshold=8.337e+02, percent-clipped=0.0 +2023-03-29 12:37:24,629 INFO [train.py:892] (2/4) Epoch 39, batch 1450, loss[loss=0.1678, simple_loss=0.2539, pruned_loss=0.04079, over 19755.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2353, pruned_loss=0.03611, over 3943791.47 frames. ], batch size: 44, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:37:28,983 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7392, 2.7029, 2.8916, 2.9375, 2.7122, 2.8901, 2.7602, 2.9243], + device='cuda:2'), covar=tensor([0.0378, 0.0380, 0.0363, 0.0286, 0.0468, 0.0343, 0.0421, 0.0357], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0089, 0.0091, 0.0085, 0.0098, 0.0091, 0.0106, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 12:38:39,232 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71972.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:38:58,475 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71980.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:39:24,728 INFO [train.py:892] (2/4) Epoch 39, batch 1500, loss[loss=0.1463, simple_loss=0.2313, pruned_loss=0.03066, over 19754.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2353, pruned_loss=0.03601, over 3944948.39 frames. ], batch size: 179, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:40:25,763 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8869, 3.3575, 3.7088, 3.1586, 3.9688, 3.9042, 4.5752, 5.1317], + device='cuda:2'), covar=tensor([0.0428, 0.1526, 0.1384, 0.2189, 0.1627, 0.1371, 0.0574, 0.0491], + device='cuda:2'), in_proj_covar=tensor([0.0261, 0.0246, 0.0274, 0.0261, 0.0307, 0.0266, 0.0240, 0.0268], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 12:40:36,642 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-29 12:40:49,423 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1635, 4.0472, 3.9854, 3.7281, 4.1799, 2.9729, 3.3654, 1.9191], + device='cuda:2'), covar=tensor([0.0278, 0.0273, 0.0218, 0.0269, 0.0227, 0.1221, 0.0964, 0.2055], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0150, 0.0116, 0.0137, 0.0122, 0.0136, 0.0145, 0.0129], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 12:41:04,305 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.372e+02 3.694e+02 4.331e+02 4.992e+02 8.331e+02, threshold=8.661e+02, percent-clipped=0.0 +2023-03-29 12:41:29,069 INFO [train.py:892] (2/4) Epoch 39, batch 1550, loss[loss=0.192, simple_loss=0.2822, pruned_loss=0.05096, over 19786.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2347, pruned_loss=0.03557, over 3946748.19 frames. ], batch size: 48, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:42:10,440 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2192, 5.4302, 5.6398, 5.3974, 5.4565, 5.1616, 5.3572, 5.1870], + device='cuda:2'), covar=tensor([0.1539, 0.1583, 0.0950, 0.1360, 0.0744, 0.0819, 0.1935, 0.2048], + device='cuda:2'), in_proj_covar=tensor([0.0305, 0.0347, 0.0383, 0.0311, 0.0287, 0.0294, 0.0373, 0.0401], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 12:42:40,658 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5966, 2.9329, 2.6265, 2.1089, 2.6693, 2.9077, 2.8482, 2.8564], + device='cuda:2'), covar=tensor([0.0401, 0.0321, 0.0349, 0.0631, 0.0392, 0.0308, 0.0324, 0.0318], + device='cuda:2'), in_proj_covar=tensor([0.0113, 0.0106, 0.0107, 0.0108, 0.0111, 0.0095, 0.0096, 0.0095], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 12:43:25,411 INFO [train.py:892] (2/4) Epoch 39, batch 1600, loss[loss=0.1844, simple_loss=0.2687, pruned_loss=0.05, over 19694.00 frames. ], tot_loss[loss=0.152, simple_loss=0.2337, pruned_loss=0.03521, over 3948752.40 frames. ], batch size: 325, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:44:50,114 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72129.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:44:53,623 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.272e+02 3.496e+02 4.151e+02 4.701e+02 9.677e+02, threshold=8.302e+02, percent-clipped=1.0 +2023-03-29 12:45:19,112 INFO [train.py:892] (2/4) Epoch 39, batch 1650, loss[loss=0.1482, simple_loss=0.2276, pruned_loss=0.03447, over 19836.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.2335, pruned_loss=0.03556, over 3949682.74 frames. ], batch size: 171, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:46:19,464 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2657, 3.2009, 4.8331, 3.6852, 3.7684, 3.6325, 2.5794, 2.8243], + device='cuda:2'), covar=tensor([0.0858, 0.2867, 0.0384, 0.0953, 0.1880, 0.1369, 0.2689, 0.2445], + device='cuda:2'), in_proj_covar=tensor([0.0353, 0.0393, 0.0352, 0.0293, 0.0376, 0.0387, 0.0383, 0.0356], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 12:46:38,770 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1654, 3.1127, 3.4098, 3.1050, 3.0304, 3.3932, 3.2611, 3.4686], + device='cuda:2'), covar=tensor([0.0849, 0.0409, 0.0427, 0.0472, 0.1725, 0.0603, 0.0499, 0.0420], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0228, 0.0228, 0.0241, 0.0211, 0.0253, 0.0241, 0.0225], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 12:47:09,844 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72190.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:47:10,944 INFO [train.py:892] (2/4) Epoch 39, batch 1700, loss[loss=0.1409, simple_loss=0.2274, pruned_loss=0.02723, over 19902.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2343, pruned_loss=0.03637, over 3950310.79 frames. ], batch size: 94, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:48:41,499 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.323e+02 3.706e+02 4.408e+02 5.454e+02 1.029e+03, threshold=8.816e+02, percent-clipped=3.0 +2023-03-29 12:49:01,614 INFO [train.py:892] (2/4) Epoch 39, batch 1750, loss[loss=0.146, simple_loss=0.2297, pruned_loss=0.03116, over 19778.00 frames. ], tot_loss[loss=0.1528, simple_loss=0.2333, pruned_loss=0.03615, over 3951486.18 frames. ], batch size: 66, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:50:03,162 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72272.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:50:09,033 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72275.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:50:29,189 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6414, 3.4738, 3.8587, 2.8882, 4.0319, 3.1874, 3.4355, 3.9180], + device='cuda:2'), covar=tensor([0.0788, 0.0450, 0.0533, 0.0806, 0.0341, 0.0431, 0.0587, 0.0291], + device='cuda:2'), in_proj_covar=tensor([0.0083, 0.0092, 0.0089, 0.0114, 0.0085, 0.0088, 0.0085, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 12:50:40,135 INFO [train.py:892] (2/4) Epoch 39, batch 1800, loss[loss=0.153, simple_loss=0.2296, pruned_loss=0.03824, over 19829.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2334, pruned_loss=0.03623, over 3950262.41 frames. ], batch size: 202, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:51:36,781 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72320.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:51:36,986 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6836, 2.1240, 2.4801, 2.8472, 3.2871, 3.3687, 3.3168, 3.3887], + device='cuda:2'), covar=tensor([0.1097, 0.1736, 0.1452, 0.0800, 0.0536, 0.0404, 0.0451, 0.0461], + device='cuda:2'), in_proj_covar=tensor([0.0165, 0.0170, 0.0183, 0.0155, 0.0141, 0.0137, 0.0130, 0.0121], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 12:51:47,506 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72326.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:51:57,377 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.312e+02 3.449e+02 3.996e+02 5.050e+02 9.624e+02, threshold=7.991e+02, percent-clipped=1.0 +2023-03-29 12:52:15,295 INFO [train.py:892] (2/4) Epoch 39, batch 1850, loss[loss=0.1435, simple_loss=0.2321, pruned_loss=0.02739, over 19562.00 frames. ], tot_loss[loss=0.1532, simple_loss=0.2344, pruned_loss=0.03595, over 3950558.85 frames. ], batch size: 53, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:52:16,221 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2365, 3.4824, 2.9476, 2.5929, 3.0519, 3.5083, 3.3833, 3.3833], + device='cuda:2'), covar=tensor([0.0314, 0.0272, 0.0321, 0.0526, 0.0341, 0.0287, 0.0269, 0.0253], + device='cuda:2'), in_proj_covar=tensor([0.0113, 0.0106, 0.0107, 0.0108, 0.0111, 0.0095, 0.0096, 0.0095], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 12:53:18,931 INFO [train.py:892] (2/4) Epoch 40, batch 0, loss[loss=0.1389, simple_loss=0.2211, pruned_loss=0.02834, over 19691.00 frames. ], tot_loss[loss=0.1389, simple_loss=0.2211, pruned_loss=0.02834, over 19691.00 frames. ], batch size: 74, lr: 3.93e-03, grad_scale: 8.0 +2023-03-29 12:53:18,932 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 12:53:42,476 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5818, 2.6077, 4.0360, 3.0643, 3.3596, 3.0089, 2.3215, 2.3767], + device='cuda:2'), covar=tensor([0.1351, 0.3508, 0.0619, 0.1224, 0.2028, 0.1855, 0.3024, 0.3368], + device='cuda:2'), in_proj_covar=tensor([0.0354, 0.0394, 0.0352, 0.0294, 0.0377, 0.0388, 0.0384, 0.0357], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 12:53:44,435 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7787, 3.3360, 3.6246, 3.1085, 3.7643, 3.7537, 4.5332, 5.0124], + device='cuda:2'), covar=tensor([0.0410, 0.1714, 0.1357, 0.2277, 0.1613, 0.1574, 0.0503, 0.0391], + device='cuda:2'), in_proj_covar=tensor([0.0260, 0.0245, 0.0274, 0.0260, 0.0307, 0.0265, 0.0239, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 12:53:52,920 INFO [train.py:926] (2/4) Epoch 40, validation: loss=0.1851, simple_loss=0.2491, pruned_loss=0.0605, over 2883724.00 frames. +2023-03-29 12:53:52,921 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-29 12:54:43,487 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.75 vs. limit=5.0 +2023-03-29 12:55:01,010 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7534, 4.7327, 5.1270, 4.8887, 5.0047, 4.6208, 4.9080, 4.6648], + device='cuda:2'), covar=tensor([0.1456, 0.1499, 0.0843, 0.1289, 0.0775, 0.0890, 0.1684, 0.1958], + device='cuda:2'), in_proj_covar=tensor([0.0304, 0.0346, 0.0381, 0.0313, 0.0287, 0.0294, 0.0372, 0.0401], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 12:55:30,605 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72387.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:55:53,218 INFO [train.py:892] (2/4) Epoch 40, batch 50, loss[loss=0.1538, simple_loss=0.2344, pruned_loss=0.0366, over 19796.00 frames. ], tot_loss[loss=0.1501, simple_loss=0.2317, pruned_loss=0.03428, over 889157.31 frames. ], batch size: 65, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 12:57:04,527 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7687, 4.4668, 4.5150, 4.2581, 4.7369, 3.1529, 3.9041, 2.2163], + device='cuda:2'), covar=tensor([0.0159, 0.0210, 0.0145, 0.0202, 0.0137, 0.1056, 0.0721, 0.1631], + device='cuda:2'), in_proj_covar=tensor([0.0108, 0.0151, 0.0116, 0.0137, 0.0122, 0.0137, 0.0145, 0.0130], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 12:57:12,955 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.328e+02 3.429e+02 4.154e+02 4.820e+02 1.115e+03, threshold=8.307e+02, percent-clipped=2.0 +2023-03-29 12:57:46,537 INFO [train.py:892] (2/4) Epoch 40, batch 100, loss[loss=0.1412, simple_loss=0.2194, pruned_loss=0.0315, over 19889.00 frames. ], tot_loss[loss=0.1516, simple_loss=0.2342, pruned_loss=0.03451, over 1567051.99 frames. ], batch size: 87, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 12:58:35,861 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-29 12:59:13,002 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-29 12:59:14,251 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72485.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:59:37,254 INFO [train.py:892] (2/4) Epoch 40, batch 150, loss[loss=0.1583, simple_loss=0.2416, pruned_loss=0.03744, over 19875.00 frames. ], tot_loss[loss=0.1517, simple_loss=0.2338, pruned_loss=0.03482, over 2094835.28 frames. ], batch size: 89, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 13:00:58,605 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.053e+02 3.352e+02 3.945e+02 4.917e+02 7.870e+02, threshold=7.891e+02, percent-clipped=0.0 +2023-03-29 13:01:02,034 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2086, 3.2780, 2.0310, 3.8532, 3.5327, 3.7900, 3.8701, 3.0478], + device='cuda:2'), covar=tensor([0.0711, 0.0721, 0.1710, 0.0687, 0.0665, 0.0512, 0.0614, 0.0822], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0151, 0.0147, 0.0161, 0.0139, 0.0144, 0.0155, 0.0153], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 13:01:36,371 INFO [train.py:892] (2/4) Epoch 40, batch 200, loss[loss=0.1477, simple_loss=0.2272, pruned_loss=0.03404, over 19830.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2366, pruned_loss=0.03586, over 2503404.75 frames. ], batch size: 208, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 13:02:44,250 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72575.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:03:24,719 INFO [train.py:892] (2/4) Epoch 40, batch 250, loss[loss=0.161, simple_loss=0.2408, pruned_loss=0.04058, over 19650.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.2346, pruned_loss=0.03515, over 2823403.97 frames. ], batch size: 299, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 13:04:28,517 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72623.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:04:44,511 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.269e+02 3.429e+02 3.885e+02 4.899e+02 8.561e+02, threshold=7.769e+02, percent-clipped=1.0 +2023-03-29 13:05:16,304 INFO [train.py:892] (2/4) Epoch 40, batch 300, loss[loss=0.1663, simple_loss=0.2489, pruned_loss=0.04183, over 19835.00 frames. ], tot_loss[loss=0.1518, simple_loss=0.2338, pruned_loss=0.03488, over 3074620.09 frames. ], batch size: 90, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 13:06:40,535 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72682.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:07:13,652 INFO [train.py:892] (2/4) Epoch 40, batch 350, loss[loss=0.1271, simple_loss=0.2121, pruned_loss=0.02104, over 19745.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2354, pruned_loss=0.03579, over 3267384.19 frames. ], batch size: 110, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 13:08:29,521 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.535e+02 3.481e+02 4.095e+02 5.199e+02 1.202e+03, threshold=8.190e+02, percent-clipped=3.0 +2023-03-29 13:08:53,017 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 13:09:02,641 INFO [train.py:892] (2/4) Epoch 40, batch 400, loss[loss=0.1594, simple_loss=0.237, pruned_loss=0.04092, over 19830.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.2343, pruned_loss=0.03523, over 3419209.66 frames. ], batch size: 146, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:09:03,557 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8468, 3.7409, 4.0863, 3.7549, 3.5604, 3.9967, 3.8283, 4.1546], + device='cuda:2'), covar=tensor([0.0783, 0.0384, 0.0380, 0.0399, 0.1190, 0.0585, 0.0510, 0.0373], + device='cuda:2'), in_proj_covar=tensor([0.0285, 0.0228, 0.0228, 0.0241, 0.0211, 0.0253, 0.0242, 0.0226], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 13:09:05,778 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6785, 2.1986, 2.4376, 2.8311, 3.2290, 3.3533, 3.1918, 3.3100], + device='cuda:2'), covar=tensor([0.1000, 0.1640, 0.1448, 0.0774, 0.0502, 0.0351, 0.0522, 0.0494], + device='cuda:2'), in_proj_covar=tensor([0.0166, 0.0171, 0.0184, 0.0156, 0.0143, 0.0137, 0.0131, 0.0121], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 13:10:20,233 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6022, 4.3142, 4.3504, 4.1396, 4.5920, 3.1587, 3.8310, 2.4176], + device='cuda:2'), covar=tensor([0.0177, 0.0232, 0.0155, 0.0192, 0.0137, 0.0984, 0.0720, 0.1434], + device='cuda:2'), in_proj_covar=tensor([0.0108, 0.0151, 0.0117, 0.0139, 0.0123, 0.0138, 0.0146, 0.0131], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-29 13:10:34,228 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72785.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:10:56,576 INFO [train.py:892] (2/4) Epoch 40, batch 450, loss[loss=0.1386, simple_loss=0.2182, pruned_loss=0.02952, over 19806.00 frames. ], tot_loss[loss=0.1514, simple_loss=0.2333, pruned_loss=0.03481, over 3536856.11 frames. ], batch size: 72, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:11:20,908 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6793, 3.8575, 2.3046, 4.1073, 4.2261, 1.8694, 3.1884, 2.9759], + device='cuda:2'), covar=tensor([0.0940, 0.0954, 0.3251, 0.0822, 0.0582, 0.3029, 0.1608, 0.1198], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0268, 0.0237, 0.0286, 0.0267, 0.0208, 0.0246, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 13:12:04,944 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.4010, 5.6421, 5.7052, 5.5384, 5.4439, 5.6584, 5.1022, 5.0970], + device='cuda:2'), covar=tensor([0.0443, 0.0474, 0.0444, 0.0462, 0.0571, 0.0510, 0.0647, 0.0989], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0303, 0.0312, 0.0273, 0.0281, 0.0262, 0.0279, 0.0325], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 13:12:17,045 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.344e+02 3.525e+02 4.138e+02 5.025e+02 1.485e+03, threshold=8.275e+02, percent-clipped=3.0 +2023-03-29 13:12:23,711 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72833.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:12:56,037 INFO [train.py:892] (2/4) Epoch 40, batch 500, loss[loss=0.1572, simple_loss=0.2394, pruned_loss=0.03745, over 19795.00 frames. ], tot_loss[loss=0.1515, simple_loss=0.2331, pruned_loss=0.03493, over 3627498.51 frames. ], batch size: 185, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:14:11,223 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.1583, 2.1635, 2.3020, 2.2460, 2.2276, 2.2098, 2.1951, 2.2878], + device='cuda:2'), covar=tensor([0.0429, 0.0368, 0.0352, 0.0395, 0.0529, 0.0479, 0.0523, 0.0405], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0088, 0.0090, 0.0085, 0.0098, 0.0090, 0.0107, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 13:14:44,590 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72894.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:14:47,529 INFO [train.py:892] (2/4) Epoch 40, batch 550, loss[loss=0.1571, simple_loss=0.2327, pruned_loss=0.04079, over 19837.00 frames. ], tot_loss[loss=0.1521, simple_loss=0.2334, pruned_loss=0.03538, over 3699595.93 frames. ], batch size: 177, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:16:09,034 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.263e+02 3.703e+02 4.460e+02 5.204e+02 8.287e+02, threshold=8.920e+02, percent-clipped=1.0 +2023-03-29 13:16:45,754 INFO [train.py:892] (2/4) Epoch 40, batch 600, loss[loss=0.151, simple_loss=0.2352, pruned_loss=0.03337, over 19767.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.2336, pruned_loss=0.03579, over 3754649.92 frames. ], batch size: 69, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:17:06,947 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72955.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:17:26,556 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72964.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 13:18:08,820 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72982.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:18:38,506 INFO [train.py:892] (2/4) Epoch 40, batch 650, loss[loss=0.1452, simple_loss=0.2178, pruned_loss=0.03626, over 19865.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2338, pruned_loss=0.03596, over 3797861.89 frames. ], batch size: 122, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:19:43,075 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73025.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 13:19:43,637 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-29 13:19:46,968 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3384, 2.5346, 4.3690, 3.7544, 4.1822, 4.3412, 4.0887, 4.1006], + device='cuda:2'), covar=tensor([0.0578, 0.1098, 0.0125, 0.0691, 0.0173, 0.0227, 0.0210, 0.0184], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0106, 0.0091, 0.0154, 0.0090, 0.0102, 0.0092, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 13:19:55,749 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73030.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:19:57,045 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.367e+02 3.602e+02 4.054e+02 4.696e+02 9.081e+02, threshold=8.108e+02, percent-clipped=1.0 +2023-03-29 13:20:29,419 INFO [train.py:892] (2/4) Epoch 40, batch 700, loss[loss=0.1466, simple_loss=0.2312, pruned_loss=0.03103, over 19814.00 frames. ], tot_loss[loss=0.1532, simple_loss=0.2346, pruned_loss=0.03593, over 3830084.86 frames. ], batch size: 72, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:21:47,609 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0875, 2.4003, 3.2742, 2.7269, 2.7673, 2.7299, 2.0088, 2.2450], + device='cuda:2'), covar=tensor([0.1235, 0.2858, 0.0752, 0.1260, 0.2122, 0.1554, 0.2883, 0.2614], + device='cuda:2'), in_proj_covar=tensor([0.0359, 0.0401, 0.0358, 0.0298, 0.0384, 0.0393, 0.0389, 0.0361], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 13:22:25,653 INFO [train.py:892] (2/4) Epoch 40, batch 750, loss[loss=0.1446, simple_loss=0.2221, pruned_loss=0.03349, over 19839.00 frames. ], tot_loss[loss=0.153, simple_loss=0.2345, pruned_loss=0.03578, over 3857210.25 frames. ], batch size: 190, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:23:43,971 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.740e+02 3.647e+02 4.174e+02 4.953e+02 1.017e+03, threshold=8.348e+02, percent-clipped=2.0 +2023-03-29 13:24:17,080 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6881, 4.6977, 2.7789, 4.9677, 5.2042, 2.2448, 4.4711, 3.8254], + device='cuda:2'), covar=tensor([0.0521, 0.0727, 0.2605, 0.0638, 0.0458, 0.2640, 0.0819, 0.0821], + device='cuda:2'), in_proj_covar=tensor([0.0241, 0.0267, 0.0237, 0.0285, 0.0266, 0.0207, 0.0245, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 13:24:18,103 INFO [train.py:892] (2/4) Epoch 40, batch 800, loss[loss=0.1407, simple_loss=0.2168, pruned_loss=0.03232, over 19469.00 frames. ], tot_loss[loss=0.1525, simple_loss=0.234, pruned_loss=0.03551, over 3877016.00 frames. ], batch size: 43, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:26:08,380 INFO [train.py:892] (2/4) Epoch 40, batch 850, loss[loss=0.1499, simple_loss=0.2181, pruned_loss=0.04083, over 19802.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.2337, pruned_loss=0.03539, over 3892933.44 frames. ], batch size: 173, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:26:55,201 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3321, 3.2959, 3.2039, 2.9759, 3.3497, 2.5711, 2.6152, 1.5924], + device='cuda:2'), covar=tensor([0.0287, 0.0283, 0.0225, 0.0258, 0.0217, 0.1601, 0.0701, 0.1993], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0152, 0.0117, 0.0139, 0.0123, 0.0138, 0.0146, 0.0131], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-29 13:27:30,151 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.270e+02 3.428e+02 4.036e+02 4.943e+02 8.954e+02, threshold=8.072e+02, percent-clipped=2.0 +2023-03-29 13:27:39,907 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.07 vs. limit=5.0 +2023-03-29 13:28:06,278 INFO [train.py:892] (2/4) Epoch 40, batch 900, loss[loss=0.1379, simple_loss=0.2162, pruned_loss=0.0298, over 19764.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.234, pruned_loss=0.03575, over 3906309.83 frames. ], batch size: 122, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:28:17,278 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73250.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:29:54,043 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73293.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:29:59,518 INFO [train.py:892] (2/4) Epoch 40, batch 950, loss[loss=0.1369, simple_loss=0.2154, pruned_loss=0.02922, over 19817.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2342, pruned_loss=0.03578, over 3916932.90 frames. ], batch size: 132, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:30:56,400 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73320.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 13:31:21,685 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.304e+02 3.673e+02 4.396e+02 5.131e+02 1.124e+03, threshold=8.792e+02, percent-clipped=1.0 +2023-03-29 13:31:53,791 INFO [train.py:892] (2/4) Epoch 40, batch 1000, loss[loss=0.1465, simple_loss=0.2239, pruned_loss=0.03457, over 19691.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2344, pruned_loss=0.03607, over 3921963.12 frames. ], batch size: 75, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:32:04,865 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3852, 1.8731, 2.0222, 2.5840, 2.8532, 2.9417, 2.8427, 2.8967], + device='cuda:2'), covar=tensor([0.1092, 0.1760, 0.1622, 0.0768, 0.0540, 0.0433, 0.0491, 0.0498], + device='cuda:2'), in_proj_covar=tensor([0.0166, 0.0172, 0.0185, 0.0157, 0.0144, 0.0138, 0.0132, 0.0122], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 13:32:15,141 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73354.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:32:53,964 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7525, 2.2052, 2.6116, 2.9571, 3.3885, 3.5631, 3.4427, 3.4961], + device='cuda:2'), covar=tensor([0.1057, 0.1715, 0.1380, 0.0766, 0.0490, 0.0361, 0.0439, 0.0480], + device='cuda:2'), in_proj_covar=tensor([0.0166, 0.0172, 0.0185, 0.0157, 0.0143, 0.0138, 0.0132, 0.0122], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 13:33:43,200 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-03-29 13:33:45,004 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2228, 3.2969, 2.2094, 3.8105, 3.5361, 3.8223, 3.8649, 3.0893], + device='cuda:2'), covar=tensor([0.0719, 0.0695, 0.1498, 0.0730, 0.0597, 0.0559, 0.0756, 0.0795], + device='cuda:2'), in_proj_covar=tensor([0.0149, 0.0149, 0.0146, 0.0160, 0.0138, 0.0143, 0.0154, 0.0152], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 13:33:49,226 INFO [train.py:892] (2/4) Epoch 40, batch 1050, loss[loss=0.1651, simple_loss=0.2387, pruned_loss=0.04578, over 19788.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2347, pruned_loss=0.03635, over 3928375.74 frames. ], batch size: 151, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:35:09,643 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.437e+02 3.473e+02 4.249e+02 4.984e+02 7.164e+02, threshold=8.499e+02, percent-clipped=0.0 +2023-03-29 13:35:39,490 INFO [train.py:892] (2/4) Epoch 40, batch 1100, loss[loss=0.1376, simple_loss=0.2096, pruned_loss=0.03275, over 19801.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2343, pruned_loss=0.03593, over 3932848.40 frames. ], batch size: 195, lr: 3.90e-03, grad_scale: 16.0 +2023-03-29 13:35:59,908 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.25 vs. limit=5.0 +2023-03-29 13:37:31,473 INFO [train.py:892] (2/4) Epoch 40, batch 1150, loss[loss=0.1339, simple_loss=0.2137, pruned_loss=0.02701, over 19892.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2344, pruned_loss=0.03613, over 3934742.52 frames. ], batch size: 94, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:38:51,535 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.496e+02 3.768e+02 4.402e+02 5.430e+02 9.903e+02, threshold=8.803e+02, percent-clipped=3.0 +2023-03-29 13:39:15,984 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8583, 2.8503, 2.9646, 3.0536, 2.8614, 2.9142, 2.7978, 2.9911], + device='cuda:2'), covar=tensor([0.0384, 0.0413, 0.0412, 0.0295, 0.0457, 0.0393, 0.0414, 0.0372], + device='cuda:2'), in_proj_covar=tensor([0.0095, 0.0089, 0.0091, 0.0086, 0.0099, 0.0092, 0.0108, 0.0080], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 13:39:27,925 INFO [train.py:892] (2/4) Epoch 40, batch 1200, loss[loss=0.1456, simple_loss=0.223, pruned_loss=0.03413, over 19805.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2342, pruned_loss=0.03627, over 3938860.69 frames. ], batch size: 98, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:39:30,747 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5410, 4.2669, 4.3359, 4.5709, 4.2602, 4.6347, 4.6426, 4.8553], + device='cuda:2'), covar=tensor([0.0711, 0.0472, 0.0544, 0.0390, 0.0787, 0.0536, 0.0433, 0.0302], + device='cuda:2'), in_proj_covar=tensor([0.0157, 0.0183, 0.0204, 0.0180, 0.0180, 0.0164, 0.0156, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 13:39:36,968 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73550.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:39:37,720 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-29 13:39:44,186 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-29 13:40:23,201 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1909, 5.4863, 5.5615, 5.4273, 5.2042, 5.5328, 5.0065, 4.9799], + device='cuda:2'), covar=tensor([0.0455, 0.0448, 0.0401, 0.0392, 0.0558, 0.0438, 0.0607, 0.0930], + device='cuda:2'), in_proj_covar=tensor([0.0286, 0.0306, 0.0314, 0.0275, 0.0284, 0.0264, 0.0279, 0.0327], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 13:40:31,661 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3131, 2.3497, 2.5004, 2.3397, 2.4220, 2.4337, 2.4174, 2.4738], + device='cuda:2'), covar=tensor([0.0502, 0.0404, 0.0360, 0.0407, 0.0492, 0.0404, 0.0500, 0.0365], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0089, 0.0091, 0.0086, 0.0099, 0.0092, 0.0107, 0.0080], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 13:40:35,940 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.72 vs. limit=5.0 +2023-03-29 13:41:06,799 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73590.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:41:18,981 INFO [train.py:892] (2/4) Epoch 40, batch 1250, loss[loss=0.1452, simple_loss=0.2213, pruned_loss=0.03459, over 19773.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2336, pruned_loss=0.03626, over 3942188.82 frames. ], batch size: 182, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:41:23,978 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73598.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:42:00,349 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-29 13:42:14,289 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73620.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 13:42:38,866 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.357e+02 3.537e+02 4.071e+02 5.247e+02 9.229e+02, threshold=8.143e+02, percent-clipped=1.0 +2023-03-29 13:43:04,953 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.8471, 1.7228, 1.8910, 1.8730, 1.8287, 1.8584, 1.8121, 1.8997], + device='cuda:2'), covar=tensor([0.0403, 0.0408, 0.0389, 0.0351, 0.0506, 0.0389, 0.0489, 0.0381], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0089, 0.0091, 0.0086, 0.0099, 0.0092, 0.0108, 0.0080], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 13:43:11,855 INFO [train.py:892] (2/4) Epoch 40, batch 1300, loss[loss=0.1546, simple_loss=0.2312, pruned_loss=0.03897, over 19899.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2341, pruned_loss=0.03627, over 3942531.65 frames. ], batch size: 91, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:43:18,811 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73649.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:43:23,114 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73651.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:44:03,519 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73668.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 13:45:04,604 INFO [train.py:892] (2/4) Epoch 40, batch 1350, loss[loss=0.1514, simple_loss=0.2295, pruned_loss=0.03665, over 19900.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.2345, pruned_loss=0.03635, over 3944663.85 frames. ], batch size: 71, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:46:21,769 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.112e+02 3.360e+02 4.018e+02 4.695e+02 9.172e+02, threshold=8.036e+02, percent-clipped=1.0 +2023-03-29 13:46:24,763 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73732.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:46:56,660 INFO [train.py:892] (2/4) Epoch 40, batch 1400, loss[loss=0.1668, simple_loss=0.258, pruned_loss=0.03783, over 19577.00 frames. ], tot_loss[loss=0.153, simple_loss=0.2336, pruned_loss=0.03621, over 3946017.75 frames. ], batch size: 49, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:48:43,205 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73793.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:48:49,028 INFO [train.py:892] (2/4) Epoch 40, batch 1450, loss[loss=0.1522, simple_loss=0.2295, pruned_loss=0.03746, over 19733.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2346, pruned_loss=0.03641, over 3946676.46 frames. ], batch size: 71, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:50:11,121 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.549e+02 3.468e+02 4.023e+02 4.768e+02 7.367e+02, threshold=8.047e+02, percent-clipped=0.0 +2023-03-29 13:50:28,086 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-29 13:50:45,946 INFO [train.py:892] (2/4) Epoch 40, batch 1500, loss[loss=0.1492, simple_loss=0.241, pruned_loss=0.02876, over 19820.00 frames. ], tot_loss[loss=0.1525, simple_loss=0.2334, pruned_loss=0.03583, over 3948757.57 frames. ], batch size: 57, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:51:53,011 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5172, 2.8662, 3.0138, 3.3791, 2.3935, 3.0456, 2.2803, 2.2820], + device='cuda:2'), covar=tensor([0.0590, 0.1400, 0.1010, 0.0528, 0.2088, 0.0831, 0.1394, 0.1600], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0327, 0.0252, 0.0209, 0.0251, 0.0214, 0.0223, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 13:52:34,857 INFO [train.py:892] (2/4) Epoch 40, batch 1550, loss[loss=0.1528, simple_loss=0.2352, pruned_loss=0.03525, over 19717.00 frames. ], tot_loss[loss=0.1522, simple_loss=0.2335, pruned_loss=0.03547, over 3948039.74 frames. ], batch size: 71, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 13:53:51,995 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.479e+02 3.617e+02 4.156e+02 5.212e+02 8.023e+02, threshold=8.312e+02, percent-clipped=0.0 +2023-03-29 13:54:26,945 INFO [train.py:892] (2/4) Epoch 40, batch 1600, loss[loss=0.14, simple_loss=0.2174, pruned_loss=0.03127, over 19773.00 frames. ], tot_loss[loss=0.152, simple_loss=0.2337, pruned_loss=0.03517, over 3948513.09 frames. ], batch size: 169, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 13:54:27,761 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73946.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:54:33,787 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73949.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:56:21,340 INFO [train.py:892] (2/4) Epoch 40, batch 1650, loss[loss=0.189, simple_loss=0.2761, pruned_loss=0.05097, over 19652.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.2344, pruned_loss=0.03547, over 3946893.71 frames. ], batch size: 343, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 13:56:27,280 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73997.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:56:49,213 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.4110, 4.9462, 5.0334, 4.6946, 5.3338, 3.2861, 4.1899, 2.7443], + device='cuda:2'), covar=tensor([0.0167, 0.0215, 0.0153, 0.0216, 0.0132, 0.0956, 0.0959, 0.1427], + device='cuda:2'), in_proj_covar=tensor([0.0108, 0.0152, 0.0117, 0.0138, 0.0122, 0.0138, 0.0146, 0.0131], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 13:57:48,764 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.164e+02 3.493e+02 4.073e+02 5.082e+02 8.769e+02, threshold=8.147e+02, percent-clipped=1.0 +2023-03-29 13:58:18,979 INFO [train.py:892] (2/4) Epoch 40, batch 1700, loss[loss=0.1459, simple_loss=0.2317, pruned_loss=0.03009, over 19689.00 frames. ], tot_loss[loss=0.1521, simple_loss=0.2337, pruned_loss=0.03521, over 3948177.52 frames. ], batch size: 74, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 13:58:32,007 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0235, 2.6531, 3.1890, 3.1982, 3.7627, 4.2209, 4.0815, 4.1099], + device='cuda:2'), covar=tensor([0.1008, 0.1616, 0.1235, 0.0753, 0.0433, 0.0218, 0.0358, 0.0422], + device='cuda:2'), in_proj_covar=tensor([0.0164, 0.0170, 0.0182, 0.0156, 0.0142, 0.0138, 0.0131, 0.0121], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 13:59:01,494 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74064.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:59:03,642 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2882, 4.8196, 4.9351, 4.5797, 5.1969, 3.3541, 4.1631, 2.7060], + device='cuda:2'), covar=tensor([0.0147, 0.0208, 0.0136, 0.0211, 0.0141, 0.0918, 0.0918, 0.1399], + device='cuda:2'), in_proj_covar=tensor([0.0108, 0.0151, 0.0116, 0.0138, 0.0122, 0.0138, 0.0145, 0.0130], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 13:59:50,936 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74088.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:00:06,306 INFO [train.py:892] (2/4) Epoch 40, batch 1750, loss[loss=0.132, simple_loss=0.2171, pruned_loss=0.02341, over 19835.00 frames. ], tot_loss[loss=0.1519, simple_loss=0.233, pruned_loss=0.03542, over 3949902.66 frames. ], batch size: 90, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 14:00:47,424 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6406, 2.5926, 2.9260, 2.5507, 3.0645, 3.0261, 3.4478, 3.8022], + device='cuda:2'), covar=tensor([0.0668, 0.1841, 0.1660, 0.2252, 0.1671, 0.1470, 0.0734, 0.0637], + device='cuda:2'), in_proj_covar=tensor([0.0262, 0.0248, 0.0275, 0.0261, 0.0307, 0.0267, 0.0241, 0.0269], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:00:47,817 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-29 14:01:03,989 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74125.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:01:13,844 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.104e+02 3.603e+02 4.226e+02 5.080e+02 8.016e+02, threshold=8.451e+02, percent-clipped=0.0 +2023-03-29 14:01:43,816 INFO [train.py:892] (2/4) Epoch 40, batch 1800, loss[loss=0.1343, simple_loss=0.2133, pruned_loss=0.02764, over 19819.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.2337, pruned_loss=0.03581, over 3949941.56 frames. ], batch size: 103, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 14:03:18,051 INFO [train.py:892] (2/4) Epoch 40, batch 1850, loss[loss=0.1477, simple_loss=0.2308, pruned_loss=0.03231, over 19827.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.2346, pruned_loss=0.03536, over 3949685.62 frames. ], batch size: 57, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 14:04:24,217 INFO [train.py:892] (2/4) Epoch 41, batch 0, loss[loss=0.1595, simple_loss=0.2436, pruned_loss=0.03771, over 19673.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2436, pruned_loss=0.03771, over 19673.00 frames. ], batch size: 325, lr: 3.83e-03, grad_scale: 16.0 +2023-03-29 14:04:24,218 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 14:04:45,232 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7558, 2.4158, 2.9570, 2.6058, 3.1499, 3.2762, 2.9465, 3.1517], + device='cuda:2'), covar=tensor([0.0579, 0.0869, 0.0138, 0.0350, 0.0155, 0.0242, 0.0217, 0.0224], + device='cuda:2'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0090, 0.0102, 0.0093, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:04:57,764 INFO [train.py:926] (2/4) Epoch 41, validation: loss=0.1869, simple_loss=0.2502, pruned_loss=0.06181, over 2883724.00 frames. +2023-03-29 14:04:57,766 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-29 14:06:10,071 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.348e+02 3.328e+02 3.806e+02 4.731e+02 7.088e+02, threshold=7.612e+02, percent-clipped=0.0 +2023-03-29 14:06:30,793 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74240.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:06:46,866 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74246.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:06:57,520 INFO [train.py:892] (2/4) Epoch 41, batch 50, loss[loss=0.1746, simple_loss=0.2661, pruned_loss=0.04157, over 19613.00 frames. ], tot_loss[loss=0.1459, simple_loss=0.2256, pruned_loss=0.03307, over 889342.21 frames. ], batch size: 351, lr: 3.83e-03, grad_scale: 16.0 +2023-03-29 14:07:16,044 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-29 14:08:04,629 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-29 14:08:22,588 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.6464, 5.9113, 5.9612, 5.8457, 5.6861, 5.9156, 5.3344, 5.3336], + device='cuda:2'), covar=tensor([0.0405, 0.0462, 0.0418, 0.0429, 0.0549, 0.0492, 0.0700, 0.0917], + device='cuda:2'), in_proj_covar=tensor([0.0287, 0.0307, 0.0316, 0.0276, 0.0284, 0.0267, 0.0281, 0.0327], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:08:35,492 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74294.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:08:49,409 INFO [train.py:892] (2/4) Epoch 41, batch 100, loss[loss=0.149, simple_loss=0.2292, pruned_loss=0.03438, over 19831.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2285, pruned_loss=0.03292, over 1568018.34 frames. ], batch size: 145, lr: 3.83e-03, grad_scale: 16.0 +2023-03-29 14:08:50,569 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74301.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:09:54,209 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.051e+02 3.488e+02 4.125e+02 4.651e+02 7.521e+02, threshold=8.249e+02, percent-clipped=0.0 +2023-03-29 14:10:04,742 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-29 14:10:38,043 INFO [train.py:892] (2/4) Epoch 41, batch 150, loss[loss=0.1454, simple_loss=0.2267, pruned_loss=0.03209, over 19897.00 frames. ], tot_loss[loss=0.1479, simple_loss=0.2289, pruned_loss=0.03342, over 2095914.36 frames. ], batch size: 63, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:11:50,623 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5917, 2.1485, 3.6329, 3.1018, 3.6169, 3.7169, 3.4744, 3.4941], + device='cuda:2'), covar=tensor([0.0823, 0.1181, 0.0134, 0.0535, 0.0182, 0.0244, 0.0229, 0.0210], + device='cuda:2'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0090, 0.0102, 0.0093, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:12:03,001 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74388.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:12:35,705 INFO [train.py:892] (2/4) Epoch 41, batch 200, loss[loss=0.1421, simple_loss=0.2262, pruned_loss=0.02904, over 19811.00 frames. ], tot_loss[loss=0.1493, simple_loss=0.2312, pruned_loss=0.03373, over 2507680.20 frames. ], batch size: 288, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:13:19,119 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74420.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:13:44,019 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.532e+02 3.589e+02 4.027e+02 4.762e+02 8.323e+02, threshold=8.054e+02, percent-clipped=1.0 +2023-03-29 14:13:55,054 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74436.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:14:27,196 INFO [train.py:892] (2/4) Epoch 41, batch 250, loss[loss=0.1819, simple_loss=0.2607, pruned_loss=0.0515, over 19791.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2331, pruned_loss=0.03461, over 2827718.40 frames. ], batch size: 162, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:14:40,038 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.5290, 5.8055, 5.8530, 5.7132, 5.5744, 5.8129, 5.2170, 5.2366], + device='cuda:2'), covar=tensor([0.0415, 0.0411, 0.0420, 0.0415, 0.0543, 0.0441, 0.0637, 0.0960], + device='cuda:2'), in_proj_covar=tensor([0.0285, 0.0305, 0.0314, 0.0274, 0.0283, 0.0266, 0.0279, 0.0325], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:16:20,790 INFO [train.py:892] (2/4) Epoch 41, batch 300, loss[loss=0.1392, simple_loss=0.2182, pruned_loss=0.03006, over 19883.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2328, pruned_loss=0.03468, over 3077896.28 frames. ], batch size: 88, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:17:27,573 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.493e+02 3.756e+02 4.487e+02 5.432e+02 9.829e+02, threshold=8.974e+02, percent-clipped=6.0 +2023-03-29 14:18:14,843 INFO [train.py:892] (2/4) Epoch 41, batch 350, loss[loss=0.165, simple_loss=0.2363, pruned_loss=0.04684, over 19635.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.2337, pruned_loss=0.03554, over 3271541.08 frames. ], batch size: 72, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:19:57,788 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74595.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:19:59,629 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74596.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:20:06,550 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-29 14:20:09,423 INFO [train.py:892] (2/4) Epoch 41, batch 400, loss[loss=0.1409, simple_loss=0.2262, pruned_loss=0.02775, over 19840.00 frames. ], tot_loss[loss=0.1516, simple_loss=0.2331, pruned_loss=0.03499, over 3420371.07 frames. ], batch size: 59, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:20:45,042 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7293, 3.9973, 4.2862, 4.8283, 3.2129, 3.4751, 2.9363, 2.8953], + device='cuda:2'), covar=tensor([0.0448, 0.1832, 0.0732, 0.0346, 0.1863, 0.1082, 0.1304, 0.1560], + device='cuda:2'), in_proj_covar=tensor([0.0253, 0.0328, 0.0253, 0.0211, 0.0252, 0.0215, 0.0223, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 14:20:56,787 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-29 14:21:17,894 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.495e+02 3.791e+02 4.431e+02 5.113e+02 8.392e+02, threshold=8.862e+02, percent-clipped=0.0 +2023-03-29 14:21:54,664 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1044, 2.9065, 3.2072, 2.7796, 3.3778, 3.4133, 3.8997, 4.3714], + device='cuda:2'), covar=tensor([0.0644, 0.1849, 0.1654, 0.2370, 0.1810, 0.1465, 0.0678, 0.0626], + device='cuda:2'), in_proj_covar=tensor([0.0261, 0.0246, 0.0274, 0.0261, 0.0306, 0.0264, 0.0240, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:22:06,034 INFO [train.py:892] (2/4) Epoch 41, batch 450, loss[loss=0.1373, simple_loss=0.2178, pruned_loss=0.02844, over 19570.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.2325, pruned_loss=0.03506, over 3538900.31 frames. ], batch size: 42, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:22:17,870 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74656.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:24:01,229 INFO [train.py:892] (2/4) Epoch 41, batch 500, loss[loss=0.1477, simple_loss=0.2248, pruned_loss=0.03532, over 19677.00 frames. ], tot_loss[loss=0.1515, simple_loss=0.2325, pruned_loss=0.03524, over 3630390.12 frames. ], batch size: 64, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:24:47,976 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74720.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:25:12,467 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.085e+02 3.392e+02 3.989e+02 4.649e+02 9.968e+02, threshold=7.978e+02, percent-clipped=1.0 +2023-03-29 14:25:55,695 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2538, 4.1021, 4.5158, 4.1314, 3.7802, 4.3442, 4.2248, 4.5856], + device='cuda:2'), covar=tensor([0.0766, 0.0354, 0.0353, 0.0383, 0.1196, 0.0594, 0.0454, 0.0338], + device='cuda:2'), in_proj_covar=tensor([0.0287, 0.0229, 0.0228, 0.0240, 0.0212, 0.0253, 0.0241, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:25:56,881 INFO [train.py:892] (2/4) Epoch 41, batch 550, loss[loss=0.1512, simple_loss=0.2226, pruned_loss=0.03995, over 19838.00 frames. ], tot_loss[loss=0.1514, simple_loss=0.2322, pruned_loss=0.03527, over 3701883.89 frames. ], batch size: 145, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:26:35,991 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74768.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:27:02,452 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3797, 3.0656, 3.3812, 2.9209, 3.5620, 3.5393, 4.1735, 4.6717], + device='cuda:2'), covar=tensor([0.0559, 0.1690, 0.1477, 0.2293, 0.1602, 0.1449, 0.0612, 0.0457], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0248, 0.0276, 0.0263, 0.0309, 0.0266, 0.0241, 0.0269], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:27:49,003 INFO [train.py:892] (2/4) Epoch 41, batch 600, loss[loss=0.1566, simple_loss=0.2454, pruned_loss=0.03384, over 19860.00 frames. ], tot_loss[loss=0.1512, simple_loss=0.2322, pruned_loss=0.03511, over 3758574.06 frames. ], batch size: 104, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:28:17,334 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8195, 2.5340, 5.0017, 4.1039, 4.6804, 4.9058, 4.7465, 4.5397], + device='cuda:2'), covar=tensor([0.0568, 0.1241, 0.0095, 0.0983, 0.0157, 0.0190, 0.0169, 0.0162], + device='cuda:2'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0090, 0.0103, 0.0094, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:28:53,127 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.538e+02 3.664e+02 4.346e+02 5.320e+02 9.628e+02, threshold=8.692e+02, percent-clipped=3.0 +2023-03-29 14:29:38,234 INFO [train.py:892] (2/4) Epoch 41, batch 650, loss[loss=0.1405, simple_loss=0.2184, pruned_loss=0.03126, over 19799.00 frames. ], tot_loss[loss=0.1512, simple_loss=0.2319, pruned_loss=0.0353, over 3801805.36 frames. ], batch size: 174, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:30:02,577 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6962, 4.7866, 2.7695, 4.9415, 5.2184, 2.2393, 4.3873, 3.7561], + device='cuda:2'), covar=tensor([0.0557, 0.0611, 0.2540, 0.0645, 0.0545, 0.2807, 0.0878, 0.0867], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0268, 0.0237, 0.0288, 0.0266, 0.0209, 0.0246, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 14:31:21,321 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74896.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:31:27,768 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4324, 1.7522, 2.1475, 2.6087, 2.9073, 3.0123, 2.9354, 2.9757], + device='cuda:2'), covar=tensor([0.1157, 0.2014, 0.1661, 0.0886, 0.0588, 0.0467, 0.0505, 0.0549], + device='cuda:2'), in_proj_covar=tensor([0.0166, 0.0171, 0.0183, 0.0158, 0.0142, 0.0138, 0.0131, 0.0122], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 14:31:32,908 INFO [train.py:892] (2/4) Epoch 41, batch 700, loss[loss=0.1277, simple_loss=0.2037, pruned_loss=0.02585, over 19700.00 frames. ], tot_loss[loss=0.1505, simple_loss=0.2313, pruned_loss=0.03484, over 3835681.11 frames. ], batch size: 46, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:31:51,778 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74909.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:32:42,131 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.552e+02 3.676e+02 4.188e+02 5.143e+02 8.176e+02, threshold=8.375e+02, percent-clipped=0.0 +2023-03-29 14:33:13,567 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74944.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:33:30,567 INFO [train.py:892] (2/4) Epoch 41, batch 750, loss[loss=0.1431, simple_loss=0.2299, pruned_loss=0.02812, over 19667.00 frames. ], tot_loss[loss=0.151, simple_loss=0.2319, pruned_loss=0.03505, over 3862229.35 frames. ], batch size: 64, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:33:31,467 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74951.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:34:02,578 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74964.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:34:15,119 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74970.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:35:26,027 INFO [train.py:892] (2/4) Epoch 41, batch 800, loss[loss=0.1403, simple_loss=0.2215, pruned_loss=0.02956, over 19837.00 frames. ], tot_loss[loss=0.1519, simple_loss=0.233, pruned_loss=0.0354, over 3880658.12 frames. ], batch size: 239, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:36:21,596 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75025.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:36:36,275 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.639e+02 3.669e+02 4.402e+02 5.474e+02 1.379e+03, threshold=8.804e+02, percent-clipped=2.0 +2023-03-29 14:37:20,369 INFO [train.py:892] (2/4) Epoch 41, batch 850, loss[loss=0.1462, simple_loss=0.2287, pruned_loss=0.03181, over 19734.00 frames. ], tot_loss[loss=0.1518, simple_loss=0.2328, pruned_loss=0.03543, over 3897438.52 frames. ], batch size: 219, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:37:43,006 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7942, 2.8204, 4.2523, 3.2399, 3.5244, 3.2541, 2.4305, 2.5397], + device='cuda:2'), covar=tensor([0.1089, 0.3232, 0.0597, 0.1200, 0.1791, 0.1649, 0.2776, 0.2824], + device='cuda:2'), in_proj_covar=tensor([0.0358, 0.0401, 0.0355, 0.0297, 0.0382, 0.0395, 0.0388, 0.0361], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:37:56,118 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 14:39:13,356 INFO [train.py:892] (2/4) Epoch 41, batch 900, loss[loss=0.1349, simple_loss=0.2148, pruned_loss=0.02755, over 19753.00 frames. ], tot_loss[loss=0.1514, simple_loss=0.2322, pruned_loss=0.03532, over 3909717.62 frames. ], batch size: 188, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:39:46,483 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8466, 2.7935, 2.8809, 2.3484, 2.9695, 2.6126, 2.9175, 2.9476], + device='cuda:2'), covar=tensor([0.0605, 0.0547, 0.0639, 0.0847, 0.0455, 0.0499, 0.0478, 0.0354], + device='cuda:2'), in_proj_covar=tensor([0.0084, 0.0093, 0.0090, 0.0115, 0.0086, 0.0089, 0.0085, 0.0080], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:39:50,666 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6176, 4.3953, 4.3586, 4.1205, 4.6314, 3.0305, 3.6598, 2.0264], + device='cuda:2'), covar=tensor([0.0285, 0.0268, 0.0222, 0.0256, 0.0233, 0.1332, 0.1104, 0.2231], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0153, 0.0118, 0.0140, 0.0123, 0.0140, 0.0147, 0.0132], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-29 14:40:21,666 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.262e+02 3.745e+02 4.337e+02 5.272e+02 8.502e+02, threshold=8.673e+02, percent-clipped=0.0 +2023-03-29 14:40:30,161 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.9772, 4.7138, 4.7669, 5.0374, 4.7253, 5.2888, 5.1853, 5.3342], + device='cuda:2'), covar=tensor([0.0754, 0.0450, 0.0578, 0.0382, 0.0762, 0.0433, 0.0428, 0.0387], + device='cuda:2'), in_proj_covar=tensor([0.0164, 0.0190, 0.0211, 0.0186, 0.0186, 0.0170, 0.0161, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 14:41:05,568 INFO [train.py:892] (2/4) Epoch 41, batch 950, loss[loss=0.1395, simple_loss=0.2229, pruned_loss=0.02806, over 19862.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2322, pruned_loss=0.03503, over 3918835.99 frames. ], batch size: 99, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:42:07,697 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.8735, 1.7241, 1.8971, 1.8949, 1.8091, 1.8688, 1.7845, 1.9132], + device='cuda:2'), covar=tensor([0.0425, 0.0420, 0.0375, 0.0381, 0.0511, 0.0395, 0.0511, 0.0357], + device='cuda:2'), in_proj_covar=tensor([0.0096, 0.0090, 0.0092, 0.0087, 0.0099, 0.0093, 0.0108, 0.0081], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 14:42:10,046 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.94 vs. limit=5.0 +2023-03-29 14:42:59,238 INFO [train.py:892] (2/4) Epoch 41, batch 1000, loss[loss=0.1301, simple_loss=0.2134, pruned_loss=0.02343, over 19802.00 frames. ], tot_loss[loss=0.1509, simple_loss=0.2321, pruned_loss=0.03488, over 3925571.39 frames. ], batch size: 107, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:44:01,265 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75228.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:44:07,911 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.478e+02 3.475e+02 3.874e+02 4.705e+02 1.241e+03, threshold=7.748e+02, percent-clipped=2.0 +2023-03-29 14:44:48,160 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.5203, 5.8848, 5.9956, 5.8175, 5.7110, 5.6838, 5.6862, 5.5625], + device='cuda:2'), covar=tensor([0.1449, 0.1325, 0.0836, 0.1350, 0.0694, 0.0935, 0.1955, 0.2017], + device='cuda:2'), in_proj_covar=tensor([0.0307, 0.0351, 0.0385, 0.0318, 0.0291, 0.0297, 0.0376, 0.0409], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 14:44:53,971 INFO [train.py:892] (2/4) Epoch 41, batch 1050, loss[loss=0.1248, simple_loss=0.205, pruned_loss=0.02229, over 19850.00 frames. ], tot_loss[loss=0.1498, simple_loss=0.2309, pruned_loss=0.0343, over 3932682.10 frames. ], batch size: 112, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:44:54,782 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75251.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:45:01,356 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7035, 4.9042, 2.8713, 5.1218, 5.3350, 2.4117, 4.6157, 3.9214], + device='cuda:2'), covar=tensor([0.0540, 0.0558, 0.2321, 0.0501, 0.0422, 0.2476, 0.0702, 0.0765], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0268, 0.0237, 0.0288, 0.0266, 0.0208, 0.0246, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 14:45:13,400 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0311, 2.5346, 4.1267, 3.7731, 4.0802, 4.2087, 4.0156, 3.8623], + device='cuda:2'), covar=tensor([0.0631, 0.1023, 0.0126, 0.0475, 0.0168, 0.0229, 0.0181, 0.0203], + device='cuda:2'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0090, 0.0102, 0.0093, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:45:20,317 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6498, 2.5660, 4.6988, 3.9804, 4.4758, 4.7331, 4.5787, 4.3989], + device='cuda:2'), covar=tensor([0.0602, 0.1152, 0.0113, 0.0938, 0.0169, 0.0194, 0.0153, 0.0163], + device='cuda:2'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0090, 0.0102, 0.0093, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:45:26,087 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75265.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:45:38,223 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75270.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:46:19,871 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75289.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:46:40,117 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75299.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:46:43,187 INFO [train.py:892] (2/4) Epoch 41, batch 1100, loss[loss=0.2197, simple_loss=0.2991, pruned_loss=0.07012, over 19469.00 frames. ], tot_loss[loss=0.1504, simple_loss=0.2313, pruned_loss=0.0347, over 3937399.67 frames. ], batch size: 396, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:46:53,636 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6736, 2.9856, 2.6623, 2.1999, 2.7193, 2.8840, 2.8887, 2.9388], + device='cuda:2'), covar=tensor([0.0401, 0.0338, 0.0357, 0.0532, 0.0415, 0.0390, 0.0304, 0.0297], + device='cuda:2'), in_proj_covar=tensor([0.0114, 0.0106, 0.0108, 0.0108, 0.0111, 0.0096, 0.0097, 0.0096], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 14:47:27,566 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75320.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:47:51,229 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.168e+02 3.537e+02 4.080e+02 4.857e+02 1.316e+03, threshold=8.160e+02, percent-clipped=3.0 +2023-03-29 14:47:53,934 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75331.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 14:48:39,015 INFO [train.py:892] (2/4) Epoch 41, batch 1150, loss[loss=0.1632, simple_loss=0.2543, pruned_loss=0.03609, over 19863.00 frames. ], tot_loss[loss=0.152, simple_loss=0.2327, pruned_loss=0.03563, over 3941053.29 frames. ], batch size: 48, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:50:04,590 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-29 14:50:16,160 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4324, 3.5546, 2.2214, 3.6034, 3.7550, 1.7900, 3.0983, 2.8997], + device='cuda:2'), covar=tensor([0.0854, 0.0769, 0.2602, 0.0828, 0.0601, 0.2610, 0.1173, 0.1028], + device='cuda:2'), in_proj_covar=tensor([0.0243, 0.0267, 0.0236, 0.0288, 0.0265, 0.0208, 0.0245, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 14:50:32,184 INFO [train.py:892] (2/4) Epoch 41, batch 1200, loss[loss=0.1653, simple_loss=0.2604, pruned_loss=0.03511, over 19901.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.2334, pruned_loss=0.03585, over 3943249.87 frames. ], batch size: 50, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:50:41,293 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2549, 2.5093, 3.5562, 2.9100, 3.0005, 2.8706, 2.1072, 2.2838], + device='cuda:2'), covar=tensor([0.1343, 0.3033, 0.0710, 0.1183, 0.2053, 0.1684, 0.2986, 0.2859], + device='cuda:2'), in_proj_covar=tensor([0.0357, 0.0401, 0.0355, 0.0296, 0.0381, 0.0395, 0.0387, 0.0360], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:51:41,593 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.627e+02 3.559e+02 4.233e+02 5.176e+02 1.064e+03, threshold=8.467e+02, percent-clipped=2.0 +2023-03-29 14:52:27,172 INFO [train.py:892] (2/4) Epoch 41, batch 1250, loss[loss=0.1314, simple_loss=0.2025, pruned_loss=0.03014, over 19881.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.2329, pruned_loss=0.03593, over 3944895.20 frames. ], batch size: 134, lr: 3.80e-03, grad_scale: 32.0 +2023-03-29 14:53:17,901 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9451, 2.8042, 2.8931, 2.1986, 3.0337, 2.5320, 2.8828, 2.9795], + device='cuda:2'), covar=tensor([0.0645, 0.0520, 0.0605, 0.1088, 0.0504, 0.0581, 0.0617, 0.0428], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0094, 0.0091, 0.0116, 0.0086, 0.0090, 0.0086, 0.0081], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:53:18,260 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.01 vs. limit=5.0 +2023-03-29 14:53:23,741 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6705, 3.8039, 2.3494, 3.8634, 4.0457, 1.9274, 3.3900, 3.1192], + device='cuda:2'), covar=tensor([0.0772, 0.0875, 0.2794, 0.0941, 0.0590, 0.2738, 0.1122, 0.0974], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0267, 0.0236, 0.0287, 0.0265, 0.0208, 0.0245, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 14:53:58,248 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-29 14:54:20,143 INFO [train.py:892] (2/4) Epoch 41, batch 1300, loss[loss=0.1894, simple_loss=0.2697, pruned_loss=0.05457, over 19619.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.2329, pruned_loss=0.03594, over 3944066.12 frames. ], batch size: 351, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 14:54:52,554 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5893, 4.4686, 4.8993, 4.4103, 4.0914, 4.7264, 4.5242, 5.0017], + device='cuda:2'), covar=tensor([0.0770, 0.0332, 0.0339, 0.0369, 0.0902, 0.0461, 0.0466, 0.0298], + device='cuda:2'), in_proj_covar=tensor([0.0283, 0.0226, 0.0227, 0.0238, 0.0209, 0.0251, 0.0239, 0.0225], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:54:54,815 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4824, 3.6528, 2.3391, 4.2049, 3.9209, 4.1140, 4.2283, 3.2664], + device='cuda:2'), covar=tensor([0.0608, 0.0632, 0.1512, 0.0570, 0.0508, 0.0515, 0.0568, 0.0832], + device='cuda:2'), in_proj_covar=tensor([0.0149, 0.0150, 0.0146, 0.0159, 0.0139, 0.0144, 0.0155, 0.0153], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 14:55:27,413 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.667e+02 3.652e+02 4.285e+02 4.954e+02 9.983e+02, threshold=8.569e+02, percent-clipped=1.0 +2023-03-29 14:56:10,887 INFO [train.py:892] (2/4) Epoch 41, batch 1350, loss[loss=0.124, simple_loss=0.2021, pruned_loss=0.02294, over 19727.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.2333, pruned_loss=0.03565, over 3945645.95 frames. ], batch size: 134, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 14:56:43,318 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-29 14:56:46,568 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75565.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:57:28,136 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75584.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:58:05,442 INFO [train.py:892] (2/4) Epoch 41, batch 1400, loss[loss=0.1544, simple_loss=0.2341, pruned_loss=0.0373, over 19567.00 frames. ], tot_loss[loss=0.1519, simple_loss=0.233, pruned_loss=0.03545, over 3947108.42 frames. ], batch size: 47, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 14:58:20,121 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1994, 2.6735, 3.3835, 3.4312, 3.8579, 4.4716, 4.2607, 4.3560], + device='cuda:2'), covar=tensor([0.0889, 0.1649, 0.1206, 0.0646, 0.0396, 0.0222, 0.0324, 0.0368], + device='cuda:2'), in_proj_covar=tensor([0.0166, 0.0170, 0.0184, 0.0157, 0.0142, 0.0139, 0.0132, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 14:58:33,046 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75613.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:58:49,365 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75620.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:59:04,013 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75626.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 14:59:14,851 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.057e+02 3.782e+02 4.383e+02 5.452e+02 9.029e+02, threshold=8.766e+02, percent-clipped=1.0 +2023-03-29 15:00:00,084 INFO [train.py:892] (2/4) Epoch 41, batch 1450, loss[loss=0.1467, simple_loss=0.2203, pruned_loss=0.03652, over 19849.00 frames. ], tot_loss[loss=0.1519, simple_loss=0.2332, pruned_loss=0.03529, over 3948120.34 frames. ], batch size: 197, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 15:00:16,441 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8351, 2.2608, 2.6104, 3.0813, 3.5406, 3.7849, 3.6885, 3.6442], + device='cuda:2'), covar=tensor([0.1046, 0.1779, 0.1501, 0.0746, 0.0469, 0.0327, 0.0373, 0.0522], + device='cuda:2'), in_proj_covar=tensor([0.0166, 0.0170, 0.0184, 0.0157, 0.0142, 0.0139, 0.0132, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 15:00:40,611 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75668.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:01:52,762 INFO [train.py:892] (2/4) Epoch 41, batch 1500, loss[loss=0.1733, simple_loss=0.2475, pruned_loss=0.04952, over 19561.00 frames. ], tot_loss[loss=0.1522, simple_loss=0.2334, pruned_loss=0.03556, over 3946295.94 frames. ], batch size: 41, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 15:03:00,969 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.571e+02 3.753e+02 4.206e+02 5.021e+02 8.487e+02, threshold=8.413e+02, percent-clipped=0.0 +2023-03-29 15:03:47,897 INFO [train.py:892] (2/4) Epoch 41, batch 1550, loss[loss=0.1593, simple_loss=0.2459, pruned_loss=0.03631, over 19780.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.2339, pruned_loss=0.03559, over 3947259.21 frames. ], batch size: 321, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 15:05:36,313 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8598, 3.7443, 3.7083, 3.4657, 3.8693, 2.8385, 3.1615, 1.8118], + device='cuda:2'), covar=tensor([0.0217, 0.0246, 0.0169, 0.0233, 0.0166, 0.1184, 0.0704, 0.1803], + device='cuda:2'), in_proj_covar=tensor([0.0108, 0.0153, 0.0118, 0.0139, 0.0122, 0.0139, 0.0146, 0.0132], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 15:05:37,485 INFO [train.py:892] (2/4) Epoch 41, batch 1600, loss[loss=0.1653, simple_loss=0.2513, pruned_loss=0.03967, over 19746.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.2334, pruned_loss=0.03565, over 3948100.51 frames. ], batch size: 276, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 15:06:45,426 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.043e+02 3.453e+02 4.213e+02 4.846e+02 1.121e+03, threshold=8.426e+02, percent-clipped=1.0 +2023-03-29 15:07:30,009 INFO [train.py:892] (2/4) Epoch 41, batch 1650, loss[loss=0.1618, simple_loss=0.2463, pruned_loss=0.03864, over 19707.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.2336, pruned_loss=0.03581, over 3949190.28 frames. ], batch size: 78, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 15:08:27,116 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 15:08:44,767 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75884.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:09:22,816 INFO [train.py:892] (2/4) Epoch 41, batch 1700, loss[loss=0.1314, simple_loss=0.208, pruned_loss=0.02739, over 19833.00 frames. ], tot_loss[loss=0.1515, simple_loss=0.2325, pruned_loss=0.03529, over 3950968.11 frames. ], batch size: 128, lr: 3.78e-03, grad_scale: 32.0 +2023-03-29 15:10:20,641 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75926.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 15:10:32,265 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.390e+02 3.529e+02 4.151e+02 4.845e+02 7.414e+02, threshold=8.303e+02, percent-clipped=0.0 +2023-03-29 15:10:35,226 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75932.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:11:15,198 INFO [train.py:892] (2/4) Epoch 41, batch 1750, loss[loss=0.1562, simple_loss=0.2355, pruned_loss=0.03848, over 19775.00 frames. ], tot_loss[loss=0.1518, simple_loss=0.233, pruned_loss=0.0353, over 3950302.05 frames. ], batch size: 198, lr: 3.78e-03, grad_scale: 32.0 +2023-03-29 15:12:01,872 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75974.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:13:01,525 INFO [train.py:892] (2/4) Epoch 41, batch 1800, loss[loss=0.144, simple_loss=0.2257, pruned_loss=0.03109, over 19658.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.2334, pruned_loss=0.03572, over 3950791.81 frames. ], batch size: 58, lr: 3.78e-03, grad_scale: 32.0 +2023-03-29 15:13:57,905 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.757e+02 3.667e+02 4.191e+02 5.134e+02 9.957e+02, threshold=8.381e+02, percent-clipped=3.0 +2023-03-29 15:14:33,344 INFO [train.py:892] (2/4) Epoch 41, batch 1850, loss[loss=0.1498, simple_loss=0.2422, pruned_loss=0.02868, over 19678.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.235, pruned_loss=0.0361, over 3947897.16 frames. ], batch size: 56, lr: 3.78e-03, grad_scale: 32.0 +2023-03-29 15:15:35,917 INFO [train.py:892] (2/4) Epoch 42, batch 0, loss[loss=0.1504, simple_loss=0.2243, pruned_loss=0.03824, over 19827.00 frames. ], tot_loss[loss=0.1504, simple_loss=0.2243, pruned_loss=0.03824, over 19827.00 frames. ], batch size: 127, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:15:35,917 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 15:16:07,962 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2785, 2.2494, 2.4214, 2.3138, 2.3541, 2.3488, 2.3333, 2.3943], + device='cuda:2'), covar=tensor([0.0414, 0.0401, 0.0350, 0.0385, 0.0495, 0.0417, 0.0499, 0.0425], + device='cuda:2'), in_proj_covar=tensor([0.0096, 0.0090, 0.0093, 0.0088, 0.0101, 0.0093, 0.0109, 0.0081], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 15:16:12,640 INFO [train.py:926] (2/4) Epoch 42, validation: loss=0.1864, simple_loss=0.2496, pruned_loss=0.06163, over 2883724.00 frames. +2023-03-29 15:16:12,641 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-29 15:17:12,403 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2520, 3.1265, 3.2616, 2.7099, 3.3150, 2.8517, 3.1509, 3.2757], + device='cuda:2'), covar=tensor([0.0520, 0.0555, 0.0597, 0.0763, 0.0442, 0.0523, 0.0474, 0.0398], + device='cuda:2'), in_proj_covar=tensor([0.0084, 0.0093, 0.0090, 0.0114, 0.0085, 0.0089, 0.0086, 0.0081], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 15:18:08,446 INFO [train.py:892] (2/4) Epoch 42, batch 50, loss[loss=0.1427, simple_loss=0.2366, pruned_loss=0.02442, over 19583.00 frames. ], tot_loss[loss=0.1481, simple_loss=0.2281, pruned_loss=0.0341, over 892031.94 frames. ], batch size: 49, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:19:07,895 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.581e+02 3.605e+02 4.013e+02 4.746e+02 9.116e+02, threshold=8.026e+02, percent-clipped=1.0 +2023-03-29 15:19:17,094 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-29 15:20:05,988 INFO [train.py:892] (2/4) Epoch 42, batch 100, loss[loss=0.1448, simple_loss=0.2237, pruned_loss=0.03298, over 19823.00 frames. ], tot_loss[loss=0.1497, simple_loss=0.2305, pruned_loss=0.03448, over 1571266.69 frames. ], batch size: 93, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:21:58,763 INFO [train.py:892] (2/4) Epoch 42, batch 150, loss[loss=0.1444, simple_loss=0.2329, pruned_loss=0.02797, over 19703.00 frames. ], tot_loss[loss=0.1494, simple_loss=0.2304, pruned_loss=0.03423, over 2099404.42 frames. ], batch size: 59, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:22:57,753 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.293e+02 3.477e+02 4.297e+02 5.129e+02 1.202e+03, threshold=8.594e+02, percent-clipped=2.0 +2023-03-29 15:23:17,368 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7300, 3.0170, 2.7163, 2.2488, 2.7951, 2.9765, 2.9180, 3.0333], + device='cuda:2'), covar=tensor([0.0414, 0.0314, 0.0332, 0.0541, 0.0379, 0.0318, 0.0291, 0.0250], + device='cuda:2'), in_proj_covar=tensor([0.0115, 0.0108, 0.0108, 0.0109, 0.0112, 0.0097, 0.0099, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 15:23:55,610 INFO [train.py:892] (2/4) Epoch 42, batch 200, loss[loss=0.1336, simple_loss=0.2103, pruned_loss=0.02849, over 19868.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2319, pruned_loss=0.03433, over 2509303.25 frames. ], batch size: 106, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:24:27,155 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76269.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 15:25:30,163 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76294.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:25:57,511 INFO [train.py:892] (2/4) Epoch 42, batch 250, loss[loss=0.1594, simple_loss=0.2331, pruned_loss=0.0429, over 19771.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2304, pruned_loss=0.03395, over 2828788.15 frames. ], batch size: 198, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:26:38,800 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76323.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 15:26:56,820 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76330.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 15:26:57,679 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.384e+02 3.381e+02 3.992e+02 4.681e+02 8.709e+02, threshold=7.984e+02, percent-clipped=1.0 +2023-03-29 15:26:58,958 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4512, 4.6027, 2.7558, 4.8164, 5.0535, 2.1500, 4.2389, 3.7568], + device='cuda:2'), covar=tensor([0.0666, 0.0742, 0.2631, 0.0728, 0.0450, 0.2773, 0.1024, 0.0830], + device='cuda:2'), in_proj_covar=tensor([0.0245, 0.0271, 0.0240, 0.0292, 0.0270, 0.0210, 0.0248, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 15:27:53,418 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-03-29 15:27:54,617 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76355.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:27:55,584 INFO [train.py:892] (2/4) Epoch 42, batch 300, loss[loss=0.161, simple_loss=0.2502, pruned_loss=0.03593, over 19527.00 frames. ], tot_loss[loss=0.1493, simple_loss=0.2309, pruned_loss=0.03384, over 3078004.67 frames. ], batch size: 54, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:28:02,937 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8559, 2.8602, 3.5666, 2.8647, 3.7443, 3.7448, 4.6225, 5.1186], + device='cuda:2'), covar=tensor([0.0467, 0.2006, 0.1507, 0.2632, 0.1773, 0.1550, 0.0536, 0.0402], + device='cuda:2'), in_proj_covar=tensor([0.0260, 0.0246, 0.0273, 0.0261, 0.0306, 0.0264, 0.0240, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 15:28:18,325 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0961, 3.1744, 3.1797, 3.3670, 3.0070, 3.1259, 2.9914, 3.3260], + device='cuda:2'), covar=tensor([0.0308, 0.0308, 0.0311, 0.0233, 0.0502, 0.0358, 0.0405, 0.0318], + device='cuda:2'), in_proj_covar=tensor([0.0097, 0.0092, 0.0094, 0.0088, 0.0102, 0.0094, 0.0110, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 15:29:02,667 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76384.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 15:29:47,640 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5389, 3.6229, 2.3081, 3.7013, 3.8400, 1.8640, 3.2235, 3.0258], + device='cuda:2'), covar=tensor([0.0813, 0.0856, 0.2688, 0.0896, 0.0729, 0.2770, 0.1097, 0.0927], + device='cuda:2'), in_proj_covar=tensor([0.0244, 0.0270, 0.0239, 0.0289, 0.0268, 0.0209, 0.0246, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 15:29:51,765 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7038, 4.9320, 4.9965, 4.8703, 4.6926, 4.9432, 4.5187, 4.4990], + device='cuda:2'), covar=tensor([0.0507, 0.0455, 0.0456, 0.0444, 0.0644, 0.0487, 0.0601, 0.0905], + device='cuda:2'), in_proj_covar=tensor([0.0290, 0.0309, 0.0319, 0.0278, 0.0288, 0.0268, 0.0282, 0.0331], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 15:29:53,465 INFO [train.py:892] (2/4) Epoch 42, batch 350, loss[loss=0.1487, simple_loss=0.2306, pruned_loss=0.03338, over 19776.00 frames. ], tot_loss[loss=0.1491, simple_loss=0.2305, pruned_loss=0.03388, over 3272630.03 frames. ], batch size: 182, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:30:48,910 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1178, 3.0981, 2.0224, 3.6728, 3.4309, 3.6383, 3.6827, 2.9636], + device='cuda:2'), covar=tensor([0.0724, 0.0734, 0.1714, 0.0682, 0.0624, 0.0511, 0.0672, 0.0839], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0151, 0.0147, 0.0161, 0.0140, 0.0145, 0.0156, 0.0153], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 15:30:58,171 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.636e+02 3.511e+02 4.196e+02 4.977e+02 1.197e+03, threshold=8.393e+02, percent-clipped=1.0 +2023-03-29 15:32:05,170 INFO [train.py:892] (2/4) Epoch 42, batch 400, loss[loss=0.1511, simple_loss=0.2411, pruned_loss=0.03054, over 19854.00 frames. ], tot_loss[loss=0.149, simple_loss=0.2307, pruned_loss=0.03364, over 3423450.37 frames. ], batch size: 56, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:32:45,300 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-29 15:34:11,925 INFO [train.py:892] (2/4) Epoch 42, batch 450, loss[loss=0.1736, simple_loss=0.2442, pruned_loss=0.05149, over 19761.00 frames. ], tot_loss[loss=0.1516, simple_loss=0.2333, pruned_loss=0.03498, over 3536790.49 frames. ], batch size: 217, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:35:17,323 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.526e+02 3.433e+02 4.142e+02 5.148e+02 8.158e+02, threshold=8.284e+02, percent-clipped=0.0 +2023-03-29 15:36:19,056 INFO [train.py:892] (2/4) Epoch 42, batch 500, loss[loss=0.1501, simple_loss=0.2302, pruned_loss=0.03502, over 19819.00 frames. ], tot_loss[loss=0.1516, simple_loss=0.233, pruned_loss=0.03507, over 3628687.67 frames. ], batch size: 181, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:38:21,664 INFO [train.py:892] (2/4) Epoch 42, batch 550, loss[loss=0.1364, simple_loss=0.2165, pruned_loss=0.02818, over 19900.00 frames. ], tot_loss[loss=0.1508, simple_loss=0.2319, pruned_loss=0.03486, over 3700451.25 frames. ], batch size: 94, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:39:05,729 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76625.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 15:39:18,272 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.079e+02 3.230e+02 3.815e+02 4.831e+02 8.297e+02, threshold=7.631e+02, percent-clipped=1.0 +2023-03-29 15:40:08,046 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76650.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:40:20,722 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-03-29 15:40:21,751 INFO [train.py:892] (2/4) Epoch 42, batch 600, loss[loss=0.1256, simple_loss=0.2068, pruned_loss=0.02223, over 19878.00 frames. ], tot_loss[loss=0.1506, simple_loss=0.2318, pruned_loss=0.0347, over 3755869.65 frames. ], batch size: 84, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:41:04,432 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-29 15:41:18,885 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76679.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 15:41:55,809 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6251, 4.3556, 4.3894, 4.6049, 4.2804, 4.7338, 4.6683, 4.8994], + device='cuda:2'), covar=tensor([0.0609, 0.0405, 0.0501, 0.0367, 0.0703, 0.0455, 0.0439, 0.0297], + device='cuda:2'), in_proj_covar=tensor([0.0161, 0.0186, 0.0209, 0.0184, 0.0184, 0.0167, 0.0159, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 15:42:25,276 INFO [train.py:892] (2/4) Epoch 42, batch 650, loss[loss=0.2173, simple_loss=0.3065, pruned_loss=0.06403, over 19269.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.2312, pruned_loss=0.03466, over 3798800.27 frames. ], batch size: 483, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:43:30,260 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.500e+02 3.354e+02 4.128e+02 5.229e+02 1.136e+03, threshold=8.256e+02, percent-clipped=6.0 +2023-03-29 15:44:09,572 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6647, 2.6862, 1.6223, 3.0469, 2.7690, 2.9404, 3.0645, 2.4043], + device='cuda:2'), covar=tensor([0.0749, 0.0814, 0.1747, 0.0755, 0.0759, 0.0635, 0.0697, 0.0992], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0151, 0.0146, 0.0160, 0.0139, 0.0144, 0.0155, 0.0153], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 15:44:32,832 INFO [train.py:892] (2/4) Epoch 42, batch 700, loss[loss=0.1429, simple_loss=0.2285, pruned_loss=0.02861, over 19639.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2316, pruned_loss=0.03456, over 3833019.50 frames. ], batch size: 72, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:46:12,063 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-29 15:46:33,793 INFO [train.py:892] (2/4) Epoch 42, batch 750, loss[loss=0.1485, simple_loss=0.222, pruned_loss=0.03751, over 19741.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2308, pruned_loss=0.03455, over 3859950.06 frames. ], batch size: 134, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:47:36,697 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.174e+02 3.338e+02 3.927e+02 4.685e+02 7.677e+02, threshold=7.855e+02, percent-clipped=0.0 +2023-03-29 15:47:59,387 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76839.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:48:42,206 INFO [train.py:892] (2/4) Epoch 42, batch 800, loss[loss=0.1595, simple_loss=0.2407, pruned_loss=0.03909, over 19876.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2312, pruned_loss=0.03435, over 3879635.91 frames. ], batch size: 64, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:48:52,996 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4157, 2.5135, 2.7187, 2.3872, 2.8659, 2.8504, 3.2916, 3.5368], + device='cuda:2'), covar=tensor([0.0694, 0.1707, 0.1677, 0.2301, 0.1541, 0.1496, 0.0734, 0.0671], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0247, 0.0275, 0.0263, 0.0308, 0.0266, 0.0241, 0.0269], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 15:50:35,534 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76900.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:50:50,192 INFO [train.py:892] (2/4) Epoch 42, batch 850, loss[loss=0.1363, simple_loss=0.2079, pruned_loss=0.03239, over 19866.00 frames. ], tot_loss[loss=0.15, simple_loss=0.2315, pruned_loss=0.03428, over 3894845.04 frames. ], batch size: 129, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 15:51:39,872 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76925.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 15:51:54,865 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.468e+02 3.585e+02 3.941e+02 4.611e+02 9.107e+02, threshold=7.882e+02, percent-clipped=2.0 +2023-03-29 15:52:43,945 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76950.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:52:58,330 INFO [train.py:892] (2/4) Epoch 42, batch 900, loss[loss=0.1788, simple_loss=0.2578, pruned_loss=0.04984, over 19768.00 frames. ], tot_loss[loss=0.15, simple_loss=0.2314, pruned_loss=0.0343, over 3907367.94 frames. ], batch size: 217, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 15:53:41,326 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76973.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 15:53:57,207 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76979.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 15:54:37,820 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-03-29 15:54:43,528 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76998.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:55:01,511 INFO [train.py:892] (2/4) Epoch 42, batch 950, loss[loss=0.1586, simple_loss=0.2397, pruned_loss=0.03871, over 19406.00 frames. ], tot_loss[loss=0.1505, simple_loss=0.2322, pruned_loss=0.03442, over 3915422.64 frames. ], batch size: 40, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 15:55:19,862 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-29 15:55:38,584 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6653, 4.6995, 5.0062, 4.8090, 4.9194, 4.5051, 4.7588, 4.5478], + device='cuda:2'), covar=tensor([0.1447, 0.1639, 0.0860, 0.1238, 0.0846, 0.0961, 0.1762, 0.2004], + device='cuda:2'), in_proj_covar=tensor([0.0310, 0.0356, 0.0388, 0.0320, 0.0295, 0.0299, 0.0377, 0.0410], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 15:55:44,992 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77027.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 15:55:53,563 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.093e+02 3.626e+02 4.310e+02 4.989e+02 8.639e+02, threshold=8.620e+02, percent-clipped=2.0 +2023-03-29 15:56:11,005 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0156, 3.8731, 3.8594, 3.6460, 4.0074, 2.8939, 3.3102, 1.9448], + device='cuda:2'), covar=tensor([0.0214, 0.0248, 0.0161, 0.0208, 0.0161, 0.1150, 0.0621, 0.1718], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0153, 0.0117, 0.0139, 0.0123, 0.0139, 0.0144, 0.0131], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 15:56:59,631 INFO [train.py:892] (2/4) Epoch 42, batch 1000, loss[loss=0.1483, simple_loss=0.2254, pruned_loss=0.03566, over 19848.00 frames. ], tot_loss[loss=0.1493, simple_loss=0.2305, pruned_loss=0.03406, over 3924456.36 frames. ], batch size: 142, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 15:58:37,690 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77095.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:58:46,655 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-29 15:59:02,968 INFO [train.py:892] (2/4) Epoch 42, batch 1050, loss[loss=0.1704, simple_loss=0.2531, pruned_loss=0.04383, over 19845.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2318, pruned_loss=0.03439, over 3928054.29 frames. ], batch size: 177, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 15:59:43,624 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4795, 4.3720, 4.8104, 4.3694, 4.0173, 4.6450, 4.4123, 4.8840], + device='cuda:2'), covar=tensor([0.0764, 0.0363, 0.0339, 0.0378, 0.0999, 0.0501, 0.0477, 0.0330], + device='cuda:2'), in_proj_covar=tensor([0.0286, 0.0229, 0.0229, 0.0241, 0.0211, 0.0253, 0.0242, 0.0228], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 16:00:02,947 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.508e+02 3.618e+02 4.248e+02 5.006e+02 7.833e+02, threshold=8.496e+02, percent-clipped=0.0 +2023-03-29 16:01:07,337 INFO [train.py:892] (2/4) Epoch 42, batch 1100, loss[loss=0.1422, simple_loss=0.2182, pruned_loss=0.03311, over 19884.00 frames. ], tot_loss[loss=0.1501, simple_loss=0.2317, pruned_loss=0.0343, over 3932804.00 frames. ], batch size: 77, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 16:01:08,454 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77156.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 16:02:45,777 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77195.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:03:12,066 INFO [train.py:892] (2/4) Epoch 42, batch 1150, loss[loss=0.1407, simple_loss=0.2271, pruned_loss=0.0271, over 19790.00 frames. ], tot_loss[loss=0.1512, simple_loss=0.2326, pruned_loss=0.03494, over 3935081.79 frames. ], batch size: 73, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 16:03:27,280 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77211.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:03:57,448 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7109, 3.3129, 3.4958, 3.2176, 4.0393, 4.0570, 4.4500, 5.0229], + device='cuda:2'), covar=tensor([0.0502, 0.1493, 0.1647, 0.2073, 0.1445, 0.1071, 0.0594, 0.0427], + device='cuda:2'), in_proj_covar=tensor([0.0261, 0.0245, 0.0273, 0.0261, 0.0306, 0.0264, 0.0239, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 16:04:16,661 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.545e+02 3.808e+02 4.258e+02 4.963e+02 8.566e+02, threshold=8.515e+02, percent-clipped=1.0 +2023-03-29 16:04:32,165 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7532, 4.5021, 4.5177, 4.7988, 4.4726, 4.9496, 4.8731, 5.0660], + device='cuda:2'), covar=tensor([0.0683, 0.0387, 0.0460, 0.0379, 0.0669, 0.0439, 0.0424, 0.0306], + device='cuda:2'), in_proj_covar=tensor([0.0164, 0.0189, 0.0212, 0.0187, 0.0187, 0.0169, 0.0162, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 16:05:18,595 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9383, 2.2554, 2.8067, 3.0653, 3.5679, 3.7988, 3.7919, 3.7034], + device='cuda:2'), covar=tensor([0.1037, 0.1964, 0.1497, 0.0817, 0.0467, 0.0375, 0.0413, 0.0554], + device='cuda:2'), in_proj_covar=tensor([0.0167, 0.0171, 0.0184, 0.0157, 0.0143, 0.0139, 0.0132, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 16:05:19,675 INFO [train.py:892] (2/4) Epoch 42, batch 1200, loss[loss=0.1398, simple_loss=0.2221, pruned_loss=0.02872, over 19902.00 frames. ], tot_loss[loss=0.1514, simple_loss=0.2328, pruned_loss=0.03504, over 3938733.13 frames. ], batch size: 94, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 16:06:02,003 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77272.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:06:04,105 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77273.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:06:16,063 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77277.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:06:21,183 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3944, 3.7289, 4.0111, 4.4174, 2.9673, 3.4626, 2.8361, 2.8991], + device='cuda:2'), covar=tensor([0.0496, 0.1802, 0.0790, 0.0386, 0.1966, 0.0933, 0.1292, 0.1496], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0328, 0.0253, 0.0212, 0.0251, 0.0215, 0.0224, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 16:07:26,690 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2971, 1.7835, 1.9804, 2.5634, 2.8013, 2.9269, 2.7773, 2.8420], + device='cuda:2'), covar=tensor([0.1224, 0.1840, 0.1738, 0.0812, 0.0604, 0.0472, 0.0559, 0.0555], + device='cuda:2'), in_proj_covar=tensor([0.0166, 0.0170, 0.0183, 0.0156, 0.0142, 0.0138, 0.0131, 0.0122], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 16:07:30,235 INFO [train.py:892] (2/4) Epoch 42, batch 1250, loss[loss=0.1608, simple_loss=0.2372, pruned_loss=0.04222, over 19837.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.232, pruned_loss=0.03465, over 3941470.95 frames. ], batch size: 239, lr: 3.70e-03, grad_scale: 32.0 +2023-03-29 16:08:30,859 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.435e+02 3.557e+02 4.199e+02 4.989e+02 7.948e+02, threshold=8.398e+02, percent-clipped=0.0 +2023-03-29 16:08:39,697 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77334.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:08:52,460 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77338.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:09:34,996 INFO [train.py:892] (2/4) Epoch 42, batch 1300, loss[loss=0.1422, simple_loss=0.2247, pruned_loss=0.02987, over 19837.00 frames. ], tot_loss[loss=0.1506, simple_loss=0.2321, pruned_loss=0.03457, over 3942659.09 frames. ], batch size: 239, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:10:28,679 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8898, 3.0422, 2.9980, 3.1980, 2.8519, 2.9756, 2.8632, 2.9618], + device='cuda:2'), covar=tensor([0.0328, 0.0312, 0.0432, 0.0231, 0.0429, 0.0356, 0.0422, 0.0434], + device='cuda:2'), in_proj_covar=tensor([0.0096, 0.0090, 0.0093, 0.0087, 0.0100, 0.0093, 0.0109, 0.0081], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 16:11:15,548 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5763, 2.7262, 4.0062, 3.1118, 3.2772, 3.0606, 2.3225, 2.4823], + device='cuda:2'), covar=tensor([0.1297, 0.3657, 0.0621, 0.1265, 0.2032, 0.1811, 0.3036, 0.3035], + device='cuda:2'), in_proj_covar=tensor([0.0360, 0.0403, 0.0359, 0.0298, 0.0382, 0.0397, 0.0390, 0.0364], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 16:11:31,837 INFO [train.py:892] (2/4) Epoch 42, batch 1350, loss[loss=0.1377, simple_loss=0.2136, pruned_loss=0.03091, over 19877.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.2324, pruned_loss=0.03452, over 3944662.66 frames. ], batch size: 139, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:12:30,504 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.502e+02 3.333e+02 3.969e+02 4.891e+02 7.952e+02, threshold=7.937e+02, percent-clipped=0.0 +2023-03-29 16:13:20,631 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77451.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 16:13:34,050 INFO [train.py:892] (2/4) Epoch 42, batch 1400, loss[loss=0.1236, simple_loss=0.2017, pruned_loss=0.02273, over 19817.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.2323, pruned_loss=0.03458, over 3946896.72 frames. ], batch size: 103, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:14:41,919 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.58 vs. limit=5.0 +2023-03-29 16:15:10,474 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77495.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:15:36,585 INFO [train.py:892] (2/4) Epoch 42, batch 1450, loss[loss=0.1359, simple_loss=0.2176, pruned_loss=0.02708, over 19901.00 frames. ], tot_loss[loss=0.1521, simple_loss=0.2334, pruned_loss=0.03539, over 3946910.18 frames. ], batch size: 116, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:16:40,347 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.038e+02 3.421e+02 4.179e+02 5.316e+02 8.524e+02, threshold=8.358e+02, percent-clipped=3.0 +2023-03-29 16:17:10,157 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77543.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:17:44,774 INFO [train.py:892] (2/4) Epoch 42, batch 1500, loss[loss=0.1657, simple_loss=0.2484, pruned_loss=0.04152, over 19558.00 frames. ], tot_loss[loss=0.1509, simple_loss=0.2322, pruned_loss=0.03483, over 3948393.91 frames. ], batch size: 47, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:18:16,080 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77567.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:18:33,370 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.0122, 1.8962, 2.0149, 2.0677, 2.0092, 2.0370, 1.9634, 2.0629], + device='cuda:2'), covar=tensor([0.0442, 0.0415, 0.0399, 0.0387, 0.0501, 0.0407, 0.0515, 0.0408], + device='cuda:2'), in_proj_covar=tensor([0.0097, 0.0091, 0.0093, 0.0088, 0.0100, 0.0094, 0.0110, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 16:19:13,580 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4751, 3.3302, 3.5295, 2.7627, 3.6671, 3.0806, 3.3409, 3.6150], + device='cuda:2'), covar=tensor([0.0775, 0.0499, 0.0771, 0.0822, 0.0372, 0.0456, 0.0445, 0.0421], + device='cuda:2'), in_proj_covar=tensor([0.0084, 0.0094, 0.0090, 0.0114, 0.0085, 0.0089, 0.0087, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 16:19:49,141 INFO [train.py:892] (2/4) Epoch 42, batch 1550, loss[loss=0.1651, simple_loss=0.2491, pruned_loss=0.04056, over 19944.00 frames. ], tot_loss[loss=0.1509, simple_loss=0.2327, pruned_loss=0.03455, over 3947233.73 frames. ], batch size: 52, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:20:15,239 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77616.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:20:50,235 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77629.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:20:56,262 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.329e+02 3.286e+02 3.992e+02 4.919e+02 1.011e+03, threshold=7.983e+02, percent-clipped=1.0 +2023-03-29 16:20:59,711 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77633.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:21:54,201 INFO [train.py:892] (2/4) Epoch 42, batch 1600, loss[loss=0.1524, simple_loss=0.2221, pruned_loss=0.04136, over 19875.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.232, pruned_loss=0.03416, over 3949337.66 frames. ], batch size: 125, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:22:46,437 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77677.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:22:46,484 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77677.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:23:39,979 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4816, 2.7230, 3.9332, 3.0904, 3.2270, 3.1207, 2.3141, 2.5231], + device='cuda:2'), covar=tensor([0.1264, 0.3094, 0.0641, 0.1244, 0.1934, 0.1646, 0.2818, 0.2818], + device='cuda:2'), in_proj_covar=tensor([0.0357, 0.0401, 0.0357, 0.0298, 0.0380, 0.0396, 0.0388, 0.0363], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 16:23:59,701 INFO [train.py:892] (2/4) Epoch 42, batch 1650, loss[loss=0.1425, simple_loss=0.2306, pruned_loss=0.02714, over 19781.00 frames. ], tot_loss[loss=0.1516, simple_loss=0.233, pruned_loss=0.03513, over 3948192.48 frames. ], batch size: 87, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:25:04,061 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.346e+02 3.655e+02 4.276e+02 5.251e+02 1.145e+03, threshold=8.553e+02, percent-clipped=2.0 +2023-03-29 16:25:19,116 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77738.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:25:51,859 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77751.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 16:26:01,319 INFO [train.py:892] (2/4) Epoch 42, batch 1700, loss[loss=0.1716, simple_loss=0.2475, pruned_loss=0.04785, over 19635.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.2327, pruned_loss=0.03493, over 3949298.61 frames. ], batch size: 69, lr: 3.69e-03, grad_scale: 16.0 +2023-03-29 16:26:21,888 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.48 vs. limit=2.0 +2023-03-29 16:27:11,475 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0695, 2.5772, 4.1803, 3.6920, 4.1573, 4.2068, 4.0333, 3.9135], + device='cuda:2'), covar=tensor([0.0637, 0.1013, 0.0121, 0.0688, 0.0160, 0.0229, 0.0194, 0.0205], + device='cuda:2'), in_proj_covar=tensor([0.0104, 0.0107, 0.0092, 0.0154, 0.0091, 0.0103, 0.0094, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 16:27:35,830 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77799.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:27:51,257 INFO [train.py:892] (2/4) Epoch 42, batch 1750, loss[loss=0.1369, simple_loss=0.2265, pruned_loss=0.02366, over 19801.00 frames. ], tot_loss[loss=0.1506, simple_loss=0.2325, pruned_loss=0.03439, over 3950121.63 frames. ], batch size: 65, lr: 3.69e-03, grad_scale: 16.0 +2023-03-29 16:28:04,387 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.9620, 6.2213, 6.2541, 6.0639, 5.9767, 6.1965, 5.5488, 5.5390], + device='cuda:2'), covar=tensor([0.0391, 0.0436, 0.0460, 0.0407, 0.0487, 0.0508, 0.0617, 0.1052], + device='cuda:2'), in_proj_covar=tensor([0.0294, 0.0312, 0.0322, 0.0281, 0.0291, 0.0272, 0.0286, 0.0336], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 16:28:49,552 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.255e+02 3.364e+02 4.068e+02 4.963e+02 7.822e+02, threshold=8.136e+02, percent-clipped=0.0 +2023-03-29 16:29:41,568 INFO [train.py:892] (2/4) Epoch 42, batch 1800, loss[loss=0.1533, simple_loss=0.2291, pruned_loss=0.03875, over 19764.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2319, pruned_loss=0.03437, over 3950968.84 frames. ], batch size: 155, lr: 3.69e-03, grad_scale: 16.0 +2023-03-29 16:29:42,459 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1539, 3.0310, 2.0712, 3.5825, 3.3208, 3.5104, 3.6426, 2.9889], + device='cuda:2'), covar=tensor([0.0696, 0.0811, 0.1688, 0.0708, 0.0673, 0.0627, 0.0634, 0.0842], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0153, 0.0148, 0.0163, 0.0143, 0.0147, 0.0157, 0.0155], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-29 16:30:04,895 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77867.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:31:23,148 INFO [train.py:892] (2/4) Epoch 42, batch 1850, loss[loss=0.1602, simple_loss=0.2511, pruned_loss=0.03471, over 19667.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2335, pruned_loss=0.03431, over 3950570.24 frames. ], batch size: 55, lr: 3.69e-03, grad_scale: 16.0 +2023-03-29 16:32:27,852 INFO [train.py:892] (2/4) Epoch 43, batch 0, loss[loss=0.1326, simple_loss=0.2133, pruned_loss=0.026, over 19847.00 frames. ], tot_loss[loss=0.1326, simple_loss=0.2133, pruned_loss=0.026, over 19847.00 frames. ], batch size: 109, lr: 3.65e-03, grad_scale: 16.0 +2023-03-29 16:32:27,852 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 16:33:04,435 INFO [train.py:926] (2/4) Epoch 43, validation: loss=0.1873, simple_loss=0.2496, pruned_loss=0.06254, over 2883724.00 frames. +2023-03-29 16:33:04,438 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22334MB +2023-03-29 16:33:16,458 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77915.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:33:39,077 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5505, 2.6094, 4.6842, 3.9909, 4.4684, 4.6674, 4.4757, 4.3515], + device='cuda:2'), covar=tensor([0.0585, 0.1103, 0.0113, 0.0821, 0.0180, 0.0200, 0.0185, 0.0166], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0108, 0.0093, 0.0155, 0.0092, 0.0104, 0.0095, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 16:33:52,309 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77929.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:33:58,721 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.066e+02 3.128e+02 3.651e+02 4.371e+02 8.409e+02, threshold=7.303e+02, percent-clipped=1.0 +2023-03-29 16:34:01,737 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77933.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:35:13,124 INFO [train.py:892] (2/4) Epoch 43, batch 50, loss[loss=0.1379, simple_loss=0.2178, pruned_loss=0.02899, over 19811.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2291, pruned_loss=0.03267, over 888909.89 frames. ], batch size: 123, lr: 3.65e-03, grad_scale: 16.0 +2023-03-29 16:35:44,856 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77972.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:35:55,684 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77977.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:36:05,055 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77981.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:37:24,590 INFO [train.py:892] (2/4) Epoch 43, batch 100, loss[loss=0.1536, simple_loss=0.2306, pruned_loss=0.03828, over 19870.00 frames. ], tot_loss[loss=0.1474, simple_loss=0.2294, pruned_loss=0.03269, over 1567748.56 frames. ], batch size: 122, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:37:32,319 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 16:38:13,996 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.210e+02 3.457e+02 4.175e+02 5.002e+02 8.622e+02, threshold=8.350e+02, percent-clipped=3.0 +2023-03-29 16:38:19,893 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78033.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:39:26,780 INFO [train.py:892] (2/4) Epoch 43, batch 150, loss[loss=0.1552, simple_loss=0.2452, pruned_loss=0.03262, over 19584.00 frames. ], tot_loss[loss=0.1467, simple_loss=0.2281, pruned_loss=0.03266, over 2097031.00 frames. ], batch size: 53, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:41:35,025 INFO [train.py:892] (2/4) Epoch 43, batch 200, loss[loss=0.1499, simple_loss=0.2404, pruned_loss=0.02976, over 19828.00 frames. ], tot_loss[loss=0.1485, simple_loss=0.2306, pruned_loss=0.03326, over 2508034.82 frames. ], batch size: 101, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:42:26,473 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.115e+02 3.506e+02 4.148e+02 4.888e+02 9.722e+02, threshold=8.295e+02, percent-clipped=1.0 +2023-03-29 16:42:32,262 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9352, 2.6795, 4.1968, 3.7590, 4.0917, 4.2219, 3.9649, 3.9270], + device='cuda:2'), covar=tensor([0.0650, 0.0942, 0.0117, 0.0586, 0.0181, 0.0236, 0.0210, 0.0193], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0108, 0.0093, 0.0156, 0.0092, 0.0105, 0.0095, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 16:42:49,424 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.98 vs. limit=5.0 +2023-03-29 16:42:51,728 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7907, 3.6246, 3.8448, 3.0479, 4.1179, 3.3910, 3.6753, 4.0472], + device='cuda:2'), covar=tensor([0.0670, 0.0414, 0.0782, 0.0775, 0.0342, 0.0430, 0.0487, 0.0317], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0094, 0.0090, 0.0114, 0.0085, 0.0089, 0.0087, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 16:43:34,477 INFO [train.py:892] (2/4) Epoch 43, batch 250, loss[loss=0.1417, simple_loss=0.2205, pruned_loss=0.0315, over 19875.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2313, pruned_loss=0.03356, over 2827625.79 frames. ], batch size: 159, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:44:12,191 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6246, 3.4037, 3.6438, 2.7855, 3.8003, 3.1543, 3.5458, 3.8131], + device='cuda:2'), covar=tensor([0.0640, 0.0418, 0.0710, 0.0814, 0.0407, 0.0487, 0.0414, 0.0280], + device='cuda:2'), in_proj_covar=tensor([0.0084, 0.0093, 0.0090, 0.0114, 0.0085, 0.0089, 0.0086, 0.0081], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 16:44:43,950 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-03-29 16:45:33,928 INFO [train.py:892] (2/4) Epoch 43, batch 300, loss[loss=0.136, simple_loss=0.2245, pruned_loss=0.02377, over 19717.00 frames. ], tot_loss[loss=0.1495, simple_loss=0.2309, pruned_loss=0.03404, over 3077693.99 frames. ], batch size: 109, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:46:28,817 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.098e+02 3.467e+02 4.146e+02 4.960e+02 1.292e+03, threshold=8.293e+02, percent-clipped=2.0 +2023-03-29 16:46:31,179 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78232.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:47:06,621 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7678, 4.1492, 4.3813, 4.9076, 3.0133, 3.5036, 2.9797, 2.8256], + device='cuda:2'), covar=tensor([0.0432, 0.1527, 0.0709, 0.0339, 0.2136, 0.1205, 0.1273, 0.1638], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0326, 0.0253, 0.0212, 0.0250, 0.0215, 0.0223, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 16:47:40,850 INFO [train.py:892] (2/4) Epoch 43, batch 350, loss[loss=0.1321, simple_loss=0.2131, pruned_loss=0.02551, over 19756.00 frames. ], tot_loss[loss=0.1491, simple_loss=0.2306, pruned_loss=0.03377, over 3271157.42 frames. ], batch size: 88, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:48:09,170 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78272.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:48:14,179 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5059, 1.8936, 2.2037, 2.7388, 3.0399, 3.1374, 3.0229, 3.0950], + device='cuda:2'), covar=tensor([0.1134, 0.1931, 0.1643, 0.0795, 0.0630, 0.0441, 0.0518, 0.0562], + device='cuda:2'), in_proj_covar=tensor([0.0167, 0.0171, 0.0184, 0.0157, 0.0143, 0.0140, 0.0132, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 16:48:59,370 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78293.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 16:49:45,835 INFO [train.py:892] (2/4) Epoch 43, batch 400, loss[loss=0.1533, simple_loss=0.2387, pruned_loss=0.03391, over 19771.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2306, pruned_loss=0.03392, over 3421894.84 frames. ], batch size: 241, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:50:09,270 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78320.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:50:37,819 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.238e+02 3.252e+02 3.876e+02 4.451e+02 9.041e+02, threshold=7.752e+02, percent-clipped=1.0 +2023-03-29 16:50:42,514 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78333.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:51:49,634 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 16:51:50,609 INFO [train.py:892] (2/4) Epoch 43, batch 450, loss[loss=0.1561, simple_loss=0.2455, pruned_loss=0.03332, over 19710.00 frames. ], tot_loss[loss=0.1486, simple_loss=0.2302, pruned_loss=0.03351, over 3538959.39 frames. ], batch size: 325, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:52:43,034 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78381.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:53:55,323 INFO [train.py:892] (2/4) Epoch 43, batch 500, loss[loss=0.1546, simple_loss=0.2335, pruned_loss=0.03791, over 19821.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2304, pruned_loss=0.03353, over 3630548.25 frames. ], batch size: 50, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 16:54:44,913 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.422e+02 3.338e+02 4.015e+02 4.887e+02 8.174e+02, threshold=8.030e+02, percent-clipped=1.0 +2023-03-29 16:55:02,299 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 16:55:57,336 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78460.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:55:58,342 INFO [train.py:892] (2/4) Epoch 43, batch 550, loss[loss=0.1461, simple_loss=0.2355, pruned_loss=0.02832, over 19876.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2307, pruned_loss=0.0335, over 3702260.53 frames. ], batch size: 55, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 16:57:09,727 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4632, 2.6241, 4.6770, 4.0002, 4.4141, 4.6092, 4.4414, 4.3161], + device='cuda:2'), covar=tensor([0.0600, 0.1054, 0.0101, 0.0825, 0.0170, 0.0223, 0.0188, 0.0165], + device='cuda:2'), in_proj_covar=tensor([0.0104, 0.0107, 0.0092, 0.0153, 0.0090, 0.0103, 0.0094, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 16:57:58,326 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-29 16:58:01,141 INFO [train.py:892] (2/4) Epoch 43, batch 600, loss[loss=0.1368, simple_loss=0.2079, pruned_loss=0.03286, over 19809.00 frames. ], tot_loss[loss=0.149, simple_loss=0.2309, pruned_loss=0.03357, over 3757583.47 frames. ], batch size: 132, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 16:58:04,755 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5482, 4.6472, 2.7527, 4.8672, 5.0624, 2.2070, 4.3207, 3.7265], + device='cuda:2'), covar=tensor([0.0607, 0.0677, 0.2570, 0.0613, 0.0433, 0.2710, 0.0956, 0.0852], + device='cuda:2'), in_proj_covar=tensor([0.0244, 0.0269, 0.0240, 0.0290, 0.0270, 0.0209, 0.0246, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 16:58:09,297 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8755, 3.5335, 3.9918, 3.1546, 4.1352, 3.3740, 3.5731, 3.9397], + device='cuda:2'), covar=tensor([0.0712, 0.0450, 0.0524, 0.0712, 0.0370, 0.0434, 0.0562, 0.0491], + device='cuda:2'), in_proj_covar=tensor([0.0084, 0.0094, 0.0090, 0.0115, 0.0085, 0.0089, 0.0086, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 16:58:12,125 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6678, 2.6997, 2.8740, 2.7840, 2.7313, 2.7734, 2.6955, 2.7928], + device='cuda:2'), covar=tensor([0.0371, 0.0359, 0.0313, 0.0330, 0.0455, 0.0364, 0.0459, 0.0471], + device='cuda:2'), in_proj_covar=tensor([0.0097, 0.0091, 0.0094, 0.0088, 0.0100, 0.0094, 0.0109, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 16:58:31,028 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78521.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:58:40,607 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7136, 4.4522, 4.4824, 4.2913, 4.7228, 3.0938, 3.9171, 2.1717], + device='cuda:2'), covar=tensor([0.0167, 0.0214, 0.0152, 0.0189, 0.0134, 0.1087, 0.0745, 0.1599], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0152, 0.0117, 0.0139, 0.0123, 0.0138, 0.0145, 0.0131], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 16:58:57,258 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.184e+02 3.257e+02 4.249e+02 5.407e+02 1.207e+03, threshold=8.497e+02, percent-clipped=3.0 +2023-03-29 16:59:52,254 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78556.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:00:06,556 INFO [train.py:892] (2/4) Epoch 43, batch 650, loss[loss=0.1381, simple_loss=0.2246, pruned_loss=0.02584, over 19788.00 frames. ], tot_loss[loss=0.1486, simple_loss=0.2306, pruned_loss=0.03326, over 3799204.36 frames. ], batch size: 73, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:01:08,190 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78588.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 17:02:02,167 INFO [train.py:892] (2/4) Epoch 43, batch 700, loss[loss=0.1648, simple_loss=0.2524, pruned_loss=0.03856, over 19704.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2308, pruned_loss=0.03341, over 3833735.56 frames. ], batch size: 305, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:02:16,640 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78617.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:02:27,654 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9010, 3.2561, 2.8273, 2.3967, 2.8705, 3.1414, 3.1482, 3.1992], + device='cuda:2'), covar=tensor([0.0358, 0.0344, 0.0353, 0.0550, 0.0388, 0.0334, 0.0269, 0.0256], + device='cuda:2'), in_proj_covar=tensor([0.0114, 0.0107, 0.0109, 0.0108, 0.0111, 0.0097, 0.0099, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 17:02:54,400 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.290e+02 3.555e+02 4.216e+02 4.848e+02 9.327e+02, threshold=8.432e+02, percent-clipped=1.0 +2023-03-29 17:03:25,474 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4717, 2.6869, 3.8989, 3.0674, 3.1971, 3.0610, 2.2386, 2.5147], + device='cuda:2'), covar=tensor([0.1283, 0.3210, 0.0639, 0.1204, 0.2002, 0.1724, 0.2901, 0.2811], + device='cuda:2'), in_proj_covar=tensor([0.0358, 0.0403, 0.0357, 0.0297, 0.0381, 0.0398, 0.0390, 0.0364], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 17:03:59,443 INFO [train.py:892] (2/4) Epoch 43, batch 750, loss[loss=0.1372, simple_loss=0.2204, pruned_loss=0.02701, over 19691.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2301, pruned_loss=0.03334, over 3860192.76 frames. ], batch size: 59, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:04:46,039 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9406, 3.1508, 3.1401, 3.2679, 2.9579, 3.2875, 3.0724, 3.2624], + device='cuda:2'), covar=tensor([0.0436, 0.0369, 0.0380, 0.0312, 0.0497, 0.0290, 0.0384, 0.0382], + device='cuda:2'), in_proj_covar=tensor([0.0098, 0.0092, 0.0095, 0.0089, 0.0101, 0.0094, 0.0110, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 17:05:55,916 INFO [train.py:892] (2/4) Epoch 43, batch 800, loss[loss=0.141, simple_loss=0.2298, pruned_loss=0.02613, over 19804.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2301, pruned_loss=0.03335, over 3879283.77 frames. ], batch size: 82, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:06:42,315 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.297e+02 3.365e+02 3.945e+02 4.793e+02 1.520e+03, threshold=7.889e+02, percent-clipped=1.0 +2023-03-29 17:07:52,415 INFO [train.py:892] (2/4) Epoch 43, batch 850, loss[loss=0.12, simple_loss=0.1994, pruned_loss=0.02024, over 19791.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2302, pruned_loss=0.03328, over 3895015.69 frames. ], batch size: 45, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:09:51,484 INFO [train.py:892] (2/4) Epoch 43, batch 900, loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03137, over 19711.00 frames. ], tot_loss[loss=0.1501, simple_loss=0.2319, pruned_loss=0.03414, over 3906194.75 frames. ], batch size: 109, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:10:06,748 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78816.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:10:39,579 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.520e+02 3.462e+02 4.060e+02 5.051e+02 1.168e+03, threshold=8.120e+02, percent-clipped=4.0 +2023-03-29 17:11:49,223 INFO [train.py:892] (2/4) Epoch 43, batch 950, loss[loss=0.1348, simple_loss=0.2204, pruned_loss=0.02458, over 19837.00 frames. ], tot_loss[loss=0.1493, simple_loss=0.2313, pruned_loss=0.03364, over 3915783.49 frames. ], batch size: 115, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:12:56,221 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78888.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:13:45,670 INFO [train.py:892] (2/4) Epoch 43, batch 1000, loss[loss=0.1465, simple_loss=0.2305, pruned_loss=0.03124, over 19811.00 frames. ], tot_loss[loss=0.1501, simple_loss=0.2325, pruned_loss=0.03392, over 3921593.90 frames. ], batch size: 67, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:13:50,592 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78912.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:14:35,059 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.508e+02 3.436e+02 3.973e+02 4.908e+02 9.421e+02, threshold=7.947e+02, percent-clipped=2.0 +2023-03-29 17:14:44,284 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78936.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:15:39,039 INFO [train.py:892] (2/4) Epoch 43, batch 1050, loss[loss=0.1445, simple_loss=0.2314, pruned_loss=0.02875, over 19650.00 frames. ], tot_loss[loss=0.151, simple_loss=0.2334, pruned_loss=0.03432, over 3927529.13 frames. ], batch size: 47, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:15:48,962 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78965.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:17:33,089 INFO [train.py:892] (2/4) Epoch 43, batch 1100, loss[loss=0.182, simple_loss=0.2731, pruned_loss=0.04547, over 19731.00 frames. ], tot_loss[loss=0.1514, simple_loss=0.2336, pruned_loss=0.03457, over 3932123.54 frames. ], batch size: 54, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:17:42,422 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8563, 2.8807, 1.8413, 3.2431, 3.0055, 3.1956, 3.2557, 2.7070], + device='cuda:2'), covar=tensor([0.0766, 0.0836, 0.1771, 0.0727, 0.0679, 0.0560, 0.0691, 0.0910], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0152, 0.0149, 0.0163, 0.0142, 0.0147, 0.0158, 0.0155], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-29 17:17:57,590 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0585, 3.3769, 3.4649, 3.9907, 2.8043, 3.3749, 2.5050, 2.5656], + device='cuda:2'), covar=tensor([0.0511, 0.1572, 0.0947, 0.0440, 0.1892, 0.0854, 0.1404, 0.1588], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0326, 0.0254, 0.0213, 0.0249, 0.0216, 0.0224, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 17:18:09,852 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79026.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:18:23,398 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.685e+02 3.705e+02 4.386e+02 5.336e+02 1.015e+03, threshold=8.773e+02, percent-clipped=4.0 +2023-03-29 17:19:32,153 INFO [train.py:892] (2/4) Epoch 43, batch 1150, loss[loss=0.1844, simple_loss=0.2697, pruned_loss=0.04951, over 19702.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.2334, pruned_loss=0.03461, over 3936542.91 frames. ], batch size: 305, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:21:27,639 INFO [train.py:892] (2/4) Epoch 43, batch 1200, loss[loss=0.1585, simple_loss=0.2386, pruned_loss=0.03917, over 19836.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.2328, pruned_loss=0.03495, over 3939049.91 frames. ], batch size: 166, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:21:40,468 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79116.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:21:40,715 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5114, 2.5850, 2.8201, 2.4910, 2.9592, 2.9174, 3.3407, 3.6538], + device='cuda:2'), covar=tensor([0.0705, 0.1710, 0.1684, 0.2291, 0.1614, 0.1494, 0.0781, 0.0700], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0247, 0.0275, 0.0263, 0.0307, 0.0267, 0.0241, 0.0272], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 17:22:16,934 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.729e+02 3.557e+02 4.069e+02 4.739e+02 8.583e+02, threshold=8.137e+02, percent-clipped=0.0 +2023-03-29 17:22:27,226 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.07 vs. limit=2.0 +2023-03-29 17:23:25,137 INFO [train.py:892] (2/4) Epoch 43, batch 1250, loss[loss=0.1523, simple_loss=0.2361, pruned_loss=0.03427, over 19712.00 frames. ], tot_loss[loss=0.1518, simple_loss=0.233, pruned_loss=0.0353, over 3941797.71 frames. ], batch size: 269, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:23:32,271 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79164.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:24:11,710 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6770, 3.4530, 3.8478, 2.9230, 3.9991, 3.2256, 3.5839, 3.7972], + device='cuda:2'), covar=tensor([0.0695, 0.0467, 0.0441, 0.0781, 0.0375, 0.0522, 0.0490, 0.0399], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0095, 0.0092, 0.0115, 0.0086, 0.0090, 0.0087, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 17:25:19,902 INFO [train.py:892] (2/4) Epoch 43, batch 1300, loss[loss=0.1552, simple_loss=0.2396, pruned_loss=0.03535, over 19935.00 frames. ], tot_loss[loss=0.1512, simple_loss=0.2322, pruned_loss=0.0351, over 3944595.49 frames. ], batch size: 51, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:25:22,988 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79212.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:25:41,086 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8861, 3.9740, 4.2497, 3.9991, 4.1988, 3.8976, 3.9989, 3.7590], + device='cuda:2'), covar=tensor([0.1523, 0.1892, 0.1003, 0.1392, 0.1213, 0.1055, 0.1804, 0.2154], + device='cuda:2'), in_proj_covar=tensor([0.0309, 0.0358, 0.0385, 0.0321, 0.0296, 0.0301, 0.0379, 0.0409], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 17:26:08,982 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.005e+02 3.409e+02 4.094e+02 5.234e+02 1.153e+03, threshold=8.189e+02, percent-clipped=5.0 +2023-03-29 17:27:14,212 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79260.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:27:15,347 INFO [train.py:892] (2/4) Epoch 43, batch 1350, loss[loss=0.1426, simple_loss=0.2268, pruned_loss=0.02916, over 19707.00 frames. ], tot_loss[loss=0.1509, simple_loss=0.2322, pruned_loss=0.03477, over 3943831.19 frames. ], batch size: 101, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:28:09,837 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79285.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 17:29:03,760 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.0049, 2.1942, 1.9705, 1.3567, 2.0164, 2.1779, 2.0465, 2.1135], + device='cuda:2'), covar=tensor([0.0381, 0.0296, 0.0318, 0.0570, 0.0369, 0.0306, 0.0311, 0.0282], + device='cuda:2'), in_proj_covar=tensor([0.0115, 0.0108, 0.0108, 0.0109, 0.0111, 0.0097, 0.0099, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 17:29:06,880 INFO [train.py:892] (2/4) Epoch 43, batch 1400, loss[loss=0.1415, simple_loss=0.2204, pruned_loss=0.03125, over 19830.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.2317, pruned_loss=0.03479, over 3945176.74 frames. ], batch size: 146, lr: 3.61e-03, grad_scale: 16.0 +2023-03-29 17:29:30,296 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79321.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:29:56,355 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.584e+02 3.305e+02 4.038e+02 5.143e+02 6.996e+02, threshold=8.075e+02, percent-clipped=0.0 +2023-03-29 17:29:59,637 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79333.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:30:19,734 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-29 17:30:29,531 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79346.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 17:30:49,224 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2121, 3.5141, 2.9357, 2.5891, 3.0163, 3.4067, 3.3122, 3.4125], + device='cuda:2'), covar=tensor([0.0318, 0.0267, 0.0327, 0.0575, 0.0367, 0.0272, 0.0291, 0.0253], + device='cuda:2'), in_proj_covar=tensor([0.0115, 0.0109, 0.0109, 0.0109, 0.0112, 0.0098, 0.0100, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 17:31:04,340 INFO [train.py:892] (2/4) Epoch 43, batch 1450, loss[loss=0.1712, simple_loss=0.2608, pruned_loss=0.04078, over 19688.00 frames. ], tot_loss[loss=0.1514, simple_loss=0.2329, pruned_loss=0.03499, over 3943517.86 frames. ], batch size: 305, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:31:12,946 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79364.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:31:37,382 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79375.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 17:31:55,414 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-03-29 17:32:03,255 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2783, 1.9559, 3.2278, 2.5858, 3.3432, 3.3705, 3.0902, 3.1926], + device='cuda:2'), covar=tensor([0.0995, 0.1332, 0.0148, 0.0420, 0.0187, 0.0271, 0.0251, 0.0227], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0107, 0.0093, 0.0155, 0.0091, 0.0105, 0.0095, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 17:32:19,992 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79394.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:32:59,393 INFO [train.py:892] (2/4) Epoch 43, batch 1500, loss[loss=0.1457, simple_loss=0.2385, pruned_loss=0.02645, over 19777.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2327, pruned_loss=0.03477, over 3945288.62 frames. ], batch size: 52, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:33:29,287 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79425.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:33:41,749 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.542e+02 3.517e+02 4.248e+02 5.342e+02 8.488e+02, threshold=8.496e+02, percent-clipped=1.0 +2023-03-29 17:33:52,845 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79436.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 17:34:48,444 INFO [train.py:892] (2/4) Epoch 43, batch 1550, loss[loss=0.1879, simple_loss=0.273, pruned_loss=0.05142, over 19599.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.2318, pruned_loss=0.03435, over 3946824.53 frames. ], batch size: 376, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:35:33,534 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5510, 1.9994, 2.3010, 2.7936, 3.1501, 3.2461, 3.1051, 3.1411], + device='cuda:2'), covar=tensor([0.1111, 0.1877, 0.1598, 0.0813, 0.0586, 0.0403, 0.0537, 0.0608], + device='cuda:2'), in_proj_covar=tensor([0.0166, 0.0172, 0.0184, 0.0158, 0.0143, 0.0139, 0.0133, 0.0122], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 17:35:49,844 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4774, 4.4726, 4.8359, 4.5921, 4.7342, 4.1936, 4.5533, 4.3815], + device='cuda:2'), covar=tensor([0.1415, 0.1984, 0.0895, 0.1350, 0.0857, 0.1118, 0.1848, 0.2034], + device='cuda:2'), in_proj_covar=tensor([0.0309, 0.0360, 0.0386, 0.0322, 0.0296, 0.0302, 0.0380, 0.0410], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 17:36:45,431 INFO [train.py:892] (2/4) Epoch 43, batch 1600, loss[loss=0.1467, simple_loss=0.2238, pruned_loss=0.03474, over 19797.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2313, pruned_loss=0.03423, over 3949229.90 frames. ], batch size: 168, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:37:34,254 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.484e+02 3.446e+02 3.965e+02 4.827e+02 6.779e+02, threshold=7.930e+02, percent-clipped=0.0 +2023-03-29 17:38:42,076 INFO [train.py:892] (2/4) Epoch 43, batch 1650, loss[loss=0.173, simple_loss=0.2546, pruned_loss=0.04572, over 19642.00 frames. ], tot_loss[loss=0.1495, simple_loss=0.2308, pruned_loss=0.03409, over 3948548.32 frames. ], batch size: 68, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:38:57,011 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8666, 3.2006, 3.3656, 3.7998, 2.7001, 3.1826, 2.3935, 2.4595], + device='cuda:2'), covar=tensor([0.0560, 0.1577, 0.0990, 0.0485, 0.1952, 0.0871, 0.1465, 0.1679], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0330, 0.0257, 0.0215, 0.0252, 0.0218, 0.0225, 0.0223], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 17:39:40,508 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79585.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:40:14,460 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6179, 2.8964, 2.5054, 2.1099, 2.6436, 2.8451, 2.8883, 2.8616], + device='cuda:2'), covar=tensor([0.0431, 0.0313, 0.0382, 0.0597, 0.0401, 0.0328, 0.0271, 0.0285], + device='cuda:2'), in_proj_covar=tensor([0.0114, 0.0108, 0.0108, 0.0108, 0.0111, 0.0097, 0.0099, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 17:40:37,519 INFO [train.py:892] (2/4) Epoch 43, batch 1700, loss[loss=0.1823, simple_loss=0.2637, pruned_loss=0.05045, over 19705.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2314, pruned_loss=0.03423, over 3950793.85 frames. ], batch size: 78, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:41:03,021 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79621.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:41:26,789 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.071e+02 3.435e+02 4.157e+02 5.071e+02 8.490e+02, threshold=8.314e+02, percent-clipped=3.0 +2023-03-29 17:41:49,590 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79641.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 17:42:00,372 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79646.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:42:21,499 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79657.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:42:28,893 INFO [train.py:892] (2/4) Epoch 43, batch 1750, loss[loss=0.1648, simple_loss=0.2429, pruned_loss=0.04337, over 19759.00 frames. ], tot_loss[loss=0.1501, simple_loss=0.2313, pruned_loss=0.03449, over 3950591.24 frames. ], batch size: 198, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:42:46,741 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79669.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:43:21,526 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5620, 2.7939, 4.0473, 3.1138, 3.3041, 3.1211, 2.3502, 2.5155], + device='cuda:2'), covar=tensor([0.1300, 0.3004, 0.0616, 0.1211, 0.1928, 0.1700, 0.2849, 0.2872], + device='cuda:2'), in_proj_covar=tensor([0.0359, 0.0404, 0.0358, 0.0298, 0.0381, 0.0400, 0.0390, 0.0365], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 17:43:29,978 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79689.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:44:13,224 INFO [train.py:892] (2/4) Epoch 43, batch 1800, loss[loss=0.1533, simple_loss=0.2288, pruned_loss=0.03888, over 19747.00 frames. ], tot_loss[loss=0.1505, simple_loss=0.2317, pruned_loss=0.03471, over 3949516.34 frames. ], batch size: 44, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:44:20,774 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9491, 2.9966, 3.0551, 3.1192, 2.9709, 2.9983, 2.9209, 3.1292], + device='cuda:2'), covar=tensor([0.0325, 0.0431, 0.0370, 0.0325, 0.0423, 0.0377, 0.0409, 0.0449], + device='cuda:2'), in_proj_covar=tensor([0.0097, 0.0091, 0.0094, 0.0088, 0.0100, 0.0094, 0.0110, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 17:44:26,148 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79718.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:44:31,255 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79720.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:44:52,919 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79731.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 17:44:53,949 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.513e+02 3.637e+02 4.285e+02 5.199e+02 9.167e+02, threshold=8.570e+02, percent-clipped=2.0 +2023-03-29 17:45:00,398 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3494, 4.3266, 4.7009, 4.4319, 4.6028, 4.2080, 4.4568, 4.2094], + device='cuda:2'), covar=tensor([0.1487, 0.1803, 0.0895, 0.1438, 0.0953, 0.1109, 0.1818, 0.2211], + device='cuda:2'), in_proj_covar=tensor([0.0306, 0.0355, 0.0382, 0.0318, 0.0293, 0.0297, 0.0375, 0.0406], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 17:45:18,872 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7508, 3.4865, 3.8388, 3.0059, 3.9690, 3.3114, 3.6741, 3.7142], + device='cuda:2'), covar=tensor([0.0555, 0.0486, 0.0513, 0.0764, 0.0399, 0.0419, 0.0416, 0.0383], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0095, 0.0091, 0.0115, 0.0085, 0.0090, 0.0087, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 17:45:48,630 INFO [train.py:892] (2/4) Epoch 43, batch 1850, loss[loss=0.1639, simple_loss=0.257, pruned_loss=0.03536, over 19844.00 frames. ], tot_loss[loss=0.1515, simple_loss=0.2332, pruned_loss=0.03483, over 3949763.50 frames. ], batch size: 58, lr: 3.60e-03, grad_scale: 32.0 +2023-03-29 17:45:50,935 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79762.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:46:59,229 INFO [train.py:892] (2/4) Epoch 44, batch 0, loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02853, over 19806.00 frames. ], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02853, over 19806.00 frames. ], batch size: 174, lr: 3.56e-03, grad_scale: 32.0 +2023-03-29 17:46:59,230 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 17:47:18,677 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0705, 4.0971, 2.4173, 4.2577, 4.4327, 2.0323, 3.7228, 3.3638], + device='cuda:2'), covar=tensor([0.0677, 0.0782, 0.3010, 0.0764, 0.0657, 0.2819, 0.1004, 0.0931], + device='cuda:2'), in_proj_covar=tensor([0.0244, 0.0271, 0.0241, 0.0293, 0.0272, 0.0210, 0.0247, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 17:47:26,720 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.8442, 2.7770, 5.0052, 4.1864, 4.7195, 4.9246, 4.6382, 4.6181], + device='cuda:2'), covar=tensor([0.0553, 0.1117, 0.0096, 0.0753, 0.0146, 0.0185, 0.0165, 0.0160], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0108, 0.0093, 0.0156, 0.0091, 0.0104, 0.0095, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 17:47:38,450 INFO [train.py:926] (2/4) Epoch 44, validation: loss=0.1877, simple_loss=0.2498, pruned_loss=0.06277, over 2883724.00 frames. +2023-03-29 17:47:38,452 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22417MB +2023-03-29 17:49:41,870 INFO [train.py:892] (2/4) Epoch 44, batch 50, loss[loss=0.1497, simple_loss=0.2305, pruned_loss=0.03439, over 19683.00 frames. ], tot_loss[loss=0.1464, simple_loss=0.228, pruned_loss=0.03245, over 889407.91 frames. ], batch size: 75, lr: 3.56e-03, grad_scale: 32.0 +2023-03-29 17:49:58,343 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79823.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:50:15,364 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.212e+02 3.427e+02 4.163e+02 5.028e+02 9.332e+02, threshold=8.326e+02, percent-clipped=1.0 +2023-03-29 17:51:32,091 INFO [train.py:892] (2/4) Epoch 44, batch 100, loss[loss=0.1205, simple_loss=0.1964, pruned_loss=0.02226, over 19844.00 frames. ], tot_loss[loss=0.1455, simple_loss=0.227, pruned_loss=0.03197, over 1568816.49 frames. ], batch size: 109, lr: 3.56e-03, grad_scale: 32.0 +2023-03-29 17:51:58,286 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-29 17:52:09,792 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-29 17:52:12,079 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2037, 5.3626, 5.6410, 5.3468, 5.3961, 5.2122, 5.3784, 5.1197], + device='cuda:2'), covar=tensor([0.1403, 0.1242, 0.0749, 0.1241, 0.0649, 0.0782, 0.1633, 0.1970], + device='cuda:2'), in_proj_covar=tensor([0.0306, 0.0354, 0.0382, 0.0317, 0.0293, 0.0297, 0.0375, 0.0406], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 17:52:14,529 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6639, 2.6379, 2.7865, 2.7801, 2.7294, 2.7618, 2.7099, 2.9190], + device='cuda:2'), covar=tensor([0.0380, 0.0368, 0.0342, 0.0325, 0.0448, 0.0321, 0.0436, 0.0342], + device='cuda:2'), in_proj_covar=tensor([0.0097, 0.0091, 0.0094, 0.0088, 0.0100, 0.0094, 0.0109, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 17:53:02,113 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2065, 4.1088, 4.1382, 4.3073, 4.1898, 4.5036, 4.1905, 4.3760], + device='cuda:2'), covar=tensor([0.0938, 0.0585, 0.0658, 0.0549, 0.0752, 0.0597, 0.0686, 0.0647], + device='cuda:2'), in_proj_covar=tensor([0.0161, 0.0187, 0.0208, 0.0186, 0.0184, 0.0167, 0.0161, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 17:53:33,231 INFO [train.py:892] (2/4) Epoch 44, batch 150, loss[loss=0.1514, simple_loss=0.2417, pruned_loss=0.03054, over 19586.00 frames. ], tot_loss[loss=0.146, simple_loss=0.228, pruned_loss=0.03205, over 2097627.22 frames. ], batch size: 49, lr: 3.56e-03, grad_scale: 32.0 +2023-03-29 17:54:11,544 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.056e+02 3.280e+02 3.825e+02 4.551e+02 8.712e+02, threshold=7.650e+02, percent-clipped=2.0 +2023-03-29 17:54:33,999 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79941.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:54:34,061 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79941.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 17:54:37,935 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6000, 2.0429, 2.3583, 2.8076, 3.1927, 3.3072, 3.1823, 3.2700], + device='cuda:2'), covar=tensor([0.1139, 0.1843, 0.1509, 0.0811, 0.0560, 0.0396, 0.0508, 0.0522], + device='cuda:2'), in_proj_covar=tensor([0.0165, 0.0171, 0.0183, 0.0156, 0.0142, 0.0138, 0.0133, 0.0122], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 17:55:19,477 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9463, 4.1922, 2.5140, 4.3982, 4.5243, 2.1013, 3.5694, 3.2422], + device='cuda:2'), covar=tensor([0.0769, 0.0734, 0.2713, 0.0716, 0.0540, 0.2841, 0.1250, 0.0990], + device='cuda:2'), in_proj_covar=tensor([0.0241, 0.0267, 0.0238, 0.0289, 0.0268, 0.0207, 0.0244, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 17:55:37,699 INFO [train.py:892] (2/4) Epoch 44, batch 200, loss[loss=0.1366, simple_loss=0.2278, pruned_loss=0.02273, over 19757.00 frames. ], tot_loss[loss=0.1476, simple_loss=0.23, pruned_loss=0.03256, over 2507310.23 frames. ], batch size: 88, lr: 3.56e-03, grad_scale: 32.0 +2023-03-29 17:56:34,709 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79989.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 17:56:34,749 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79989.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:57:32,893 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80013.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:57:40,398 INFO [train.py:892] (2/4) Epoch 44, batch 250, loss[loss=0.1455, simple_loss=0.2264, pruned_loss=0.03233, over 19711.00 frames. ], tot_loss[loss=0.1483, simple_loss=0.2304, pruned_loss=0.03316, over 2825880.38 frames. ], batch size: 85, lr: 3.56e-03, grad_scale: 16.0 +2023-03-29 17:57:53,559 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80020.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:58:17,258 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80031.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 17:58:20,284 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.104e+02 3.486e+02 4.096e+02 4.877e+02 1.090e+03, threshold=8.193e+02, percent-clipped=2.0 +2023-03-29 17:58:30,442 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80037.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:59:38,673 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2286, 3.2380, 2.2165, 3.7554, 3.4818, 3.7746, 3.8474, 2.9964], + device='cuda:2'), covar=tensor([0.0661, 0.0744, 0.1544, 0.0756, 0.0669, 0.0489, 0.0624, 0.0868], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0151, 0.0147, 0.0161, 0.0141, 0.0147, 0.0156, 0.0155], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 17:59:39,671 INFO [train.py:892] (2/4) Epoch 44, batch 300, loss[loss=0.1265, simple_loss=0.2097, pruned_loss=0.02164, over 19810.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2308, pruned_loss=0.03335, over 3076657.43 frames. ], batch size: 96, lr: 3.56e-03, grad_scale: 16.0 +2023-03-29 17:59:44,489 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80068.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:00:09,800 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80079.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 18:01:38,233 INFO [train.py:892] (2/4) Epoch 44, batch 350, loss[loss=0.141, simple_loss=0.213, pruned_loss=0.03453, over 19848.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2308, pruned_loss=0.0335, over 3269477.26 frames. ], batch size: 137, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:01:44,024 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80118.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:02:20,934 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.499e+02 3.590e+02 4.276e+02 5.127e+02 9.462e+02, threshold=8.552e+02, percent-clipped=1.0 +2023-03-29 18:03:35,940 INFO [train.py:892] (2/4) Epoch 44, batch 400, loss[loss=0.1742, simple_loss=0.2577, pruned_loss=0.04532, over 19642.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2302, pruned_loss=0.0333, over 3421732.67 frames. ], batch size: 299, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:04:17,511 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9313, 2.8424, 5.0499, 4.2782, 4.8908, 4.9799, 4.9192, 4.6780], + device='cuda:2'), covar=tensor([0.0532, 0.1038, 0.0092, 0.0786, 0.0111, 0.0163, 0.0141, 0.0136], + device='cuda:2'), in_proj_covar=tensor([0.0106, 0.0109, 0.0093, 0.0157, 0.0092, 0.0105, 0.0096, 0.0093], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 18:04:43,303 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-29 18:04:47,369 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0400, 3.3876, 3.5396, 4.0163, 2.7473, 3.3198, 2.3945, 2.6299], + device='cuda:2'), covar=tensor([0.0534, 0.1648, 0.0910, 0.0413, 0.1976, 0.0906, 0.1550, 0.1595], + device='cuda:2'), in_proj_covar=tensor([0.0252, 0.0327, 0.0254, 0.0213, 0.0251, 0.0217, 0.0225, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 18:05:38,840 INFO [train.py:892] (2/4) Epoch 44, batch 450, loss[loss=0.1324, simple_loss=0.2129, pruned_loss=0.02598, over 19867.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2302, pruned_loss=0.03309, over 3539453.11 frames. ], batch size: 104, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:06:18,568 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.753e+02 3.555e+02 3.976e+02 4.443e+02 7.591e+02, threshold=7.952e+02, percent-clipped=1.0 +2023-03-29 18:06:39,002 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80241.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:07:34,126 INFO [train.py:892] (2/4) Epoch 44, batch 500, loss[loss=0.1611, simple_loss=0.225, pruned_loss=0.04859, over 19865.00 frames. ], tot_loss[loss=0.1485, simple_loss=0.2301, pruned_loss=0.03342, over 3631242.19 frames. ], batch size: 136, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:07:56,050 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1463, 3.0231, 4.9846, 3.4213, 3.7392, 3.4677, 2.7176, 2.8737], + device='cuda:2'), covar=tensor([0.1111, 0.3683, 0.0422, 0.1329, 0.2176, 0.1912, 0.2902, 0.2706], + device='cuda:2'), in_proj_covar=tensor([0.0360, 0.0404, 0.0359, 0.0299, 0.0381, 0.0401, 0.0390, 0.0367], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 18:08:14,279 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80283.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:08:28,368 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80289.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:08:51,572 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-29 18:09:27,926 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80313.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:09:33,189 INFO [train.py:892] (2/4) Epoch 44, batch 550, loss[loss=0.1422, simple_loss=0.2132, pruned_loss=0.03562, over 19866.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.229, pruned_loss=0.03329, over 3703117.86 frames. ], batch size: 136, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:09:38,953 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-29 18:10:13,724 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.312e+02 3.379e+02 4.084e+02 4.890e+02 8.912e+02, threshold=8.167e+02, percent-clipped=1.0 +2023-03-29 18:10:41,188 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80344.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:11:16,153 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.54 vs. limit=5.0 +2023-03-29 18:11:21,906 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80361.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:11:31,167 INFO [train.py:892] (2/4) Epoch 44, batch 600, loss[loss=0.1591, simple_loss=0.2431, pruned_loss=0.03753, over 19844.00 frames. ], tot_loss[loss=0.1486, simple_loss=0.23, pruned_loss=0.0336, over 3756472.09 frames. ], batch size: 59, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:12:34,792 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80393.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:13:27,005 INFO [train.py:892] (2/4) Epoch 44, batch 650, loss[loss=0.1642, simple_loss=0.2422, pruned_loss=0.04313, over 19800.00 frames. ], tot_loss[loss=0.1481, simple_loss=0.2294, pruned_loss=0.03339, over 3798506.01 frames. ], batch size: 200, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:13:32,541 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80418.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:14:07,159 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.620e+02 3.666e+02 4.287e+02 5.079e+02 9.209e+02, threshold=8.573e+02, percent-clipped=4.0 +2023-03-29 18:14:24,447 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2132, 2.7937, 3.2882, 3.2635, 3.8356, 4.4181, 4.1044, 4.3298], + device='cuda:2'), covar=tensor([0.0894, 0.1569, 0.1242, 0.0734, 0.0420, 0.0224, 0.0403, 0.0386], + device='cuda:2'), in_proj_covar=tensor([0.0166, 0.0170, 0.0182, 0.0157, 0.0143, 0.0138, 0.0133, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 18:14:55,316 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80454.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:15:14,863 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80463.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:15:20,973 INFO [train.py:892] (2/4) Epoch 44, batch 700, loss[loss=0.1611, simple_loss=0.2418, pruned_loss=0.04025, over 19691.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2302, pruned_loss=0.03377, over 3832758.03 frames. ], batch size: 59, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:15:23,434 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80466.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:16:26,695 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80494.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:16:54,083 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5167, 2.5881, 2.6193, 2.1828, 2.6852, 2.2844, 2.6574, 2.5613], + device='cuda:2'), covar=tensor([0.0543, 0.0594, 0.0561, 0.0905, 0.0447, 0.0587, 0.0490, 0.0475], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0095, 0.0091, 0.0114, 0.0086, 0.0090, 0.0086, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 18:17:18,253 INFO [train.py:892] (2/4) Epoch 44, batch 750, loss[loss=0.1393, simple_loss=0.2169, pruned_loss=0.03086, over 19786.00 frames. ], tot_loss[loss=0.1495, simple_loss=0.2309, pruned_loss=0.03412, over 3857921.43 frames. ], batch size: 163, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:17:38,346 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80524.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:17:57,764 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.299e+02 3.592e+02 4.231e+02 5.310e+02 9.600e+02, threshold=8.463e+02, percent-clipped=2.0 +2023-03-29 18:18:50,440 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80555.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:19:14,285 INFO [train.py:892] (2/4) Epoch 44, batch 800, loss[loss=0.1626, simple_loss=0.2384, pruned_loss=0.04338, over 19764.00 frames. ], tot_loss[loss=0.1495, simple_loss=0.2308, pruned_loss=0.03409, over 3879498.76 frames. ], batch size: 244, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:20:46,584 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.10 vs. limit=5.0 +2023-03-29 18:21:15,047 INFO [train.py:892] (2/4) Epoch 44, batch 850, loss[loss=0.1254, simple_loss=0.1955, pruned_loss=0.02766, over 19750.00 frames. ], tot_loss[loss=0.1496, simple_loss=0.2313, pruned_loss=0.03392, over 3892604.95 frames. ], batch size: 129, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:21:54,761 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.578e+02 3.349e+02 3.795e+02 4.586e+02 1.230e+03, threshold=7.591e+02, percent-clipped=1.0 +2023-03-29 18:22:09,700 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80639.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:23:11,190 INFO [train.py:892] (2/4) Epoch 44, batch 900, loss[loss=0.1423, simple_loss=0.219, pruned_loss=0.03282, over 19761.00 frames. ], tot_loss[loss=0.1486, simple_loss=0.23, pruned_loss=0.03358, over 3905837.70 frames. ], batch size: 217, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:24:56,101 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.8181, 6.1405, 6.2059, 6.0314, 5.8318, 6.1347, 5.5008, 5.5339], + device='cuda:2'), covar=tensor([0.0476, 0.0475, 0.0423, 0.0442, 0.0481, 0.0485, 0.0668, 0.1018], + device='cuda:2'), in_proj_covar=tensor([0.0298, 0.0316, 0.0325, 0.0282, 0.0295, 0.0277, 0.0287, 0.0338], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 18:25:06,274 INFO [train.py:892] (2/4) Epoch 44, batch 950, loss[loss=0.2236, simple_loss=0.3014, pruned_loss=0.07287, over 19451.00 frames. ], tot_loss[loss=0.1491, simple_loss=0.2309, pruned_loss=0.03369, over 3914353.30 frames. ], batch size: 396, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:25:43,538 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5091, 3.6093, 2.2663, 3.6968, 3.8191, 1.8185, 3.1985, 2.9940], + device='cuda:2'), covar=tensor([0.0837, 0.0933, 0.2735, 0.0845, 0.0688, 0.2732, 0.1142, 0.0982], + device='cuda:2'), in_proj_covar=tensor([0.0244, 0.0270, 0.0241, 0.0292, 0.0273, 0.0210, 0.0247, 0.0213], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 18:25:49,383 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.433e+02 3.452e+02 4.047e+02 4.823e+02 9.247e+02, threshold=8.094e+02, percent-clipped=4.0 +2023-03-29 18:25:56,844 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80736.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:26:26,197 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80749.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:27:06,657 INFO [train.py:892] (2/4) Epoch 44, batch 1000, loss[loss=0.1446, simple_loss=0.2133, pruned_loss=0.03797, over 19764.00 frames. ], tot_loss[loss=0.1498, simple_loss=0.2319, pruned_loss=0.03389, over 3922114.00 frames. ], batch size: 122, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:27:24,462 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80773.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:28:19,063 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80797.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:29:01,249 INFO [train.py:892] (2/4) Epoch 44, batch 1050, loss[loss=0.1664, simple_loss=0.2511, pruned_loss=0.04082, over 19845.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2326, pruned_loss=0.03401, over 3926131.00 frames. ], batch size: 78, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:29:09,839 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80819.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:29:41,372 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.092e+02 3.498e+02 3.993e+02 4.688e+02 1.348e+03, threshold=7.986e+02, percent-clipped=2.0 +2023-03-29 18:29:44,670 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80834.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:30:22,006 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80850.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:30:46,965 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.3899, 5.8392, 5.9450, 5.8272, 5.6341, 5.7061, 5.5912, 5.5710], + device='cuda:2'), covar=tensor([0.1511, 0.1221, 0.0759, 0.1106, 0.0667, 0.0754, 0.1844, 0.1806], + device='cuda:2'), in_proj_covar=tensor([0.0308, 0.0358, 0.0385, 0.0320, 0.0293, 0.0301, 0.0379, 0.0409], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 18:30:57,642 INFO [train.py:892] (2/4) Epoch 44, batch 1100, loss[loss=0.1227, simple_loss=0.2049, pruned_loss=0.0203, over 19801.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2322, pruned_loss=0.03422, over 3932208.97 frames. ], batch size: 105, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:32:57,576 INFO [train.py:892] (2/4) Epoch 44, batch 1150, loss[loss=0.1434, simple_loss=0.2206, pruned_loss=0.03307, over 19880.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2318, pruned_loss=0.03406, over 3937472.70 frames. ], batch size: 95, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:33:35,848 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.488e+02 3.446e+02 4.222e+02 4.835e+02 9.572e+02, threshold=8.444e+02, percent-clipped=1.0 +2023-03-29 18:33:49,665 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80939.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:34:56,394 INFO [train.py:892] (2/4) Epoch 44, batch 1200, loss[loss=0.1412, simple_loss=0.2159, pruned_loss=0.0333, over 19872.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.2317, pruned_loss=0.03435, over 3940129.74 frames. ], batch size: 136, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:35:47,305 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80987.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:36:53,884 INFO [train.py:892] (2/4) Epoch 44, batch 1250, loss[loss=0.123, simple_loss=0.2038, pruned_loss=0.0211, over 19754.00 frames. ], tot_loss[loss=0.1497, simple_loss=0.2311, pruned_loss=0.03418, over 3942273.16 frames. ], batch size: 110, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:37:34,760 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.062e+02 3.416e+02 3.866e+02 4.633e+02 7.617e+02, threshold=7.733e+02, percent-clipped=0.0 +2023-03-29 18:38:13,545 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81049.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:38:51,614 INFO [train.py:892] (2/4) Epoch 44, batch 1300, loss[loss=0.137, simple_loss=0.2188, pruned_loss=0.02756, over 19781.00 frames. ], tot_loss[loss=0.1496, simple_loss=0.2313, pruned_loss=0.03396, over 3944089.14 frames. ], batch size: 163, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:39:18,839 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-29 18:39:52,012 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81092.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:40:04,811 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81097.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:40:48,671 INFO [train.py:892] (2/4) Epoch 44, batch 1350, loss[loss=0.1458, simple_loss=0.2219, pruned_loss=0.03487, over 19537.00 frames. ], tot_loss[loss=0.1491, simple_loss=0.2308, pruned_loss=0.03372, over 3944939.54 frames. ], batch size: 46, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:40:49,909 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81116.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:40:55,675 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81119.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:41:13,495 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-03-29 18:41:18,998 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81129.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:41:27,827 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.454e+02 3.446e+02 4.149e+02 4.801e+02 1.146e+03, threshold=8.297e+02, percent-clipped=2.0 +2023-03-29 18:42:04,482 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81150.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:42:39,485 INFO [train.py:892] (2/4) Epoch 44, batch 1400, loss[loss=0.1511, simple_loss=0.2351, pruned_loss=0.03358, over 19764.00 frames. ], tot_loss[loss=0.1481, simple_loss=0.2299, pruned_loss=0.03312, over 3945432.72 frames. ], batch size: 244, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:42:42,775 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81167.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:43:08,345 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81177.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:43:58,138 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81198.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:44:41,831 INFO [train.py:892] (2/4) Epoch 44, batch 1450, loss[loss=0.1536, simple_loss=0.2272, pruned_loss=0.03998, over 19800.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2303, pruned_loss=0.03304, over 3946449.81 frames. ], batch size: 126, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:45:20,213 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.287e+02 3.615e+02 3.975e+02 5.064e+02 7.888e+02, threshold=7.950e+02, percent-clipped=0.0 +2023-03-29 18:45:56,507 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8593, 2.4213, 3.9120, 3.4128, 3.8436, 3.9320, 3.6893, 3.6982], + device='cuda:2'), covar=tensor([0.0716, 0.1096, 0.0131, 0.0568, 0.0188, 0.0251, 0.0223, 0.0214], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0108, 0.0092, 0.0155, 0.0091, 0.0104, 0.0095, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 18:46:39,804 INFO [train.py:892] (2/4) Epoch 44, batch 1500, loss[loss=0.1305, simple_loss=0.2142, pruned_loss=0.02346, over 19866.00 frames. ], tot_loss[loss=0.1477, simple_loss=0.2297, pruned_loss=0.03288, over 3946828.57 frames. ], batch size: 104, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:48:01,843 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81299.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:48:06,029 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81301.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:48:19,522 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1025, 2.6553, 4.2226, 3.7870, 4.1134, 4.2320, 4.0737, 3.9580], + device='cuda:2'), covar=tensor([0.0652, 0.0983, 0.0120, 0.0598, 0.0178, 0.0262, 0.0181, 0.0210], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0108, 0.0092, 0.0155, 0.0091, 0.0104, 0.0095, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 18:48:39,251 INFO [train.py:892] (2/4) Epoch 44, batch 1550, loss[loss=0.1438, simple_loss=0.2328, pruned_loss=0.02739, over 19705.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2299, pruned_loss=0.03285, over 3947834.39 frames. ], batch size: 101, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:49:19,857 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.242e+02 3.436e+02 3.963e+02 4.737e+02 1.006e+03, threshold=7.927e+02, percent-clipped=2.0 +2023-03-29 18:50:16,987 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.6210, 3.0172, 2.6410, 2.1993, 2.7233, 2.9805, 2.9253, 2.9986], + device='cuda:2'), covar=tensor([0.0432, 0.0383, 0.0364, 0.0640, 0.0405, 0.0300, 0.0339, 0.0268], + device='cuda:2'), in_proj_covar=tensor([0.0115, 0.0108, 0.0108, 0.0108, 0.0111, 0.0098, 0.0099, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 18:50:23,910 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81360.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:50:28,422 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81362.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:50:36,257 INFO [train.py:892] (2/4) Epoch 44, batch 1600, loss[loss=0.1456, simple_loss=0.2272, pruned_loss=0.03201, over 19922.00 frames. ], tot_loss[loss=0.1473, simple_loss=0.2296, pruned_loss=0.03255, over 3948213.26 frames. ], batch size: 51, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:51:24,643 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-29 18:51:40,596 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81392.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:52:07,659 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6336, 3.8034, 2.4114, 4.2825, 3.9335, 4.2697, 4.3337, 3.3989], + device='cuda:2'), covar=tensor([0.0629, 0.0590, 0.1651, 0.0677, 0.0623, 0.0402, 0.0592, 0.0825], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0151, 0.0148, 0.0162, 0.0142, 0.0148, 0.0157, 0.0155], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-29 18:52:37,037 INFO [train.py:892] (2/4) Epoch 44, batch 1650, loss[loss=0.1337, simple_loss=0.2131, pruned_loss=0.02717, over 19728.00 frames. ], tot_loss[loss=0.1477, simple_loss=0.2299, pruned_loss=0.03274, over 3948120.19 frames. ], batch size: 80, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:53:06,767 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81429.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:53:16,899 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.426e+02 3.380e+02 3.975e+02 4.611e+02 1.350e+03, threshold=7.949e+02, percent-clipped=2.0 +2023-03-29 18:53:33,573 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81440.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:54:32,129 INFO [train.py:892] (2/4) Epoch 44, batch 1700, loss[loss=0.1363, simple_loss=0.2202, pruned_loss=0.02623, over 19637.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2299, pruned_loss=0.03284, over 3949519.05 frames. ], batch size: 68, lr: 3.52e-03, grad_scale: 16.0 +2023-03-29 18:54:35,033 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2310, 2.7339, 4.4684, 3.8216, 4.2949, 4.4365, 4.2310, 4.1609], + device='cuda:2'), covar=tensor([0.0675, 0.1003, 0.0116, 0.0681, 0.0173, 0.0201, 0.0182, 0.0171], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0108, 0.0092, 0.0155, 0.0091, 0.0105, 0.0095, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 18:54:49,106 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81472.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:54:59,860 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81477.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:56:28,371 INFO [train.py:892] (2/4) Epoch 44, batch 1750, loss[loss=0.1398, simple_loss=0.2185, pruned_loss=0.0305, over 19753.00 frames. ], tot_loss[loss=0.1485, simple_loss=0.2306, pruned_loss=0.03326, over 3949367.35 frames. ], batch size: 188, lr: 3.52e-03, grad_scale: 16.0 +2023-03-29 18:57:03,566 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.489e+02 3.354e+02 3.892e+02 4.607e+02 1.019e+03, threshold=7.783e+02, percent-clipped=2.0 +2023-03-29 18:58:09,168 INFO [train.py:892] (2/4) Epoch 44, batch 1800, loss[loss=0.1491, simple_loss=0.2311, pruned_loss=0.03355, over 19827.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2308, pruned_loss=0.03342, over 3949672.10 frames. ], batch size: 202, lr: 3.52e-03, grad_scale: 16.0 +2023-03-29 18:59:05,574 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1913, 3.1421, 3.2191, 2.6376, 3.3117, 2.8341, 3.1891, 3.2608], + device='cuda:2'), covar=tensor([0.0641, 0.0474, 0.0663, 0.0812, 0.0447, 0.0552, 0.0458, 0.0431], + device='cuda:2'), in_proj_covar=tensor([0.0086, 0.0096, 0.0092, 0.0116, 0.0087, 0.0091, 0.0088, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 18:59:40,701 INFO [train.py:892] (2/4) Epoch 44, batch 1850, loss[loss=0.1585, simple_loss=0.2519, pruned_loss=0.03251, over 19578.00 frames. ], tot_loss[loss=0.1494, simple_loss=0.232, pruned_loss=0.03341, over 3948368.66 frames. ], batch size: 53, lr: 3.52e-03, grad_scale: 16.0 +2023-03-29 19:00:42,766 INFO [train.py:892] (2/4) Epoch 45, batch 0, loss[loss=0.1426, simple_loss=0.2233, pruned_loss=0.03092, over 19470.00 frames. ], tot_loss[loss=0.1426, simple_loss=0.2233, pruned_loss=0.03092, over 19470.00 frames. ], batch size: 43, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:00:42,767 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 19:01:20,025 INFO [train.py:926] (2/4) Epoch 45, validation: loss=0.1889, simple_loss=0.2504, pruned_loss=0.0637, over 2883724.00 frames. +2023-03-29 19:01:20,027 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22417MB +2023-03-29 19:01:47,735 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.448e+02 3.358e+02 4.011e+02 4.800e+02 7.460e+02, threshold=8.023e+02, percent-clipped=0.0 +2023-03-29 19:01:55,984 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-29 19:02:13,550 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 19:02:42,339 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81655.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:02:48,836 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81657.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:03:18,019 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81670.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:03:18,992 INFO [train.py:892] (2/4) Epoch 45, batch 50, loss[loss=0.1365, simple_loss=0.2257, pruned_loss=0.02371, over 19894.00 frames. ], tot_loss[loss=0.1459, simple_loss=0.2279, pruned_loss=0.03191, over 891766.80 frames. ], batch size: 87, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:04:26,783 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1289, 5.1849, 5.5029, 5.2362, 5.2945, 5.1017, 5.2573, 4.9906], + device='cuda:2'), covar=tensor([0.1507, 0.1506, 0.0796, 0.1255, 0.0748, 0.0804, 0.1691, 0.1940], + device='cuda:2'), in_proj_covar=tensor([0.0307, 0.0358, 0.0387, 0.0320, 0.0294, 0.0301, 0.0379, 0.0408], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 19:05:21,198 INFO [train.py:892] (2/4) Epoch 45, batch 100, loss[loss=0.1314, simple_loss=0.2175, pruned_loss=0.02271, over 19896.00 frames. ], tot_loss[loss=0.1448, simple_loss=0.2262, pruned_loss=0.03168, over 1570954.12 frames. ], batch size: 91, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:05:44,706 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81731.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 19:05:49,395 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.145e+02 3.372e+02 3.916e+02 5.007e+02 1.092e+03, threshold=7.832e+02, percent-clipped=2.0 +2023-03-29 19:06:03,101 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9432, 2.4566, 2.9453, 3.1943, 3.6419, 4.0685, 3.8878, 3.8847], + device='cuda:2'), covar=tensor([0.1003, 0.1738, 0.1324, 0.0741, 0.0440, 0.0244, 0.0384, 0.0510], + device='cuda:2'), in_proj_covar=tensor([0.0166, 0.0169, 0.0183, 0.0157, 0.0144, 0.0138, 0.0133, 0.0122], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 19:06:13,931 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2786, 3.1976, 5.0274, 3.5749, 3.9462, 3.6463, 2.6554, 2.8200], + device='cuda:2'), covar=tensor([0.0917, 0.3001, 0.0337, 0.1141, 0.1748, 0.1535, 0.2773, 0.2665], + device='cuda:2'), in_proj_covar=tensor([0.0361, 0.0406, 0.0360, 0.0300, 0.0382, 0.0404, 0.0392, 0.0369], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 19:07:18,290 INFO [train.py:892] (2/4) Epoch 45, batch 150, loss[loss=0.1341, simple_loss=0.2225, pruned_loss=0.02287, over 19731.00 frames. ], tot_loss[loss=0.1467, simple_loss=0.2286, pruned_loss=0.03242, over 2096024.47 frames. ], batch size: 52, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:07:21,519 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81772.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:09:11,564 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81820.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:09:12,786 INFO [train.py:892] (2/4) Epoch 45, batch 200, loss[loss=0.1447, simple_loss=0.2308, pruned_loss=0.02931, over 19695.00 frames. ], tot_loss[loss=0.1469, simple_loss=0.2285, pruned_loss=0.03262, over 2507847.90 frames. ], batch size: 74, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:09:39,852 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.431e+02 3.452e+02 4.222e+02 4.821e+02 9.570e+02, threshold=8.444e+02, percent-clipped=2.0 +2023-03-29 19:10:54,850 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-29 19:11:08,726 INFO [train.py:892] (2/4) Epoch 45, batch 250, loss[loss=0.1369, simple_loss=0.2075, pruned_loss=0.03313, over 19739.00 frames. ], tot_loss[loss=0.1461, simple_loss=0.2272, pruned_loss=0.03248, over 2828183.06 frames. ], batch size: 140, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:12:49,226 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.23 vs. limit=5.0 +2023-03-29 19:13:09,791 INFO [train.py:892] (2/4) Epoch 45, batch 300, loss[loss=0.1315, simple_loss=0.2115, pruned_loss=0.02571, over 19892.00 frames. ], tot_loss[loss=0.146, simple_loss=0.228, pruned_loss=0.03198, over 3077757.23 frames. ], batch size: 113, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:13:37,288 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.189e+02 3.336e+02 4.013e+02 4.745e+02 8.684e+02, threshold=8.026e+02, percent-clipped=2.0 +2023-03-29 19:14:30,312 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81955.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:14:34,799 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81957.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:15:04,598 INFO [train.py:892] (2/4) Epoch 45, batch 350, loss[loss=0.155, simple_loss=0.2342, pruned_loss=0.03789, over 19750.00 frames. ], tot_loss[loss=0.1466, simple_loss=0.229, pruned_loss=0.03212, over 3269634.91 frames. ], batch size: 221, lr: 3.47e-03, grad_scale: 16.0 +2023-03-29 19:15:45,248 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81987.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:16:22,890 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82003.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:16:26,981 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82005.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:17:03,723 INFO [train.py:892] (2/4) Epoch 45, batch 400, loss[loss=0.1435, simple_loss=0.2194, pruned_loss=0.03383, over 19749.00 frames. ], tot_loss[loss=0.1474, simple_loss=0.2297, pruned_loss=0.03256, over 3419969.34 frames. ], batch size: 213, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:17:15,207 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82026.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 19:17:33,547 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.143e+02 3.235e+02 3.968e+02 4.770e+02 8.639e+02, threshold=7.936e+02, percent-clipped=1.0 +2023-03-29 19:18:09,189 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82048.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:18:26,218 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82055.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:18:39,972 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6189, 3.3680, 3.4540, 3.6444, 3.5122, 3.5743, 3.6547, 3.8664], + device='cuda:2'), covar=tensor([0.0736, 0.0539, 0.0623, 0.0477, 0.0789, 0.0697, 0.0572, 0.0401], + device='cuda:2'), in_proj_covar=tensor([0.0161, 0.0186, 0.0207, 0.0185, 0.0183, 0.0167, 0.0161, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 19:19:03,142 INFO [train.py:892] (2/4) Epoch 45, batch 450, loss[loss=0.1224, simple_loss=0.1948, pruned_loss=0.02496, over 19817.00 frames. ], tot_loss[loss=0.1486, simple_loss=0.2305, pruned_loss=0.03338, over 3537137.86 frames. ], batch size: 133, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:19:06,995 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.54 vs. limit=5.0 +2023-03-29 19:20:10,807 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.2869, 1.7553, 1.9832, 2.5264, 2.7115, 2.8757, 2.6634, 2.7668], + device='cuda:2'), covar=tensor([0.1178, 0.1947, 0.1645, 0.0863, 0.0727, 0.0461, 0.0633, 0.0572], + device='cuda:2'), in_proj_covar=tensor([0.0167, 0.0169, 0.0183, 0.0157, 0.0144, 0.0138, 0.0133, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 19:20:51,388 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82116.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:21:01,419 INFO [train.py:892] (2/4) Epoch 45, batch 500, loss[loss=0.1527, simple_loss=0.2308, pruned_loss=0.0373, over 19750.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2305, pruned_loss=0.03352, over 3628990.03 frames. ], batch size: 250, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:21:29,426 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.527e+02 3.621e+02 4.307e+02 5.011e+02 9.871e+02, threshold=8.613e+02, percent-clipped=3.0 +2023-03-29 19:21:30,605 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.4967, 4.6312, 2.7655, 4.8584, 5.0904, 2.2213, 4.3268, 3.6925], + device='cuda:2'), covar=tensor([0.0605, 0.0578, 0.2581, 0.0596, 0.0433, 0.2690, 0.0872, 0.0884], + device='cuda:2'), in_proj_covar=tensor([0.0243, 0.0268, 0.0239, 0.0290, 0.0270, 0.0209, 0.0246, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 19:22:58,969 INFO [train.py:892] (2/4) Epoch 45, batch 550, loss[loss=0.1529, simple_loss=0.2377, pruned_loss=0.03407, over 19800.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2308, pruned_loss=0.03344, over 3700386.46 frames. ], batch size: 51, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:24:02,783 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7684, 2.3109, 2.6201, 3.0168, 3.3599, 3.5689, 3.4552, 3.4256], + device='cuda:2'), covar=tensor([0.1053, 0.1716, 0.1420, 0.0759, 0.0550, 0.0380, 0.0460, 0.0571], + device='cuda:2'), in_proj_covar=tensor([0.0169, 0.0172, 0.0185, 0.0158, 0.0146, 0.0140, 0.0135, 0.0124], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 19:24:56,171 INFO [train.py:892] (2/4) Epoch 45, batch 600, loss[loss=0.1422, simple_loss=0.2195, pruned_loss=0.03244, over 19653.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2314, pruned_loss=0.03416, over 3755344.99 frames. ], batch size: 67, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:25:22,516 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.595e+02 3.638e+02 4.311e+02 5.322e+02 1.783e+03, threshold=8.623e+02, percent-clipped=2.0 +2023-03-29 19:25:33,383 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.3447, 1.7905, 2.0230, 2.5309, 2.7713, 2.8956, 2.7621, 2.8237], + device='cuda:2'), covar=tensor([0.1209, 0.1957, 0.1749, 0.0896, 0.0665, 0.0489, 0.0604, 0.0612], + device='cuda:2'), in_proj_covar=tensor([0.0169, 0.0172, 0.0185, 0.0158, 0.0146, 0.0140, 0.0134, 0.0124], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 19:26:49,033 INFO [train.py:892] (2/4) Epoch 45, batch 650, loss[loss=0.1612, simple_loss=0.239, pruned_loss=0.04166, over 19813.00 frames. ], tot_loss[loss=0.1496, simple_loss=0.2316, pruned_loss=0.03383, over 3797562.21 frames. ], batch size: 132, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:28:46,104 INFO [train.py:892] (2/4) Epoch 45, batch 700, loss[loss=0.1286, simple_loss=0.2008, pruned_loss=0.02822, over 19793.00 frames. ], tot_loss[loss=0.1498, simple_loss=0.2316, pruned_loss=0.03404, over 3831728.49 frames. ], batch size: 120, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:28:57,052 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1325, 4.2302, 2.5365, 4.4363, 4.6034, 2.0306, 3.8825, 3.5077], + device='cuda:2'), covar=tensor([0.0720, 0.0825, 0.2661, 0.0769, 0.0563, 0.2775, 0.0947, 0.0923], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0267, 0.0239, 0.0289, 0.0269, 0.0208, 0.0246, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 19:29:01,218 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82326.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:29:15,653 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.176e+02 3.457e+02 4.012e+02 4.787e+02 1.008e+03, threshold=8.024e+02, percent-clipped=2.0 +2023-03-29 19:29:40,063 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82343.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:29:46,465 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82346.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:29:46,500 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1682, 3.3993, 2.9147, 2.5924, 2.9566, 3.2659, 3.3197, 3.3109], + device='cuda:2'), covar=tensor([0.0321, 0.0271, 0.0336, 0.0523, 0.0368, 0.0349, 0.0217, 0.0236], + device='cuda:2'), in_proj_covar=tensor([0.0116, 0.0109, 0.0109, 0.0110, 0.0113, 0.0099, 0.0101, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 19:30:08,213 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.6172, 3.5697, 3.9645, 3.5761, 3.4360, 3.9043, 3.6720, 4.0299], + device='cuda:2'), covar=tensor([0.1029, 0.0486, 0.0465, 0.0517, 0.1392, 0.0666, 0.0645, 0.0458], + device='cuda:2'), in_proj_covar=tensor([0.0287, 0.0231, 0.0232, 0.0244, 0.0213, 0.0257, 0.0245, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 19:30:42,583 INFO [train.py:892] (2/4) Epoch 45, batch 750, loss[loss=0.1389, simple_loss=0.2201, pruned_loss=0.02883, over 19795.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2304, pruned_loss=0.03346, over 3858235.47 frames. ], batch size: 211, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:30:49,661 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82374.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:32:10,225 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82407.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:32:14,041 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.1914, 4.8738, 4.9849, 5.1949, 5.0206, 5.5119, 5.3380, 5.5060], + device='cuda:2'), covar=tensor([0.0685, 0.0442, 0.0426, 0.0351, 0.0629, 0.0395, 0.0474, 0.0361], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0187, 0.0207, 0.0185, 0.0184, 0.0167, 0.0161, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 19:32:19,249 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82411.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:32:38,392 INFO [train.py:892] (2/4) Epoch 45, batch 800, loss[loss=0.1392, simple_loss=0.2234, pruned_loss=0.02746, over 19680.00 frames. ], tot_loss[loss=0.1479, simple_loss=0.23, pruned_loss=0.03295, over 3877902.38 frames. ], batch size: 59, lr: 3.46e-03, grad_scale: 32.0 +2023-03-29 19:32:48,954 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5067, 2.8321, 3.5442, 2.8914, 3.6612, 3.5611, 4.3234, 4.7618], + device='cuda:2'), covar=tensor([0.0533, 0.1871, 0.1372, 0.2237, 0.1869, 0.1523, 0.0587, 0.0453], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0248, 0.0275, 0.0263, 0.0310, 0.0267, 0.0241, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 19:33:03,753 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.510e+02 3.458e+02 4.305e+02 5.045e+02 7.264e+02, threshold=8.610e+02, percent-clipped=0.0 +2023-03-29 19:33:30,022 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.22 vs. limit=5.0 +2023-03-29 19:34:06,466 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 19:34:36,617 INFO [train.py:892] (2/4) Epoch 45, batch 850, loss[loss=0.1256, simple_loss=0.2092, pruned_loss=0.02095, over 19738.00 frames. ], tot_loss[loss=0.1483, simple_loss=0.2304, pruned_loss=0.03311, over 3893817.19 frames. ], batch size: 99, lr: 3.46e-03, grad_scale: 32.0 +2023-03-29 19:34:55,627 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3871, 4.0386, 4.1607, 4.3288, 4.1449, 4.3894, 4.4104, 4.6311], + device='cuda:2'), covar=tensor([0.0649, 0.0511, 0.0524, 0.0403, 0.0717, 0.0586, 0.0519, 0.0350], + device='cuda:2'), in_proj_covar=tensor([0.0161, 0.0186, 0.0206, 0.0184, 0.0182, 0.0166, 0.0160, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 19:35:25,040 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82493.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 19:36:29,761 INFO [train.py:892] (2/4) Epoch 45, batch 900, loss[loss=0.1406, simple_loss=0.223, pruned_loss=0.02906, over 19846.00 frames. ], tot_loss[loss=0.1479, simple_loss=0.2299, pruned_loss=0.03293, over 3906531.05 frames. ], batch size: 115, lr: 3.46e-03, grad_scale: 32.0 +2023-03-29 19:36:57,911 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.335e+02 3.589e+02 4.303e+02 4.859e+02 1.125e+03, threshold=8.606e+02, percent-clipped=1.0 +2023-03-29 19:37:49,458 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82554.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 19:38:17,334 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7796, 3.2783, 3.6530, 3.1514, 3.9236, 3.8826, 4.5495, 5.0442], + device='cuda:2'), covar=tensor([0.0450, 0.1586, 0.1362, 0.2187, 0.1684, 0.1332, 0.0525, 0.0427], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0248, 0.0276, 0.0264, 0.0310, 0.0267, 0.0242, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 19:38:23,870 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82569.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:38:27,667 INFO [train.py:892] (2/4) Epoch 45, batch 950, loss[loss=0.1454, simple_loss=0.223, pruned_loss=0.0339, over 19563.00 frames. ], tot_loss[loss=0.148, simple_loss=0.23, pruned_loss=0.03297, over 3916380.05 frames. ], batch size: 41, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:38:37,848 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5297, 4.6554, 2.6977, 4.9245, 5.1004, 2.2125, 4.3070, 3.6628], + device='cuda:2'), covar=tensor([0.0635, 0.0723, 0.2665, 0.0739, 0.0398, 0.2770, 0.0880, 0.0894], + device='cuda:2'), in_proj_covar=tensor([0.0243, 0.0269, 0.0240, 0.0291, 0.0270, 0.0209, 0.0247, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 19:39:17,964 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3232, 3.2884, 5.0777, 3.7468, 3.9882, 3.7413, 2.7322, 2.9698], + device='cuda:2'), covar=tensor([0.1004, 0.3123, 0.0374, 0.1068, 0.1753, 0.1496, 0.2868, 0.2637], + device='cuda:2'), in_proj_covar=tensor([0.0359, 0.0405, 0.0358, 0.0299, 0.0381, 0.0403, 0.0391, 0.0368], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 19:40:26,308 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2907, 4.3354, 2.5454, 4.5409, 4.7253, 2.1117, 3.9709, 3.5377], + device='cuda:2'), covar=tensor([0.0644, 0.0867, 0.2713, 0.0781, 0.0535, 0.2652, 0.0997, 0.0889], + device='cuda:2'), in_proj_covar=tensor([0.0244, 0.0270, 0.0242, 0.0292, 0.0272, 0.0210, 0.0249, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 19:40:27,134 INFO [train.py:892] (2/4) Epoch 45, batch 1000, loss[loss=0.148, simple_loss=0.2263, pruned_loss=0.03484, over 19717.00 frames. ], tot_loss[loss=0.1474, simple_loss=0.2296, pruned_loss=0.03263, over 3923269.69 frames. ], batch size: 78, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:40:51,664 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82630.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:40:56,939 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-29 19:41:01,298 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.945e+02 3.409e+02 3.840e+02 4.535e+02 7.731e+02, threshold=7.679e+02, percent-clipped=0.0 +2023-03-29 19:41:22,503 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82643.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:42:26,378 INFO [train.py:892] (2/4) Epoch 45, batch 1050, loss[loss=0.1493, simple_loss=0.2324, pruned_loss=0.03309, over 19869.00 frames. ], tot_loss[loss=0.1481, simple_loss=0.2304, pruned_loss=0.03293, over 3930085.10 frames. ], batch size: 89, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:42:44,842 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5537, 2.5934, 4.3293, 3.0649, 3.3207, 3.0662, 2.4117, 2.4611], + device='cuda:2'), covar=tensor([0.1465, 0.3975, 0.0547, 0.1348, 0.2387, 0.2086, 0.3066, 0.3180], + device='cuda:2'), in_proj_covar=tensor([0.0360, 0.0405, 0.0358, 0.0300, 0.0382, 0.0403, 0.0392, 0.0368], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 19:43:12,527 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82691.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:43:39,246 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82702.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:43:59,378 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82711.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:44:18,901 INFO [train.py:892] (2/4) Epoch 45, batch 1100, loss[loss=0.1382, simple_loss=0.2129, pruned_loss=0.03169, over 19796.00 frames. ], tot_loss[loss=0.1474, simple_loss=0.2296, pruned_loss=0.03259, over 3935016.66 frames. ], batch size: 162, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:44:29,223 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-29 19:44:49,682 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.363e+02 3.616e+02 4.255e+02 4.865e+02 1.038e+03, threshold=8.510e+02, percent-clipped=5.0 +2023-03-29 19:45:48,338 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82759.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:46:16,999 INFO [train.py:892] (2/4) Epoch 45, batch 1150, loss[loss=0.1452, simple_loss=0.2261, pruned_loss=0.03211, over 19734.00 frames. ], tot_loss[loss=0.1479, simple_loss=0.2298, pruned_loss=0.033, over 3937528.88 frames. ], batch size: 76, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:48:14,772 INFO [train.py:892] (2/4) Epoch 45, batch 1200, loss[loss=0.1547, simple_loss=0.238, pruned_loss=0.03575, over 19849.00 frames. ], tot_loss[loss=0.1495, simple_loss=0.2315, pruned_loss=0.03375, over 3940672.94 frames. ], batch size: 58, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:48:45,773 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.607e+02 4.196e+02 5.114e+02 1.465e+03, threshold=8.391e+02, percent-clipped=1.0 +2023-03-29 19:49:22,150 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82849.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 19:50:06,275 INFO [train.py:892] (2/4) Epoch 45, batch 1250, loss[loss=0.1423, simple_loss=0.2292, pruned_loss=0.02769, over 19743.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.231, pruned_loss=0.03339, over 3942860.94 frames. ], batch size: 259, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:52:02,765 INFO [train.py:892] (2/4) Epoch 45, batch 1300, loss[loss=0.1446, simple_loss=0.2123, pruned_loss=0.03848, over 19800.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2314, pruned_loss=0.0331, over 3943985.80 frames. ], batch size: 150, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 19:52:15,061 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82925.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:52:34,742 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.586e+02 3.425e+02 4.088e+02 4.844e+02 1.609e+03, threshold=8.176e+02, percent-clipped=1.0 +2023-03-29 19:52:38,420 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2354, 4.1326, 4.5247, 4.1472, 3.8023, 4.3415, 4.1755, 4.5814], + device='cuda:2'), covar=tensor([0.0745, 0.0349, 0.0355, 0.0364, 0.1036, 0.0548, 0.0540, 0.0323], + device='cuda:2'), in_proj_covar=tensor([0.0289, 0.0232, 0.0234, 0.0245, 0.0215, 0.0258, 0.0246, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 19:53:02,968 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 19:53:43,437 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82963.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:54:02,344 INFO [train.py:892] (2/4) Epoch 45, batch 1350, loss[loss=0.1413, simple_loss=0.2353, pruned_loss=0.02365, over 19852.00 frames. ], tot_loss[loss=0.1485, simple_loss=0.2313, pruned_loss=0.03289, over 3945854.84 frames. ], batch size: 56, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 19:54:55,088 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2702, 5.4139, 5.6996, 5.4082, 5.4491, 5.2767, 5.4296, 5.1969], + device='cuda:2'), covar=tensor([0.1331, 0.1368, 0.0728, 0.1223, 0.0710, 0.0772, 0.1681, 0.1904], + device='cuda:2'), in_proj_covar=tensor([0.0307, 0.0356, 0.0386, 0.0321, 0.0296, 0.0300, 0.0380, 0.0409], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 19:55:17,020 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83002.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:55:59,033 INFO [train.py:892] (2/4) Epoch 45, batch 1400, loss[loss=0.1724, simple_loss=0.2543, pruned_loss=0.04522, over 19561.00 frames. ], tot_loss[loss=0.149, simple_loss=0.2316, pruned_loss=0.03325, over 3946202.31 frames. ], batch size: 60, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 19:56:09,257 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83024.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:56:29,944 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.035e+02 3.427e+02 4.004e+02 4.659e+02 9.090e+02, threshold=8.008e+02, percent-clipped=1.0 +2023-03-29 19:57:07,386 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83050.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:57:14,613 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9217, 3.0986, 3.2301, 3.0947, 2.9900, 3.0440, 3.0010, 3.2507], + device='cuda:2'), covar=tensor([0.0438, 0.0355, 0.0294, 0.0374, 0.0456, 0.0374, 0.0405, 0.0299], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0093, 0.0095, 0.0090, 0.0103, 0.0095, 0.0111, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 19:57:56,912 INFO [train.py:892] (2/4) Epoch 45, batch 1450, loss[loss=0.135, simple_loss=0.2119, pruned_loss=0.02902, over 19532.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2315, pruned_loss=0.03313, over 3947489.48 frames. ], batch size: 46, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 19:58:11,302 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-29 19:58:23,741 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-03-29 19:59:49,363 INFO [train.py:892] (2/4) Epoch 45, batch 1500, loss[loss=0.1582, simple_loss=0.2303, pruned_loss=0.04304, over 19837.00 frames. ], tot_loss[loss=0.1496, simple_loss=0.2317, pruned_loss=0.03372, over 3948525.64 frames. ], batch size: 239, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 20:00:17,875 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.217e+02 3.289e+02 3.880e+02 4.653e+02 8.208e+02, threshold=7.760e+02, percent-clipped=2.0 +2023-03-29 20:00:55,647 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83149.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 20:01:43,838 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.8010, 3.1203, 3.6764, 3.1505, 3.9189, 3.9602, 4.5583, 5.0802], + device='cuda:2'), covar=tensor([0.0468, 0.1743, 0.1509, 0.2254, 0.1541, 0.1207, 0.0560, 0.0444], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0247, 0.0275, 0.0262, 0.0309, 0.0266, 0.0240, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 20:01:45,140 INFO [train.py:892] (2/4) Epoch 45, batch 1550, loss[loss=0.164, simple_loss=0.2564, pruned_loss=0.03576, over 19721.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.231, pruned_loss=0.03342, over 3950667.95 frames. ], batch size: 54, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 20:02:46,043 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83196.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:02:48,042 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83197.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 20:03:45,851 INFO [train.py:892] (2/4) Epoch 45, batch 1600, loss[loss=0.1507, simple_loss=0.2432, pruned_loss=0.02909, over 19816.00 frames. ], tot_loss[loss=0.1497, simple_loss=0.2321, pruned_loss=0.0337, over 3950342.00 frames. ], batch size: 57, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 20:03:57,118 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83225.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:04:15,363 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.230e+02 3.453e+02 4.061e+02 4.762e+02 8.632e+02, threshold=8.122e+02, percent-clipped=1.0 +2023-03-29 20:05:10,886 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83257.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:05:15,537 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.5427, 2.7739, 2.4509, 2.0003, 2.4966, 2.6831, 2.7126, 2.7713], + device='cuda:2'), covar=tensor([0.0408, 0.0388, 0.0397, 0.0621, 0.0476, 0.0346, 0.0380, 0.0304], + device='cuda:2'), in_proj_covar=tensor([0.0116, 0.0109, 0.0110, 0.0109, 0.0113, 0.0100, 0.0101, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 20:05:41,845 INFO [train.py:892] (2/4) Epoch 45, batch 1650, loss[loss=0.1391, simple_loss=0.2161, pruned_loss=0.03104, over 19854.00 frames. ], tot_loss[loss=0.1497, simple_loss=0.2324, pruned_loss=0.03348, over 3947770.18 frames. ], batch size: 157, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 20:05:47,088 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83273.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:06:11,745 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83284.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:07:17,074 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1080, 2.3930, 3.3570, 2.7707, 2.8460, 2.7540, 2.0968, 2.2256], + device='cuda:2'), covar=tensor([0.1247, 0.2737, 0.0729, 0.1229, 0.1929, 0.1621, 0.2690, 0.2652], + device='cuda:2'), in_proj_covar=tensor([0.0359, 0.0404, 0.0357, 0.0299, 0.0380, 0.0404, 0.0391, 0.0367], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 20:07:29,137 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83319.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:07:33,424 INFO [train.py:892] (2/4) Epoch 45, batch 1700, loss[loss=0.1344, simple_loss=0.2184, pruned_loss=0.02516, over 19705.00 frames. ], tot_loss[loss=0.1491, simple_loss=0.2316, pruned_loss=0.03334, over 3949312.46 frames. ], batch size: 101, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 20:07:40,939 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.59 vs. limit=5.0 +2023-03-29 20:08:02,060 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.889e+02 3.340e+02 3.879e+02 4.594e+02 1.209e+03, threshold=7.757e+02, percent-clipped=3.0 +2023-03-29 20:08:28,503 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83345.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:08:43,674 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1903, 4.0543, 4.5166, 4.1070, 3.8876, 4.3679, 4.1507, 4.6240], + device='cuda:2'), covar=tensor([0.0949, 0.0458, 0.0464, 0.0485, 0.1096, 0.0658, 0.0602, 0.0418], + device='cuda:2'), in_proj_covar=tensor([0.0291, 0.0233, 0.0236, 0.0248, 0.0216, 0.0261, 0.0249, 0.0234], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 20:09:20,244 INFO [train.py:892] (2/4) Epoch 45, batch 1750, loss[loss=0.157, simple_loss=0.2424, pruned_loss=0.03578, over 19724.00 frames. ], tot_loss[loss=0.148, simple_loss=0.2301, pruned_loss=0.033, over 3949689.70 frames. ], batch size: 50, lr: 3.44e-03, grad_scale: 16.0 +2023-03-29 20:09:23,352 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.73 vs. limit=5.0 +2023-03-29 20:10:59,579 INFO [train.py:892] (2/4) Epoch 45, batch 1800, loss[loss=0.1324, simple_loss=0.2064, pruned_loss=0.02918, over 19818.00 frames. ], tot_loss[loss=0.15, simple_loss=0.2318, pruned_loss=0.03414, over 3949165.76 frames. ], batch size: 98, lr: 3.44e-03, grad_scale: 16.0 +2023-03-29 20:11:14,244 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1264, 3.3091, 2.7349, 2.4279, 2.8716, 3.1421, 3.2485, 3.2566], + device='cuda:2'), covar=tensor([0.0330, 0.0344, 0.0401, 0.0624, 0.0417, 0.0403, 0.0261, 0.0257], + device='cuda:2'), in_proj_covar=tensor([0.0116, 0.0109, 0.0110, 0.0109, 0.0113, 0.0099, 0.0101, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 20:11:23,980 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.557e+02 3.459e+02 4.040e+02 5.185e+02 7.885e+02, threshold=8.081e+02, percent-clipped=1.0 +2023-03-29 20:12:16,869 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-29 20:12:35,801 INFO [train.py:892] (2/4) Epoch 45, batch 1850, loss[loss=0.1635, simple_loss=0.249, pruned_loss=0.03901, over 19826.00 frames. ], tot_loss[loss=0.1496, simple_loss=0.2321, pruned_loss=0.03355, over 3948392.89 frames. ], batch size: 57, lr: 3.44e-03, grad_scale: 16.0 +2023-03-29 20:13:42,296 INFO [train.py:892] (2/4) Epoch 46, batch 0, loss[loss=0.1527, simple_loss=0.2339, pruned_loss=0.03574, over 19927.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.2339, pruned_loss=0.03574, over 19927.00 frames. ], batch size: 51, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:13:42,297 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 20:14:19,282 INFO [train.py:926] (2/4) Epoch 46, validation: loss=0.1879, simple_loss=0.2498, pruned_loss=0.06295, over 2883724.00 frames. +2023-03-29 20:14:19,284 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22417MB +2023-03-29 20:16:21,255 INFO [train.py:892] (2/4) Epoch 46, batch 50, loss[loss=0.1504, simple_loss=0.2363, pruned_loss=0.03227, over 19791.00 frames. ], tot_loss[loss=0.1441, simple_loss=0.2261, pruned_loss=0.03104, over 889748.38 frames. ], batch size: 45, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:16:38,122 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.206e+02 3.234e+02 3.698e+02 4.952e+02 1.024e+03, threshold=7.395e+02, percent-clipped=5.0 +2023-03-29 20:16:51,087 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-29 20:17:20,760 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83552.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:17:24,118 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7023, 3.2796, 3.5607, 3.1546, 3.8885, 3.8737, 4.4548, 4.9816], + device='cuda:2'), covar=tensor([0.0472, 0.1543, 0.1420, 0.2133, 0.1626, 0.1278, 0.0616, 0.0415], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0249, 0.0278, 0.0265, 0.0312, 0.0268, 0.0243, 0.0275], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 20:18:14,929 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5461, 2.3788, 5.0249, 4.2509, 4.8516, 4.8413, 4.6968, 4.7015], + device='cuda:2'), covar=tensor([0.0781, 0.1484, 0.0129, 0.0945, 0.0159, 0.0242, 0.0203, 0.0167], + device='cuda:2'), in_proj_covar=tensor([0.0106, 0.0108, 0.0093, 0.0155, 0.0091, 0.0105, 0.0095, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 20:18:16,045 INFO [train.py:892] (2/4) Epoch 46, batch 100, loss[loss=0.1513, simple_loss=0.2325, pruned_loss=0.0351, over 19891.00 frames. ], tot_loss[loss=0.1459, simple_loss=0.227, pruned_loss=0.03239, over 1568725.67 frames. ], batch size: 47, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:19:28,040 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.7175, 2.1684, 2.5806, 2.9213, 3.2348, 3.4197, 3.3122, 3.3650], + device='cuda:2'), covar=tensor([0.1046, 0.1771, 0.1342, 0.0763, 0.0581, 0.0395, 0.0519, 0.0468], + device='cuda:2'), in_proj_covar=tensor([0.0168, 0.0171, 0.0183, 0.0158, 0.0145, 0.0139, 0.0134, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 20:19:59,592 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83619.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:20:13,492 INFO [train.py:892] (2/4) Epoch 46, batch 150, loss[loss=0.1345, simple_loss=0.2114, pruned_loss=0.02879, over 19844.00 frames. ], tot_loss[loss=0.1454, simple_loss=0.2263, pruned_loss=0.03222, over 2096698.76 frames. ], batch size: 109, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:20:31,959 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.341e+02 3.214e+02 3.787e+02 4.556e+02 6.937e+02, threshold=7.575e+02, percent-clipped=0.0 +2023-03-29 20:20:51,265 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83640.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:21:17,883 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2871, 3.2288, 2.1212, 3.8412, 3.5101, 3.8025, 3.8394, 3.0744], + device='cuda:2'), covar=tensor([0.0637, 0.0680, 0.1710, 0.0657, 0.0634, 0.0439, 0.0719, 0.0847], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0152, 0.0149, 0.0163, 0.0141, 0.0148, 0.0157, 0.0155], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-29 20:21:25,215 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3903, 3.4698, 2.1688, 3.5719, 3.6776, 1.7610, 3.0854, 2.8318], + device='cuda:2'), covar=tensor([0.0892, 0.0990, 0.2891, 0.0953, 0.0754, 0.2766, 0.1176, 0.1077], + device='cuda:2'), in_proj_covar=tensor([0.0243, 0.0270, 0.0242, 0.0292, 0.0271, 0.0209, 0.0249, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 20:21:54,625 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83667.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:22:16,396 INFO [train.py:892] (2/4) Epoch 46, batch 200, loss[loss=0.1284, simple_loss=0.2108, pruned_loss=0.02303, over 19891.00 frames. ], tot_loss[loss=0.1473, simple_loss=0.2282, pruned_loss=0.0332, over 2508226.24 frames. ], batch size: 87, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:24:05,082 INFO [train.py:892] (2/4) Epoch 46, batch 250, loss[loss=0.1314, simple_loss=0.2085, pruned_loss=0.02716, over 19655.00 frames. ], tot_loss[loss=0.1476, simple_loss=0.2289, pruned_loss=0.03309, over 2827192.28 frames. ], batch size: 47, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:24:21,765 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.227e+02 3.614e+02 4.201e+02 5.267e+02 1.212e+03, threshold=8.403e+02, percent-clipped=3.0 +2023-03-29 20:25:25,758 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83760.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:26:06,555 INFO [train.py:892] (2/4) Epoch 46, batch 300, loss[loss=0.1584, simple_loss=0.2313, pruned_loss=0.04277, over 19812.00 frames. ], tot_loss[loss=0.1466, simple_loss=0.2282, pruned_loss=0.03255, over 3075988.84 frames. ], batch size: 132, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:26:45,529 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0462, 2.9582, 3.0556, 2.5593, 3.2395, 2.7907, 3.1079, 3.2009], + device='cuda:2'), covar=tensor([0.0687, 0.0558, 0.0836, 0.0866, 0.0421, 0.0551, 0.0527, 0.0424], + device='cuda:2'), in_proj_covar=tensor([0.0086, 0.0096, 0.0092, 0.0115, 0.0088, 0.0091, 0.0088, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 20:27:10,540 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5032, 3.7477, 3.9502, 4.5219, 3.0922, 3.1691, 3.2193, 3.0516], + device='cuda:2'), covar=tensor([0.0512, 0.1926, 0.0895, 0.0408, 0.1865, 0.1263, 0.1104, 0.1485], + device='cuda:2'), in_proj_covar=tensor([0.0257, 0.0332, 0.0258, 0.0216, 0.0253, 0.0221, 0.0227, 0.0223], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 20:28:00,379 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83821.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:28:11,311 INFO [train.py:892] (2/4) Epoch 46, batch 350, loss[loss=0.1344, simple_loss=0.2178, pruned_loss=0.02551, over 19412.00 frames. ], tot_loss[loss=0.1466, simple_loss=0.2286, pruned_loss=0.03232, over 3271091.25 frames. ], batch size: 40, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:28:31,211 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.272e+02 3.501e+02 4.159e+02 5.209e+02 9.522e+02, threshold=8.317e+02, percent-clipped=2.0 +2023-03-29 20:29:18,686 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83852.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:30:17,325 INFO [train.py:892] (2/4) Epoch 46, batch 400, loss[loss=0.1332, simple_loss=0.2146, pruned_loss=0.02592, over 19801.00 frames. ], tot_loss[loss=0.1462, simple_loss=0.2281, pruned_loss=0.03219, over 3423114.37 frames. ], batch size: 114, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:31:22,494 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83900.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:32:25,502 INFO [train.py:892] (2/4) Epoch 46, batch 450, loss[loss=0.1415, simple_loss=0.2191, pruned_loss=0.03193, over 19717.00 frames. ], tot_loss[loss=0.1466, simple_loss=0.2285, pruned_loss=0.03235, over 3541178.25 frames. ], batch size: 85, lr: 3.40e-03, grad_scale: 8.0 +2023-03-29 20:32:48,219 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.948e+02 3.420e+02 3.775e+02 4.552e+02 8.094e+02, threshold=7.550e+02, percent-clipped=0.0 +2023-03-29 20:33:01,539 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83940.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:33:22,818 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83949.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:34:29,682 INFO [train.py:892] (2/4) Epoch 46, batch 500, loss[loss=0.1512, simple_loss=0.2291, pruned_loss=0.03668, over 19786.00 frames. ], tot_loss[loss=0.1469, simple_loss=0.2287, pruned_loss=0.03251, over 3630880.68 frames. ], batch size: 83, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:34:42,966 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.9033, 2.9530, 1.9936, 3.3509, 3.1068, 3.2750, 3.3851, 2.6812], + device='cuda:2'), covar=tensor([0.0754, 0.0781, 0.1591, 0.0711, 0.0677, 0.0659, 0.0636, 0.0975], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0152, 0.0148, 0.0163, 0.0141, 0.0148, 0.0158, 0.0155], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-29 20:34:51,586 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83985.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:34:59,178 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83988.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:35:35,331 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-29 20:35:59,507 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84010.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 20:36:35,487 INFO [train.py:892] (2/4) Epoch 46, batch 550, loss[loss=0.135, simple_loss=0.2202, pruned_loss=0.02493, over 19828.00 frames. ], tot_loss[loss=0.1473, simple_loss=0.2292, pruned_loss=0.03276, over 3700960.02 frames. ], batch size: 93, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:36:58,608 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.358e+02 3.554e+02 4.105e+02 5.072e+02 7.939e+02, threshold=8.210e+02, percent-clipped=2.0 +2023-03-29 20:37:02,483 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-29 20:37:27,515 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84046.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:38:20,887 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5570, 2.3627, 4.8710, 3.9804, 4.5571, 4.7608, 4.5300, 4.4346], + device='cuda:2'), covar=tensor([0.0636, 0.1302, 0.0099, 0.0946, 0.0149, 0.0198, 0.0172, 0.0166], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0108, 0.0092, 0.0154, 0.0092, 0.0105, 0.0095, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 20:38:42,374 INFO [train.py:892] (2/4) Epoch 46, batch 600, loss[loss=0.158, simple_loss=0.2453, pruned_loss=0.03541, over 19938.00 frames. ], tot_loss[loss=0.1471, simple_loss=0.229, pruned_loss=0.03258, over 3755725.89 frames. ], batch size: 52, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:39:34,334 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84097.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:40:17,198 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84116.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:40:40,177 INFO [train.py:892] (2/4) Epoch 46, batch 650, loss[loss=0.1395, simple_loss=0.2243, pruned_loss=0.02732, over 19857.00 frames. ], tot_loss[loss=0.1474, simple_loss=0.2294, pruned_loss=0.03267, over 3799132.30 frames. ], batch size: 58, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:41:04,400 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.462e+02 3.455e+02 4.092e+02 4.989e+02 7.207e+02, threshold=8.185e+02, percent-clipped=0.0 +2023-03-29 20:41:39,486 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-29 20:41:39,565 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.06 vs. limit=5.0 +2023-03-29 20:41:46,492 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-29 20:42:03,767 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84158.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:42:46,667 INFO [train.py:892] (2/4) Epoch 46, batch 700, loss[loss=0.1357, simple_loss=0.2171, pruned_loss=0.0271, over 19715.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.2295, pruned_loss=0.03271, over 3833148.09 frames. ], batch size: 85, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:43:45,185 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84199.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:44:03,359 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.0011, 2.2546, 2.0964, 1.5083, 2.0917, 2.2716, 2.1160, 2.2196], + device='cuda:2'), covar=tensor([0.0509, 0.0412, 0.0436, 0.0687, 0.0497, 0.0375, 0.0401, 0.0349], + device='cuda:2'), in_proj_covar=tensor([0.0117, 0.0110, 0.0110, 0.0110, 0.0114, 0.0100, 0.0102, 0.0100], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 20:44:54,024 INFO [train.py:892] (2/4) Epoch 46, batch 750, loss[loss=0.1476, simple_loss=0.2383, pruned_loss=0.02842, over 19660.00 frames. ], tot_loss[loss=0.1465, simple_loss=0.2286, pruned_loss=0.03219, over 3860491.41 frames. ], batch size: 57, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:45:18,700 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.966e+02 3.322e+02 4.189e+02 5.032e+02 8.486e+02, threshold=8.378e+02, percent-clipped=1.0 +2023-03-29 20:45:22,228 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84236.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:46:21,601 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84260.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:46:38,545 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7080, 3.7426, 2.2742, 3.9437, 4.0841, 1.9175, 3.4622, 3.1805], + device='cuda:2'), covar=tensor([0.0863, 0.1039, 0.3030, 0.0930, 0.0682, 0.2877, 0.1120, 0.0999], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0268, 0.0240, 0.0289, 0.0269, 0.0208, 0.0247, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 20:46:56,845 INFO [train.py:892] (2/4) Epoch 46, batch 800, loss[loss=0.1387, simple_loss=0.2198, pruned_loss=0.02876, over 19764.00 frames. ], tot_loss[loss=0.1463, simple_loss=0.228, pruned_loss=0.03231, over 3880678.05 frames. ], batch size: 217, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:47:33,159 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5337, 4.5493, 2.7260, 4.8591, 5.0915, 2.2421, 4.3364, 3.7656], + device='cuda:2'), covar=tensor([0.0611, 0.0796, 0.2776, 0.0688, 0.0512, 0.2854, 0.0881, 0.0881], + device='cuda:2'), in_proj_covar=tensor([0.0242, 0.0269, 0.0241, 0.0290, 0.0270, 0.0208, 0.0247, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 20:47:48,363 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84297.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:48:06,896 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84305.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 20:48:30,107 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5258, 4.4041, 4.8713, 4.4557, 4.0535, 4.6471, 4.5232, 4.9474], + device='cuda:2'), covar=tensor([0.0779, 0.0369, 0.0349, 0.0386, 0.0953, 0.0558, 0.0485, 0.0355], + device='cuda:2'), in_proj_covar=tensor([0.0290, 0.0233, 0.0235, 0.0246, 0.0215, 0.0260, 0.0248, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 20:48:59,548 INFO [train.py:892] (2/4) Epoch 46, batch 850, loss[loss=0.1459, simple_loss=0.2297, pruned_loss=0.03103, over 19809.00 frames. ], tot_loss[loss=0.1461, simple_loss=0.228, pruned_loss=0.03211, over 3896738.46 frames. ], batch size: 231, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:49:22,798 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.218e+02 3.307e+02 4.048e+02 4.791e+02 1.064e+03, threshold=8.096e+02, percent-clipped=1.0 +2023-03-29 20:49:27,013 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84336.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:49:41,456 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84341.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:49:51,594 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-29 20:50:08,147 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-29 20:51:08,424 INFO [train.py:892] (2/4) Epoch 46, batch 900, loss[loss=0.137, simple_loss=0.2163, pruned_loss=0.02882, over 19781.00 frames. ], tot_loss[loss=0.1464, simple_loss=0.2281, pruned_loss=0.03238, over 3909958.59 frames. ], batch size: 65, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:51:41,045 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1545, 3.1123, 3.1436, 2.6444, 3.2873, 2.7948, 3.0983, 3.2141], + device='cuda:2'), covar=tensor([0.0549, 0.0452, 0.0674, 0.0799, 0.0401, 0.0501, 0.0531, 0.0515], + device='cuda:2'), in_proj_covar=tensor([0.0087, 0.0096, 0.0093, 0.0116, 0.0088, 0.0092, 0.0088, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 20:51:59,852 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84397.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:52:44,323 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.29 vs. limit=5.0 +2023-03-29 20:52:47,315 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84416.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:53:09,208 INFO [train.py:892] (2/4) Epoch 46, batch 950, loss[loss=0.1452, simple_loss=0.2326, pruned_loss=0.02888, over 19742.00 frames. ], tot_loss[loss=0.146, simple_loss=0.2277, pruned_loss=0.03214, over 3919485.99 frames. ], batch size: 221, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:53:32,134 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.409e+02 3.247e+02 3.846e+02 4.740e+02 1.002e+03, threshold=7.692e+02, percent-clipped=2.0 +2023-03-29 20:53:58,311 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-29 20:54:17,772 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84453.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:54:43,067 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84464.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:55:12,994 INFO [train.py:892] (2/4) Epoch 46, batch 1000, loss[loss=0.1476, simple_loss=0.2288, pruned_loss=0.03316, over 19752.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.2293, pruned_loss=0.03279, over 3925726.23 frames. ], batch size: 155, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 20:55:26,666 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.63 vs. limit=5.0 +2023-03-29 20:56:45,489 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-03-29 20:57:09,428 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84525.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:57:10,393 INFO [train.py:892] (2/4) Epoch 46, batch 1050, loss[loss=0.186, simple_loss=0.2731, pruned_loss=0.0495, over 19700.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2307, pruned_loss=0.03353, over 3930080.41 frames. ], batch size: 337, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 20:57:31,650 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.128e+02 3.422e+02 4.030e+02 5.116e+02 8.679e+02, threshold=8.059e+02, percent-clipped=4.0 +2023-03-29 20:58:20,644 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84555.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:59:10,821 INFO [train.py:892] (2/4) Epoch 46, batch 1100, loss[loss=0.1241, simple_loss=0.2064, pruned_loss=0.02093, over 19802.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2299, pruned_loss=0.03324, over 3934741.00 frames. ], batch size: 98, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 20:59:32,303 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.2163, 4.7811, 4.8640, 4.6048, 5.1513, 3.2808, 4.1947, 2.6412], + device='cuda:2'), covar=tensor([0.0160, 0.0215, 0.0157, 0.0209, 0.0145, 0.0997, 0.0907, 0.1460], + device='cuda:2'), in_proj_covar=tensor([0.0111, 0.0156, 0.0120, 0.0142, 0.0126, 0.0141, 0.0147, 0.0134], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-29 20:59:34,784 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84586.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:59:49,096 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84592.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:00:22,899 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84605.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:01:12,922 INFO [train.py:892] (2/4) Epoch 46, batch 1150, loss[loss=0.1414, simple_loss=0.2256, pruned_loss=0.02856, over 19759.00 frames. ], tot_loss[loss=0.1485, simple_loss=0.2301, pruned_loss=0.03343, over 3937906.12 frames. ], batch size: 100, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:01:31,611 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84633.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:01:34,970 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.372e+02 3.414e+02 3.912e+02 4.793e+02 1.001e+03, threshold=7.825e+02, percent-clipped=1.0 +2023-03-29 21:01:52,765 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84641.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:02:22,341 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84653.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:03:17,298 INFO [train.py:892] (2/4) Epoch 46, batch 1200, loss[loss=0.1256, simple_loss=0.2076, pruned_loss=0.02182, over 19837.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2306, pruned_loss=0.03345, over 3941201.09 frames. ], batch size: 177, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:03:50,256 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84689.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:03:56,729 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84692.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:04:01,932 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84694.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:05:19,015 INFO [train.py:892] (2/4) Epoch 46, batch 1250, loss[loss=0.1354, simple_loss=0.2176, pruned_loss=0.02655, over 19662.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2306, pruned_loss=0.03344, over 3941318.64 frames. ], batch size: 50, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:05:19,959 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7660, 4.4613, 4.5109, 4.2246, 4.7460, 3.2008, 3.8879, 2.4259], + device='cuda:2'), covar=tensor([0.0166, 0.0225, 0.0151, 0.0200, 0.0141, 0.0968, 0.0797, 0.1479], + device='cuda:2'), in_proj_covar=tensor([0.0110, 0.0156, 0.0119, 0.0142, 0.0125, 0.0141, 0.0147, 0.0134], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-29 21:05:39,531 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.527e+02 3.466e+02 3.877e+02 4.842e+02 1.121e+03, threshold=7.755e+02, percent-clipped=3.0 +2023-03-29 21:06:28,557 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84753.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:07:25,107 INFO [train.py:892] (2/4) Epoch 46, batch 1300, loss[loss=0.1341, simple_loss=0.2078, pruned_loss=0.03014, over 19824.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2299, pruned_loss=0.03325, over 3944335.65 frames. ], batch size: 121, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:07:54,690 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.5811, 6.0703, 6.1076, 5.7875, 5.7312, 5.8917, 5.7690, 5.4940], + device='cuda:2'), covar=tensor([0.1365, 0.1304, 0.0742, 0.1155, 0.0653, 0.0656, 0.1627, 0.1780], + device='cuda:2'), in_proj_covar=tensor([0.0305, 0.0356, 0.0387, 0.0319, 0.0295, 0.0300, 0.0378, 0.0410], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 21:08:25,892 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84800.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:08:27,677 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84801.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:09:27,320 INFO [train.py:892] (2/4) Epoch 46, batch 1350, loss[loss=0.1279, simple_loss=0.2057, pruned_loss=0.02506, over 19872.00 frames. ], tot_loss[loss=0.1477, simple_loss=0.2292, pruned_loss=0.03313, over 3946229.32 frames. ], batch size: 108, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:09:50,717 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.135e+02 3.546e+02 4.239e+02 5.224e+02 8.019e+02, threshold=8.477e+02, percent-clipped=4.0 +2023-03-29 21:10:05,596 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 21:10:45,668 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84855.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:10:48,279 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1372, 2.8793, 3.2425, 2.7061, 3.3631, 3.3031, 3.9163, 4.3357], + device='cuda:2'), covar=tensor([0.0526, 0.1709, 0.1443, 0.2370, 0.1596, 0.1398, 0.0671, 0.0525], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0248, 0.0276, 0.0265, 0.0311, 0.0268, 0.0242, 0.0275], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 21:11:00,445 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84861.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:11:35,545 INFO [train.py:892] (2/4) Epoch 46, batch 1400, loss[loss=0.1289, simple_loss=0.2077, pruned_loss=0.0251, over 19726.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.229, pruned_loss=0.03297, over 3947032.08 frames. ], batch size: 134, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:11:46,579 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84881.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:11:46,829 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7597, 3.0201, 3.2187, 3.6105, 2.5716, 2.9735, 2.4044, 2.4117], + device='cuda:2'), covar=tensor([0.0591, 0.1895, 0.1107, 0.0561, 0.2069, 0.1039, 0.1481, 0.1729], + device='cuda:2'), in_proj_covar=tensor([0.0257, 0.0333, 0.0259, 0.0216, 0.0252, 0.0220, 0.0227, 0.0223], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 21:11:55,659 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2230, 2.7893, 3.3081, 3.2348, 3.9085, 4.3889, 4.2207, 4.3338], + device='cuda:2'), covar=tensor([0.0917, 0.1646, 0.1287, 0.0810, 0.0416, 0.0233, 0.0346, 0.0382], + device='cuda:2'), in_proj_covar=tensor([0.0167, 0.0171, 0.0183, 0.0159, 0.0145, 0.0139, 0.0135, 0.0124], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 21:12:09,113 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84892.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:12:20,335 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.4467, 4.9869, 5.0093, 4.7443, 5.3771, 3.3293, 4.3599, 2.7383], + device='cuda:2'), covar=tensor([0.0163, 0.0208, 0.0152, 0.0203, 0.0140, 0.0946, 0.0851, 0.1472], + device='cuda:2'), in_proj_covar=tensor([0.0111, 0.0156, 0.0119, 0.0143, 0.0125, 0.0140, 0.0147, 0.0134], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-29 21:12:38,549 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84903.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:12:43,567 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0184, 2.2928, 4.3066, 3.6978, 4.0744, 4.2571, 4.0440, 3.9023], + device='cuda:2'), covar=tensor([0.0696, 0.1216, 0.0127, 0.0738, 0.0189, 0.0226, 0.0201, 0.0226], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0107, 0.0092, 0.0153, 0.0091, 0.0105, 0.0094, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 21:13:32,298 INFO [train.py:892] (2/4) Epoch 46, batch 1450, loss[loss=0.138, simple_loss=0.2188, pruned_loss=0.02858, over 19580.00 frames. ], tot_loss[loss=0.1468, simple_loss=0.2289, pruned_loss=0.03231, over 3945932.81 frames. ], batch size: 42, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:13:55,751 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.367e+02 3.403e+02 4.016e+02 4.651e+02 7.911e+02, threshold=8.032e+02, percent-clipped=0.0 +2023-03-29 21:14:08,242 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84940.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:14:36,266 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84951.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:15:44,126 INFO [train.py:892] (2/4) Epoch 46, batch 1500, loss[loss=0.1352, simple_loss=0.223, pruned_loss=0.02365, over 19858.00 frames. ], tot_loss[loss=0.1473, simple_loss=0.2294, pruned_loss=0.03259, over 3945212.12 frames. ], batch size: 58, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:16:15,453 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84989.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:16:22,846 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84992.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:17:17,004 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85012.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:17:49,549 INFO [train.py:892] (2/4) Epoch 46, batch 1550, loss[loss=0.1456, simple_loss=0.227, pruned_loss=0.03207, over 19675.00 frames. ], tot_loss[loss=0.147, simple_loss=0.2288, pruned_loss=0.03256, over 3947824.83 frames. ], batch size: 52, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:17:58,208 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2402, 3.2349, 3.1259, 2.7998, 3.2169, 2.5682, 2.4275, 1.6718], + device='cuda:2'), covar=tensor([0.0244, 0.0259, 0.0180, 0.0232, 0.0179, 0.0861, 0.0570, 0.1700], + device='cuda:2'), in_proj_covar=tensor([0.0111, 0.0156, 0.0119, 0.0143, 0.0125, 0.0140, 0.0147, 0.0134], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-29 21:18:12,485 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.101e+02 3.669e+02 4.198e+02 5.006e+02 8.856e+02, threshold=8.396e+02, percent-clipped=3.0 +2023-03-29 21:18:26,244 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85040.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:19:20,403 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.0509, 3.9242, 4.4313, 4.0042, 3.8590, 4.3432, 4.1507, 4.5309], + device='cuda:2'), covar=tensor([0.1065, 0.0496, 0.0503, 0.0483, 0.0993, 0.0696, 0.0555, 0.0435], + device='cuda:2'), in_proj_covar=tensor([0.0291, 0.0233, 0.0236, 0.0247, 0.0215, 0.0261, 0.0247, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 21:19:59,540 INFO [train.py:892] (2/4) Epoch 46, batch 1600, loss[loss=0.1567, simple_loss=0.2469, pruned_loss=0.03323, over 19740.00 frames. ], tot_loss[loss=0.148, simple_loss=0.23, pruned_loss=0.03302, over 3948071.65 frames. ], batch size: 219, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:21:21,409 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85107.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:21:42,156 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85115.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:22:09,310 INFO [train.py:892] (2/4) Epoch 46, batch 1650, loss[loss=0.1345, simple_loss=0.2136, pruned_loss=0.02769, over 19572.00 frames. ], tot_loss[loss=0.1469, simple_loss=0.2287, pruned_loss=0.03261, over 3949218.62 frames. ], batch size: 42, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:22:31,393 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.611e+02 3.617e+02 4.144e+02 5.075e+02 9.786e+02, threshold=8.288e+02, percent-clipped=2.0 +2023-03-29 21:23:27,019 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85156.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:23:57,608 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85168.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 21:24:15,585 INFO [train.py:892] (2/4) Epoch 46, batch 1700, loss[loss=0.1286, simple_loss=0.2053, pruned_loss=0.02593, over 19866.00 frames. ], tot_loss[loss=0.1485, simple_loss=0.2304, pruned_loss=0.03325, over 3946891.90 frames. ], batch size: 129, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:24:16,756 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85176.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:24:29,987 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85181.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:26:00,824 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2987, 2.4665, 3.8029, 2.9448, 3.0353, 2.8613, 2.2073, 2.3494], + device='cuda:2'), covar=tensor([0.1429, 0.3405, 0.0674, 0.1306, 0.2287, 0.1834, 0.2893, 0.2975], + device='cuda:2'), in_proj_covar=tensor([0.0361, 0.0408, 0.0359, 0.0302, 0.0384, 0.0406, 0.0393, 0.0371], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 21:26:14,881 INFO [train.py:892] (2/4) Epoch 46, batch 1750, loss[loss=0.1377, simple_loss=0.2206, pruned_loss=0.02745, over 19846.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.2294, pruned_loss=0.03277, over 3948482.47 frames. ], batch size: 112, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:26:22,876 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85229.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:26:35,386 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.122e+02 3.389e+02 3.888e+02 4.671e+02 1.140e+03, threshold=7.776e+02, percent-clipped=1.0 +2023-03-29 21:28:04,572 INFO [train.py:892] (2/4) Epoch 46, batch 1800, loss[loss=0.182, simple_loss=0.2728, pruned_loss=0.04561, over 19660.00 frames. ], tot_loss[loss=0.1469, simple_loss=0.2289, pruned_loss=0.0324, over 3948933.55 frames. ], batch size: 55, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:28:30,784 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85289.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:28:47,452 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7444, 3.9415, 4.1293, 4.7760, 3.0217, 3.3781, 2.9054, 3.0074], + device='cuda:2'), covar=tensor([0.0438, 0.1761, 0.0796, 0.0360, 0.2013, 0.1190, 0.1292, 0.1480], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0327, 0.0256, 0.0213, 0.0250, 0.0217, 0.0224, 0.0221], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-29 21:29:00,488 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85307.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:29:38,039 INFO [train.py:892] (2/4) Epoch 46, batch 1850, loss[loss=0.1554, simple_loss=0.2483, pruned_loss=0.03122, over 19842.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2305, pruned_loss=0.03252, over 3948344.62 frames. ], batch size: 58, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:30:45,978 INFO [train.py:892] (2/4) Epoch 47, batch 0, loss[loss=0.1714, simple_loss=0.251, pruned_loss=0.0459, over 19708.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.251, pruned_loss=0.0459, over 19708.00 frames. ], batch size: 305, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:30:45,988 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 21:31:18,898 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1274, 2.9478, 4.8571, 3.5560, 3.8321, 3.3597, 2.5464, 2.6947], + device='cuda:2'), covar=tensor([0.1025, 0.3624, 0.0402, 0.1080, 0.1957, 0.1767, 0.3166, 0.2962], + device='cuda:2'), in_proj_covar=tensor([0.0361, 0.0408, 0.0358, 0.0301, 0.0383, 0.0406, 0.0394, 0.0371], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 21:31:22,866 INFO [train.py:926] (2/4) Epoch 47, validation: loss=0.1894, simple_loss=0.2504, pruned_loss=0.06424, over 2883724.00 frames. +2023-03-29 21:31:22,870 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22417MB +2023-03-29 21:31:31,583 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.328e+02 3.387e+02 4.010e+02 4.758e+02 1.602e+03, threshold=8.020e+02, percent-clipped=2.0 +2023-03-29 21:31:39,728 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85337.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:33:29,574 INFO [train.py:892] (2/4) Epoch 47, batch 50, loss[loss=0.1322, simple_loss=0.2205, pruned_loss=0.02191, over 19875.00 frames. ], tot_loss[loss=0.1439, simple_loss=0.2256, pruned_loss=0.03113, over 889340.31 frames. ], batch size: 84, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:35:26,313 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85428.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:35:31,534 INFO [train.py:892] (2/4) Epoch 47, batch 100, loss[loss=0.1849, simple_loss=0.2751, pruned_loss=0.04734, over 19579.00 frames. ], tot_loss[loss=0.1464, simple_loss=0.2272, pruned_loss=0.03278, over 1568840.13 frames. ], batch size: 376, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:35:42,089 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.536e+02 3.357e+02 4.311e+02 4.946e+02 7.786e+02, threshold=8.621e+02, percent-clipped=0.0 +2023-03-29 21:36:34,077 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85456.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:36:49,979 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85463.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 21:37:08,394 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85471.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:37:33,232 INFO [train.py:892] (2/4) Epoch 47, batch 150, loss[loss=0.1566, simple_loss=0.24, pruned_loss=0.03664, over 19805.00 frames. ], tot_loss[loss=0.1466, simple_loss=0.2276, pruned_loss=0.03275, over 2097714.62 frames. ], batch size: 162, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:37:37,251 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2214, 2.4901, 2.7125, 3.0905, 2.0725, 2.7886, 2.0193, 2.0995], + device='cuda:2'), covar=tensor([0.0657, 0.1262, 0.1090, 0.0583, 0.2200, 0.0869, 0.1444, 0.1543], + device='cuda:2'), in_proj_covar=tensor([0.0257, 0.0331, 0.0258, 0.0216, 0.0253, 0.0220, 0.0227, 0.0223], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 21:37:40,200 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85483.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:37:55,923 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85489.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:38:35,697 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85504.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:38:40,781 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5213, 4.6494, 2.7423, 4.8622, 5.1068, 2.1460, 4.3877, 3.6858], + device='cuda:2'), covar=tensor([0.0639, 0.0671, 0.2638, 0.0688, 0.0475, 0.2772, 0.0798, 0.0915], + device='cuda:2'), in_proj_covar=tensor([0.0245, 0.0272, 0.0241, 0.0293, 0.0272, 0.0210, 0.0249, 0.0213], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 21:39:40,858 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85530.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:39:42,189 INFO [train.py:892] (2/4) Epoch 47, batch 200, loss[loss=0.1305, simple_loss=0.2175, pruned_loss=0.0218, over 19798.00 frames. ], tot_loss[loss=0.1476, simple_loss=0.2293, pruned_loss=0.033, over 2506097.99 frames. ], batch size: 51, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:39:43,573 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1174, 4.1657, 2.3808, 4.3896, 4.5857, 1.9839, 3.8518, 3.3884], + device='cuda:2'), covar=tensor([0.0709, 0.0761, 0.3021, 0.0674, 0.0512, 0.2883, 0.0963, 0.0931], + device='cuda:2'), in_proj_covar=tensor([0.0246, 0.0273, 0.0242, 0.0295, 0.0274, 0.0212, 0.0251, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 21:39:50,794 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.307e+02 3.468e+02 4.248e+02 4.914e+02 6.825e+02, threshold=8.497e+02, percent-clipped=0.0 +2023-03-29 21:40:12,911 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85544.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:40:26,423 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-03-29 21:40:39,354 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.20 vs. limit=5.0 +2023-03-29 21:41:45,827 INFO [train.py:892] (2/4) Epoch 47, batch 250, loss[loss=0.1565, simple_loss=0.2424, pruned_loss=0.03525, over 19771.00 frames. ], tot_loss[loss=0.1468, simple_loss=0.2286, pruned_loss=0.03255, over 2826750.72 frames. ], batch size: 273, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:42:01,018 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.61 vs. limit=5.0 +2023-03-29 21:42:12,246 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85591.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:42:54,948 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85607.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:43:40,674 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85625.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:43:54,574 INFO [train.py:892] (2/4) Epoch 47, batch 300, loss[loss=0.1326, simple_loss=0.2131, pruned_loss=0.02602, over 19760.00 frames. ], tot_loss[loss=0.1463, simple_loss=0.2283, pruned_loss=0.03215, over 3075498.12 frames. ], batch size: 217, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:44:06,340 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.290e+02 3.428e+02 3.993e+02 4.702e+02 7.624e+02, threshold=7.986e+02, percent-clipped=0.0 +2023-03-29 21:44:50,634 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85655.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:45:49,351 INFO [train.py:892] (2/4) Epoch 47, batch 350, loss[loss=0.1434, simple_loss=0.218, pruned_loss=0.03442, over 19756.00 frames. ], tot_loss[loss=0.1465, simple_loss=0.2282, pruned_loss=0.03244, over 3270252.96 frames. ], batch size: 188, lr: 3.32e-03, grad_scale: 8.0 +2023-03-29 21:46:02,152 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85686.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 21:47:05,318 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.5797, 3.8381, 4.0812, 4.7086, 2.9966, 3.3696, 3.2242, 2.9761], + device='cuda:2'), covar=tensor([0.0494, 0.2063, 0.0881, 0.0386, 0.2150, 0.1258, 0.1116, 0.1528], + device='cuda:2'), in_proj_covar=tensor([0.0259, 0.0333, 0.0260, 0.0218, 0.0255, 0.0221, 0.0228, 0.0224], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 21:47:46,159 INFO [train.py:892] (2/4) Epoch 47, batch 400, loss[loss=0.1899, simple_loss=0.2793, pruned_loss=0.05026, over 19568.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2306, pruned_loss=0.03313, over 3418274.16 frames. ], batch size: 376, lr: 3.32e-03, grad_scale: 8.0 +2023-03-29 21:47:56,620 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.491e+02 3.390e+02 3.969e+02 4.785e+02 1.194e+03, threshold=7.938e+02, percent-clipped=3.0 +2023-03-29 21:48:51,523 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0676, 3.1678, 2.0374, 3.5820, 3.3109, 3.5622, 3.5817, 3.0071], + device='cuda:2'), covar=tensor([0.0688, 0.0662, 0.1559, 0.0689, 0.0660, 0.0507, 0.0660, 0.0804], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0154, 0.0150, 0.0164, 0.0143, 0.0149, 0.0159, 0.0157], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0004], + device='cuda:2') +2023-03-29 21:49:02,467 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85763.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:49:20,545 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85771.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:49:46,502 INFO [train.py:892] (2/4) Epoch 47, batch 450, loss[loss=0.1281, simple_loss=0.2074, pruned_loss=0.02446, over 19825.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.231, pruned_loss=0.03321, over 3535183.48 frames. ], batch size: 127, lr: 3.32e-03, grad_scale: 8.0 +2023-03-29 21:49:53,649 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85784.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:50:56,274 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85811.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:51:16,878 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85819.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:51:43,851 INFO [train.py:892] (2/4) Epoch 47, batch 500, loss[loss=0.1566, simple_loss=0.245, pruned_loss=0.03411, over 19655.00 frames. ], tot_loss[loss=0.148, simple_loss=0.2305, pruned_loss=0.03282, over 3627657.70 frames. ], batch size: 343, lr: 3.32e-03, grad_scale: 8.0 +2023-03-29 21:51:52,517 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.359e+02 3.439e+02 4.112e+02 5.000e+02 9.207e+02, threshold=8.225e+02, percent-clipped=1.0 +2023-03-29 21:52:01,469 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85839.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:53:17,993 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.6470, 5.9528, 5.9826, 5.8163, 5.6946, 5.9528, 5.3520, 5.3082], + device='cuda:2'), covar=tensor([0.0415, 0.0462, 0.0441, 0.0450, 0.0542, 0.0482, 0.0655, 0.1033], + device='cuda:2'), in_proj_covar=tensor([0.0298, 0.0318, 0.0326, 0.0285, 0.0297, 0.0277, 0.0289, 0.0337], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 21:53:23,915 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85875.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:53:36,863 INFO [train.py:892] (2/4) Epoch 47, batch 550, loss[loss=0.1249, simple_loss=0.2091, pruned_loss=0.02033, over 19870.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2313, pruned_loss=0.03308, over 3696895.24 frames. ], batch size: 99, lr: 3.32e-03, grad_scale: 8.0 +2023-03-29 21:53:50,104 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85886.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:55:33,623 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85930.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 21:55:34,540 INFO [train.py:892] (2/4) Epoch 47, batch 600, loss[loss=0.1376, simple_loss=0.2178, pruned_loss=0.02872, over 19707.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2304, pruned_loss=0.03322, over 3753814.73 frames. ], batch size: 60, lr: 3.32e-03, grad_scale: 16.0 +2023-03-29 21:55:42,772 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.322e+02 3.636e+02 4.237e+02 5.117e+02 1.424e+03, threshold=8.474e+02, percent-clipped=3.0 +2023-03-29 21:55:45,832 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85936.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:56:37,157 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85957.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:56:49,138 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0526, 2.4107, 3.2465, 2.7904, 2.8022, 2.7476, 2.0584, 2.2663], + device='cuda:2'), covar=tensor([0.1321, 0.2956, 0.0769, 0.1249, 0.2068, 0.1627, 0.2885, 0.2588], + device='cuda:2'), in_proj_covar=tensor([0.0360, 0.0408, 0.0358, 0.0301, 0.0382, 0.0406, 0.0393, 0.0370], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 21:57:01,859 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0865, 5.2560, 5.5046, 5.2328, 5.3289, 5.1251, 5.2093, 5.0051], + device='cuda:2'), covar=tensor([0.1521, 0.1350, 0.0859, 0.1325, 0.0686, 0.0764, 0.1755, 0.1972], + device='cuda:2'), in_proj_covar=tensor([0.0309, 0.0359, 0.0389, 0.0320, 0.0296, 0.0301, 0.0380, 0.0412], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-29 21:57:31,375 INFO [train.py:892] (2/4) Epoch 47, batch 650, loss[loss=0.1357, simple_loss=0.2062, pruned_loss=0.03259, over 19795.00 frames. ], tot_loss[loss=0.1481, simple_loss=0.2303, pruned_loss=0.03296, over 3796511.01 frames. ], batch size: 185, lr: 3.32e-03, grad_scale: 16.0 +2023-03-29 21:57:32,312 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85981.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 21:57:58,392 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85991.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 21:58:06,876 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1904, 4.0117, 4.4816, 4.0558, 3.8219, 4.3009, 4.1464, 4.5095], + device='cuda:2'), covar=tensor([0.0799, 0.0443, 0.0367, 0.0435, 0.1072, 0.0622, 0.0544, 0.0388], + device='cuda:2'), in_proj_covar=tensor([0.0294, 0.0234, 0.0237, 0.0249, 0.0217, 0.0264, 0.0249, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 21:58:30,340 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86002.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:58:47,836 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-29 21:59:05,251 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86018.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:59:14,444 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.4910, 2.4680, 2.5873, 2.5223, 2.5703, 2.6218, 2.5568, 2.5465], + device='cuda:2'), covar=tensor([0.0452, 0.0384, 0.0424, 0.0407, 0.0521, 0.0360, 0.0480, 0.0434], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0093, 0.0096, 0.0091, 0.0103, 0.0096, 0.0112, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 21:59:35,629 INFO [train.py:892] (2/4) Epoch 47, batch 700, loss[loss=0.1483, simple_loss=0.234, pruned_loss=0.03128, over 19793.00 frames. ], tot_loss[loss=0.148, simple_loss=0.2304, pruned_loss=0.03283, over 3830776.64 frames. ], batch size: 162, lr: 3.32e-03, grad_scale: 16.0 +2023-03-29 21:59:44,268 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.350e+02 3.531e+02 4.077e+02 5.107e+02 6.974e+02, threshold=8.153e+02, percent-clipped=0.0 +2023-03-29 22:00:24,760 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86052.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:00:51,400 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86063.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:01:00,259 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3977, 3.1947, 3.6544, 2.8146, 3.6369, 3.1106, 3.3651, 3.6102], + device='cuda:2'), covar=tensor([0.0698, 0.0586, 0.0530, 0.0823, 0.0447, 0.0480, 0.0503, 0.0351], + device='cuda:2'), in_proj_covar=tensor([0.0087, 0.0097, 0.0093, 0.0117, 0.0089, 0.0092, 0.0089, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 22:01:07,077 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86069.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:01:36,215 INFO [train.py:892] (2/4) Epoch 47, batch 750, loss[loss=0.1408, simple_loss=0.2298, pruned_loss=0.02586, over 19840.00 frames. ], tot_loss[loss=0.1468, simple_loss=0.2293, pruned_loss=0.03217, over 3857558.01 frames. ], batch size: 90, lr: 3.32e-03, grad_scale: 16.0 +2023-03-29 22:01:43,120 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86084.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:01:49,038 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0596, 4.6670, 4.7230, 4.4291, 5.0244, 3.2897, 4.0363, 2.6145], + device='cuda:2'), covar=tensor([0.0180, 0.0229, 0.0148, 0.0206, 0.0139, 0.0919, 0.0891, 0.1426], + device='cuda:2'), in_proj_covar=tensor([0.0111, 0.0157, 0.0120, 0.0143, 0.0126, 0.0141, 0.0148, 0.0134], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-29 22:02:29,447 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4694, 2.5628, 4.1008, 2.9193, 3.1656, 2.9872, 2.3733, 2.4650], + device='cuda:2'), covar=tensor([0.1490, 0.4121, 0.0613, 0.1477, 0.2580, 0.2039, 0.3143, 0.3165], + device='cuda:2'), in_proj_covar=tensor([0.0363, 0.0411, 0.0361, 0.0303, 0.0385, 0.0409, 0.0396, 0.0372], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 22:02:47,781 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86113.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:03:24,219 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86130.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:03:25,061 INFO [train.py:892] (2/4) Epoch 47, batch 800, loss[loss=0.1336, simple_loss=0.2154, pruned_loss=0.02589, over 19886.00 frames. ], tot_loss[loss=0.147, simple_loss=0.2291, pruned_loss=0.03242, over 3878756.92 frames. ], batch size: 88, lr: 3.32e-03, grad_scale: 16.0 +2023-03-29 22:03:27,848 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86132.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:03:35,179 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.984e+02 3.596e+02 4.313e+02 5.169e+02 1.005e+03, threshold=8.627e+02, percent-clipped=4.0 +2023-03-29 22:03:46,718 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86139.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:05:09,921 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.7748, 1.5821, 1.7843, 1.7733, 1.7013, 1.7740, 1.5734, 1.7643], + device='cuda:2'), covar=tensor([0.0438, 0.0435, 0.0401, 0.0390, 0.0547, 0.0412, 0.0628, 0.0386], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0094, 0.0097, 0.0091, 0.0104, 0.0097, 0.0112, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 22:05:24,296 INFO [train.py:892] (2/4) Epoch 47, batch 850, loss[loss=0.1311, simple_loss=0.2145, pruned_loss=0.02384, over 19696.00 frames. ], tot_loss[loss=0.1465, simple_loss=0.2286, pruned_loss=0.0322, over 3895410.20 frames. ], batch size: 74, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:05:37,614 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86186.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:05:40,999 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86187.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:07:21,649 INFO [train.py:892] (2/4) Epoch 47, batch 900, loss[loss=0.1521, simple_loss=0.2383, pruned_loss=0.0329, over 19657.00 frames. ], tot_loss[loss=0.1461, simple_loss=0.2282, pruned_loss=0.03203, over 3908064.65 frames. ], batch size: 57, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:07:22,521 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86231.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:07:29,073 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86234.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:07:30,574 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.206e+02 3.419e+02 4.069e+02 4.713e+02 1.195e+03, threshold=8.139e+02, percent-clipped=3.0 +2023-03-29 22:07:56,311 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86246.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:09:15,442 INFO [train.py:892] (2/4) Epoch 47, batch 950, loss[loss=0.122, simple_loss=0.2076, pruned_loss=0.01825, over 19900.00 frames. ], tot_loss[loss=0.1459, simple_loss=0.2281, pruned_loss=0.03188, over 3917926.38 frames. ], batch size: 113, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:09:16,261 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86281.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:09:16,508 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5968, 2.5643, 4.3614, 3.0685, 3.2436, 2.9692, 2.2741, 2.4197], + device='cuda:2'), covar=tensor([0.1306, 0.3969, 0.0496, 0.1328, 0.2591, 0.2044, 0.3181, 0.3385], + device='cuda:2'), in_proj_covar=tensor([0.0361, 0.0410, 0.0360, 0.0301, 0.0383, 0.0407, 0.0395, 0.0371], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 22:09:24,371 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([5.0148, 4.7858, 4.7994, 5.0212, 4.8218, 5.2400, 5.0839, 5.3180], + device='cuda:2'), covar=tensor([0.0672, 0.0356, 0.0368, 0.0350, 0.0584, 0.0361, 0.0439, 0.0287], + device='cuda:2'), in_proj_covar=tensor([0.0166, 0.0192, 0.0213, 0.0192, 0.0189, 0.0172, 0.0166, 0.0213], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 22:09:27,194 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-29 22:09:28,511 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86286.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 22:10:16,843 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86307.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:10:18,949 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.2784, 3.3455, 2.1729, 3.9179, 3.5805, 3.8627, 3.9307, 3.1290], + device='cuda:2'), covar=tensor([0.0666, 0.0645, 0.1601, 0.0694, 0.0623, 0.0489, 0.0586, 0.0843], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0152, 0.0148, 0.0162, 0.0141, 0.0148, 0.0157, 0.0155], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-29 22:10:29,344 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86313.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:10:44,065 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1757, 2.4818, 3.5880, 2.9208, 2.9462, 2.8509, 2.1065, 2.3425], + device='cuda:2'), covar=tensor([0.1416, 0.3196, 0.0709, 0.1219, 0.2114, 0.1750, 0.2856, 0.2765], + device='cuda:2'), in_proj_covar=tensor([0.0362, 0.0411, 0.0360, 0.0302, 0.0384, 0.0408, 0.0396, 0.0372], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 22:11:06,093 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-29 22:11:07,297 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86329.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:11:10,607 INFO [train.py:892] (2/4) Epoch 47, batch 1000, loss[loss=0.1351, simple_loss=0.2144, pruned_loss=0.02786, over 19688.00 frames. ], tot_loss[loss=0.1457, simple_loss=0.2278, pruned_loss=0.03183, over 3923693.90 frames. ], batch size: 82, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:11:20,546 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.209e+02 3.279e+02 4.003e+02 4.723e+02 7.348e+02, threshold=8.007e+02, percent-clipped=0.0 +2023-03-29 22:12:16,477 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86358.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:13:07,764 INFO [train.py:892] (2/4) Epoch 47, batch 1050, loss[loss=0.1345, simple_loss=0.2183, pruned_loss=0.0254, over 19765.00 frames. ], tot_loss[loss=0.1456, simple_loss=0.2275, pruned_loss=0.03187, over 3930677.04 frames. ], batch size: 88, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:14:11,569 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86408.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:14:51,452 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86425.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:14:58,741 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.2562, 3.0322, 3.3318, 2.8719, 3.5407, 3.5103, 4.1249, 4.4949], + device='cuda:2'), covar=tensor([0.0540, 0.1683, 0.1570, 0.2247, 0.1602, 0.1392, 0.0612, 0.0531], + device='cuda:2'), in_proj_covar=tensor([0.0267, 0.0250, 0.0280, 0.0267, 0.0314, 0.0269, 0.0244, 0.0277], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 22:15:05,563 INFO [train.py:892] (2/4) Epoch 47, batch 1100, loss[loss=0.1631, simple_loss=0.2497, pruned_loss=0.0382, over 19763.00 frames. ], tot_loss[loss=0.1453, simple_loss=0.2273, pruned_loss=0.0316, over 3934784.73 frames. ], batch size: 179, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:15:16,003 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.572e+02 3.379e+02 4.084e+02 4.897e+02 1.462e+03, threshold=8.168e+02, percent-clipped=5.0 +2023-03-29 22:15:27,659 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86440.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:15:40,240 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86445.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:17:00,652 INFO [train.py:892] (2/4) Epoch 47, batch 1150, loss[loss=0.1244, simple_loss=0.2042, pruned_loss=0.02232, over 19845.00 frames. ], tot_loss[loss=0.1468, simple_loss=0.2289, pruned_loss=0.03231, over 3937589.31 frames. ], batch size: 104, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:17:19,447 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9695, 2.9823, 4.5627, 3.4550, 3.5698, 3.4315, 2.4895, 2.6544], + device='cuda:2'), covar=tensor([0.1108, 0.3533, 0.0455, 0.1171, 0.2046, 0.1671, 0.2882, 0.2658], + device='cuda:2'), in_proj_covar=tensor([0.0360, 0.0408, 0.0357, 0.0300, 0.0382, 0.0405, 0.0393, 0.0370], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 22:17:46,878 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86501.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:17:59,743 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86506.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 22:18:58,208 INFO [train.py:892] (2/4) Epoch 47, batch 1200, loss[loss=0.1366, simple_loss=0.2185, pruned_loss=0.02731, over 19712.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2294, pruned_loss=0.03253, over 3941167.51 frames. ], batch size: 61, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:18:59,341 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86531.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:19:07,247 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.118e+02 3.301e+02 3.835e+02 4.475e+02 8.063e+02, threshold=7.670e+02, percent-clipped=0.0 +2023-03-29 22:19:40,181 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-29 22:20:44,570 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86579.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:20:47,334 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86580.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:20:49,872 INFO [train.py:892] (2/4) Epoch 47, batch 1250, loss[loss=0.1578, simple_loss=0.2432, pruned_loss=0.03622, over 19647.00 frames. ], tot_loss[loss=0.1461, simple_loss=0.228, pruned_loss=0.03212, over 3944015.90 frames. ], batch size: 66, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:21:04,077 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86586.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 22:21:41,984 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86602.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:22:06,370 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86613.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:22:49,123 INFO [train.py:892] (2/4) Epoch 47, batch 1300, loss[loss=0.1529, simple_loss=0.2436, pruned_loss=0.03117, over 19804.00 frames. ], tot_loss[loss=0.1467, simple_loss=0.2286, pruned_loss=0.0324, over 3945823.57 frames. ], batch size: 65, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:22:57,921 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86634.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 22:22:59,172 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.242e+02 3.251e+02 3.887e+02 4.565e+02 9.819e+02, threshold=7.775e+02, percent-clipped=3.0 +2023-03-29 22:23:12,315 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86641.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:23:21,691 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8984, 3.1376, 2.7086, 2.3616, 2.8083, 3.0385, 3.0883, 3.0544], + device='cuda:2'), covar=tensor([0.0331, 0.0357, 0.0393, 0.0577, 0.0414, 0.0366, 0.0247, 0.0281], + device='cuda:2'), in_proj_covar=tensor([0.0119, 0.0112, 0.0112, 0.0112, 0.0116, 0.0102, 0.0104, 0.0102], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 22:23:53,674 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86658.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:24:00,031 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86661.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:24:43,562 INFO [train.py:892] (2/4) Epoch 47, batch 1350, loss[loss=0.1436, simple_loss=0.2262, pruned_loss=0.03054, over 19875.00 frames. ], tot_loss[loss=0.1465, simple_loss=0.2286, pruned_loss=0.03224, over 3946718.46 frames. ], batch size: 125, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:25:44,662 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86706.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:25:48,883 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86708.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:26:27,147 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86725.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:26:40,894 INFO [train.py:892] (2/4) Epoch 47, batch 1400, loss[loss=0.1605, simple_loss=0.2496, pruned_loss=0.03573, over 19795.00 frames. ], tot_loss[loss=0.1465, simple_loss=0.2284, pruned_loss=0.03227, over 3947391.85 frames. ], batch size: 79, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:26:49,070 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.330e+02 3.187e+02 3.920e+02 5.050e+02 7.981e+02, threshold=7.841e+02, percent-clipped=2.0 +2023-03-29 22:27:34,536 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86756.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:28:14,246 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86773.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:28:33,040 INFO [train.py:892] (2/4) Epoch 47, batch 1450, loss[loss=0.1333, simple_loss=0.2154, pruned_loss=0.02557, over 19885.00 frames. ], tot_loss[loss=0.1469, simple_loss=0.2291, pruned_loss=0.03235, over 3946017.31 frames. ], batch size: 97, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:29:06,576 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86796.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:29:20,750 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86801.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 22:29:49,521 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86814.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:30:28,317 INFO [train.py:892] (2/4) Epoch 47, batch 1500, loss[loss=0.1718, simple_loss=0.2525, pruned_loss=0.04549, over 19760.00 frames. ], tot_loss[loss=0.1464, simple_loss=0.2284, pruned_loss=0.03214, over 3947447.38 frames. ], batch size: 253, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:30:37,007 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.591e+02 3.342e+02 4.163e+02 4.864e+02 6.569e+02, threshold=8.326e+02, percent-clipped=0.0 +2023-03-29 22:32:11,867 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86875.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:32:18,208 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86878.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:32:23,385 INFO [train.py:892] (2/4) Epoch 47, batch 1550, loss[loss=0.1374, simple_loss=0.219, pruned_loss=0.02791, over 19887.00 frames. ], tot_loss[loss=0.1463, simple_loss=0.2287, pruned_loss=0.03196, over 3947780.32 frames. ], batch size: 47, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:33:10,790 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86902.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:34:19,986 INFO [train.py:892] (2/4) Epoch 47, batch 1600, loss[loss=0.1329, simple_loss=0.2163, pruned_loss=0.02479, over 19831.00 frames. ], tot_loss[loss=0.1459, simple_loss=0.2282, pruned_loss=0.03177, over 3949796.85 frames. ], batch size: 144, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:34:27,989 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.263e+02 3.386e+02 4.039e+02 4.750e+02 1.112e+03, threshold=8.078e+02, percent-clipped=1.0 +2023-03-29 22:34:31,029 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86936.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:34:37,159 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86939.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:35:03,421 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86950.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:35:44,900 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-29 22:35:48,595 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0877, 2.6337, 2.9867, 3.2209, 3.8483, 4.3031, 4.0838, 4.1717], + device='cuda:2'), covar=tensor([0.0983, 0.1681, 0.1395, 0.0731, 0.0379, 0.0250, 0.0363, 0.0364], + device='cuda:2'), in_proj_covar=tensor([0.0169, 0.0173, 0.0185, 0.0160, 0.0147, 0.0142, 0.0137, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 22:36:13,801 INFO [train.py:892] (2/4) Epoch 47, batch 1650, loss[loss=0.1357, simple_loss=0.2103, pruned_loss=0.03052, over 19810.00 frames. ], tot_loss[loss=0.1463, simple_loss=0.2288, pruned_loss=0.0319, over 3949796.39 frames. ], batch size: 181, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:36:25,254 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6150, 3.8635, 4.1531, 4.6303, 3.1478, 3.3568, 2.9400, 2.9564], + device='cuda:2'), covar=tensor([0.0454, 0.1919, 0.0822, 0.0380, 0.1916, 0.1081, 0.1284, 0.1571], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0329, 0.0256, 0.0215, 0.0253, 0.0217, 0.0226, 0.0222], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 22:37:07,408 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87002.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 22:37:19,283 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.3234, 2.6947, 4.7066, 4.0637, 4.3569, 4.6660, 4.3839, 4.2767], + device='cuda:2'), covar=tensor([0.0662, 0.1076, 0.0095, 0.0723, 0.0171, 0.0189, 0.0170, 0.0172], + device='cuda:2'), in_proj_covar=tensor([0.0106, 0.0108, 0.0093, 0.0154, 0.0092, 0.0105, 0.0095, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 22:38:05,802 INFO [train.py:892] (2/4) Epoch 47, batch 1700, loss[loss=0.1467, simple_loss=0.2268, pruned_loss=0.0333, over 19795.00 frames. ], tot_loss[loss=0.147, simple_loss=0.2293, pruned_loss=0.03234, over 3950422.17 frames. ], batch size: 185, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:38:14,347 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.986e+02 3.322e+02 4.069e+02 4.865e+02 7.645e+02, threshold=8.138e+02, percent-clipped=0.0 +2023-03-29 22:39:20,649 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87063.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 22:39:56,684 INFO [train.py:892] (2/4) Epoch 47, batch 1750, loss[loss=0.1472, simple_loss=0.2313, pruned_loss=0.03158, over 19805.00 frames. ], tot_loss[loss=0.1477, simple_loss=0.2299, pruned_loss=0.03278, over 3950650.67 frames. ], batch size: 74, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:40:26,966 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87096.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:40:36,163 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87101.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 22:41:33,776 INFO [train.py:892] (2/4) Epoch 47, batch 1800, loss[loss=0.1557, simple_loss=0.229, pruned_loss=0.04116, over 19712.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2304, pruned_loss=0.033, over 3950238.14 frames. ], batch size: 78, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:41:41,050 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.449e+02 3.587e+02 4.161e+02 4.922e+02 1.323e+03, threshold=8.323e+02, percent-clipped=3.0 +2023-03-29 22:41:59,480 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87144.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:42:08,354 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87149.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:42:12,213 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.5298, 3.3946, 3.5876, 2.8963, 3.7426, 3.1134, 3.4598, 3.6619], + device='cuda:2'), covar=tensor([0.0593, 0.0435, 0.0681, 0.0783, 0.0348, 0.0478, 0.0441, 0.0374], + device='cuda:2'), in_proj_covar=tensor([0.0087, 0.0097, 0.0093, 0.0117, 0.0089, 0.0092, 0.0088, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 22:42:36,037 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.7833, 3.1664, 3.5645, 3.0737, 3.8477, 3.9271, 4.4900, 5.0640], + device='cuda:2'), covar=tensor([0.0435, 0.1656, 0.1515, 0.2308, 0.1655, 0.1302, 0.0596, 0.0364], + device='cuda:2'), in_proj_covar=tensor([0.0268, 0.0251, 0.0281, 0.0267, 0.0315, 0.0271, 0.0244, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 22:42:47,118 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87170.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:43:06,720 INFO [train.py:892] (2/4) Epoch 47, batch 1850, loss[loss=0.1411, simple_loss=0.2224, pruned_loss=0.02989, over 19847.00 frames. ], tot_loss[loss=0.1479, simple_loss=0.2307, pruned_loss=0.03256, over 3950117.65 frames. ], batch size: 58, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:44:09,123 INFO [train.py:892] (2/4) Epoch 48, batch 0, loss[loss=0.1544, simple_loss=0.234, pruned_loss=0.03736, over 19887.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.234, pruned_loss=0.03736, over 19887.00 frames. ], batch size: 62, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:44:09,124 INFO [train.py:917] (2/4) Computing validation loss +2023-03-29 22:44:44,074 INFO [train.py:926] (2/4) Epoch 48, validation: loss=0.1901, simple_loss=0.2508, pruned_loss=0.06469, over 2883724.00 frames. +2023-03-29 22:44:44,075 INFO [train.py:927] (2/4) Maximum memory allocated so far is 22417MB +2023-03-29 22:46:30,423 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87230.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:46:40,137 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87234.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:46:41,366 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.137e+02 3.310e+02 3.743e+02 4.381e+02 8.999e+02, threshold=7.487e+02, percent-clipped=2.0 +2023-03-29 22:46:45,733 INFO [train.py:892] (2/4) Epoch 48, batch 50, loss[loss=0.133, simple_loss=0.2198, pruned_loss=0.02307, over 19902.00 frames. ], tot_loss[loss=0.1431, simple_loss=0.2258, pruned_loss=0.03015, over 890381.10 frames. ], batch size: 91, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:46:46,612 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87236.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:48:06,439 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.0957, 2.7729, 3.0147, 3.2788, 3.8421, 4.3471, 4.1864, 4.2171], + device='cuda:2'), covar=tensor([0.0983, 0.1627, 0.1410, 0.0782, 0.0456, 0.0264, 0.0385, 0.0378], + device='cuda:2'), in_proj_covar=tensor([0.0169, 0.0173, 0.0184, 0.0161, 0.0147, 0.0142, 0.0137, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-29 22:48:06,518 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.6255, 3.8443, 4.0142, 4.6523, 3.0293, 3.4008, 3.0065, 2.7509], + device='cuda:2'), covar=tensor([0.0534, 0.1854, 0.0933, 0.0442, 0.2141, 0.1245, 0.1398, 0.1817], + device='cuda:2'), in_proj_covar=tensor([0.0255, 0.0331, 0.0258, 0.0217, 0.0253, 0.0218, 0.0227, 0.0223], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 22:48:38,626 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87284.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:48:42,219 INFO [train.py:892] (2/4) Epoch 48, batch 100, loss[loss=0.1374, simple_loss=0.2173, pruned_loss=0.02878, over 19710.00 frames. ], tot_loss[loss=0.1443, simple_loss=0.2272, pruned_loss=0.03071, over 1568142.84 frames. ], batch size: 78, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:48:53,640 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87291.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:50:36,647 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.455e+02 3.250e+02 3.994e+02 4.855e+02 8.208e+02, threshold=7.988e+02, percent-clipped=2.0 +2023-03-29 22:50:39,121 INFO [train.py:892] (2/4) Epoch 48, batch 150, loss[loss=0.137, simple_loss=0.2111, pruned_loss=0.03142, over 19875.00 frames. ], tot_loss[loss=0.1431, simple_loss=0.2256, pruned_loss=0.03031, over 2097184.47 frames. ], batch size: 159, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:50:44,618 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.1649, 3.1577, 4.7587, 3.5387, 3.7335, 3.5885, 2.6601, 2.9225], + device='cuda:2'), covar=tensor([0.1025, 0.3101, 0.0429, 0.1215, 0.1839, 0.1607, 0.2794, 0.2717], + device='cuda:2'), in_proj_covar=tensor([0.0364, 0.0409, 0.0358, 0.0303, 0.0383, 0.0407, 0.0395, 0.0373], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 22:51:30,853 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([1.9476, 1.7634, 2.6892, 1.8282, 2.7731, 2.8751, 2.4873, 2.7936], + device='cuda:2'), covar=tensor([0.1129, 0.1289, 0.0194, 0.0405, 0.0213, 0.0290, 0.0322, 0.0263], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0108, 0.0093, 0.0154, 0.0092, 0.0106, 0.0094, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 22:51:32,879 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87358.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 22:51:41,657 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87362.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:52:33,968 INFO [train.py:892] (2/4) Epoch 48, batch 200, loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03095, over 19741.00 frames. ], tot_loss[loss=0.1468, simple_loss=0.2297, pruned_loss=0.03192, over 2504404.92 frames. ], batch size: 140, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:52:38,500 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.9238, 2.7751, 5.0319, 4.1217, 4.6412, 4.8764, 4.6900, 4.5708], + device='cuda:2'), covar=tensor([0.0500, 0.1043, 0.0086, 0.0940, 0.0137, 0.0192, 0.0148, 0.0154], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0108, 0.0093, 0.0154, 0.0092, 0.0106, 0.0094, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 22:54:01,034 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87423.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:54:01,554 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.48 vs. limit=5.0 +2023-03-29 22:54:24,399 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.319e+02 3.416e+02 4.017e+02 4.841e+02 7.070e+02, threshold=8.034e+02, percent-clipped=0.0 +2023-03-29 22:54:26,339 INFO [train.py:892] (2/4) Epoch 48, batch 250, loss[loss=0.1378, simple_loss=0.2226, pruned_loss=0.02656, over 19696.00 frames. ], tot_loss[loss=0.1452, simple_loss=0.2278, pruned_loss=0.0313, over 2826761.10 frames. ], batch size: 46, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:55:23,562 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87461.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:55:44,950 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87470.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:56:14,922 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-29 22:56:19,976 INFO [train.py:892] (2/4) Epoch 48, batch 300, loss[loss=0.1317, simple_loss=0.2176, pruned_loss=0.02285, over 19828.00 frames. ], tot_loss[loss=0.145, simple_loss=0.2271, pruned_loss=0.03148, over 3075948.95 frames. ], batch size: 93, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 22:56:41,394 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([4.3210, 4.0396, 4.1503, 4.3570, 4.1382, 4.3365, 4.4003, 4.6070], + device='cuda:2'), covar=tensor([0.0708, 0.0448, 0.0532, 0.0379, 0.0639, 0.0589, 0.0437, 0.0299], + device='cuda:2'), in_proj_covar=tensor([0.0165, 0.0190, 0.0211, 0.0189, 0.0187, 0.0171, 0.0163, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-29 22:56:56,816 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.07 vs. limit=2.0 +2023-03-29 22:57:07,921 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87505.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:57:20,145 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([2.8750, 2.8677, 1.9044, 3.3079, 3.0963, 3.2022, 3.3017, 2.7761], + device='cuda:2'), covar=tensor([0.0738, 0.0794, 0.1787, 0.0723, 0.0731, 0.0635, 0.0667, 0.0852], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0154, 0.0149, 0.0164, 0.0143, 0.0149, 0.0159, 0.0156], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0004], + device='cuda:2') +2023-03-29 22:57:37,238 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87518.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:57:45,590 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87522.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:58:14,254 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87534.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:58:15,390 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.562e+02 3.502e+02 4.019e+02 4.844e+02 7.845e+02, threshold=8.037e+02, percent-clipped=0.0 +2023-03-29 22:58:18,644 INFO [train.py:892] (2/4) Epoch 48, batch 350, loss[loss=0.1346, simple_loss=0.2134, pruned_loss=0.0279, over 19726.00 frames. ], tot_loss[loss=0.1453, simple_loss=0.2274, pruned_loss=0.03163, over 3270778.66 frames. ], batch size: 99, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 22:59:31,488 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87566.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 23:00:10,287 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87582.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:00:14,732 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87584.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:00:17,800 INFO [train.py:892] (2/4) Epoch 48, batch 400, loss[loss=0.1413, simple_loss=0.2195, pruned_loss=0.03149, over 19879.00 frames. ], tot_loss[loss=0.1457, simple_loss=0.2277, pruned_loss=0.03186, over 3421262.80 frames. ], batch size: 88, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:00:18,758 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87586.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:00:25,593 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87589.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:00:40,349 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-29 23:00:57,005 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-03-29 23:01:03,149 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87605.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:02:10,866 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.523e+02 3.389e+02 3.932e+02 4.658e+02 8.787e+02, threshold=7.864e+02, percent-clipped=1.0 +2023-03-29 23:02:13,007 INFO [train.py:892] (2/4) Epoch 48, batch 450, loss[loss=0.1447, simple_loss=0.2253, pruned_loss=0.03205, over 19623.00 frames. ], tot_loss[loss=0.1458, simple_loss=0.2277, pruned_loss=0.03195, over 3538798.53 frames. ], batch size: 65, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:02:36,847 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87645.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 23:02:48,711 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87650.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:03:07,072 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87658.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 23:03:24,707 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87666.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 23:04:11,529 INFO [train.py:892] (2/4) Epoch 48, batch 500, loss[loss=0.1275, simple_loss=0.2046, pruned_loss=0.02515, over 19841.00 frames. ], tot_loss[loss=0.1468, simple_loss=0.229, pruned_loss=0.0323, over 3630668.04 frames. ], batch size: 109, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:04:58,681 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87706.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 23:05:28,032 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87718.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:06:06,496 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.147e+02 3.363e+02 3.924e+02 4.897e+02 8.586e+02, threshold=7.848e+02, percent-clipped=2.0 +2023-03-29 23:06:09,073 INFO [train.py:892] (2/4) Epoch 48, batch 550, loss[loss=0.1395, simple_loss=0.2158, pruned_loss=0.03163, over 19633.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.23, pruned_loss=0.03249, over 3700736.36 frames. ], batch size: 68, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:07:06,346 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.4428, 2.7006, 3.8965, 3.0825, 3.1573, 3.0110, 2.2972, 2.4598], + device='cuda:2'), covar=tensor([0.1348, 0.3238, 0.0703, 0.1257, 0.2138, 0.1839, 0.2778, 0.2986], + device='cuda:2'), in_proj_covar=tensor([0.0364, 0.0410, 0.0359, 0.0304, 0.0384, 0.0409, 0.0395, 0.0373], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 23:08:07,548 INFO [train.py:892] (2/4) Epoch 48, batch 600, loss[loss=0.142, simple_loss=0.2268, pruned_loss=0.02855, over 19781.00 frames. ], tot_loss[loss=0.1479, simple_loss=0.2308, pruned_loss=0.03255, over 3753985.08 frames. ], batch size: 91, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:08:15,522 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-29 23:09:18,911 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87817.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:09:27,667 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-03-29 23:09:59,432 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.332e+02 3.329e+02 4.109e+02 5.019e+02 8.243e+02, threshold=8.218e+02, percent-clipped=1.0 +2023-03-29 23:10:01,371 INFO [train.py:892] (2/4) Epoch 48, batch 650, loss[loss=0.1408, simple_loss=0.2222, pruned_loss=0.0297, over 19833.00 frames. ], tot_loss[loss=0.1461, simple_loss=0.2286, pruned_loss=0.03176, over 3797433.14 frames. ], batch size: 128, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:10:07,343 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.64 vs. limit=5.0 +2023-03-29 23:11:01,547 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87861.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 23:11:56,098 INFO [train.py:892] (2/4) Epoch 48, batch 700, loss[loss=0.1467, simple_loss=0.2299, pruned_loss=0.03174, over 19531.00 frames. ], tot_loss[loss=0.146, simple_loss=0.2283, pruned_loss=0.03184, over 3832156.31 frames. ], batch size: 54, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:11:57,078 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87886.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:12:56,945 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.7186, 3.6295, 3.9879, 3.6308, 3.4336, 3.8814, 3.7380, 4.0364], + device='cuda:2'), covar=tensor([0.0788, 0.0372, 0.0370, 0.0415, 0.1251, 0.0576, 0.0487, 0.0396], + device='cuda:2'), in_proj_covar=tensor([0.0293, 0.0233, 0.0235, 0.0246, 0.0215, 0.0263, 0.0248, 0.0234], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-29 23:13:47,289 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87934.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:13:48,367 INFO [optim.py:368] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.639e+02 3.556e+02 4.149e+02 4.938e+02 1.091e+03, threshold=8.298e+02, percent-clipped=2.0 +2023-03-29 23:13:50,830 INFO [train.py:892] (2/4) Epoch 48, batch 750, loss[loss=0.1581, simple_loss=0.2398, pruned_loss=0.03826, over 19623.00 frames. ], tot_loss[loss=0.1459, simple_loss=0.2285, pruned_loss=0.03169, over 3856651.77 frames. ], batch size: 65, lr: 3.25e-03, grad_scale: 32.0 +2023-03-29 23:14:38,876 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87940.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 23:14:50,645 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87945.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:15:26,365 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87961.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 23:17:04,578 INFO [zipformer.py:1454] (2/4) attn_weights_entropy = tensor([3.1908, 2.7877, 3.1506, 3.3738, 3.8365, 4.4673, 4.1708, 4.2103], + device='cuda:2'), covar=tensor([0.0943, 0.1582, 0.1329, 0.0711, 0.0470, 0.0227, 0.0342, 0.0448], + device='cuda:2'), in_proj_covar=tensor([0.0170, 0.0174, 0.0186, 0.0162, 0.0148, 0.0143, 0.0138, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.00 \ No newline at end of file